mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-16 01:16:56 +00:00
Compare commits
26 Commits
chat-route
...
key-action
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
b43e895110 | ||
|
|
c838eb65a4 | ||
|
|
8a0713ce7b | ||
|
|
c832797416 | ||
|
|
790c3587a1 | ||
|
|
acf634c64a | ||
|
|
76cf169080 | ||
|
|
b01163dcde | ||
|
|
b4313ab6dd | ||
|
|
6c3e1e68e7 | ||
|
|
8956fb6142 | ||
|
|
8347077747 | ||
|
|
dba56071d7 | ||
|
|
bc5157c8ed | ||
|
|
ab8a1bcd1c | ||
|
|
5db9105f61 | ||
|
|
a9a338dcba | ||
|
|
c61f6ce071 | ||
|
|
43ba22b339 | ||
|
|
fba7925ca3 | ||
|
|
754def5512 | ||
|
|
4acfb9a1ec | ||
|
|
3ed3188f54 | ||
|
|
69a51d4a70 | ||
|
|
c52f9f8c16 | ||
|
|
0848635efc |
17
.github/workflows/publish-docker-images.yml
vendored
17
.github/workflows/publish-docker-images.yml
vendored
@@ -104,20 +104,3 @@ jobs:
|
||||
repository: meilisearch/meilisearch-cloud
|
||||
event-type: cloud-docker-build
|
||||
client-payload: '{ "meilisearch_version": "${{ github.ref_name }}", "stable": "${{ steps.check-tag-format.outputs.stable }}" }'
|
||||
|
||||
# Send notification to Swarmia to notify of a deployment: https://app.swarmia.com
|
||||
- name: Send deployment to Swarmia
|
||||
if: github.event_name == 'push' && success()
|
||||
run: |
|
||||
JSON_STRING=$( jq --null-input --compact-output \
|
||||
--arg version "${{ github.ref_name }}" \
|
||||
--arg appName "meilisearch" \
|
||||
--arg environment "production" \
|
||||
--arg commitSha "${{ github.sha }}" \
|
||||
--arg repositoryFullName "${{ github.repository }}" \
|
||||
'{"version": $version, "appName": $appName, "environment": $environment, "commitSha": $commitSha, "repositoryFullName": $repositoryFullName}' )
|
||||
|
||||
curl -H "Authorization: ${{ secrets.SWARMIA_DEPLOYMENTS_AUTHORIZATION }}" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d "$JSON_STRING" \
|
||||
https://hook.swarmia.com/deployments
|
||||
|
||||
591
Cargo.lock
generated
591
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -22,7 +22,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.15.0"
|
||||
version = "1.14.0"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
||||
@@ -41,7 +41,7 @@
|
||||
- [**Movies**](https://where2watch.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=organization) — An application to help you find streaming platforms to watch movies using [hybrid search](https://www.meilisearch.com/solutions/hybrid-search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos).
|
||||
- [**Ecommerce**](https://ecommerce.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Ecommerce website using disjunctive [facets](https://www.meilisearch.com/docs/learn/fine_tuning_results/faceted_search?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos), range and rating filtering, and pagination.
|
||||
- [**Songs**](https://music.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search through 47 million of songs.
|
||||
- [**SaaS**](https://saas.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search for contacts, deals, and companies in this [multi-tenant](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) CRM application.
|
||||
- [**SaaS**](https://saas.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) — Search for contacts, deals, and companies in this [multi-tenant](https://www.meilisearch.com/docs/learn/security/multitenancy_tenant_tokens?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=demos) CRM application.
|
||||
|
||||
See the list of all our example apps in our [demos repository](https://github.com/meilisearch/demos).
|
||||
|
||||
@@ -99,7 +99,7 @@ If you want to know more about the kind of data we collect and what we use it fo
|
||||
|
||||
## 📫 Get in touch!
|
||||
|
||||
Meilisearch is a search engine created by [Meili](https://www.meilisearch.com/careers), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||
Meilisearch is a search engine created by [Meili]([https://www.welcometothejungle.com/en/companies/meilisearch](https://www.meilisearch.com/careers)), a software development company headquartered in France and with team members all over the world. Want to know more about us? [Check out our blog!](https://blog.meilisearch.com/?utm_campaign=oss&utm_source=github&utm_medium=meilisearch&utm_content=contact)
|
||||
|
||||
🗞 [Subscribe to our newsletter](https://meilisearch.us2.list-manage.com/subscribe?u=27870f7b71c908a8b359599fb&id=79582d828e) if you don't want to miss any updates! We promise we won't clutter your mailbox: we only send one edition every two months.
|
||||
|
||||
|
||||
@@ -31,7 +31,7 @@ anyhow = "1.0.95"
|
||||
bytes = "1.9.0"
|
||||
convert_case = "0.6.0"
|
||||
flate2 = "1.0.35"
|
||||
reqwest = { version = "0.12.15", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
reqwest = { version = "0.12.12", features = ["blocking", "rustls-tls"], default-features = false }
|
||||
|
||||
[features]
|
||||
default = ["milli/all-tokenizations"]
|
||||
|
||||
@@ -373,7 +373,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
},
|
||||
disable_on_words: typo.disable_on_words.into(),
|
||||
disable_on_attributes: typo.disable_on_attributes.into(),
|
||||
disable_on_numbers: v6::Setting::NotSet,
|
||||
}),
|
||||
v5::Setting::Reset => v6::Setting::Reset,
|
||||
v5::Setting::NotSet => v6::Setting::NotSet,
|
||||
@@ -398,7 +397,6 @@ impl<T> From<v5::Settings<T>> for v6::Settings<v6::Unchecked> {
|
||||
search_cutoff_ms: v6::Setting::NotSet,
|
||||
facet_search: v6::Setting::NotSet,
|
||||
prefix_search: v6::Setting::NotSet,
|
||||
chat: v6::Setting::NotSet,
|
||||
_kind: std::marker::PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,7 +2,6 @@ use std::fmt::Display;
|
||||
|
||||
use meilisearch_types::batches::BatchId;
|
||||
use meilisearch_types::error::{Code, ErrorCode};
|
||||
use meilisearch_types::milli::index::RollbackOutcome;
|
||||
use meilisearch_types::tasks::{Kind, Status};
|
||||
use meilisearch_types::{heed, milli};
|
||||
use thiserror::Error;
|
||||
@@ -151,24 +150,8 @@ pub enum Error {
|
||||
CorruptedTaskQueue,
|
||||
#[error(transparent)]
|
||||
DatabaseUpgrade(Box<Self>),
|
||||
#[error("Failed to rollback for index `{index}`: {rollback_outcome} ")]
|
||||
RollbackFailed { index: String, rollback_outcome: RollbackOutcome },
|
||||
#[error(transparent)]
|
||||
UnrecoverableError(Box<Self>),
|
||||
#[error("The index scheduler is in version v{}.{}.{}, but Meilisearch is in version v{}.{}.{}.\n - hint: start the correct version of Meilisearch, or consider updating your database. See also <https://www.meilisearch.com/docs/learn/update_and_migration/updating>",
|
||||
index_scheduler_version.0, index_scheduler_version.1, index_scheduler_version.2,
|
||||
package_version.0, package_version.1, package_version.2)]
|
||||
IndexSchedulerVersionMismatch {
|
||||
index_scheduler_version: (u32, u32, u32),
|
||||
package_version: (u32, u32, u32),
|
||||
},
|
||||
#[error("Index `{index}` is in version v{}.{}.{}, but Meilisearch is in version v{}.{}.{}.\n - note: this is an internal error, please consider filing a bug report: <https://github.com/meilisearch/meilisearch/issues/new?template=bug_report.md>",
|
||||
index_version.0, index_version.1, index_version.2, package_version.0, package_version.1, package_version.2)]
|
||||
IndexVersionMismatch {
|
||||
index: String,
|
||||
index_version: (u32, u32, u32),
|
||||
package_version: (u32, u32, u32),
|
||||
},
|
||||
#[error(transparent)]
|
||||
HeedTransaction(heed::Error),
|
||||
|
||||
@@ -226,9 +209,6 @@ impl Error {
|
||||
| Error::CorruptedTaskQueue
|
||||
| Error::DatabaseUpgrade(_)
|
||||
| Error::UnrecoverableError(_)
|
||||
| Error::IndexSchedulerVersionMismatch { .. }
|
||||
| Error::IndexVersionMismatch { .. }
|
||||
| Error::RollbackFailed { .. }
|
||||
| Error::HeedTransaction(_) => false,
|
||||
#[cfg(test)]
|
||||
Error::PlannedFailure => false,
|
||||
@@ -294,10 +274,7 @@ impl ErrorCode for Error {
|
||||
Error::CorruptedTaskQueue => Code::Internal,
|
||||
Error::CorruptedDump => Code::Internal,
|
||||
Error::DatabaseUpgrade(_) => Code::Internal,
|
||||
Error::RollbackFailed { .. } => Code::Internal,
|
||||
Error::UnrecoverableError(_) => Code::Internal,
|
||||
Error::IndexSchedulerVersionMismatch { .. } => Code::Internal,
|
||||
Error::IndexVersionMismatch { .. } => Code::Internal,
|
||||
Error::CreateBatch(_) => Code::Internal,
|
||||
|
||||
// This one should never be seen by the end user
|
||||
|
||||
@@ -7,7 +7,6 @@ use meilisearch_types::heed::types::{SerdeJson, Str};
|
||||
use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn, WithoutTls};
|
||||
use meilisearch_types::milli;
|
||||
use meilisearch_types::milli::database_stats::DatabaseStats;
|
||||
use meilisearch_types::milli::index::RollbackOutcome;
|
||||
use meilisearch_types::milli::update::IndexerConfig;
|
||||
use meilisearch_types::milli::{FieldDistribution, Index};
|
||||
use serde::{Deserialize, Serialize};
|
||||
@@ -432,51 +431,6 @@ impl IndexMapper {
|
||||
Ok(index)
|
||||
}
|
||||
|
||||
pub fn rollback_index(
|
||||
&self,
|
||||
rtxn: &RoTxn,
|
||||
name: &str,
|
||||
to: (u32, u32, u32),
|
||||
) -> Result<RollbackOutcome> {
|
||||
// remove any currently updating index to make sure that we aren't keeping a reference to the index somewhere
|
||||
drop(self.currently_updating_index.write().unwrap().take());
|
||||
|
||||
let uuid = self
|
||||
.index_mapping
|
||||
.get(rtxn, name)?
|
||||
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||
|
||||
// take the lock to make sure noone is messing with the indexes while we rollback
|
||||
// this will block any search or other operation, but we are rollbacking so this is probably acceptable.
|
||||
let mut index_map = self.index_map.write().unwrap();
|
||||
|
||||
'close_index: loop {
|
||||
match index_map.get(&uuid) {
|
||||
Available(_) => {
|
||||
index_map.close_for_resize(&uuid, self.enable_mdb_writemap, 0);
|
||||
// index should now be `Closing`; try again
|
||||
continue;
|
||||
}
|
||||
// index already closed
|
||||
Missing => break 'close_index,
|
||||
// closing requested by this thread or another one; wait for closing to complete, then exit
|
||||
Closing(closing_index) => {
|
||||
if closing_index.wait_timeout(Duration::from_secs(100)).is_none() {
|
||||
// release the lock so it doesn't get poisoned
|
||||
drop(index_map);
|
||||
panic!("cannot close index")
|
||||
}
|
||||
break;
|
||||
}
|
||||
BeingDeleted => return Err(Error::IndexNotFound(name.to_string())),
|
||||
};
|
||||
}
|
||||
|
||||
let index_path = self.base_path.join(uuid.to_string());
|
||||
Index::rollback(milli::heed::EnvOpenOptions::new().read_txn_without_tls(), index_path, to)
|
||||
.map_err(|err| crate::Error::from_milli(err, Some(name.to_string())))
|
||||
}
|
||||
|
||||
/// Attempts `f` for each index that exists in the index mapper.
|
||||
///
|
||||
/// It is preferable to use this function rather than a loop that opens all indexes, as a way to avoid having all indexes opened,
|
||||
|
||||
@@ -41,8 +41,11 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
||||
let mut snap = String::new();
|
||||
|
||||
let indx_sched_version = version.get_version(&rtxn).unwrap();
|
||||
let latest_version =
|
||||
(versioning::VERSION_MAJOR, versioning::VERSION_MINOR, versioning::VERSION_PATCH);
|
||||
let latest_version = (
|
||||
versioning::VERSION_MAJOR.parse().unwrap(),
|
||||
versioning::VERSION_MINOR.parse().unwrap(),
|
||||
versioning::VERSION_PATCH.parse().unwrap(),
|
||||
);
|
||||
if indx_sched_version != Some(latest_version) {
|
||||
snap.push_str(&format!("index scheduler running on version {indx_sched_version:?}\n"));
|
||||
}
|
||||
|
||||
@@ -28,7 +28,6 @@ mod lru;
|
||||
mod processing;
|
||||
mod queue;
|
||||
mod scheduler;
|
||||
mod settings;
|
||||
#[cfg(test)]
|
||||
mod test_utils;
|
||||
pub mod upgrade;
|
||||
@@ -54,8 +53,8 @@ use flate2::Compression;
|
||||
use meilisearch_types::batches::Batch;
|
||||
use meilisearch_types::features::{InstanceTogglableFeatures, Network, RuntimeTogglableFeatures};
|
||||
use meilisearch_types::heed::byteorder::BE;
|
||||
use meilisearch_types::heed::types::{SerdeJson, Str, I128};
|
||||
use meilisearch_types::heed::{self, Database, Env, RoTxn, Unspecified, WithoutTls};
|
||||
use meilisearch_types::heed::types::I128;
|
||||
use meilisearch_types::heed::{self, Env, RoTxn, WithoutTls};
|
||||
use meilisearch_types::milli::index::IndexEmbeddingConfig;
|
||||
use meilisearch_types::milli::update::IndexerConfig;
|
||||
use meilisearch_types::milli::vector::{Embedder, EmbedderOptions, EmbeddingConfigs};
|
||||
@@ -143,8 +142,6 @@ pub struct IndexScheduler {
|
||||
/// The list of tasks currently processing
|
||||
pub(crate) processing_tasks: Arc<RwLock<ProcessingTasks>>,
|
||||
|
||||
/// The main database that also has the chat settings.
|
||||
pub main: Database<Str, Unspecified>,
|
||||
/// A database containing only the version of the index-scheduler
|
||||
pub version: versioning::Versioning,
|
||||
/// The queue containing both the tasks and the batches.
|
||||
@@ -199,7 +196,7 @@ impl IndexScheduler {
|
||||
version: self.version.clone(),
|
||||
queue: self.queue.private_clone(),
|
||||
scheduler: self.scheduler.private_clone(),
|
||||
main: self.main.clone(),
|
||||
|
||||
index_mapper: self.index_mapper.clone(),
|
||||
cleanup_enabled: self.cleanup_enabled,
|
||||
webhook_url: self.webhook_url.clone(),
|
||||
@@ -270,7 +267,6 @@ impl IndexScheduler {
|
||||
let features = features::FeatureData::new(&env, &mut wtxn, options.instance_features)?;
|
||||
let queue = Queue::new(&env, &mut wtxn, &options)?;
|
||||
let index_mapper = IndexMapper::new(&env, &mut wtxn, &options, budget)?;
|
||||
let chat_settings = env.create_database(&mut wtxn, Some("chat-settings"))?;
|
||||
wtxn.commit()?;
|
||||
|
||||
// allow unreachable_code to get rids of the warning in the case of a test build.
|
||||
@@ -294,7 +290,6 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
run_loop_iteration: Arc::new(RwLock::new(0)),
|
||||
features,
|
||||
chat_settings,
|
||||
};
|
||||
|
||||
this.run();
|
||||
@@ -403,9 +398,9 @@ impl IndexScheduler {
|
||||
Ok(Ok(TickOutcome::StopProcessingForever)) => break,
|
||||
Ok(Err(e)) => {
|
||||
tracing::error!("{e}");
|
||||
// Wait when an irrecoverable error occurs.
|
||||
// Wait one second when an irrecoverable error occurs.
|
||||
if !e.is_recoverable() {
|
||||
std::thread::sleep(Duration::from_secs(10));
|
||||
std::thread::sleep(Duration::from_secs(1));
|
||||
}
|
||||
}
|
||||
Err(_panic) => {
|
||||
@@ -862,18 +857,6 @@ impl IndexScheduler {
|
||||
.collect();
|
||||
res.map(EmbeddingConfigs::new)
|
||||
}
|
||||
|
||||
pub fn chat_settings(&self) -> Result<Option<serde_json::Value>> {
|
||||
let rtxn = self.env.read_txn().map_err(Error::HeedTransaction)?;
|
||||
self.chat_settings.get(&rtxn, "main").map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn put_chat_settings(&self, settings: &serde_json::Value) -> Result<()> {
|
||||
let mut wtxn = self.env.write_txn().map_err(Error::HeedTransaction)?;
|
||||
self.chat_settings.put(&mut wtxn, "main", settings)?;
|
||||
wtxn.commit().map_err(Error::HeedTransaction)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
/// The outcome of calling the [`IndexScheduler::tick`] function.
|
||||
|
||||
@@ -74,7 +74,6 @@ make_enum_progress! {
|
||||
make_enum_progress! {
|
||||
pub enum TaskCancelationProgress {
|
||||
RetrievingTasks,
|
||||
CancelingUpgrade,
|
||||
UpdatingTasks,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -423,8 +423,7 @@ impl IndexScheduler {
|
||||
}
|
||||
|
||||
/// Create the next batch to be processed;
|
||||
/// 0. We get the *last* task to cancel.
|
||||
/// 1. We get the tasks to upgrade.
|
||||
/// 1. We get the *last* task to cancel.
|
||||
/// 2. We get the *next* task to delete.
|
||||
/// 3. We get the *next* snapshot to process.
|
||||
/// 4. We get the *next* dump to process.
|
||||
@@ -444,20 +443,7 @@ impl IndexScheduler {
|
||||
let count_total_enqueued = enqueued.len();
|
||||
let failed = &self.queue.tasks.get_status(rtxn, Status::Failed)?;
|
||||
|
||||
// 0. we get the last task to cancel.
|
||||
let to_cancel = self.queue.tasks.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||
if let Some(task_id) = to_cancel.max() {
|
||||
let mut task =
|
||||
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
current_batch.processing(Some(&mut task));
|
||||
current_batch.reason(BatchStopReason::TaskCannotBeBatched {
|
||||
kind: Kind::TaskCancelation,
|
||||
id: task_id,
|
||||
});
|
||||
return Ok(Some((Batch::TaskCancelation { task }, current_batch)));
|
||||
}
|
||||
|
||||
// 1. We upgrade the instance
|
||||
// 0. The priority over everything is to upgrade the instance
|
||||
// There shouldn't be multiple upgrade tasks but just in case we're going to batch all of them at the same time
|
||||
let upgrade = self.queue.tasks.get_kind(rtxn, Kind::UpgradeDatabase)? & (enqueued | failed);
|
||||
if !upgrade.is_empty() {
|
||||
@@ -473,21 +459,17 @@ impl IndexScheduler {
|
||||
return Ok(Some((Batch::UpgradeDatabase { tasks }, current_batch)));
|
||||
}
|
||||
|
||||
// check the version of the scheduler here.
|
||||
// if the version is not the current, refuse to batch any additional task.
|
||||
let version = self.version.get_version(rtxn)?;
|
||||
let package_version = (
|
||||
meilisearch_types::versioning::VERSION_MAJOR,
|
||||
meilisearch_types::versioning::VERSION_MINOR,
|
||||
meilisearch_types::versioning::VERSION_PATCH,
|
||||
);
|
||||
if version != Some(package_version) {
|
||||
return Err(Error::UnrecoverableError(Box::new(
|
||||
Error::IndexSchedulerVersionMismatch {
|
||||
index_scheduler_version: version.unwrap_or((1, 12, 0)),
|
||||
package_version,
|
||||
},
|
||||
)));
|
||||
// 1. we get the last task to cancel.
|
||||
let to_cancel = self.queue.tasks.get_kind(rtxn, Kind::TaskCancelation)? & enqueued;
|
||||
if let Some(task_id) = to_cancel.max() {
|
||||
let mut task =
|
||||
self.queue.tasks.get_task(rtxn, task_id)?.ok_or(Error::CorruptedTaskQueue)?;
|
||||
current_batch.processing(Some(&mut task));
|
||||
current_batch.reason(BatchStopReason::TaskCannotBeBatched {
|
||||
kind: Kind::TaskCancelation,
|
||||
id: task_id,
|
||||
});
|
||||
return Ok(Some((Batch::TaskCancelation { task }, current_batch)));
|
||||
}
|
||||
|
||||
// 2. we get the next task to delete
|
||||
|
||||
@@ -6,8 +6,7 @@ use meilisearch_types::batches::{BatchEnqueuedAt, BatchId};
|
||||
use meilisearch_types::heed::{RoTxn, RwTxn};
|
||||
use meilisearch_types::milli::progress::{Progress, VariableNameStep};
|
||||
use meilisearch_types::milli::{self, ChannelCongestion};
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, Kind, KindWithContent, Status, Task};
|
||||
use meilisearch_types::versioning::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
use meilisearch_types::tasks::{Details, IndexSwap, KindWithContent, Status, Task};
|
||||
use milli::update::Settings as MilliSettings;
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
@@ -145,22 +144,11 @@ impl IndexScheduler {
|
||||
self.index_mapper.index(&rtxn, &index_uid)?
|
||||
};
|
||||
|
||||
let mut index_wtxn = index.write_txn()?;
|
||||
|
||||
let index_version = index.get_version(&index_wtxn)?.unwrap_or((1, 12, 0));
|
||||
let package_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
||||
if index_version != package_version {
|
||||
return Err(Error::IndexVersionMismatch {
|
||||
index: index_uid,
|
||||
index_version,
|
||||
package_version,
|
||||
});
|
||||
}
|
||||
|
||||
// the index operation can take a long time, so save this handle to make it available to the search for the duration of the tick
|
||||
self.index_mapper
|
||||
.set_currently_updating_index(Some((index_uid.clone(), index.clone())));
|
||||
|
||||
let mut index_wtxn = index.write_txn()?;
|
||||
let pre_commit_dabases_sizes = index.database_sizes(&index_wtxn)?;
|
||||
let (tasks, congestion) =
|
||||
self.apply_index_operation(&mut index_wtxn, &index, op, &progress)?;
|
||||
@@ -365,11 +353,9 @@ impl IndexScheduler {
|
||||
let KindWithContent::UpgradeDatabase { from } = tasks.last().unwrap().kind else {
|
||||
unreachable!();
|
||||
};
|
||||
|
||||
let ret = catch_unwind(AssertUnwindSafe(|| self.process_upgrade(from, progress)));
|
||||
match ret {
|
||||
Ok(Ok(())) => (),
|
||||
Ok(Err(Error::AbortedTask)) => return Err(Error::AbortedTask),
|
||||
Ok(Err(e)) => return Err(Error::DatabaseUpgrade(Box::new(e))),
|
||||
Err(e) => {
|
||||
let msg = match e.downcast_ref::<&'static str>() {
|
||||
@@ -667,79 +653,17 @@ impl IndexScheduler {
|
||||
progress: &Progress,
|
||||
) -> Result<Vec<Task>> {
|
||||
progress.update_progress(TaskCancelationProgress::RetrievingTasks);
|
||||
let mut tasks_to_cancel = RoaringBitmap::new();
|
||||
|
||||
let enqueued_tasks = &self.queue.tasks.get_status(rtxn, Status::Enqueued)?;
|
||||
|
||||
// 0. Check if any upgrade task was matched.
|
||||
// If so, we cancel all the failed or enqueued upgrade tasks.
|
||||
let upgrade_tasks = &self.queue.tasks.get_kind(rtxn, Kind::UpgradeDatabase)?;
|
||||
let is_canceling_upgrade = !matched_tasks.is_disjoint(upgrade_tasks);
|
||||
if is_canceling_upgrade {
|
||||
let failed_tasks = self.queue.tasks.get_status(rtxn, Status::Failed)?;
|
||||
tasks_to_cancel |= upgrade_tasks & (enqueued_tasks | failed_tasks);
|
||||
}
|
||||
// 1. Remove from this list the tasks that we are not allowed to cancel
|
||||
// Notice that only the _enqueued_ ones are cancelable and we should
|
||||
// have already aborted the indexation of the _processing_ ones
|
||||
tasks_to_cancel |= enqueued_tasks & matched_tasks;
|
||||
let cancelable_tasks = self.queue.tasks.get_status(rtxn, Status::Enqueued)?;
|
||||
let tasks_to_cancel = cancelable_tasks & matched_tasks;
|
||||
|
||||
// 2. If we're canceling an upgrade, attempt the rollback
|
||||
if let Some(latest_upgrade_task) = (&tasks_to_cancel & upgrade_tasks).max() {
|
||||
progress.update_progress(TaskCancelationProgress::CancelingUpgrade);
|
||||
|
||||
let task = self.queue.tasks.get_task(rtxn, latest_upgrade_task)?.unwrap();
|
||||
let Some(Details::UpgradeDatabase { from, to }) = task.details else {
|
||||
unreachable!("wrong details for upgrade task {latest_upgrade_task}")
|
||||
};
|
||||
|
||||
// check that we are rollbacking an upgrade to the current Meilisearch
|
||||
let bin_major: u32 = meilisearch_types::versioning::VERSION_MAJOR;
|
||||
let bin_minor: u32 = meilisearch_types::versioning::VERSION_MINOR;
|
||||
let bin_patch: u32 = meilisearch_types::versioning::VERSION_PATCH;
|
||||
|
||||
if to == (bin_major, bin_minor, bin_patch) {
|
||||
tracing::warn!(
|
||||
"Rollbacking from v{}.{}.{} to v{}.{}.{}",
|
||||
to.0,
|
||||
to.1,
|
||||
to.2,
|
||||
from.0,
|
||||
from.1,
|
||||
from.2
|
||||
);
|
||||
match std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
|
||||
self.process_rollback(from, progress)
|
||||
})) {
|
||||
Ok(Ok(())) => {}
|
||||
Ok(Err(err)) => return Err(Error::DatabaseUpgrade(Box::new(err))),
|
||||
Err(e) => {
|
||||
let msg = match e.downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match e.downcast_ref::<String>() {
|
||||
Some(s) => &s[..],
|
||||
None => "Box<dyn Any>",
|
||||
},
|
||||
};
|
||||
return Err(Error::DatabaseUpgrade(Box::new(Error::ProcessBatchPanicked(
|
||||
msg.to_string(),
|
||||
))));
|
||||
}
|
||||
}
|
||||
} else {
|
||||
tracing::debug!(
|
||||
"Not rollbacking an upgrade targetting the earlier version v{}.{}.{}",
|
||||
bin_major,
|
||||
bin_minor,
|
||||
bin_patch
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
// 3. We now have a list of tasks to cancel, cancel them
|
||||
let (task_progress, progress_obj) = AtomicTaskStep::new(tasks_to_cancel.len() as u32);
|
||||
progress.update_progress(progress_obj);
|
||||
|
||||
// 2. We now have a list of tasks to cancel, cancel them
|
||||
let mut tasks = self.queue.tasks.get_existing_tasks(
|
||||
rtxn,
|
||||
tasks_to_cancel.iter().inspect(|_| {
|
||||
|
||||
@@ -12,14 +12,10 @@ impl IndexScheduler {
|
||||
#[cfg(test)]
|
||||
self.maybe_fail(crate::test_utils::FailureLocation::ProcessUpgrade)?;
|
||||
|
||||
enum UpgradeIndex {}
|
||||
let indexes = self.index_names()?;
|
||||
|
||||
for (i, uid) in indexes.iter().enumerate() {
|
||||
let must_stop_processing = self.scheduler.must_stop_processing.clone();
|
||||
|
||||
if must_stop_processing.get() {
|
||||
return Err(Error::AbortedTask);
|
||||
}
|
||||
progress.update_progress(VariableNameStep::<UpgradeIndex>::new(
|
||||
format!("Upgrading index `{uid}`"),
|
||||
i as u32,
|
||||
@@ -31,7 +27,6 @@ impl IndexScheduler {
|
||||
&mut index_wtxn,
|
||||
&index,
|
||||
db_version,
|
||||
|| must_stop_processing.get(),
|
||||
progress.clone(),
|
||||
)
|
||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
||||
@@ -51,42 +46,4 @@ impl IndexScheduler {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn process_rollback(&self, db_version: (u32, u32, u32), progress: &Progress) -> Result<()> {
|
||||
let mut wtxn = self.env.write_txn()?;
|
||||
tracing::info!(?db_version, "roll back index scheduler version");
|
||||
self.version.set_version(&mut wtxn, db_version)?;
|
||||
let db_path = self.scheduler.version_file_path.parent().unwrap();
|
||||
wtxn.commit()?;
|
||||
|
||||
let indexes = self.index_names()?;
|
||||
|
||||
tracing::info!("roll backing all indexes");
|
||||
for (i, uid) in indexes.iter().enumerate() {
|
||||
progress.update_progress(VariableNameStep::<UpgradeIndex>::new(
|
||||
format!("Rollbacking index `{uid}`"),
|
||||
i as u32,
|
||||
indexes.len() as u32,
|
||||
));
|
||||
let index_schd_rtxn = self.env.read_txn()?;
|
||||
|
||||
let rollback_outcome =
|
||||
self.index_mapper.rollback_index(&index_schd_rtxn, uid, db_version)?;
|
||||
if !rollback_outcome.succeeded() {
|
||||
return Err(crate::Error::RollbackFailed { index: uid.clone(), rollback_outcome });
|
||||
}
|
||||
}
|
||||
|
||||
tracing::info!(?db_path, ?db_version, "roll back version file");
|
||||
meilisearch_types::versioning::create_version_file(
|
||||
db_path,
|
||||
db_version.0,
|
||||
db_version.1,
|
||||
db_version.2,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
enum UpgradeIndex {}
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
||||
[timestamp] [4,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.14.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "task with id 1 of type `indexCreation` cannot be batched", }
|
||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 2 of type `indexCreation` cannot be batched", }
|
||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "task with id 3 of type `indexCreation` cannot be batched", }
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
enqueued [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
----------------------------------------------------------------------
|
||||
### Status:
|
||||
@@ -37,7 +37,7 @@ catto [1,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.14.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
----------------------------------------------------------------------
|
||||
@@ -40,7 +40,7 @@ doggo [2,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.14.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
||||
[]
|
||||
----------------------------------------------------------------------
|
||||
### All Tasks:
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 14, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||
@@ -43,7 +43,7 @@ doggo [2,3,]
|
||||
[timestamp] [0,]
|
||||
----------------------------------------------------------------------
|
||||
### All Batches:
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.14.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task", }
|
||||
----------------------------------------------------------------------
|
||||
### Batch to tasks mapping:
|
||||
0 [0,]
|
||||
|
||||
@@ -1,432 +0,0 @@
|
||||
use std::collections::{BTreeMap, BTreeSet};
|
||||
use std::convert::Infallible;
|
||||
use std::fmt;
|
||||
use std::marker::PhantomData;
|
||||
use std::num::NonZeroUsize;
|
||||
use std::ops::{ControlFlow, Deref};
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use fst::IntoStreamer;
|
||||
use milli::disabled_typos_terms::DisabledTyposTerms;
|
||||
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
|
||||
use milli::proximity::ProximityPrecision;
|
||||
use milli::update::Setting;
|
||||
use milli::{FilterableAttributesRule, Index};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::heed::RoTxn;
|
||||
use crate::IndexScheduler;
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase, where_predicate = __Deserr_E: deserr::MergeWithError<DeserrJsonError<InvalidSettingsTypoTolerance>>)]
|
||||
pub struct PromptsSettings {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub system: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[schema(value_type = Option<MinWordSizeTyposSetting>, example = json!({ "oneTypo": 5, "twoTypo": 9 }))]
|
||||
pub search_description: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub search_q_param: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub pre_query: Setting<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub enum ChatSource {
|
||||
#[default]
|
||||
OpenAi,
|
||||
}
|
||||
|
||||
/// Holds all the settings for an index. `T` can either be `Checked` if they represents settings
|
||||
/// whose validity is guaranteed, or `Unchecked` if they need to be validated. In the later case, a
|
||||
/// call to `check` will return a `Settings<Checked>` from a `Settings<Unchecked>`.
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(
|
||||
deny_unknown_fields,
|
||||
rename_all = "camelCase",
|
||||
bound(serialize = "T: Serialize", deserialize = "T: Deserialize<'static>")
|
||||
)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct ChatSettings<T> {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub source: Setting<ChatSource>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub base_api: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub api_key: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||
#[schema(value_type = Option<PromptsSettings>)]
|
||||
pub prompts: Setting<PromptsSettings>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
}
|
||||
|
||||
impl<T> ChatSettings<T> {
|
||||
pub fn hide_secrets(&mut self) {
|
||||
match &mut self.api_key {
|
||||
Setting::Set(key) => Self::hide_secrets(key),
|
||||
Setting::Reset => todo!(),
|
||||
Setting::NotSet => todo!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn hide_secret(secret: &mut String) {
|
||||
match secret.len() {
|
||||
x if x < 10 => {
|
||||
secret.replace_range(.., "XXX...");
|
||||
}
|
||||
x if x < 20 => {
|
||||
secret.replace_range(2.., "XXXX...");
|
||||
}
|
||||
x if x < 30 => {
|
||||
secret.replace_range(3.., "XXXXX...");
|
||||
}
|
||||
_x => {
|
||||
secret.replace_range(5.., "XXXXXX...");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatSettings<Checked> {
|
||||
pub fn cleared() -> ChatSettings<Checked> {
|
||||
ChatSettings {
|
||||
source: Setting::Reset,
|
||||
base_api: Setting::Reset,
|
||||
api_key: Setting::Reset,
|
||||
prompts: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn into_unchecked(self) -> ChatSettings<Unchecked> {
|
||||
let Self { source, base_api, api_key, prompts, _kind } = self;
|
||||
ChatSettings { source, base_api, api_key, prompts, _kind: PhantomData }
|
||||
}
|
||||
}
|
||||
|
||||
impl ChatSettings<Unchecked> {
|
||||
pub fn check(self) -> ChatSettings<Checked> {
|
||||
ChatSettings {
|
||||
source: self.source,
|
||||
base_api: self.base_api,
|
||||
api_key: self.api_key,
|
||||
prompts: self.prompts,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn validate(self) -> Result<Self, milli::Error> {
|
||||
self.validate_prompt_settings()?;
|
||||
self.validate_global_settings()
|
||||
}
|
||||
|
||||
fn validate_global_settings(mut self) -> Result<Self, milli::Error> {
|
||||
// Check that the ApiBase is a valid URL
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
fn validate_prompt_settings(mut self) -> Result<Self, milli::Error> {
|
||||
// TODO
|
||||
// let Setting::Set(mut configs) = self.embedders else { return Ok(self) };
|
||||
// for (name, config) in configs.iter_mut() {
|
||||
// let config_to_check = std::mem::take(config);
|
||||
// let checked_config =
|
||||
// milli::update::validate_embedding_settings(config_to_check.inner, name)?;
|
||||
// *config = SettingEmbeddingSettings { inner: checked_config };
|
||||
// }
|
||||
// self.embedders = Setting::Set(configs);
|
||||
Ok(self)
|
||||
}
|
||||
|
||||
pub fn merge(&mut self, other: &Self) {
|
||||
// For most settings only the latest version is kept
|
||||
*self = Self {
|
||||
source: other.source.or(self.source),
|
||||
base_api: other.base_api.or(self.base_api),
|
||||
api_key: other.api_key.or(self.api_key),
|
||||
prompts: match (self.prompts, other.prompts) {
|
||||
(Setting::NotSet, set) | (set, Setting::NotSet) => set,
|
||||
(Setting::Set(_) | Setting::Reset, Setting::Reset) => Setting::Reset,
|
||||
(Setting::Reset, Setting::Set(set)) => Setting::Set(set),
|
||||
// If both are set we must merge the prompts settings
|
||||
(Setting::Set(this), Setting::Set(other)) => Setting::Set(PromptsSettings {
|
||||
system: other.system.or(system),
|
||||
search_description: other.search_description.or(search_description),
|
||||
search_q_param: other.search_q_param.or(search_q_param),
|
||||
pre_query: other.pre_query.or(pre_query),
|
||||
}),
|
||||
},
|
||||
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn apply_settings_to_builder(
|
||||
settings: &ChatSettings<Checked>,
|
||||
// TODO we must not store this into milli but in the index scheduler
|
||||
builder: &mut milli::update::Settings,
|
||||
) {
|
||||
let ChatSettings { source, base_api, api_key, prompts, _kind } = settings;
|
||||
|
||||
match source.deref() {
|
||||
Setting::Set(ref names) => builder.set_searchable_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_searchable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match displayed_attributes.deref() {
|
||||
Setting::Set(ref names) => builder.set_displayed_fields(names.clone()),
|
||||
Setting::Reset => builder.reset_displayed_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match filterable_attributes {
|
||||
Setting::Set(ref facets) => {
|
||||
builder.set_filterable_fields(facets.clone().into_iter().collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_filterable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match sortable_attributes {
|
||||
Setting::Set(ref fields) => builder.set_sortable_fields(fields.iter().cloned().collect()),
|
||||
Setting::Reset => builder.reset_sortable_fields(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match ranking_rules {
|
||||
Setting::Set(ref criteria) => {
|
||||
builder.set_criteria(criteria.iter().map(|c| c.clone().into()).collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_criteria(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match stop_words {
|
||||
Setting::Set(ref stop_words) => builder.set_stop_words(stop_words.clone()),
|
||||
Setting::Reset => builder.reset_stop_words(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match non_separator_tokens {
|
||||
Setting::Set(ref non_separator_tokens) => {
|
||||
builder.set_non_separator_tokens(non_separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_non_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match separator_tokens {
|
||||
Setting::Set(ref separator_tokens) => {
|
||||
builder.set_separator_tokens(separator_tokens.clone())
|
||||
}
|
||||
Setting::Reset => builder.reset_separator_tokens(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match dictionary {
|
||||
Setting::Set(ref dictionary) => builder.set_dictionary(dictionary.clone()),
|
||||
Setting::Reset => builder.reset_dictionary(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match synonyms {
|
||||
Setting::Set(ref synonyms) => builder.set_synonyms(synonyms.clone().into_iter().collect()),
|
||||
Setting::Reset => builder.reset_synonyms(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match distinct_attribute {
|
||||
Setting::Set(ref attr) => builder.set_distinct_field(attr.clone()),
|
||||
Setting::Reset => builder.reset_distinct_field(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match proximity_precision {
|
||||
Setting::Set(ref precision) => builder.set_proximity_precision((*precision).into()),
|
||||
Setting::Reset => builder.reset_proximity_precision(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match localized_attributes_rules {
|
||||
Setting::Set(ref rules) => builder
|
||||
.set_localized_attributes_rules(rules.iter().cloned().map(|r| r.into()).collect()),
|
||||
Setting::Reset => builder.reset_localized_attributes_rules(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match typo_tolerance {
|
||||
Setting::Set(ref value) => {
|
||||
match value.enabled {
|
||||
Setting::Set(val) => builder.set_autorize_typos(val),
|
||||
Setting::Reset => builder.reset_authorize_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.min_word_size_for_typos {
|
||||
Setting::Set(ref setting) => {
|
||||
match setting.one_typo {
|
||||
Setting::Set(val) => builder.set_min_word_len_one_typo(val),
|
||||
Setting::Reset => builder.reset_min_word_len_one_typo(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
match setting.two_typos {
|
||||
Setting::Set(val) => builder.set_min_word_len_two_typos(val),
|
||||
Setting::Reset => builder.reset_min_word_len_two_typos(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
builder.reset_min_word_len_one_typo();
|
||||
builder.reset_min_word_len_two_typos();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_words {
|
||||
Setting::Set(ref words) => {
|
||||
builder.set_exact_words(words.clone());
|
||||
}
|
||||
Setting::Reset => builder.reset_exact_words(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_attributes {
|
||||
Setting::Set(ref words) => {
|
||||
builder.set_exact_attributes(words.iter().cloned().collect())
|
||||
}
|
||||
Setting::Reset => builder.reset_exact_attributes(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_numbers {
|
||||
Setting::Set(val) => builder.set_disable_on_numbers(val),
|
||||
Setting::Reset => builder.reset_disable_on_numbers(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
// all typo settings need to be reset here.
|
||||
builder.reset_authorize_typos();
|
||||
builder.reset_min_word_len_one_typo();
|
||||
builder.reset_min_word_len_two_typos();
|
||||
builder.reset_exact_words();
|
||||
builder.reset_exact_attributes();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match faceting {
|
||||
Setting::Set(FacetingSettings { max_values_per_facet, sort_facet_values_by }) => {
|
||||
match max_values_per_facet {
|
||||
Setting::Set(val) => builder.set_max_values_per_facet(*val),
|
||||
Setting::Reset => builder.reset_max_values_per_facet(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
match sort_facet_values_by {
|
||||
Setting::Set(val) => builder.set_sort_facet_values_by(
|
||||
val.iter().map(|(name, order)| (name.clone(), (*order).into())).collect(),
|
||||
),
|
||||
Setting::Reset => builder.reset_sort_facet_values_by(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
builder.reset_max_values_per_facet();
|
||||
builder.reset_sort_facet_values_by();
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match pagination {
|
||||
Setting::Set(ref value) => match value.max_total_hits {
|
||||
Setting::Set(val) => builder.set_pagination_max_total_hits(val),
|
||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||
Setting::NotSet => (),
|
||||
},
|
||||
Setting::Reset => builder.reset_pagination_max_total_hits(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match embedders {
|
||||
Setting::Set(value) => builder.set_embedder_settings(
|
||||
value.iter().map(|(k, v)| (k.clone(), v.inner.clone())).collect(),
|
||||
),
|
||||
Setting::Reset => builder.reset_embedder_settings(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match search_cutoff_ms {
|
||||
Setting::Set(cutoff) => builder.set_search_cutoff(*cutoff),
|
||||
Setting::Reset => builder.reset_search_cutoff(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match prefix_search {
|
||||
Setting::Set(prefix_search) => {
|
||||
builder.set_prefix_search(PrefixSearch::from(*prefix_search))
|
||||
}
|
||||
Setting::Reset => builder.reset_prefix_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match facet_search {
|
||||
Setting::Set(facet_search) => builder.set_facet_search(*facet_search),
|
||||
Setting::Reset => builder.reset_facet_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match chat {
|
||||
Setting::Set(chat) => builder.set_chat(chat.clone()),
|
||||
Setting::Reset => builder.reset_chat(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SecretPolicy {
|
||||
RevealSecrets,
|
||||
HideSecrets,
|
||||
}
|
||||
|
||||
pub fn settings(
|
||||
index_scheduler: &IndexScheduler,
|
||||
rtxn: &RoTxn,
|
||||
secret_policy: SecretPolicy,
|
||||
) -> Result<Settings<Checked>, milli::Error> {
|
||||
let mut settings = index_scheduler.chat_settings(rtxn)?;
|
||||
if let SecretPolicy::HideSecrets = secret_policy {
|
||||
settings.hide_secrets()
|
||||
}
|
||||
Ok(settings)
|
||||
}
|
||||
@@ -1,3 +0,0 @@
|
||||
mod chat;
|
||||
|
||||
pub use chat::ChatSettings;
|
||||
@@ -114,8 +114,12 @@ impl IndexScheduler {
|
||||
auto_upgrade: true, // Don't cost much and will ensure the happy path works
|
||||
embedding_cache_cap: 10,
|
||||
};
|
||||
let version = configuration(&mut options).unwrap_or({
|
||||
(versioning::VERSION_MAJOR, versioning::VERSION_MINOR, versioning::VERSION_PATCH)
|
||||
let version = configuration(&mut options).unwrap_or_else(|| {
|
||||
(
|
||||
versioning::VERSION_MAJOR.parse().unwrap(),
|
||||
versioning::VERSION_MINOR.parse().unwrap(),
|
||||
versioning::VERSION_PATCH.parse().unwrap(),
|
||||
)
|
||||
});
|
||||
|
||||
std::fs::create_dir_all(&options.auth_path).unwrap();
|
||||
|
||||
@@ -28,17 +28,12 @@ pub fn upgrade_index_scheduler(
|
||||
let current_minor = to.1;
|
||||
let current_patch = to.2;
|
||||
|
||||
let upgrade_functions: &[&dyn UpgradeIndexScheduler] = &[
|
||||
// This is the last upgrade function, it will be called when the index is up to date.
|
||||
// any other upgrade function should be added before this one.
|
||||
&ToCurrentNoOp {},
|
||||
];
|
||||
let upgrade_functions: &[&dyn UpgradeIndexScheduler] = &[&ToCurrentNoOp {}];
|
||||
|
||||
let start = match from {
|
||||
(1, 12, _) => 0,
|
||||
(1, 13, _) => 0,
|
||||
(1, 14, _) => 0,
|
||||
(1, 15, _) => 0,
|
||||
(major, minor, patch) => {
|
||||
if major > current_major
|
||||
|| (major == current_major && minor > current_minor)
|
||||
@@ -109,6 +104,10 @@ impl UpgradeIndexScheduler for ToCurrentNoOp {
|
||||
}
|
||||
|
||||
fn target_version(&self) -> (u32, u32, u32) {
|
||||
(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
|
||||
(
|
||||
VERSION_MAJOR.parse().unwrap(),
|
||||
VERSION_MINOR.parse().unwrap(),
|
||||
VERSION_PATCH.parse().unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -39,9 +39,9 @@ impl Versioning {
|
||||
}
|
||||
|
||||
pub fn set_current_version(&self, wtxn: &mut RwTxn) -> Result<(), heed::Error> {
|
||||
let major = versioning::VERSION_MAJOR;
|
||||
let minor = versioning::VERSION_MINOR;
|
||||
let patch = versioning::VERSION_PATCH;
|
||||
let major = versioning::VERSION_MAJOR.parse().unwrap();
|
||||
let minor = versioning::VERSION_MINOR.parse().unwrap();
|
||||
let patch = versioning::VERSION_PATCH.parse().unwrap();
|
||||
self.set_version(wtxn, (major, minor, patch))
|
||||
}
|
||||
|
||||
@@ -64,9 +64,9 @@ impl Versioning {
|
||||
};
|
||||
wtxn.commit()?;
|
||||
|
||||
let bin_major: u32 = versioning::VERSION_MAJOR;
|
||||
let bin_minor: u32 = versioning::VERSION_MINOR;
|
||||
let bin_patch: u32 = versioning::VERSION_PATCH;
|
||||
let bin_major: u32 = versioning::VERSION_MAJOR.parse().unwrap();
|
||||
let bin_minor: u32 = versioning::VERSION_MINOR.parse().unwrap();
|
||||
let bin_patch: u32 = versioning::VERSION_PATCH.parse().unwrap();
|
||||
let to = (bin_major, bin_minor, bin_patch);
|
||||
|
||||
if from != to {
|
||||
|
||||
@@ -351,7 +351,6 @@ pub struct IndexSearchRules {
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
store.put_api_key(Key::default_chat())?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ impl HeedAuthStore {
|
||||
|
||||
let mut actions = HashSet::new();
|
||||
for action in &key.actions {
|
||||
match action {
|
||||
match *action {
|
||||
Action::All => actions.extend(enum_iterator::all::<Action>()),
|
||||
Action::DocumentsAll => {
|
||||
actions.extend(
|
||||
@@ -110,23 +110,11 @@ impl HeedAuthStore {
|
||||
Action::SettingsAll => {
|
||||
actions.extend([Action::SettingsGet, Action::SettingsUpdate].iter());
|
||||
}
|
||||
Action::DumpsAll => {
|
||||
actions.insert(Action::DumpsCreate);
|
||||
}
|
||||
Action::SnapshotsAll => {
|
||||
actions.insert(Action::SnapshotsCreate);
|
||||
}
|
||||
Action::TasksAll => {
|
||||
actions.extend([Action::TasksGet, Action::TasksDelete, Action::TasksCancel]);
|
||||
}
|
||||
Action::StatsAll => {
|
||||
actions.insert(Action::StatsGet);
|
||||
}
|
||||
Action::MetricsAll => {
|
||||
actions.insert(Action::MetricsGet);
|
||||
}
|
||||
other => {
|
||||
actions.insert(*other);
|
||||
actions.insert(other);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -275,18 +263,24 @@ impl HeedAuthStore {
|
||||
/// optionally on a specific index, for a given key.
|
||||
pub struct KeyIdActionCodec;
|
||||
|
||||
impl KeyIdActionCodec {
|
||||
fn action_parts_to_32bits([p1, p2, p3, p4]: &[u8; 4]) -> u32 {
|
||||
((*p1 as u32) << 24) | ((*p2 as u32) << 16) | ((*p3 as u32) << 8) | (*p4 as u32)
|
||||
}
|
||||
}
|
||||
|
||||
impl<'a> heed::BytesDecode<'a> for KeyIdActionCodec {
|
||||
type DItem = (KeyId, Action, Option<&'a [u8]>);
|
||||
|
||||
fn bytes_decode(bytes: &'a [u8]) -> StdResult<Self::DItem, BoxedError> {
|
||||
let (key_id_bytes, action_bytes) = try_split_array_at(bytes).ok_or(SliceTooShortError)?;
|
||||
let (&action_byte, index) =
|
||||
match try_split_array_at(action_bytes).ok_or(SliceTooShortError)? {
|
||||
([action], []) => (action, None),
|
||||
([action], index) => (action, Some(index)),
|
||||
let (action_bits, index) =
|
||||
match try_split_array_at::<u8, 4>(action_bytes).ok_or(SliceTooShortError)? {
|
||||
(action_parts, []) => (Self::action_parts_to_32bits(action_parts), None),
|
||||
(action_parts, index) => (Self::action_parts_to_32bits(action_parts), Some(index)),
|
||||
};
|
||||
let key_id = Uuid::from_bytes(*key_id_bytes);
|
||||
let action = Action::from_repr(action_byte).ok_or(InvalidActionError { action_byte })?;
|
||||
let action = Action::from_bits(action_bits).ok_or(InvalidActionError { action_bits })?;
|
||||
|
||||
Ok((key_id, action, index))
|
||||
}
|
||||
@@ -299,7 +293,7 @@ impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
let mut bytes = Vec::new();
|
||||
|
||||
bytes.extend_from_slice(key_id.as_bytes());
|
||||
let action_bytes = u8::to_be_bytes(action.repr());
|
||||
let action_bytes = u32::to_be_bytes(action.bits());
|
||||
bytes.extend_from_slice(&action_bytes);
|
||||
if let Some(index) = index {
|
||||
bytes.extend_from_slice(index);
|
||||
@@ -314,9 +308,9 @@ impl<'a> heed::BytesEncode<'a> for KeyIdActionCodec {
|
||||
pub struct SliceTooShortError;
|
||||
|
||||
#[derive(Error, Debug)]
|
||||
#[error("cannot construct a valid Action from {action_byte}")]
|
||||
#[error("cannot construct a valid Action from {action_bits}")]
|
||||
pub struct InvalidActionError {
|
||||
pub action_byte: u8,
|
||||
pub action_bits: u32,
|
||||
}
|
||||
|
||||
pub fn generate_key_as_hexa(uid: Uuid, master_key: &[u8]) -> String {
|
||||
|
||||
@@ -42,6 +42,7 @@ time = { version = "0.3.37", features = [
|
||||
tokio = "1.43"
|
||||
utoipa = { version = "5.3.1", features = ["macros"] }
|
||||
uuid = { version = "1.11.0", features = ["serde", "v4"] }
|
||||
bitflags = "2.6.0"
|
||||
|
||||
[dev-dependencies]
|
||||
# fixed version due to format breakages in v1.40
|
||||
|
||||
@@ -22,7 +22,7 @@ pub struct BatchView {
|
||||
#[serde(with = "time::serde::rfc3339::option", default)]
|
||||
pub finished_at: Option<OffsetDateTime>,
|
||||
#[serde(default = "meilisearch_types::batches::default_stop_reason")]
|
||||
pub batch_creation_complete: String,
|
||||
pub batcher_stopped_because: String,
|
||||
}
|
||||
|
||||
impl BatchView {
|
||||
@@ -35,7 +35,7 @@ impl BatchView {
|
||||
duration: batch.finished_at.map(|finished_at| finished_at - batch.started_at),
|
||||
started_at: batch.started_at,
|
||||
finished_at: batch.finished_at,
|
||||
batch_creation_complete: batch.stop_reason.clone(),
|
||||
batcher_stopped_because: batch.stop_reason.clone(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -387,8 +387,7 @@ VectorEmbeddingError , InvalidRequest , BAD_REQUEST ;
|
||||
NotFoundSimilarId , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentEditionContext , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidDocumentEditionFunctionFilter , InvalidRequest , BAD_REQUEST ;
|
||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST ;
|
||||
InvalidSettingsIndexChat , InvalidRequest , BAD_REQUEST
|
||||
EditDocumentsByFunctionError , InvalidRequest , BAD_REQUEST
|
||||
}
|
||||
|
||||
impl ErrorCode for JoinError {
|
||||
|
||||
@@ -2,10 +2,11 @@ use std::convert::Infallible;
|
||||
use std::hash::Hash;
|
||||
use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, Deserr, MergeWithError, ValuePointerRef};
|
||||
use bitflags::{bitflags, Flags};
|
||||
use deserr::{take_cf_content, DeserializeError, Deserr, MergeWithError, ValuePointerRef};
|
||||
use enum_iterator::Sequence;
|
||||
use milli::update::Setting;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
use time::format_description::well_known::Rfc3339;
|
||||
use time::macros::{format_description, time};
|
||||
use time::{Date, OffsetDateTime, PrimitiveDateTime};
|
||||
@@ -158,21 +159,6 @@ impl Key {
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn default_chat() -> Self {
|
||||
let now = OffsetDateTime::now_utc();
|
||||
let uid = Uuid::new_v4();
|
||||
Self {
|
||||
name: Some("Default Chat API Key".to_string()),
|
||||
description: Some("Use it to chat and search from the frontend".to_string()),
|
||||
uid,
|
||||
actions: vec![Action::Chat, Action::Search],
|
||||
indexes: vec![IndexUidPattern::all()],
|
||||
expires_at: None,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn parse_expiration_date(
|
||||
@@ -210,227 +196,307 @@ fn parse_expiration_date(
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(
|
||||
Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash, Sequence, Deserr, ToSchema,
|
||||
)]
|
||||
#[repr(u8)]
|
||||
pub enum Action {
|
||||
#[serde(rename = "*")]
|
||||
#[deserr(rename = "*")]
|
||||
All = 0,
|
||||
#[serde(rename = "search")]
|
||||
#[deserr(rename = "search")]
|
||||
Search,
|
||||
#[serde(rename = "documents.*")]
|
||||
#[deserr(rename = "documents.*")]
|
||||
DocumentsAll,
|
||||
#[serde(rename = "documents.add")]
|
||||
#[deserr(rename = "documents.add")]
|
||||
DocumentsAdd,
|
||||
#[serde(rename = "documents.get")]
|
||||
#[deserr(rename = "documents.get")]
|
||||
DocumentsGet,
|
||||
#[serde(rename = "documents.delete")]
|
||||
#[deserr(rename = "documents.delete")]
|
||||
DocumentsDelete,
|
||||
#[serde(rename = "indexes.*")]
|
||||
#[deserr(rename = "indexes.*")]
|
||||
IndexesAll,
|
||||
#[serde(rename = "indexes.create")]
|
||||
#[deserr(rename = "indexes.create")]
|
||||
IndexesAdd,
|
||||
#[serde(rename = "indexes.get")]
|
||||
#[deserr(rename = "indexes.get")]
|
||||
IndexesGet,
|
||||
#[serde(rename = "indexes.update")]
|
||||
#[deserr(rename = "indexes.update")]
|
||||
IndexesUpdate,
|
||||
#[serde(rename = "indexes.delete")]
|
||||
#[deserr(rename = "indexes.delete")]
|
||||
IndexesDelete,
|
||||
#[serde(rename = "indexes.swap")]
|
||||
#[deserr(rename = "indexes.swap")]
|
||||
IndexesSwap,
|
||||
#[serde(rename = "tasks.*")]
|
||||
#[deserr(rename = "tasks.*")]
|
||||
TasksAll,
|
||||
#[serde(rename = "tasks.cancel")]
|
||||
#[deserr(rename = "tasks.cancel")]
|
||||
TasksCancel,
|
||||
#[serde(rename = "tasks.delete")]
|
||||
#[deserr(rename = "tasks.delete")]
|
||||
TasksDelete,
|
||||
#[serde(rename = "tasks.get")]
|
||||
#[deserr(rename = "tasks.get")]
|
||||
TasksGet,
|
||||
#[serde(rename = "settings.*")]
|
||||
#[deserr(rename = "settings.*")]
|
||||
SettingsAll,
|
||||
#[serde(rename = "settings.get")]
|
||||
#[deserr(rename = "settings.get")]
|
||||
SettingsGet,
|
||||
#[serde(rename = "settings.update")]
|
||||
#[deserr(rename = "settings.update")]
|
||||
SettingsUpdate,
|
||||
#[serde(rename = "stats.*")]
|
||||
#[deserr(rename = "stats.*")]
|
||||
StatsAll,
|
||||
#[serde(rename = "stats.get")]
|
||||
#[deserr(rename = "stats.get")]
|
||||
StatsGet,
|
||||
#[serde(rename = "metrics.*")]
|
||||
#[deserr(rename = "metrics.*")]
|
||||
MetricsAll,
|
||||
#[serde(rename = "metrics.get")]
|
||||
#[deserr(rename = "metrics.get")]
|
||||
MetricsGet,
|
||||
#[serde(rename = "dumps.*")]
|
||||
#[deserr(rename = "dumps.*")]
|
||||
DumpsAll,
|
||||
#[serde(rename = "dumps.create")]
|
||||
#[deserr(rename = "dumps.create")]
|
||||
DumpsCreate,
|
||||
#[serde(rename = "snapshots.*")]
|
||||
#[deserr(rename = "snapshots.*")]
|
||||
SnapshotsAll,
|
||||
#[serde(rename = "snapshots.create")]
|
||||
#[deserr(rename = "snapshots.create")]
|
||||
SnapshotsCreate,
|
||||
#[serde(rename = "version")]
|
||||
#[deserr(rename = "version")]
|
||||
Version,
|
||||
#[serde(rename = "keys.create")]
|
||||
#[deserr(rename = "keys.create")]
|
||||
KeysAdd,
|
||||
#[serde(rename = "keys.get")]
|
||||
#[deserr(rename = "keys.get")]
|
||||
KeysGet,
|
||||
#[serde(rename = "keys.update")]
|
||||
#[deserr(rename = "keys.update")]
|
||||
KeysUpdate,
|
||||
#[serde(rename = "keys.delete")]
|
||||
#[deserr(rename = "keys.delete")]
|
||||
KeysDelete,
|
||||
#[serde(rename = "experimental.get")]
|
||||
#[deserr(rename = "experimental.get")]
|
||||
ExperimentalFeaturesGet,
|
||||
#[serde(rename = "experimental.update")]
|
||||
#[deserr(rename = "experimental.update")]
|
||||
ExperimentalFeaturesUpdate,
|
||||
#[serde(rename = "network.get")]
|
||||
#[deserr(rename = "network.get")]
|
||||
NetworkGet,
|
||||
#[serde(rename = "network.update")]
|
||||
#[deserr(rename = "network.update")]
|
||||
NetworkUpdate,
|
||||
#[serde(rename = "chat.get")]
|
||||
#[deserr(rename = "chat.get")]
|
||||
Chat,
|
||||
#[serde(rename = "chatSettings.*")]
|
||||
#[deserr(rename = "chatSettings.*")]
|
||||
ChatSettingsAll,
|
||||
#[serde(rename = "chatSettings.get")]
|
||||
#[deserr(rename = "chatSettings.get")]
|
||||
ChatSettingsGet,
|
||||
#[serde(rename = "chatSettings.update")]
|
||||
#[deserr(rename = "chatSettings.update")]
|
||||
ChatSettingsUpdate,
|
||||
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, PartialOrd, Ord, ToSchema)]
|
||||
#[repr(transparent)]
|
||||
pub struct Action(u32);
|
||||
|
||||
bitflags! {
|
||||
// NOTE: For `Sequence` impl to work, the values of these must be in ascending order
|
||||
impl Action: u32 {
|
||||
const Search = 1;
|
||||
// Documents
|
||||
const DocumentsAdd = 1 << 1;
|
||||
const DocumentsGet = 1 << 2;
|
||||
const DocumentsDelete = 1 << 3;
|
||||
const DocumentsAll = Self::DocumentsAdd.bits() | Self::DocumentsGet.bits() | Self::DocumentsDelete.bits();
|
||||
// Indexes
|
||||
const IndexesAdd = 1 << 4;
|
||||
const IndexesGet = 1 << 5;
|
||||
const IndexesUpdate = 1 << 6;
|
||||
const IndexesDelete = 1 << 7;
|
||||
const IndexesSwap = 1 << 8;
|
||||
const IndexesAll = Self::IndexesAdd.bits() | Self::IndexesGet.bits() | Self::IndexesUpdate.bits() | Self::IndexesDelete.bits() | Self::IndexesSwap.bits();
|
||||
// Tasks
|
||||
const TasksCancel = 1 << 9;
|
||||
const TasksDelete = 1 << 10;
|
||||
const TasksGet = 1 << 11;
|
||||
const TasksAll = Self::TasksCancel.bits() | Self::TasksDelete.bits() | Self::TasksGet.bits();
|
||||
// Settings
|
||||
const SettingsGet = 1 << 12;
|
||||
const SettingsUpdate = 1 << 13;
|
||||
const SettingsAll = Self::SettingsGet.bits() | Self::SettingsUpdate.bits();
|
||||
// Stats
|
||||
const StatsGet = 1 << 14;
|
||||
const StatsAll = Self::StatsGet.bits();
|
||||
// Metrics
|
||||
const MetricsGet = 1 << 15;
|
||||
const MetricsAll = Self::MetricsGet.bits();
|
||||
// Dumps
|
||||
const DumpsCreate = 1 << 16;
|
||||
const DumpsAll = Self::DumpsCreate.bits();
|
||||
// Snapshots
|
||||
const SnapshotsCreate = 1 << 17;
|
||||
const SnapshotsAll = Self::SnapshotsCreate.bits();
|
||||
// Keys without an "all" version
|
||||
const Version = 1 << 18;
|
||||
const KeysAdd = 1 << 19;
|
||||
const KeysGet = 1 << 20;
|
||||
const KeysUpdate = 1 << 21;
|
||||
const KeysDelete = 1 << 22;
|
||||
// Experimental Features
|
||||
const ExperimentalFeaturesGet = 1 << 23;
|
||||
const ExperimentalFeaturesUpdate = 1 << 24;
|
||||
// Network
|
||||
const NetworkGet = 1 << 25;
|
||||
const NetworkUpdate = 1 << 26;
|
||||
// All
|
||||
const All = 0xFFFFFFFF >> (32 - 1 - 26);
|
||||
}
|
||||
}
|
||||
|
||||
impl Action {
|
||||
pub const fn from_repr(repr: u8) -> Option<Self> {
|
||||
use actions::*;
|
||||
match repr {
|
||||
ALL => Some(Self::All),
|
||||
SEARCH => Some(Self::Search),
|
||||
DOCUMENTS_ALL => Some(Self::DocumentsAll),
|
||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||
INDEXES_ALL => Some(Self::IndexesAll),
|
||||
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||
INDEXES_GET => Some(Self::IndexesGet),
|
||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||
INDEXES_SWAP => Some(Self::IndexesSwap),
|
||||
TASKS_ALL => Some(Self::TasksAll),
|
||||
TASKS_CANCEL => Some(Self::TasksCancel),
|
||||
TASKS_DELETE => Some(Self::TasksDelete),
|
||||
TASKS_GET => Some(Self::TasksGet),
|
||||
SETTINGS_ALL => Some(Self::SettingsAll),
|
||||
SETTINGS_GET => Some(Self::SettingsGet),
|
||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||
CHAT_SETTINGS_ALL => Some(Self::ChatSettingsAll),
|
||||
CHAT_SETTINGS_GET => Some(Self::ChatSettingsGet),
|
||||
CHAT_SETTINGS_UPDATE => Some(Self::ChatSettingsUpdate),
|
||||
STATS_ALL => Some(Self::StatsAll),
|
||||
STATS_GET => Some(Self::StatsGet),
|
||||
METRICS_ALL => Some(Self::MetricsAll),
|
||||
METRICS_GET => Some(Self::MetricsGet),
|
||||
DUMPS_ALL => Some(Self::DumpsAll),
|
||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||
SNAPSHOTS_CREATE => Some(Self::SnapshotsCreate),
|
||||
VERSION => Some(Self::Version),
|
||||
KEYS_CREATE => Some(Self::KeysAdd),
|
||||
KEYS_GET => Some(Self::KeysGet),
|
||||
KEYS_UPDATE => Some(Self::KeysUpdate),
|
||||
KEYS_DELETE => Some(Self::KeysDelete),
|
||||
EXPERIMENTAL_FEATURES_GET => Some(Self::ExperimentalFeaturesGet),
|
||||
EXPERIMENTAL_FEATURES_UPDATE => Some(Self::ExperimentalFeaturesUpdate),
|
||||
NETWORK_GET => Some(Self::NetworkGet),
|
||||
NETWORK_UPDATE => Some(Self::NetworkUpdate),
|
||||
CHAT => Some(Self::Chat),
|
||||
_otherwise => None,
|
||||
}
|
||||
const SERDE_MAP_ARR: [(&'static str, Self); 36] = [
|
||||
("search", Self::Search),
|
||||
("documents.add", Self::DocumentsAdd),
|
||||
("documents.get", Self::DocumentsGet),
|
||||
("documents.delete", Self::DocumentsDelete),
|
||||
("documents.*", Self::DocumentsAll),
|
||||
("indexes.create", Self::IndexesAdd),
|
||||
("indexes.get", Self::IndexesGet),
|
||||
("indexes.update", Self::IndexesUpdate),
|
||||
("indexes.delete", Self::IndexesDelete),
|
||||
("indexes.swap", Self::IndexesSwap),
|
||||
("indexes.*", Self::IndexesAll),
|
||||
("tasks.cancel", Self::TasksCancel),
|
||||
("tasks.delete", Self::TasksDelete),
|
||||
("tasks.get", Self::TasksGet),
|
||||
("tasks.*", Self::TasksAll),
|
||||
("settings.get", Self::SettingsGet),
|
||||
("settings.update", Self::SettingsUpdate),
|
||||
("settings.*", Self::SettingsAll),
|
||||
("stats.get", Self::StatsGet),
|
||||
("stats.*", Self::StatsAll),
|
||||
("metrics.get", Self::MetricsGet),
|
||||
("metrics.*", Self::MetricsAll),
|
||||
("dumps.create", Self::DumpsCreate),
|
||||
("dumps.*", Self::DumpsAll),
|
||||
("snapshots.create", Self::SnapshotsCreate),
|
||||
("snapshots.*", Self::SnapshotsAll),
|
||||
("version", Self::Version),
|
||||
("keys.create", Self::KeysAdd),
|
||||
("keys.get", Self::KeysGet),
|
||||
("keys.update", Self::KeysUpdate),
|
||||
("keys.delete", Self::KeysDelete),
|
||||
("experimental.get", Self::ExperimentalFeaturesGet),
|
||||
("experimental.update", Self::ExperimentalFeaturesUpdate),
|
||||
("network.get", Self::NetworkGet),
|
||||
("network.update", Self::NetworkUpdate),
|
||||
("*", Self::All),
|
||||
];
|
||||
|
||||
fn get_action(v: &str) -> Option<Action> {
|
||||
Self::SERDE_MAP_ARR
|
||||
.iter()
|
||||
.find(|(serde_name, _)| &v == serde_name)
|
||||
.map(|(_, action)| *action)
|
||||
}
|
||||
|
||||
pub const fn repr(&self) -> u8 {
|
||||
*self as u8
|
||||
fn get_action_serde_name(v: &Action) -> &'static str {
|
||||
Self::SERDE_MAP_ARR
|
||||
.iter()
|
||||
.find(|(_, action)| v == action)
|
||||
.map(|(serde_name, _)| serde_name)
|
||||
.expect("an action is missing a matching serialized value")
|
||||
}
|
||||
|
||||
// when we remove "all" flags, this will give us the exact index
|
||||
fn get_potential_index(&self) -> usize {
|
||||
if self.is_empty() {
|
||||
return 0;
|
||||
}
|
||||
|
||||
// most significant bit for u32
|
||||
let msb = 1u32 << (31 - self.bits().leading_zeros());
|
||||
|
||||
// index of the single set bit
|
||||
msb.trailing_zeros() as usize
|
||||
}
|
||||
}
|
||||
|
||||
pub mod actions {
|
||||
use super::Action::*;
|
||||
use super::Action as A;
|
||||
|
||||
pub(crate) const ALL: u8 = All.repr();
|
||||
pub const SEARCH: u8 = Search.repr();
|
||||
pub const DOCUMENTS_ALL: u8 = DocumentsAll.repr();
|
||||
pub const DOCUMENTS_ADD: u8 = DocumentsAdd.repr();
|
||||
pub const DOCUMENTS_GET: u8 = DocumentsGet.repr();
|
||||
pub const DOCUMENTS_DELETE: u8 = DocumentsDelete.repr();
|
||||
pub const INDEXES_ALL: u8 = IndexesAll.repr();
|
||||
pub const INDEXES_CREATE: u8 = IndexesAdd.repr();
|
||||
pub const INDEXES_GET: u8 = IndexesGet.repr();
|
||||
pub const INDEXES_UPDATE: u8 = IndexesUpdate.repr();
|
||||
pub const INDEXES_DELETE: u8 = IndexesDelete.repr();
|
||||
pub const INDEXES_SWAP: u8 = IndexesSwap.repr();
|
||||
pub const TASKS_ALL: u8 = TasksAll.repr();
|
||||
pub const TASKS_CANCEL: u8 = TasksCancel.repr();
|
||||
pub const TASKS_DELETE: u8 = TasksDelete.repr();
|
||||
pub const TASKS_GET: u8 = TasksGet.repr();
|
||||
pub const SETTINGS_ALL: u8 = SettingsAll.repr();
|
||||
pub const SETTINGS_GET: u8 = SettingsGet.repr();
|
||||
pub const SETTINGS_UPDATE: u8 = SettingsUpdate.repr();
|
||||
pub const STATS_ALL: u8 = StatsAll.repr();
|
||||
pub const STATS_GET: u8 = StatsGet.repr();
|
||||
pub const METRICS_ALL: u8 = MetricsAll.repr();
|
||||
pub const METRICS_GET: u8 = MetricsGet.repr();
|
||||
pub const DUMPS_ALL: u8 = DumpsAll.repr();
|
||||
pub const DUMPS_CREATE: u8 = DumpsCreate.repr();
|
||||
pub const SNAPSHOTS_CREATE: u8 = SnapshotsCreate.repr();
|
||||
pub const VERSION: u8 = Version.repr();
|
||||
pub const KEYS_CREATE: u8 = KeysAdd.repr();
|
||||
pub const KEYS_GET: u8 = KeysGet.repr();
|
||||
pub const KEYS_UPDATE: u8 = KeysUpdate.repr();
|
||||
pub const KEYS_DELETE: u8 = KeysDelete.repr();
|
||||
pub const EXPERIMENTAL_FEATURES_GET: u8 = ExperimentalFeaturesGet.repr();
|
||||
pub const EXPERIMENTAL_FEATURES_UPDATE: u8 = ExperimentalFeaturesUpdate.repr();
|
||||
pub const SEARCH: u32 = A::Search.bits();
|
||||
|
||||
pub const NETWORK_GET: u8 = NetworkGet.repr();
|
||||
pub const NETWORK_UPDATE: u8 = NetworkUpdate.repr();
|
||||
pub const DOCUMENTS_ADD: u32 = A::DocumentsAdd.bits();
|
||||
pub const DOCUMENTS_GET: u32 = A::DocumentsGet.bits();
|
||||
pub const DOCUMENTS_DELETE: u32 = A::DocumentsDelete.bits();
|
||||
pub const DOCUMENTS_ALL: u32 = A::DocumentsAll.bits();
|
||||
|
||||
pub const CHAT: u8 = Chat.repr();
|
||||
pub const CHAT_SETTINGS_ALL: u8 = ChatSettingsAll.repr();
|
||||
pub const CHAT_SETTINGS_GET: u8 = ChatSettingsGet.repr();
|
||||
pub const CHAT_SETTINGS_UPDATE: u8 = ChatSettingsUpdate.repr();
|
||||
pub const INDEXES_CREATE: u32 = A::IndexesAdd.bits();
|
||||
pub const INDEXES_GET: u32 = A::IndexesGet.bits();
|
||||
pub const INDEXES_UPDATE: u32 = A::IndexesUpdate.bits();
|
||||
pub const INDEXES_DELETE: u32 = A::IndexesDelete.bits();
|
||||
pub const INDEXES_SWAP: u32 = A::IndexesSwap.bits();
|
||||
pub const INDEXES_ALL: u32 = A::IndexesAll.bits();
|
||||
|
||||
pub const TASKS_CANCEL: u32 = A::TasksCancel.bits();
|
||||
pub const TASKS_DELETE: u32 = A::TasksDelete.bits();
|
||||
pub const TASKS_GET: u32 = A::TasksGet.bits();
|
||||
pub const TASKS_ALL: u32 = A::TasksAll.bits();
|
||||
|
||||
pub const SETTINGS_GET: u32 = A::SettingsGet.bits();
|
||||
pub const SETTINGS_UPDATE: u32 = A::SettingsUpdate.bits();
|
||||
pub const SETTINGS_ALL: u32 = A::SettingsAll.bits();
|
||||
|
||||
pub const STATS_GET: u32 = A::StatsGet.bits();
|
||||
pub const STATS_ALL: u32 = A::StatsAll.bits();
|
||||
|
||||
pub const METRICS_GET: u32 = A::MetricsGet.bits();
|
||||
pub const METRICS_ALL: u32 = A::MetricsAll.bits();
|
||||
|
||||
pub const DUMPS_CREATE: u32 = A::DumpsCreate.bits();
|
||||
pub const DUMPS_ALL: u32 = A::DumpsAll.bits();
|
||||
|
||||
pub const SNAPSHOTS_CREATE: u32 = A::SnapshotsCreate.bits();
|
||||
pub const SNAPSHOTS_ALL: u32 = A::SnapshotsAll.bits();
|
||||
|
||||
pub const VERSION: u32 = A::Version.bits();
|
||||
|
||||
pub const KEYS_CREATE: u32 = A::KeysAdd.bits();
|
||||
pub const KEYS_GET: u32 = A::KeysGet.bits();
|
||||
pub const KEYS_UPDATE: u32 = A::KeysUpdate.bits();
|
||||
pub const KEYS_DELETE: u32 = A::KeysDelete.bits();
|
||||
|
||||
pub const EXPERIMENTAL_FEATURES_GET: u32 = A::ExperimentalFeaturesGet.bits();
|
||||
pub const EXPERIMENTAL_FEATURES_UPDATE: u32 = A::ExperimentalFeaturesUpdate.bits();
|
||||
|
||||
pub const NETWORK_GET: u32 = A::NetworkGet.bits();
|
||||
pub const NETWORK_UPDATE: u32 = A::NetworkUpdate.bits();
|
||||
|
||||
pub const ALL: u32 = A::All.bits();
|
||||
}
|
||||
|
||||
impl<E: DeserializeError> Deserr<E> for Action {
|
||||
fn deserialize_from_value<V: deserr::IntoValue>(
|
||||
value: deserr::Value<V>,
|
||||
location: deserr::ValuePointerRef<'_>,
|
||||
) -> Result<Self, E> {
|
||||
match value {
|
||||
deserr::Value::String(s) => match Self::get_action(&s) {
|
||||
Some(action) => Ok(action),
|
||||
None => Err(deserr::take_cf_content(E::error::<std::convert::Infallible>(
|
||||
None,
|
||||
deserr::ErrorKind::UnknownValue {
|
||||
value: &s,
|
||||
accepted: &Self::SERDE_MAP_ARR.map(|(ser_action, _)| ser_action),
|
||||
},
|
||||
location,
|
||||
))),
|
||||
},
|
||||
_ => Err(take_cf_content(E::error(
|
||||
None,
|
||||
deserr::ErrorKind::IncorrectValueKind {
|
||||
actual: value,
|
||||
accepted: &[deserr::ValueKind::String],
|
||||
},
|
||||
location,
|
||||
))),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for Action {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
serializer.serialize_str(Self::get_action_serde_name(self))
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for Action {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
struct Visitor;
|
||||
impl serde::de::Visitor<'_> for Visitor {
|
||||
type Value = Action;
|
||||
|
||||
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
|
||||
write!(formatter, "the name of a valid action (string)")
|
||||
}
|
||||
|
||||
fn visit_str<E>(self, s: &str) -> Result<Self::Value, E>
|
||||
where
|
||||
E: serde::de::Error,
|
||||
{
|
||||
match Self::Value::get_action(s) {
|
||||
Some(action) => Ok(action),
|
||||
None => Err(E::invalid_value(serde::de::Unexpected::Str(s), &"a valid action")),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
deserializer.deserialize_str(Visitor)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: Once "all" type flags are removed, simplify
|
||||
// Essentially `get_potential_index` will give the exact index, +1 the exact next, -1 the exact previous
|
||||
impl Sequence for Action {
|
||||
const CARDINALITY: usize = Self::FLAGS.len();
|
||||
|
||||
fn next(&self) -> Option<Self> {
|
||||
let mut potential_next_index = self.get_potential_index() + 1;
|
||||
|
||||
loop {
|
||||
if let Some(next_flag) = Self::FLAGS.get(potential_next_index).map(|v| v.value()) {
|
||||
if next_flag > self {
|
||||
return Some(*next_flag);
|
||||
}
|
||||
|
||||
potential_next_index += 1;
|
||||
} else {
|
||||
return None;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn previous(&self) -> Option<Self> {
|
||||
// -2 because of "all" type flags that represent a single flag, otherwise -1 would suffice
|
||||
let initial_potential_index = self.get_potential_index();
|
||||
if initial_potential_index == 0 {
|
||||
return None;
|
||||
}
|
||||
|
||||
let mut potential_previous_index: usize =
|
||||
if initial_potential_index == 1 { 0 } else { initial_potential_index - 2 };
|
||||
|
||||
let mut previous_item: Option<Self> = None;
|
||||
let mut pre_previous_item: Option<Self> = None;
|
||||
|
||||
loop {
|
||||
if let Some(next_flag) = Self::FLAGS.get(potential_previous_index).map(|v| v.value()) {
|
||||
if next_flag > self {
|
||||
return pre_previous_item;
|
||||
}
|
||||
|
||||
pre_previous_item = previous_item;
|
||||
previous_item = Some(*next_flag);
|
||||
potential_previous_index += 1;
|
||||
} else {
|
||||
return pre_previous_item;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn first() -> Option<Self> {
|
||||
Self::FLAGS.first().map(|v| *v.value())
|
||||
}
|
||||
|
||||
fn last() -> Option<Self> {
|
||||
Self::FLAGS.last().map(|v| *v.value())
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,16 +8,13 @@ use std::str::FromStr;
|
||||
|
||||
use deserr::{DeserializeError, Deserr, ErrorKind, MergeWithError, ValuePointerRef};
|
||||
use fst::IntoStreamer;
|
||||
use milli::disabled_typos_terms::DisabledTyposTerms;
|
||||
use milli::index::{IndexEmbeddingConfig, PrefixSearch};
|
||||
use milli::proximity::ProximityPrecision;
|
||||
pub use milli::update::ChatSettings;
|
||||
use milli::update::Setting;
|
||||
use milli::{Criterion, CriterionError, FilterableAttributesRule, Index, DEFAULT_VALUES_PER_FACET};
|
||||
use serde::{Deserialize, Serialize, Serializer};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use super::{Checked, Unchecked};
|
||||
use crate::deserr::DeserrJsonError;
|
||||
use crate::error::deserr_codes::*;
|
||||
use crate::facet_values_sort::FacetValuesSort;
|
||||
@@ -107,10 +104,6 @@ pub struct TypoSettings {
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<BTreeSet<String>>, example = json!(["uuid", "url"]))]
|
||||
pub disable_on_attributes: Setting<BTreeSet<String>>,
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<bool>, example = json!(true))]
|
||||
pub disable_on_numbers: Setting<bool>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
@@ -201,86 +194,72 @@ pub struct Settings<T> {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDisplayedAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["id", "title", "description", "url"]))]
|
||||
pub displayed_attributes: WildcardSetting,
|
||||
|
||||
/// Fields in which to search for matching query words sorted by order of importance.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["title", "description"]))]
|
||||
pub searchable_attributes: WildcardSetting,
|
||||
|
||||
/// Attributes to use for faceting and filtering. See [Filtering and Faceted Search](https://www.meilisearch.com/docs/learn/filtering_and_sorting/search_with_facet_filters).
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFilterableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<FilterableAttributesRule>>, example = json!(["release_date", "genre"]))]
|
||||
pub filterable_attributes: Setting<Vec<FilterableAttributesRule>>,
|
||||
|
||||
/// Attributes to use when sorting search results.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSortableAttributes>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["release_date"]))]
|
||||
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of ranking rules sorted by order of importance. The order is customizable.
|
||||
/// [A list of ordered built-in ranking rules](https://www.meilisearch.com/docs/learn/relevancy/relevancy).
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsRankingRules>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!([RankingRuleView::Words, RankingRuleView::Typo, RankingRuleView::Proximity, RankingRuleView::Attribute, RankingRuleView::Exactness]))]
|
||||
pub ranking_rules: Setting<Vec<RankingRuleView>>,
|
||||
|
||||
/// List of words ignored when present in search queries.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsStopWords>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["the", "a", "them", "their"]))]
|
||||
pub stop_words: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of characters not delimiting where one term begins and ends.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsNonSeparatorTokens>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!([" ", "\n"]))]
|
||||
pub non_separator_tokens: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of characters delimiting where one term begins and ends.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSeparatorTokens>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["S"]))]
|
||||
pub separator_tokens: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of strings Meilisearch should parse as a single term.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDictionary>)]
|
||||
#[schema(value_type = Option<Vec<String>>, example = json!(["iPhone pro"]))]
|
||||
pub dictionary: Setting<BTreeSet<String>>,
|
||||
|
||||
/// List of associated words treated similarly. A word associated to an array of word as synonyms.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSynonyms>)]
|
||||
#[schema(value_type = Option<BTreeMap<String, Vec<String>>>, example = json!({ "he": ["she", "they", "them"], "phone": ["iPhone", "android"]}))]
|
||||
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
|
||||
/// Search returns documents with distinct (different) values of the given field.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsDistinctAttribute>)]
|
||||
#[schema(value_type = Option<String>, example = json!("sku"))]
|
||||
pub distinct_attribute: Setting<String>,
|
||||
|
||||
/// Precision level when calculating the proximity ranking rule.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsProximityPrecision>)]
|
||||
#[schema(value_type = Option<String>, example = json!(ProximityPrecisionView::ByAttribute))]
|
||||
pub proximity_precision: Setting<ProximityPrecisionView>,
|
||||
|
||||
/// Customize typo tolerance feature.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsTypoTolerance>)]
|
||||
#[schema(value_type = Option<TypoSettings>, example = json!({ "enabled": true, "disableOnAttributes": ["title"]}))]
|
||||
pub typo_tolerance: Setting<TypoSettings>,
|
||||
|
||||
/// Faceting settings.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFaceting>)]
|
||||
#[schema(value_type = Option<FacetingSettings>, example = json!({ "maxValuesPerFacet": 10, "sortFacetValuesBy": { "genre": FacetValuesSort::Count }}))]
|
||||
pub faceting: Setting<FacetingSettings>,
|
||||
|
||||
/// Pagination settings.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPagination>)]
|
||||
@@ -292,34 +271,24 @@ pub struct Settings<T> {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsEmbedders>)]
|
||||
#[schema(value_type = Option<BTreeMap<String, SettingEmbeddingSettings>>)]
|
||||
pub embedders: Setting<BTreeMap<String, SettingEmbeddingSettings>>,
|
||||
|
||||
/// Maximum duration of a search query.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsSearchCutoffMs>)]
|
||||
#[schema(value_type = Option<u64>, example = json!(50))]
|
||||
pub search_cutoff_ms: Setting<u64>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsLocalizedAttributes>)]
|
||||
#[schema(value_type = Option<Vec<LocalizedAttributesRuleView>>, example = json!(50))]
|
||||
pub localized_attributes: Setting<Vec<LocalizedAttributesRuleView>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsFacetSearch>)]
|
||||
#[schema(value_type = Option<bool>, example = json!(true))]
|
||||
pub facet_search: Setting<bool>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsPrefixSearch>)]
|
||||
#[schema(value_type = Option<PrefixSearchSettings>, example = json!("Hemlo"))]
|
||||
pub prefix_search: Setting<PrefixSearchSettings>,
|
||||
|
||||
/// Customize the chat prompting.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsIndexChat>)]
|
||||
#[schema(value_type = Option<ChatSettings>)]
|
||||
pub chat: Setting<ChatSettings>,
|
||||
|
||||
#[serde(skip)]
|
||||
#[deserr(skip)]
|
||||
pub _kind: PhantomData<T>,
|
||||
@@ -385,7 +354,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: Setting::Reset,
|
||||
facet_search: Setting::Reset,
|
||||
prefix_search: Setting::Reset,
|
||||
chat: Setting::Reset,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -412,7 +380,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind,
|
||||
} = self;
|
||||
|
||||
@@ -437,7 +404,6 @@ impl Settings<Checked> {
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -488,7 +454,6 @@ impl Settings<Unchecked> {
|
||||
localized_attributes: self.localized_attributes,
|
||||
facet_search: self.facet_search,
|
||||
prefix_search: self.prefix_search,
|
||||
chat: self.chat,
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -563,9 +528,8 @@ impl Settings<Unchecked> {
|
||||
Setting::Set(this)
|
||||
}
|
||||
},
|
||||
facet_search: other.facet_search.or(self.facet_search),
|
||||
prefix_search: other.prefix_search.or(self.prefix_search),
|
||||
chat: other.chat.clone().or(self.chat.clone()),
|
||||
facet_search: other.facet_search.or(self.facet_search),
|
||||
_kind: PhantomData,
|
||||
}
|
||||
}
|
||||
@@ -604,7 +568,6 @@ pub fn apply_settings_to_builder(
|
||||
localized_attributes: localized_attributes_rules,
|
||||
facet_search,
|
||||
prefix_search,
|
||||
chat,
|
||||
_kind,
|
||||
} = settings;
|
||||
|
||||
@@ -738,12 +701,6 @@ pub fn apply_settings_to_builder(
|
||||
Setting::Reset => builder.reset_exact_attributes(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match value.disable_on_numbers {
|
||||
Setting::Set(val) => builder.set_disable_on_numbers(val),
|
||||
Setting::Reset => builder.reset_disable_on_numbers(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
Setting::Reset => {
|
||||
// all typo settings need to be reset here.
|
||||
@@ -815,12 +772,6 @@ pub fn apply_settings_to_builder(
|
||||
Setting::Reset => builder.reset_facet_search(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match chat {
|
||||
Setting::Set(chat) => builder.set_chat(chat.clone()),
|
||||
Setting::Reset => builder.reset_chat(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
}
|
||||
|
||||
pub enum SecretPolicy {
|
||||
@@ -875,14 +826,12 @@ pub fn settings(
|
||||
};
|
||||
|
||||
let disabled_attributes = index.exact_attributes(rtxn)?.into_iter().map(String::from).collect();
|
||||
let DisabledTyposTerms { disable_on_numbers } = index.disabled_typos_terms(rtxn)?;
|
||||
|
||||
let typo_tolerance = TypoSettings {
|
||||
enabled: Setting::Set(index.authorize_typos(rtxn)?),
|
||||
min_word_size_for_typos: Setting::Set(min_typo_word_len),
|
||||
disable_on_words: Setting::Set(disabled_words),
|
||||
disable_on_attributes: Setting::Set(disabled_attributes),
|
||||
disable_on_numbers: Setting::Set(disable_on_numbers),
|
||||
};
|
||||
|
||||
let faceting = FacetingSettings {
|
||||
@@ -918,11 +867,14 @@ pub fn settings(
|
||||
})
|
||||
.collect();
|
||||
let embedders = Setting::Set(embedders);
|
||||
|
||||
let search_cutoff_ms = index.search_cutoff(rtxn)?;
|
||||
|
||||
let localized_attributes_rules = index.localized_attributes_rules(rtxn)?;
|
||||
|
||||
let prefix_search = index.prefix_search(rtxn)?.map(PrefixSearchSettings::from);
|
||||
|
||||
let facet_search = index.facet_search(rtxn)?;
|
||||
let chat = index.chat_config(rtxn).map(ChatSettings::from)?;
|
||||
|
||||
let mut settings = Settings {
|
||||
displayed_attributes: match displayed_attributes {
|
||||
@@ -960,9 +912,8 @@ pub fn settings(
|
||||
Some(rules) => Setting::Set(rules.into_iter().map(|r| r.into()).collect()),
|
||||
None => Setting::Reset,
|
||||
},
|
||||
facet_search: Setting::Set(facet_search),
|
||||
prefix_search: Setting::Set(prefix_search.unwrap_or_default()),
|
||||
chat: Setting::Set(chat),
|
||||
facet_search: Setting::Set(facet_search),
|
||||
_kind: PhantomData,
|
||||
};
|
||||
|
||||
@@ -1190,7 +1141,6 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
@@ -1222,8 +1172,6 @@ pub(crate) mod test {
|
||||
search_cutoff_ms: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
|
||||
_kind: PhantomData::<Unchecked>,
|
||||
};
|
||||
|
||||
|
||||
@@ -272,9 +272,9 @@ impl KindWithContent {
|
||||
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
|
||||
from: (from.0, from.1, from.2),
|
||||
to: (
|
||||
versioning::VERSION_MAJOR,
|
||||
versioning::VERSION_MINOR,
|
||||
versioning::VERSION_PATCH,
|
||||
versioning::VERSION_MAJOR.parse().unwrap(),
|
||||
versioning::VERSION_MINOR.parse().unwrap(),
|
||||
versioning::VERSION_PATCH.parse().unwrap(),
|
||||
),
|
||||
}),
|
||||
}
|
||||
@@ -338,9 +338,9 @@ impl KindWithContent {
|
||||
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
|
||||
from: *from,
|
||||
to: (
|
||||
versioning::VERSION_MAJOR,
|
||||
versioning::VERSION_MINOR,
|
||||
versioning::VERSION_PATCH,
|
||||
versioning::VERSION_MAJOR.parse().unwrap(),
|
||||
versioning::VERSION_MINOR.parse().unwrap(),
|
||||
versioning::VERSION_PATCH.parse().unwrap(),
|
||||
),
|
||||
}),
|
||||
}
|
||||
@@ -386,9 +386,9 @@ impl From<&KindWithContent> for Option<Details> {
|
||||
KindWithContent::UpgradeDatabase { from } => Some(Details::UpgradeDatabase {
|
||||
from: *from,
|
||||
to: (
|
||||
versioning::VERSION_MAJOR,
|
||||
versioning::VERSION_MINOR,
|
||||
versioning::VERSION_PATCH,
|
||||
versioning::VERSION_MAJOR.parse().unwrap(),
|
||||
versioning::VERSION_MINOR.parse().unwrap(),
|
||||
versioning::VERSION_PATCH.parse().unwrap(),
|
||||
),
|
||||
}),
|
||||
}
|
||||
|
||||
@@ -8,7 +8,9 @@ use tempfile::NamedTempFile;
|
||||
/// The name of the file that contains the version of the database.
|
||||
pub const VERSION_FILE_NAME: &str = "VERSION";
|
||||
|
||||
pub use milli::constants::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
pub static VERSION_MAJOR: &str = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
pub static VERSION_MINOR: &str = env!("CARGO_PKG_VERSION_MINOR");
|
||||
pub static VERSION_PATCH: &str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
|
||||
/// Persists the version of the current Meilisearch binary to a VERSION file
|
||||
pub fn create_current_version_file(db_path: &Path) -> anyhow::Result<()> {
|
||||
@@ -17,9 +19,9 @@ pub fn create_current_version_file(db_path: &Path) -> anyhow::Result<()> {
|
||||
|
||||
pub fn create_version_file(
|
||||
db_path: &Path,
|
||||
major: u32,
|
||||
minor: u32,
|
||||
patch: u32,
|
||||
major: &str,
|
||||
minor: &str,
|
||||
patch: &str,
|
||||
) -> anyhow::Result<()> {
|
||||
let version_path = db_path.join(VERSION_FILE_NAME);
|
||||
// In order to persist the file later we must create it in the `data.ms` and not in `/tmp`
|
||||
|
||||
@@ -32,7 +32,6 @@ async-trait = "0.1.85"
|
||||
bstr = "1.11.3"
|
||||
byte-unit = { version = "5.1.6", features = ["serde"] }
|
||||
bytes = "1.9.0"
|
||||
bumpalo = "3.16.0"
|
||||
clap = { version = "4.5.24", features = ["derive", "env"] }
|
||||
crossbeam-channel = "0.5.15"
|
||||
deserr = { version = "0.6.3", features = ["actix-web"] }
|
||||
@@ -49,7 +48,6 @@ is-terminal = "0.4.13"
|
||||
itertools = "0.14.0"
|
||||
jsonwebtoken = "9.3.0"
|
||||
lazy_static = "1.5.0"
|
||||
liquid = "0.26.9"
|
||||
meilisearch-auth = { path = "../meilisearch-auth" }
|
||||
meilisearch-types = { path = "../meilisearch-types" }
|
||||
mimalloc = { version = "0.1.43", default-features = false }
|
||||
@@ -113,8 +111,6 @@ utoipa = { version = "5.3.1", features = [
|
||||
"openapi_extensions",
|
||||
] }
|
||||
utoipa-scalar = { version = "0.3.0", optional = true, features = ["actix-web"] }
|
||||
async-openai = { git = "https://github.com/meilisearch/async-openai", branch = "optional-type-function" }
|
||||
actix-web-lab = { version = "0.24.1", default-features = false }
|
||||
|
||||
[dev-dependencies]
|
||||
actix-rt = "2.10.0"
|
||||
|
||||
@@ -4,7 +4,6 @@ use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::FromRequest;
|
||||
pub use error::AuthenticationError;
|
||||
@@ -95,44 +94,36 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
_payload: &mut actix_web::dev::Payload,
|
||||
) -> Self::Future {
|
||||
match req.app_data::<Data<AuthController>>().cloned() {
|
||||
Some(auth) => match extract_token_from_request(req) {
|
||||
Ok(Some(token)) => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
))
|
||||
}
|
||||
Ok(None) => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
Err(e) => Box::pin(err(e.into())),
|
||||
Some(auth) => match req
|
||||
.headers()
|
||||
.get("Authorization")
|
||||
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||
{
|
||||
Some(mut type_token) => match type_token.next() {
|
||||
Some("Bearer") => {
|
||||
// TODO: find a less hardcoded way?
|
||||
let index = req.match_info().get("index_uid");
|
||||
match type_token.next() {
|
||||
Some(token) => Box::pin(Self::auth_bearer(
|
||||
auth,
|
||||
token.to_string(),
|
||||
index.map(String::from),
|
||||
req.app_data::<D>().cloned(),
|
||||
)),
|
||||
None => Box::pin(err(AuthenticationError::InvalidToken.into())),
|
||||
}
|
||||
}
|
||||
_otherwise => {
|
||||
Box::pin(err(AuthenticationError::MissingAuthorizationHeader.into()))
|
||||
}
|
||||
},
|
||||
None => Box::pin(Self::auth_token(auth, req.app_data::<D>().cloned())),
|
||||
},
|
||||
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_token_from_request(
|
||||
req: &actix_web::HttpRequest,
|
||||
) -> Result<Option<&str>, AuthenticationError> {
|
||||
match req
|
||||
.headers()
|
||||
.get(AUTHORIZATION)
|
||||
.map(|type_token| type_token.to_str().unwrap_or_default().splitn(2, ' '))
|
||||
{
|
||||
Some(mut type_token) => match type_token.next() {
|
||||
Some("Bearer") => match type_token.next() {
|
||||
Some(token) => Ok(Some(token)),
|
||||
None => Err(AuthenticationError::InvalidToken),
|
||||
},
|
||||
_otherwise => Err(AuthenticationError::MissingAuthorizationHeader),
|
||||
},
|
||||
None => Ok(None),
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Policy {
|
||||
fn authenticate(
|
||||
auth: Data<AuthController>,
|
||||
@@ -180,7 +171,7 @@ pub mod policies {
|
||||
#[error("Could not decode tenant token, {0}.")]
|
||||
CouldNotDecodeTenantToken(jsonwebtoken::errors::Error),
|
||||
#[error("Invalid action `{0}`.")]
|
||||
InternalInvalidAction(u8),
|
||||
InternalInvalidAction(u32),
|
||||
}
|
||||
|
||||
impl From<jsonwebtoken::errors::Error> for AuthError {
|
||||
@@ -223,14 +214,14 @@ pub mod policies {
|
||||
Ok(api_key_uid)
|
||||
}
|
||||
|
||||
fn is_keys_action(action: u8) -> bool {
|
||||
fn is_keys_action(action: u32) -> bool {
|
||||
use actions::*;
|
||||
matches!(action, KEYS_GET | KEYS_CREATE | KEYS_UPDATE | KEYS_DELETE)
|
||||
}
|
||||
|
||||
pub struct ActionPolicy<const A: u8>;
|
||||
pub struct ActionPolicy<const A: u32>;
|
||||
|
||||
impl<const A: u8> Policy for ActionPolicy<A> {
|
||||
impl<const A: u32> Policy for ActionPolicy<A> {
|
||||
/// Attempts to grant authentication from a bearer token (that can be a tenant token or an API key), the requested Action,
|
||||
/// and a list of requested indexes.
|
||||
///
|
||||
@@ -264,7 +255,7 @@ pub mod policies {
|
||||
};
|
||||
|
||||
// check that the indexes are allowed
|
||||
let action = Action::from_repr(A).ok_or(AuthError::InternalInvalidAction(A))?;
|
||||
let action = Action::from_bits(A).ok_or(AuthError::InternalInvalidAction(A))?;
|
||||
let auth_filter = auth
|
||||
.get_key_filters(key_uuid, search_rules)
|
||||
.map_err(|_e| AuthError::InvalidApiKey)?;
|
||||
@@ -303,13 +294,13 @@ pub mod policies {
|
||||
}
|
||||
}
|
||||
|
||||
impl<const A: u8> ActionPolicy<A> {
|
||||
impl<const A: u32> ActionPolicy<A> {
|
||||
fn authenticate_tenant_token(
|
||||
auth: &AuthController,
|
||||
token: &str,
|
||||
) -> Result<TenantTokenOutcome, AuthError> {
|
||||
// Only search and chat actions can be accessed by a tenant token.
|
||||
if A != actions::SEARCH && A != actions::CHAT {
|
||||
// Only search action can be accessed by a tenant token.
|
||||
if A != actions::SEARCH {
|
||||
return Ok(TenantTokenOutcome::NotATenantToken);
|
||||
}
|
||||
|
||||
|
||||
@@ -235,7 +235,10 @@ pub fn setup_meilisearch(opt: &Opt) -> anyhow::Result<(Arc<IndexScheduler>, Arc<
|
||||
auto_upgrade: opt.experimental_dumpless_upgrade,
|
||||
embedding_cache_cap: opt.experimental_embedding_cache_entries,
|
||||
};
|
||||
let binary_version = (VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH);
|
||||
let bin_major: u32 = VERSION_MAJOR.parse().unwrap();
|
||||
let bin_minor: u32 = VERSION_MINOR.parse().unwrap();
|
||||
let bin_patch: u32 = VERSION_PATCH.parse().unwrap();
|
||||
let binary_version = (bin_major, bin_minor, bin_patch);
|
||||
|
||||
let empty_db = is_empty_db(&opt.db_path);
|
||||
let (index_scheduler, auth_controller) = if let Some(ref snapshot_path) = opt.import_snapshot {
|
||||
|
||||
@@ -1,560 +0,0 @@
|
||||
use std::cell::RefCell;
|
||||
use std::collections::HashMap;
|
||||
use std::mem;
|
||||
use std::sync::RwLock;
|
||||
use std::time::Duration;
|
||||
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::{Either, HttpRequest, HttpResponse, Responder};
|
||||
use actix_web_lab::sse::{self, Event, Sse};
|
||||
use async_openai::config::OpenAIConfig;
|
||||
use async_openai::types::{
|
||||
ChatCompletionMessageToolCall, ChatCompletionMessageToolCallChunk,
|
||||
ChatCompletionRequestAssistantMessageArgs, ChatCompletionRequestMessage,
|
||||
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
|
||||
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
||||
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
|
||||
CreateChatCompletionRequest, FinishReason, FunctionCall, FunctionCallStream,
|
||||
FunctionObjectArgs,
|
||||
};
|
||||
use async_openai::Client;
|
||||
use bumpalo::Bump;
|
||||
use futures::StreamExt;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_auth::AuthController;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::heed::RoTxn;
|
||||
use meilisearch_types::keys::actions;
|
||||
use meilisearch_types::milli::index::ChatConfig;
|
||||
use meilisearch_types::milli::prompt::{Prompt, PromptData};
|
||||
use meilisearch_types::milli::update::new::document::DocumentFromDb;
|
||||
use meilisearch_types::milli::{
|
||||
DocumentId, FieldIdMapWithMetadata, GlobalFieldsIdsMap, MetadataBuilder, TimeBudget,
|
||||
};
|
||||
use meilisearch_types::Index;
|
||||
use serde::Deserialize;
|
||||
use serde_json::json;
|
||||
use tokio::runtime::Handle;
|
||||
use tokio::sync::mpsc::error::SendError;
|
||||
|
||||
use super::settings::chat::{ChatPrompts, GlobalChatSettings};
|
||||
use crate::error::MeilisearchHttpError;
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
||||
use crate::routes::indexes::search::search_kind;
|
||||
use crate::search::{
|
||||
add_search_rules, prepare_search, search_from_kind, HybridQuery, MatchingStrategy, SearchQuery,
|
||||
SemanticRatio,
|
||||
};
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
const EMBEDDER_NAME: &str = "openai";
|
||||
const SEARCH_IN_INDEX_FUNCTION_NAME: &str = "_meiliSearchInIndex";
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("/completions").route(web::post().to(chat)));
|
||||
}
|
||||
|
||||
/// Get a chat completion
|
||||
async fn chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
web::Json(chat_completion): web::Json<CreateChatCompletionRequest>,
|
||||
) -> impl Responder {
|
||||
// To enable later on, when the feature will be experimental
|
||||
// index_scheduler.features().check_chat("Using the /chat route")?;
|
||||
|
||||
assert_eq!(
|
||||
chat_completion.n.unwrap_or(1),
|
||||
1,
|
||||
"Meilisearch /chat only support one completion at a time (n = 1, n = null)"
|
||||
);
|
||||
|
||||
if chat_completion.stream.unwrap_or(false) {
|
||||
Either::Right(
|
||||
streamed_chat(index_scheduler, auth_ctrl, req, search_queue, chat_completion).await,
|
||||
)
|
||||
} else {
|
||||
Either::Left(
|
||||
non_streamed_chat(index_scheduler, auth_ctrl, req, search_queue, chat_completion).await,
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
/// Setup search tool in chat completion request
|
||||
fn setup_search_tool(
|
||||
index_scheduler: &Data<IndexScheduler>,
|
||||
filters: &meilisearch_auth::AuthFilter,
|
||||
chat_completion: &mut CreateChatCompletionRequest,
|
||||
prompts: &ChatPrompts,
|
||||
) -> Result<(), ResponseError> {
|
||||
let tools = chat_completion.tools.get_or_insert_default();
|
||||
if tools.iter().find(|t| t.function.name == SEARCH_IN_INDEX_FUNCTION_NAME).is_some() {
|
||||
panic!("{SEARCH_IN_INDEX_FUNCTION_NAME} function already set");
|
||||
}
|
||||
|
||||
let index_uids: Vec<_> = index_scheduler
|
||||
.index_names()?
|
||||
.into_iter()
|
||||
.filter(|index_uid| filters.is_index_authorized(&index_uid))
|
||||
.collect();
|
||||
|
||||
let tool = ChatCompletionToolArgs::default()
|
||||
.r#type(ChatCompletionToolType::Function)
|
||||
.function(
|
||||
FunctionObjectArgs::default()
|
||||
.name(SEARCH_IN_INDEX_FUNCTION_NAME)
|
||||
.description(&prompts.search_description)
|
||||
.parameters(json!({
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"index_uid": {
|
||||
"type": "string",
|
||||
"enum": index_uids,
|
||||
"description": prompts.search_index_uid_param,
|
||||
},
|
||||
"q": {
|
||||
// Unfortunately, Mistral does not support an array of types, here.
|
||||
// "type": ["string", "null"],
|
||||
"type": "string",
|
||||
"description": prompts.search_q_param,
|
||||
}
|
||||
},
|
||||
"required": ["index_uid", "q"],
|
||||
"additionalProperties": false,
|
||||
}))
|
||||
.strict(true)
|
||||
.build()
|
||||
.unwrap(),
|
||||
)
|
||||
.build()
|
||||
.unwrap();
|
||||
tools.push(tool);
|
||||
chat_completion.messages.insert(
|
||||
0,
|
||||
ChatCompletionRequestMessage::System(ChatCompletionRequestSystemMessage {
|
||||
content: ChatCompletionRequestSystemMessageContent::Text(prompts.system.clone()),
|
||||
name: None,
|
||||
}),
|
||||
);
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Process search request and return formatted results
|
||||
async fn process_search_request(
|
||||
index_scheduler: &GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
search_queue: &web::Data<SearchQueue>,
|
||||
auth_token: &str,
|
||||
index_uid: String,
|
||||
q: Option<String>,
|
||||
) -> Result<(Index, String), ResponseError> {
|
||||
let mut query = SearchQuery {
|
||||
q,
|
||||
hybrid: Some(HybridQuery {
|
||||
semantic_ratio: SemanticRatio::default(),
|
||||
embedder: EMBEDDER_NAME.to_string(),
|
||||
}),
|
||||
limit: 20,
|
||||
matching_strategy: MatchingStrategy::Frequency,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let auth_filter = ActionPolicy::<{ actions::SEARCH }>::authenticate(
|
||||
auth_ctrl,
|
||||
auth_token,
|
||||
Some(index_uid.as_str()),
|
||||
)?;
|
||||
|
||||
// Tenant token search_rules.
|
||||
if let Some(search_rules) = auth_filter.get_index_search_rules(&index_uid) {
|
||||
add_search_rules(&mut query.filter, search_rules);
|
||||
}
|
||||
|
||||
// TBD
|
||||
// let mut aggregate = SearchAggregator::<SearchPOST>::from_query(&query);
|
||||
|
||||
let index = index_scheduler.index(&index_uid)?;
|
||||
let search_kind =
|
||||
search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index)?;
|
||||
|
||||
let permit = search_queue.try_get_search_permit().await?;
|
||||
let features = index_scheduler.features();
|
||||
let index_cloned = index.clone();
|
||||
let search_result = tokio::task::spawn_blocking(move || -> Result<_, ResponseError> {
|
||||
let rtxn = index_cloned.read_txn()?;
|
||||
let time_budget = match index_cloned
|
||||
.search_cutoff(&rtxn)
|
||||
.map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.clone())))?
|
||||
{
|
||||
Some(cutoff) => TimeBudget::new(Duration::from_millis(cutoff)),
|
||||
None => TimeBudget::default(),
|
||||
};
|
||||
|
||||
let (search, _is_finite_pagination, _max_total_hits, _offset) =
|
||||
prepare_search(&index_cloned, &rtxn, &query, &search_kind, time_budget, features)?;
|
||||
|
||||
search_from_kind(index_uid, search_kind, search)
|
||||
.map(|(search_results, _)| search_results)
|
||||
.map_err(ResponseError::from)
|
||||
})
|
||||
.await;
|
||||
permit.drop().await;
|
||||
|
||||
let search_result = search_result?;
|
||||
if let Ok(ref search_result) = search_result {
|
||||
// aggregate.succeed(search_result);
|
||||
if search_result.degraded {
|
||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||
}
|
||||
}
|
||||
// analytics.publish(aggregate, &req);
|
||||
|
||||
let search_result = search_result?;
|
||||
let rtxn = index.read_txn()?;
|
||||
let render_alloc = Bump::new();
|
||||
let formatted = format_documents(&rtxn, &index, &render_alloc, search_result.documents_ids)?;
|
||||
let text = formatted.join("\n");
|
||||
drop(rtxn);
|
||||
|
||||
Ok((index, text))
|
||||
}
|
||||
|
||||
async fn non_streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let chat_settings = match index_scheduler.chat_settings().unwrap() {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
|
||||
let mut config = OpenAIConfig::default();
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
}
|
||||
let client = Client::with_config(config);
|
||||
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap();
|
||||
setup_search_tool(&index_scheduler, filters, &mut chat_completion, &chat_settings.prompts)?;
|
||||
|
||||
let mut response;
|
||||
loop {
|
||||
response = client.chat().create(chat_completion.clone()).await.unwrap();
|
||||
|
||||
let choice = &mut response.choices[0];
|
||||
match choice.finish_reason {
|
||||
Some(FinishReason::ToolCalls) => {
|
||||
let tool_calls = mem::take(&mut choice.message.tool_calls).unwrap_or_default();
|
||||
|
||||
let (meili_calls, other_calls): (Vec<_>, Vec<_>) = tool_calls
|
||||
.into_iter()
|
||||
.partition(|call| call.function.name == SEARCH_IN_INDEX_FUNCTION_NAME);
|
||||
|
||||
chat_completion.messages.push(
|
||||
ChatCompletionRequestAssistantMessageArgs::default()
|
||||
.tool_calls(meili_calls.clone())
|
||||
.build()
|
||||
.unwrap()
|
||||
.into(),
|
||||
);
|
||||
|
||||
for call in meili_calls {
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
)
|
||||
.await
|
||||
.map_err(|e| e.to_string()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
let text = match result {
|
||||
Ok((_, text)) => text,
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(
|
||||
ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
content: ChatCompletionRequestToolMessageContent::Text(format!(
|
||||
"{}\n\n{text}",
|
||||
chat_settings.prompts.pre_query
|
||||
)),
|
||||
},
|
||||
));
|
||||
}
|
||||
|
||||
// Let the client call other tools by themselves
|
||||
if !other_calls.is_empty() {
|
||||
response.choices[0].message.tool_calls = Some(other_calls);
|
||||
break;
|
||||
}
|
||||
}
|
||||
_ => break,
|
||||
}
|
||||
}
|
||||
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
async fn streamed_chat(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::CHAT }>, Data<IndexScheduler>>,
|
||||
auth_ctrl: web::Data<AuthController>,
|
||||
req: HttpRequest,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
mut chat_completion: CreateChatCompletionRequest,
|
||||
) -> Result<impl Responder, ResponseError> {
|
||||
let filters = index_scheduler.filters();
|
||||
|
||||
let chat_settings = match index_scheduler.chat_settings().unwrap() {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
|
||||
let mut config = OpenAIConfig::default();
|
||||
if let Some(api_key) = chat_settings.api_key.as_ref() {
|
||||
config = config.with_api_key(api_key);
|
||||
}
|
||||
if let Some(base_api) = chat_settings.base_api.as_ref() {
|
||||
config = config.with_api_base(base_api);
|
||||
}
|
||||
|
||||
let auth_token = extract_token_from_request(&req)?.unwrap().to_string();
|
||||
setup_search_tool(&index_scheduler, filters, &mut chat_completion, &chat_settings.prompts)?;
|
||||
|
||||
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
||||
let _join_handle = Handle::current().spawn(async move {
|
||||
let client = Client::with_config(config.clone());
|
||||
let mut global_tool_calls = HashMap::<u32, Call>::new();
|
||||
let mut finish_reason = None;
|
||||
|
||||
// Limit the number of internal calls to satisfy the search requests of the LLM
|
||||
'main: for _ in 0..20 {
|
||||
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
||||
while let Some(result) = response.next().await {
|
||||
match result {
|
||||
Ok(resp) => {
|
||||
let choice = &resp.choices[0];
|
||||
finish_reason = choice.finish_reason;
|
||||
|
||||
#[allow(deprecated)]
|
||||
let ChatCompletionStreamResponseDelta {
|
||||
content,
|
||||
// Using deprecated field but keeping for compatibility
|
||||
function_call: _,
|
||||
ref tool_calls,
|
||||
role: _,
|
||||
refusal: _,
|
||||
} = &choice.delta;
|
||||
|
||||
if content.is_some() {
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(sse::Data::new_json(&resp).unwrap())).await {
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
match tool_calls {
|
||||
Some(tool_calls) => {
|
||||
for chunk in tool_calls {
|
||||
let ChatCompletionMessageToolCallChunk {
|
||||
index,
|
||||
id,
|
||||
r#type: _,
|
||||
function,
|
||||
} = chunk;
|
||||
let FunctionCallStream { name, arguments } =
|
||||
function.as_ref().unwrap();
|
||||
global_tool_calls
|
||||
.entry(*index)
|
||||
.and_modify(|call| call.append(arguments.as_ref().unwrap()))
|
||||
.or_insert_with(|| Call {
|
||||
id: id.as_ref().unwrap().clone(),
|
||||
function_name: name.as_ref().unwrap().clone(),
|
||||
arguments: arguments.as_ref().unwrap().clone(),
|
||||
});
|
||||
}
|
||||
}
|
||||
None if !global_tool_calls.is_empty() => {
|
||||
let (meili_calls, _other_calls): (Vec<_>, Vec<_>) =
|
||||
mem::take(&mut global_tool_calls)
|
||||
.into_values()
|
||||
.map(|call| ChatCompletionMessageToolCall {
|
||||
id: call.id,
|
||||
r#type: Some(ChatCompletionToolType::Function),
|
||||
function: FunctionCall {
|
||||
name: call.function_name,
|
||||
arguments: call.arguments,
|
||||
},
|
||||
})
|
||||
.partition(|call| call.function.name == SEARCH_IN_INDEX_FUNCTION_NAME);
|
||||
|
||||
chat_completion.messages.push(
|
||||
ChatCompletionRequestAssistantMessageArgs::default()
|
||||
.tool_calls(meili_calls.clone())
|
||||
.build()
|
||||
.unwrap()
|
||||
.into(),
|
||||
);
|
||||
|
||||
for call in meili_calls {
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(
|
||||
sse::Data::new_json(json!({
|
||||
"object": "chat.completion.tool.call",
|
||||
"tool": call,
|
||||
}))
|
||||
.unwrap(),
|
||||
))
|
||||
.await {
|
||||
return;
|
||||
}
|
||||
|
||||
let result = match serde_json::from_str(&call.function.arguments) {
|
||||
Ok(SearchInIndexParameters { index_uid, q }) => process_search_request(
|
||||
&index_scheduler,
|
||||
auth_ctrl.clone(),
|
||||
&search_queue,
|
||||
&auth_token,
|
||||
index_uid,
|
||||
q,
|
||||
).await.map_err(|e| e.to_string()),
|
||||
Err(err) => Err(err.to_string()),
|
||||
};
|
||||
|
||||
let is_error = result.is_err();
|
||||
let text = match result {
|
||||
Ok((_, text)) => text,
|
||||
Err(err) => err,
|
||||
};
|
||||
|
||||
let tool = ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id.clone(),
|
||||
content: ChatCompletionRequestToolMessageContent::Text(
|
||||
format!("{}\n\n{text}", chat_settings.prompts.pre_query),
|
||||
),
|
||||
};
|
||||
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(
|
||||
sse::Data::new_json(json!({
|
||||
"object": if is_error {
|
||||
"chat.completion.tool.error"
|
||||
} else {
|
||||
"chat.completion.tool.output"
|
||||
},
|
||||
"tool": ChatCompletionRequestToolMessage {
|
||||
tool_call_id: call.id,
|
||||
content: ChatCompletionRequestToolMessageContent::Text(
|
||||
text,
|
||||
),
|
||||
},
|
||||
}))
|
||||
.unwrap(),
|
||||
))
|
||||
.await {
|
||||
return;
|
||||
}
|
||||
|
||||
chat_completion.messages.push(ChatCompletionRequestMessage::Tool(tool));
|
||||
}
|
||||
}
|
||||
None => (),
|
||||
}
|
||||
}
|
||||
Err(err) => {
|
||||
tracing::error!("{err:?}");
|
||||
if let Err(SendError(_)) = tx.send(Event::Data(sse::Data::new_json(&json!({
|
||||
"object": "chat.completion.error",
|
||||
"tool": err.to_string(),
|
||||
})).unwrap())).await {
|
||||
return;
|
||||
}
|
||||
|
||||
break 'main;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// We must stop if the finish reason is not something we can solve with Meilisearch
|
||||
if finish_reason.map_or(true, |fr| fr != FinishReason::ToolCalls) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
let _ = tx.send(Event::Data(sse::Data::new("[DONE]")));
|
||||
});
|
||||
|
||||
Ok(Sse::from_infallible_receiver(rx).with_retry_duration(Duration::from_secs(10)))
|
||||
}
|
||||
|
||||
/// The structure used to aggregate the function calls to make.
|
||||
#[derive(Debug)]
|
||||
struct Call {
|
||||
id: String,
|
||||
function_name: String,
|
||||
arguments: String,
|
||||
}
|
||||
|
||||
impl Call {
|
||||
fn append(&mut self, arguments: &str) {
|
||||
self.arguments.push_str(arguments);
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Deserialize)]
|
||||
struct SearchInIndexParameters {
|
||||
/// The index uid to search in.
|
||||
index_uid: String,
|
||||
/// The query parameter to use.
|
||||
q: Option<String>,
|
||||
}
|
||||
|
||||
fn format_documents<'t, 'doc>(
|
||||
rtxn: &RoTxn<'t>,
|
||||
index: &Index,
|
||||
doc_alloc: &'doc Bump,
|
||||
internal_docids: Vec<DocumentId>,
|
||||
) -> Result<Vec<&'doc str>, ResponseError> {
|
||||
let ChatConfig { prompt: PromptData { template, max_bytes }, .. } = index.chat_config(rtxn)?;
|
||||
|
||||
let prompt = Prompt::new(template, max_bytes).unwrap();
|
||||
let fid_map = index.fields_ids_map(rtxn)?;
|
||||
let metadata_builder = MetadataBuilder::from_index(index, rtxn)?;
|
||||
let fid_map_with_meta = FieldIdMapWithMetadata::new(fid_map.clone(), metadata_builder);
|
||||
let global = RwLock::new(fid_map_with_meta);
|
||||
let gfid_map = RefCell::new(GlobalFieldsIdsMap::new(&global));
|
||||
|
||||
let external_ids: Vec<String> = index
|
||||
.external_id_of(rtxn, internal_docids.iter().copied())?
|
||||
.into_iter()
|
||||
.collect::<Result<_, _>>()?;
|
||||
|
||||
let mut renders = Vec::new();
|
||||
for (docid, external_docid) in internal_docids.into_iter().zip(external_ids) {
|
||||
let document = match DocumentFromDb::new(docid, rtxn, index, &fid_map)? {
|
||||
Some(doc) => doc,
|
||||
None => continue,
|
||||
};
|
||||
|
||||
let text = prompt.render_document(&external_docid, document, &gfid_map, doc_alloc).unwrap();
|
||||
renders.push(text);
|
||||
}
|
||||
|
||||
Ok(renders)
|
||||
}
|
||||
@@ -6,7 +6,7 @@ use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::settings::{
|
||||
settings, ChatSettings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
|
||||
settings, SecretPolicy, SettingEmbeddingSettings, Settings, Unchecked,
|
||||
};
|
||||
use meilisearch_types::tasks::KindWithContent;
|
||||
use tracing::debug;
|
||||
@@ -508,17 +508,6 @@ make_setting_routes!(
|
||||
camelcase_attr: "prefixSearch",
|
||||
analytics: PrefixSearchAnalytics
|
||||
},
|
||||
{
|
||||
route: "/chat",
|
||||
update_verb: put,
|
||||
value_type: ChatSettings,
|
||||
err_type: meilisearch_types::deserr::DeserrJsonError<
|
||||
meilisearch_types::error::deserr_codes::InvalidSettingsIndexChat,
|
||||
>,
|
||||
attr: chat,
|
||||
camelcase_attr: "chat",
|
||||
analytics: ChatAnalytics
|
||||
},
|
||||
);
|
||||
|
||||
#[utoipa::path(
|
||||
@@ -608,7 +597,6 @@ pub async fn update_all(
|
||||
),
|
||||
facet_search: FacetSearchAnalytics::new(new_settings.facet_search.as_ref().set()),
|
||||
prefix_search: PrefixSearchAnalytics::new(new_settings.prefix_search.as_ref().set()),
|
||||
chat: ChatAnalytics::new(new_settings.chat.as_ref().set()),
|
||||
},
|
||||
&req,
|
||||
);
|
||||
|
||||
@@ -10,8 +10,8 @@ use meilisearch_types::locales::{Locale, LocalizedAttributesRuleView};
|
||||
use meilisearch_types::milli::update::Setting;
|
||||
use meilisearch_types::milli::FilterableAttributesRule;
|
||||
use meilisearch_types::settings::{
|
||||
ChatSettings, FacetingSettings, PaginationSettings, PrefixSearchSettings,
|
||||
ProximityPrecisionView, RankingRuleView, SettingEmbeddingSettings, TypoSettings,
|
||||
FacetingSettings, PaginationSettings, PrefixSearchSettings, ProximityPrecisionView,
|
||||
RankingRuleView, SettingEmbeddingSettings, TypoSettings,
|
||||
};
|
||||
use serde::Serialize;
|
||||
|
||||
@@ -39,7 +39,6 @@ pub struct SettingsAnalytics {
|
||||
pub non_separator_tokens: NonSeparatorTokensAnalytics,
|
||||
pub facet_search: FacetSearchAnalytics,
|
||||
pub prefix_search: PrefixSearchAnalytics,
|
||||
pub chat: ChatAnalytics,
|
||||
}
|
||||
|
||||
impl Aggregate for SettingsAnalytics {
|
||||
@@ -199,7 +198,6 @@ impl Aggregate for SettingsAnalytics {
|
||||
set: new.prefix_search.set | self.prefix_search.set,
|
||||
value: new.prefix_search.value.or(self.prefix_search.value),
|
||||
},
|
||||
chat: ChatAnalytics { set: new.chat.set | self.chat.set },
|
||||
})
|
||||
}
|
||||
|
||||
@@ -676,18 +674,3 @@ impl PrefixSearchAnalytics {
|
||||
SettingsAnalytics { prefix_search: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Default)]
|
||||
pub struct ChatAnalytics {
|
||||
pub set: bool,
|
||||
}
|
||||
|
||||
impl ChatAnalytics {
|
||||
pub fn new(settings: Option<&ChatSettings>) -> Self {
|
||||
Self { set: settings.is_some() }
|
||||
}
|
||||
|
||||
pub fn into_settings(self) -> SettingsAnalytics {
|
||||
SettingsAnalytics { chat: self, ..Default::default() }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -52,7 +52,6 @@ const PAGINATION_DEFAULT_LIMIT_FN: fn() -> usize = || 20;
|
||||
|
||||
mod api_key;
|
||||
pub mod batches;
|
||||
pub mod chat;
|
||||
mod dump;
|
||||
pub mod features;
|
||||
pub mod indexes;
|
||||
@@ -62,7 +61,6 @@ mod multi_search;
|
||||
mod multi_search_analytics;
|
||||
pub mod network;
|
||||
mod open_api_utils;
|
||||
pub mod settings;
|
||||
mod snapshot;
|
||||
mod swap_indexes;
|
||||
pub mod tasks;
|
||||
@@ -115,9 +113,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
.service(web::scope("/swap-indexes").configure(swap_indexes::configure))
|
||||
.service(web::scope("/metrics").configure(metrics::configure))
|
||||
.service(web::scope("/experimental-features").configure(features::configure))
|
||||
.service(web::scope("/network").configure(network::configure))
|
||||
.service(web::scope("/chat").configure(chat::configure))
|
||||
.service(web::scope("/settings/chat").configure(settings::chat::configure));
|
||||
.service(web::scope("/network").configure(network::configure));
|
||||
|
||||
#[cfg(feature = "swagger")]
|
||||
{
|
||||
|
||||
@@ -1,107 +0,0 @@
|
||||
use actix_web::web::{self, Data};
|
||||
use actix_web::HttpResponse;
|
||||
use index_scheduler::IndexScheduler;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::GuardedData;
|
||||
use crate::extractors::sequential_extractor::SeqHandler;
|
||||
|
||||
pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(
|
||||
web::resource("")
|
||||
.route(web::get().to(get_settings))
|
||||
.route(web::patch().to(SeqHandler(patch_settings))),
|
||||
);
|
||||
}
|
||||
|
||||
async fn get_settings(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::CHAT_SETTINGS_GET }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = match index_scheduler.chat_settings()? {
|
||||
Some(value) => serde_json::from_value(value).unwrap(),
|
||||
None => GlobalChatSettings::default(),
|
||||
};
|
||||
Ok(HttpResponse::Ok().json(settings))
|
||||
}
|
||||
|
||||
async fn patch_settings(
|
||||
index_scheduler: GuardedData<
|
||||
ActionPolicy<{ actions::CHAT_SETTINGS_UPDATE }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
web::Json(chat_settings): web::Json<GlobalChatSettings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let chat_settings = serde_json::to_value(chat_settings).unwrap();
|
||||
index_scheduler.put_chat_settings(&chat_settings)?;
|
||||
Ok(HttpResponse::Ok().finish())
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct GlobalChatSettings {
|
||||
pub source: String,
|
||||
pub base_api: Option<String>,
|
||||
pub api_key: Option<String>,
|
||||
pub prompts: ChatPrompts,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct ChatPrompts {
|
||||
pub system: String,
|
||||
pub search_description: String,
|
||||
pub search_q_param: String,
|
||||
pub search_index_uid_param: String,
|
||||
pub pre_query: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
pub struct ChatIndexSettings {
|
||||
pub description: String,
|
||||
pub document_template: String,
|
||||
}
|
||||
|
||||
const DEFAULT_SYSTEM_MESSAGE: &str = "You are a highly capable research assistant with access to powerful search tools. IMPORTANT INSTRUCTIONS:\
|
||||
1. When answering questions, you MUST make multiple tool calls (at least 2-3) to gather comprehensive information.\
|
||||
2. Use different search queries for each tool call - vary keywords, rephrase questions, and explore different semantic angles to ensure broad coverage.\
|
||||
3. Always explicitly announce BEFORE making each tool call by saying: \"I'll search for [specific information] now.\"\
|
||||
4. Combine information from ALL tool calls to provide complete, nuanced answers rather than relying on a single source.\
|
||||
5. For complex topics, break down your research into multiple targeted queries rather than using a single generic search.";
|
||||
|
||||
/// The default description of the searchInIndex tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION: &str =
|
||||
"Search the database for relevant JSON documents using an optional query.";
|
||||
/// The default description of the searchInIndex `q` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The search query string used to find relevant documents in the index. \
|
||||
This should contain keywords or phrases that best represent what the user is looking for. \
|
||||
More specific queries will yield more precise results.";
|
||||
/// The default description of the searchInIndex `index` parameter tool provided to OpenAI.
|
||||
const DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION: &str =
|
||||
"The name of the index to search within. An index is a collection of documents organized for search. \
|
||||
Selecting the right index ensures the most relevant results for the user query";
|
||||
|
||||
impl Default for GlobalChatSettings {
|
||||
fn default() -> Self {
|
||||
GlobalChatSettings {
|
||||
source: "openAi".to_string(),
|
||||
base_api: None,
|
||||
api_key: None,
|
||||
prompts: ChatPrompts {
|
||||
system: DEFAULT_SYSTEM_MESSAGE.to_string(),
|
||||
search_description: DEFAULT_SEARCH_IN_INDEX_TOOL_DESCRIPTION.to_string(),
|
||||
search_q_param: DEFAULT_SEARCH_IN_INDEX_Q_PARAMETER_TOOL_DESCRIPTION.to_string(),
|
||||
search_index_uid_param: DEFAULT_SEARCH_IN_INDEX_INDEX_PARAMETER_TOOL_DESCRIPTION
|
||||
.to_string(),
|
||||
pre_query: "".to_string(),
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1 +0,0 @@
|
||||
pub mod chat;
|
||||
@@ -882,7 +882,7 @@ pub fn add_search_rules(filter: &mut Option<Value>, rules: IndexSearchRules) {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prepare_search<'t>(
|
||||
fn prepare_search<'t>(
|
||||
index: &'t Index,
|
||||
rtxn: &'t RoTxn,
|
||||
query: &'t SearchQuery,
|
||||
|
||||
@@ -421,7 +421,7 @@ async fn error_add_api_key_invalid_parameters_actions() {
|
||||
meili_snap::snapshot!(code, @"400 Bad Request");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response, { ".createdAt" => "[ignored]", ".updatedAt" => "[ignored]" }), @r###"
|
||||
{
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
|
||||
"message": "Unknown value `doc.add` at `.actions[0]`: expected one of `search`, `documents.add`, `documents.get`, `documents.delete`, `documents.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `indexes.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `tasks.*`, `settings.get`, `settings.update`, `settings.*`, `stats.get`, `stats.*`, `metrics.get`, `metrics.*`, `dumps.create`, `dumps.*`, `snapshots.create`, `snapshots.*`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
@@ -820,22 +820,6 @@ async fn list_api_keys() {
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Chat API Key",
|
||||
"description": "Use it to chat and search from the frontend",
|
||||
"key": "[ignored]",
|
||||
"uid": "[ignored]",
|
||||
"actions": [
|
||||
"search",
|
||||
"chat.get"
|
||||
],
|
||||
"indexes": [
|
||||
"*"
|
||||
],
|
||||
"expiresAt": null,
|
||||
"createdAt": "[ignored]",
|
||||
"updatedAt": "[ignored]"
|
||||
},
|
||||
{
|
||||
"name": "Default Search API Key",
|
||||
"description": "Use it to search from the frontend",
|
||||
|
||||
@@ -93,7 +93,7 @@ async fn create_api_key_bad_actions() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `*`, `search`, `documents.*`, `documents.add`, `documents.get`, `documents.delete`, `indexes.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `tasks.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `settings.*`, `settings.get`, `settings.update`, `stats.*`, `stats.get`, `metrics.*`, `metrics.get`, `dumps.*`, `dumps.create`, `snapshots.*`, `snapshots.create`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`",
|
||||
"message": "Unknown value `doggo` at `.actions[0]`: expected one of `search`, `documents.add`, `documents.get`, `documents.delete`, `documents.*`, `indexes.create`, `indexes.get`, `indexes.update`, `indexes.delete`, `indexes.swap`, `indexes.*`, `tasks.cancel`, `tasks.delete`, `tasks.get`, `tasks.*`, `settings.get`, `settings.update`, `settings.*`, `stats.get`, `stats.*`, `metrics.get`, `metrics.*`, `dumps.create`, `dumps.*`, `snapshots.create`, `snapshots.*`, `version`, `keys.create`, `keys.get`, `keys.update`, `keys.delete`, `experimental.get`, `experimental.update`, `network.get`, `network.update`, `*`",
|
||||
"code": "invalid_api_key_actions",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_api_key_actions"
|
||||
|
||||
@@ -310,7 +310,7 @@ async fn test_summarized_document_addition_or_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -353,7 +353,7 @@ async fn test_summarized_document_addition_or_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -398,7 +398,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -440,7 +440,7 @@ async fn test_summarized_delete_documents_by_batch() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -488,7 +488,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -532,7 +532,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -576,7 +576,7 @@ async fn test_summarized_delete_documents_by_filter() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -622,7 +622,7 @@ async fn test_summarized_delete_document_by_id() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -664,7 +664,7 @@ async fn test_summarized_delete_document_by_id() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -731,7 +731,7 @@ async fn test_summarized_settings_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -773,7 +773,7 @@ async fn test_summarized_index_creation() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 0 of type `indexCreation` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 0 of type `indexCreation` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -812,7 +812,7 @@ async fn test_summarized_index_creation() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 1 of type `indexCreation` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 1 of type `indexCreation` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -964,7 +964,7 @@ async fn test_summarized_index_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 0 of type `indexUpdate` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 0 of type `indexUpdate` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1003,7 +1003,7 @@ async fn test_summarized_index_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 1 of type `indexUpdate` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 1 of type `indexUpdate` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1043,7 +1043,7 @@ async fn test_summarized_index_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 3 of type `indexUpdate` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 3 of type `indexUpdate` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1082,7 +1082,7 @@ async fn test_summarized_index_update() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 4 of type `indexUpdate` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 4 of type `indexUpdate` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1134,7 +1134,7 @@ async fn test_summarized_index_swap() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 0 of type `indexSwap` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 0 of type `indexSwap` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
|
||||
@@ -1177,7 +1177,7 @@ async fn test_summarized_index_swap() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 1 of type `indexCreation` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 1 of type `indexCreation` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1224,7 +1224,7 @@ async fn test_summarized_batch_cancelation() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 1 of type `taskCancelation` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 1 of type `taskCancelation` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1271,7 +1271,7 @@ async fn test_summarized_batch_deletion() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task"
|
||||
"batcherStoppedBecause": "a batch of tasks of type `taskDeletion` cannot be batched with any other type of task"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
@@ -1313,7 +1313,7 @@ async fn test_summarized_dump_creation() {
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "task with id 0 of type `dumpCreation` cannot be batched"
|
||||
"batcherStoppedBecause": "task with id 0 of type `dumpCreation` cannot be batched"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
@@ -87,8 +87,7 @@ async fn import_dump_v1_movie_raw() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -261,8 +260,7 @@ async fn import_dump_v1_movie_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -434,8 +432,7 @@ async fn import_dump_v1_rubygems_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -593,8 +590,7 @@ async fn import_dump_v2_movie_raw() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -764,8 +760,7 @@ async fn import_dump_v2_movie_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -934,8 +929,7 @@ async fn import_dump_v2_rubygems_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1093,8 +1087,7 @@ async fn import_dump_v3_movie_raw() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1264,8 +1257,7 @@ async fn import_dump_v3_movie_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1434,8 +1426,7 @@ async fn import_dump_v3_rubygems_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1593,8 +1584,7 @@ async fn import_dump_v4_movie_raw() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1764,8 +1754,7 @@ async fn import_dump_v4_movie_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -1934,8 +1923,7 @@ async fn import_dump_v4_rubygems_with_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -2224,8 +2212,7 @@ async fn import_dump_v6_containing_experimental_features() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -2457,8 +2444,7 @@ async fn generate_and_import_dump_containing_vectors() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
|
||||
@@ -27,7 +27,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
|
||||
"duration": "[date]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "batched all enqueued tasks"
|
||||
"batcherStoppedBecause": "batched all enqueued tasks"
|
||||
},
|
||||
{
|
||||
"uid": 1,
|
||||
@@ -51,7 +51,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
|
||||
"duration": "PT0.144827890S",
|
||||
"startedAt": "2025-02-04T10:15:21.275640274Z",
|
||||
"finishedAt": "2025-02-04T10:15:21.420468164Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 0,
|
||||
@@ -72,7 +72,7 @@ source: crates/meilisearch/tests/dumps/mod.rs
|
||||
"duration": "PT0.032902186S",
|
||||
"startedAt": "2025-02-04T10:14:43.559526162Z",
|
||||
"finishedAt": "2025-02-04T10:14:43.592428348Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 3,
|
||||
|
||||
@@ -1976,93 +1976,3 @@ async fn change_facet_casing() {
|
||||
})
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn test_exact_typos_terms() {
|
||||
let documents = json!([
|
||||
{
|
||||
"id": 0,
|
||||
"title": "The zeroth document 1298484",
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"title": "The first document 234342",
|
||||
"nested": {
|
||||
"object": "field 22231",
|
||||
"machin": "bidule 23443.32111",
|
||||
},
|
||||
},
|
||||
{
|
||||
"id": 2,
|
||||
"title": "The second document 3398499",
|
||||
"nested": [
|
||||
"array",
|
||||
{
|
||||
"object": "field 23245121,23223",
|
||||
},
|
||||
{
|
||||
"prout": "truc 123980612321",
|
||||
"machin": "lol 12345645333447879",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"title": "The third document 12333",
|
||||
"nested": "I lied 98878",
|
||||
},
|
||||
]);
|
||||
|
||||
// Test prefix search
|
||||
test_settings_documents_indexing_swapping_and_search(
|
||||
&documents,
|
||||
&json!({
|
||||
"searchableAttributes": ["title", "nested.object", "nested.machin"],
|
||||
"typoTolerance": {
|
||||
"enabled": true,
|
||||
"disableOnNumbers": true
|
||||
}
|
||||
}),
|
||||
&json!({"q": "12345"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
snapshot!(json_string!(response["hits"]), @r###"
|
||||
[
|
||||
{
|
||||
"id": 2,
|
||||
"title": "The second document 3398499",
|
||||
"nested": [
|
||||
"array",
|
||||
{
|
||||
"object": "field 23245121,23223"
|
||||
},
|
||||
{
|
||||
"prout": "truc 123980612321",
|
||||
"machin": "lol 12345645333447879"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
|
||||
// Test typo search
|
||||
test_settings_documents_indexing_swapping_and_search(
|
||||
&documents,
|
||||
&json!({
|
||||
"searchableAttributes": ["title", "nested.object", "nested.machin"],
|
||||
"typoTolerance": {
|
||||
"enabled": true,
|
||||
"disableOnNumbers": true
|
||||
}
|
||||
}),
|
||||
&json!({"q": "123457"}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
snapshot!(json_string!(response["hits"]), @r###"[]"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
@@ -274,7 +274,7 @@ async fn settings_bad_typo_tolerance() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Unknown field `typoTolerance`: expected one of `enabled`, `minWordSizeForTypos`, `disableOnWords`, `disableOnAttributes`, `disableOnNumbers`",
|
||||
"message": "Unknown field `typoTolerance`: expected one of `enabled`, `minWordSizeForTypos`, `disableOnWords`, `disableOnAttributes`",
|
||||
"code": "invalid_settings_typo_tolerance",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_settings_typo_tolerance"
|
||||
|
||||
@@ -179,7 +179,7 @@ test_setting_routes!(
|
||||
{
|
||||
setting: typo_tolerance,
|
||||
update_verb: patch,
|
||||
default_value: {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": [], "disableOnNumbers": false}
|
||||
default_value: {"enabled": true, "minWordSizeForTypos": {"oneTypo": 5, "twoTypos": 9}, "disableOnWords": [], "disableOnAttributes": []}
|
||||
},
|
||||
);
|
||||
|
||||
@@ -276,7 +276,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r###"
|
||||
meili_snap::snapshot!(meili_snap::json_string!(response), @r#"
|
||||
{
|
||||
"displayedAttributes": [
|
||||
"*"
|
||||
@@ -308,8 +308,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
"twoTypos": 9
|
||||
},
|
||||
"disableOnWords": [],
|
||||
"disableOnAttributes": [],
|
||||
"disableOnNumbers": false
|
||||
"disableOnAttributes": []
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 100,
|
||||
@@ -338,7 +337,7 @@ async fn secrets_are_hidden_in_settings() {
|
||||
"facetSearch": true,
|
||||
"prefixSearch": "indexingTime"
|
||||
}
|
||||
"###);
|
||||
"#);
|
||||
|
||||
let (response, code) = server.get_task(settings_update_uid).await;
|
||||
meili_snap::snapshot!(code, @"200 OK");
|
||||
|
||||
@@ -43,7 +43,7 @@ async fn version_too_old() {
|
||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.0");
|
||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.14.0");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -54,11 +54,11 @@ async fn version_requires_downgrade() {
|
||||
std::fs::create_dir_all(&db_path).unwrap();
|
||||
let major = meilisearch_types::versioning::VERSION_MAJOR;
|
||||
let minor = meilisearch_types::versioning::VERSION_MINOR;
|
||||
let patch = meilisearch_types::versioning::VERSION_PATCH + 1;
|
||||
let patch = meilisearch_types::versioning::VERSION_PATCH.parse::<u32>().unwrap() + 1;
|
||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||
snapshot!(err, @"Database version 1.15.1 is higher than the Meilisearch version 1.15.0. Downgrade is not supported");
|
||||
snapshot!(err, @"Database version 1.14.1 is higher than the Meilisearch version 1.14.0. Downgrade is not supported");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
---
|
||||
source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
snapshot_kind: text
|
||||
---
|
||||
{
|
||||
"displayedAttributes": [
|
||||
@@ -48,8 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
],
|
||||
"disableOnAttributes": [
|
||||
"surname"
|
||||
],
|
||||
"disableOnNumbers": false
|
||||
]
|
||||
},
|
||||
"faceting": {
|
||||
"maxValuesPerFacet": 99,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
@@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
"batcherStoppedBecause": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
@@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.004146631S",
|
||||
"startedAt": "2025-01-23T11:38:57.012591321Z",
|
||||
"finishedAt": "2025-01-23T11:38:57.016737952Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 22,
|
||||
@@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.102738497S",
|
||||
"startedAt": "2025-01-23T11:36:22.551906856Z",
|
||||
"finishedAt": "2025-01-23T11:36:22.654645353Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 21,
|
||||
@@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.005108474S",
|
||||
"startedAt": "2025-01-23T11:36:04.132670526Z",
|
||||
"finishedAt": "2025-01-23T11:36:04.137779Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 20,
|
||||
@@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.027954894S",
|
||||
"startedAt": "2025-01-23T11:35:53.631082795Z",
|
||||
"finishedAt": "2025-01-23T11:35:53.659037689Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 19,
|
||||
@@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.006903297S",
|
||||
"startedAt": "2025-01-20T11:50:52.874106134Z",
|
||||
"finishedAt": "2025-01-20T11:50:52.881009431Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 18,
|
||||
@@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000481257S",
|
||||
"startedAt": "2025-01-20T11:48:04.92820416Z",
|
||||
"finishedAt": "2025-01-20T11:48:04.928685417Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 17,
|
||||
@@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000407005S",
|
||||
"startedAt": "2025-01-20T11:47:53.509403957Z",
|
||||
"finishedAt": "2025-01-20T11:47:53.509810962Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 16,
|
||||
@@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000403716S",
|
||||
"startedAt": "2025-01-20T11:47:48.430653005Z",
|
||||
"finishedAt": "2025-01-20T11:47:48.431056721Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 15,
|
||||
@@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000417016S",
|
||||
"startedAt": "2025-01-20T11:47:42.429678617Z",
|
||||
"finishedAt": "2025-01-20T11:47:42.430095633Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 14,
|
||||
@@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT12.086284842S",
|
||||
"startedAt": "2025-01-20T11:47:03.092181576Z",
|
||||
"finishedAt": "2025-01-20T11:47:15.178466418Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 13,
|
||||
@@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.011506614S",
|
||||
"startedAt": "2025-01-16T17:18:43.29334923Z",
|
||||
"finishedAt": "2025-01-16T17:18:43.304855844Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 12,
|
||||
@@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007640163S",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
@@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007307840S",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
@@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
@@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
@@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
@@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
@@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
@@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
@@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
"batcherStoppedBecause": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
@@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.004146631S",
|
||||
"startedAt": "2025-01-23T11:38:57.012591321Z",
|
||||
"finishedAt": "2025-01-23T11:38:57.016737952Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 22,
|
||||
@@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.102738497S",
|
||||
"startedAt": "2025-01-23T11:36:22.551906856Z",
|
||||
"finishedAt": "2025-01-23T11:36:22.654645353Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 21,
|
||||
@@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.005108474S",
|
||||
"startedAt": "2025-01-23T11:36:04.132670526Z",
|
||||
"finishedAt": "2025-01-23T11:36:04.137779Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 20,
|
||||
@@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.027954894S",
|
||||
"startedAt": "2025-01-23T11:35:53.631082795Z",
|
||||
"finishedAt": "2025-01-23T11:35:53.659037689Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 19,
|
||||
@@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.006903297S",
|
||||
"startedAt": "2025-01-20T11:50:52.874106134Z",
|
||||
"finishedAt": "2025-01-20T11:50:52.881009431Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 18,
|
||||
@@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000481257S",
|
||||
"startedAt": "2025-01-20T11:48:04.92820416Z",
|
||||
"finishedAt": "2025-01-20T11:48:04.928685417Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 17,
|
||||
@@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000407005S",
|
||||
"startedAt": "2025-01-20T11:47:53.509403957Z",
|
||||
"finishedAt": "2025-01-20T11:47:53.509810962Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 16,
|
||||
@@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000403716S",
|
||||
"startedAt": "2025-01-20T11:47:48.430653005Z",
|
||||
"finishedAt": "2025-01-20T11:47:48.431056721Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 15,
|
||||
@@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000417016S",
|
||||
"startedAt": "2025-01-20T11:47:42.429678617Z",
|
||||
"finishedAt": "2025-01-20T11:47:42.430095633Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 14,
|
||||
@@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT12.086284842S",
|
||||
"startedAt": "2025-01-20T11:47:03.092181576Z",
|
||||
"finishedAt": "2025-01-20T11:47:15.178466418Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 13,
|
||||
@@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.011506614S",
|
||||
"startedAt": "2025-01-16T17:18:43.29334923Z",
|
||||
"finishedAt": "2025-01-16T17:18:43.304855844Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 12,
|
||||
@@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007640163S",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
@@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007307840S",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
@@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
@@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
@@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
@@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
@@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
@@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
@@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
"batcherStoppedBecause": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
@@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.004146631S",
|
||||
"startedAt": "2025-01-23T11:38:57.012591321Z",
|
||||
"finishedAt": "2025-01-23T11:38:57.016737952Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 22,
|
||||
@@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.102738497S",
|
||||
"startedAt": "2025-01-23T11:36:22.551906856Z",
|
||||
"finishedAt": "2025-01-23T11:36:22.654645353Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 21,
|
||||
@@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.005108474S",
|
||||
"startedAt": "2025-01-23T11:36:04.132670526Z",
|
||||
"finishedAt": "2025-01-23T11:36:04.137779Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 20,
|
||||
@@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.027954894S",
|
||||
"startedAt": "2025-01-23T11:35:53.631082795Z",
|
||||
"finishedAt": "2025-01-23T11:35:53.659037689Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 19,
|
||||
@@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.006903297S",
|
||||
"startedAt": "2025-01-20T11:50:52.874106134Z",
|
||||
"finishedAt": "2025-01-20T11:50:52.881009431Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 18,
|
||||
@@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000481257S",
|
||||
"startedAt": "2025-01-20T11:48:04.92820416Z",
|
||||
"finishedAt": "2025-01-20T11:48:04.928685417Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 17,
|
||||
@@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000407005S",
|
||||
"startedAt": "2025-01-20T11:47:53.509403957Z",
|
||||
"finishedAt": "2025-01-20T11:47:53.509810962Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 16,
|
||||
@@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000403716S",
|
||||
"startedAt": "2025-01-20T11:47:48.430653005Z",
|
||||
"finishedAt": "2025-01-20T11:47:48.431056721Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 15,
|
||||
@@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000417016S",
|
||||
"startedAt": "2025-01-20T11:47:42.429678617Z",
|
||||
"finishedAt": "2025-01-20T11:47:42.430095633Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 14,
|
||||
@@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT12.086284842S",
|
||||
"startedAt": "2025-01-20T11:47:03.092181576Z",
|
||||
"finishedAt": "2025-01-20T11:47:15.178466418Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 13,
|
||||
@@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.011506614S",
|
||||
"startedAt": "2025-01-16T17:18:43.29334923Z",
|
||||
"finishedAt": "2025-01-16T17:18:43.304855844Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 12,
|
||||
@@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007640163S",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
@@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007307840S",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
@@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
@@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
@@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
@@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
@@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
@@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 23,
|
||||
|
||||
@@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
|
||||
@@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 0,
|
||||
@@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.111055654S",
|
||||
"startedAt": "2025-01-16T16:45:16.020248085Z",
|
||||
"finishedAt": "2025-01-16T16:45:16.131303739Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
|
||||
@@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 0,
|
||||
@@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.111055654S",
|
||||
"startedAt": "2025-01-16T16:45:16.020248085Z",
|
||||
"finishedAt": "2025-01-16T16:45:16.131303739Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
|
||||
@@ -25,7 +25,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 0,
|
||||
@@ -49,7 +49,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.111055654S",
|
||||
"startedAt": "2025-01-16T16:45:16.020248085Z",
|
||||
"finishedAt": "2025-01-16T16:45:16.131303739Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 2,
|
||||
|
||||
@@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
|
||||
@@ -30,7 +30,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
|
||||
@@ -29,7 +29,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 1,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"progress": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
@@ -24,7 +24,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "[duration]",
|
||||
"startedAt": "[date]",
|
||||
"finishedAt": "[date]",
|
||||
"batchCreationComplete": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
"batcherStoppedBecause": "a batch of tasks of type `upgradeDatabase` cannot be batched with any other type of task"
|
||||
},
|
||||
{
|
||||
"uid": 23,
|
||||
@@ -47,7 +47,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.004146631S",
|
||||
"startedAt": "2025-01-23T11:38:57.012591321Z",
|
||||
"finishedAt": "2025-01-23T11:38:57.016737952Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 22,
|
||||
@@ -71,7 +71,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.102738497S",
|
||||
"startedAt": "2025-01-23T11:36:22.551906856Z",
|
||||
"finishedAt": "2025-01-23T11:36:22.654645353Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 21,
|
||||
@@ -95,7 +95,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.005108474S",
|
||||
"startedAt": "2025-01-23T11:36:04.132670526Z",
|
||||
"finishedAt": "2025-01-23T11:36:04.137779Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 20,
|
||||
@@ -119,7 +119,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.027954894S",
|
||||
"startedAt": "2025-01-23T11:35:53.631082795Z",
|
||||
"finishedAt": "2025-01-23T11:35:53.659037689Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 19,
|
||||
@@ -142,7 +142,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.006903297S",
|
||||
"startedAt": "2025-01-20T11:50:52.874106134Z",
|
||||
"finishedAt": "2025-01-20T11:50:52.881009431Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 18,
|
||||
@@ -171,7 +171,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000481257S",
|
||||
"startedAt": "2025-01-20T11:48:04.92820416Z",
|
||||
"finishedAt": "2025-01-20T11:48:04.928685417Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 17,
|
||||
@@ -194,7 +194,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000407005S",
|
||||
"startedAt": "2025-01-20T11:47:53.509403957Z",
|
||||
"finishedAt": "2025-01-20T11:47:53.509810962Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 16,
|
||||
@@ -217,7 +217,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000403716S",
|
||||
"startedAt": "2025-01-20T11:47:48.430653005Z",
|
||||
"finishedAt": "2025-01-20T11:47:48.431056721Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 15,
|
||||
@@ -240,7 +240,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.000417016S",
|
||||
"startedAt": "2025-01-20T11:47:42.429678617Z",
|
||||
"finishedAt": "2025-01-20T11:47:42.430095633Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 14,
|
||||
@@ -264,7 +264,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT12.086284842S",
|
||||
"startedAt": "2025-01-20T11:47:03.092181576Z",
|
||||
"finishedAt": "2025-01-20T11:47:15.178466418Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 13,
|
||||
@@ -296,7 +296,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.011506614S",
|
||||
"startedAt": "2025-01-16T17:18:43.29334923Z",
|
||||
"finishedAt": "2025-01-16T17:18:43.304855844Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 12,
|
||||
@@ -324,7 +324,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007640163S",
|
||||
"startedAt": "2025-01-16T17:02:52.539749853Z",
|
||||
"finishedAt": "2025-01-16T17:02:52.547390016Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 11,
|
||||
@@ -347,7 +347,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007307840S",
|
||||
"startedAt": "2025-01-16T17:01:14.112756687Z",
|
||||
"finishedAt": "2025-01-16T17:01:14.120064527Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 10,
|
||||
@@ -375,7 +375,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007391353S",
|
||||
"startedAt": "2025-01-16T17:00:29.201180268Z",
|
||||
"finishedAt": "2025-01-16T17:00:29.208571621Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 9,
|
||||
@@ -403,7 +403,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007445825S",
|
||||
"startedAt": "2025-01-16T17:00:15.77629445Z",
|
||||
"finishedAt": "2025-01-16T17:00:15.783740275Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 8,
|
||||
@@ -436,7 +436,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.012020083S",
|
||||
"startedAt": "2025-01-16T16:59:42.744086671Z",
|
||||
"finishedAt": "2025-01-16T16:59:42.756106754Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 7,
|
||||
@@ -463,7 +463,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007440092S",
|
||||
"startedAt": "2025-01-16T16:58:41.2155771Z",
|
||||
"finishedAt": "2025-01-16T16:58:41.223017192Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 6,
|
||||
@@ -490,7 +490,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007565161S",
|
||||
"startedAt": "2025-01-16T16:54:51.940332781Z",
|
||||
"finishedAt": "2025-01-16T16:54:51.947897942Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 5,
|
||||
@@ -516,7 +516,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.016307263S",
|
||||
"startedAt": "2025-01-16T16:53:19.913351957Z",
|
||||
"finishedAt": "2025-01-16T16:53:19.92965922Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 4,
|
||||
@@ -540,7 +540,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.087655941S",
|
||||
"startedAt": "2025-01-16T16:52:32.631145531Z",
|
||||
"finishedAt": "2025-01-16T16:52:32.718801472Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 3,
|
||||
@@ -565,7 +565,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.007593573S",
|
||||
"startedAt": "2025-01-16T16:47:53.677901409Z",
|
||||
"finishedAt": "2025-01-16T16:47:53.685494982Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 2,
|
||||
@@ -591,7 +591,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.017769760S",
|
||||
"startedAt": "2025-01-16T16:47:41.211587682Z",
|
||||
"finishedAt": "2025-01-16T16:47:41.229357442Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 1,
|
||||
@@ -615,7 +615,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.066095506S",
|
||||
"startedAt": "2025-01-16T16:47:10.217299609Z",
|
||||
"finishedAt": "2025-01-16T16:47:10.283395115Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
},
|
||||
{
|
||||
"uid": 0,
|
||||
@@ -639,7 +639,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"duration": "PT0.111055654S",
|
||||
"startedAt": "2025-01-16T16:45:16.020248085Z",
|
||||
"finishedAt": "2025-01-16T16:45:16.131303739Z",
|
||||
"batchCreationComplete": "unspecified"
|
||||
"batcherStoppedBecause": "unspecified"
|
||||
}
|
||||
],
|
||||
"total": 25,
|
||||
|
||||
@@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"upgradeFrom": "v1.12.0",
|
||||
"upgradeTo": "v1.15.0"
|
||||
"upgradeTo": "v1.14.0"
|
||||
},
|
||||
"error": null,
|
||||
"duration": "[duration]",
|
||||
|
||||
@@ -49,10 +49,15 @@ impl OfflineUpgrade {
|
||||
const LAST_SUPPORTED_UPGRADE_TO_VERSION: &str = "1.12.7";
|
||||
|
||||
let upgrade_list = [
|
||||
(v1_9_to_v1_10 as fn(&Path, u32, u32, u32) -> Result<(), anyhow::Error>, 1, 10, 0),
|
||||
(v1_10_to_v1_11, 1, 11, 0),
|
||||
(v1_11_to_v1_12, 1, 12, 0),
|
||||
(v1_12_to_v1_12_3, 1, 12, 3),
|
||||
(
|
||||
v1_9_to_v1_10 as fn(&Path, u32, u32, u32) -> Result<(), anyhow::Error>,
|
||||
"1",
|
||||
"10",
|
||||
"0",
|
||||
),
|
||||
(v1_10_to_v1_11, "1", "11", "0"),
|
||||
(v1_11_to_v1_12, "1", "12", "0"),
|
||||
(v1_12_to_v1_12_3, "1", "12", "3"),
|
||||
];
|
||||
|
||||
let no_upgrade: usize = upgrade_list.len();
|
||||
@@ -90,8 +95,13 @@ impl OfflineUpgrade {
|
||||
|
||||
if start_at == no_upgrade {
|
||||
println!("No upgrade operation to perform, writing VERSION file");
|
||||
create_version_file(&self.db_path, target_major, target_minor, target_patch)
|
||||
.context("while writing VERSION file after the upgrade")?;
|
||||
create_version_file(
|
||||
&self.db_path,
|
||||
&target_major.to_string(),
|
||||
&target_minor.to_string(),
|
||||
&target_patch.to_string(),
|
||||
)
|
||||
.context("while writing VERSION file after the upgrade")?;
|
||||
println!("Success");
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
@@ -1,13 +1,6 @@
|
||||
pub const VERSION_MAJOR: u32 = parse_u32(env!("CARGO_PKG_VERSION_MAJOR"));
|
||||
pub const VERSION_MINOR: u32 = parse_u32(env!("CARGO_PKG_VERSION_MINOR"));
|
||||
pub const VERSION_PATCH: u32 = parse_u32(env!("CARGO_PKG_VERSION_PATCH"));
|
||||
|
||||
const fn parse_u32(s: &str) -> u32 {
|
||||
match u32::from_str_radix(s, 10) {
|
||||
Ok(version) => version,
|
||||
Err(_) => panic!("could not parse as u32"),
|
||||
}
|
||||
}
|
||||
pub static VERSION_MAJOR: &str = env!("CARGO_PKG_VERSION_MAJOR");
|
||||
pub static VERSION_MINOR: &str = env!("CARGO_PKG_VERSION_MINOR");
|
||||
pub static VERSION_PATCH: &str = env!("CARGO_PKG_VERSION_PATCH");
|
||||
|
||||
pub const RESERVED_VECTORS_FIELD_NAME: &str = "_vectors";
|
||||
pub const RESERVED_GEO_FIELD_NAME: &str = "_geo";
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
use heed::{
|
||||
types::{SerdeJson, Str},
|
||||
RoTxn, RwTxn,
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::{index::main_key, Index};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq, Default)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct DisabledTyposTerms {
|
||||
pub disable_on_numbers: bool,
|
||||
}
|
||||
|
||||
impl Index {
|
||||
pub fn disabled_typos_terms(&self, txn: &RoTxn<'_>) -> heed::Result<DisabledTyposTerms> {
|
||||
self.main
|
||||
.remap_types::<Str, SerdeJson<DisabledTyposTerms>>()
|
||||
.get(txn, main_key::DISABLED_TYPOS_TERMS)
|
||||
.map(|option| option.unwrap_or_default())
|
||||
}
|
||||
|
||||
pub(crate) fn put_disabled_typos_terms(
|
||||
&self,
|
||||
txn: &mut RwTxn<'_>,
|
||||
disabled_typos_terms: &DisabledTyposTerms,
|
||||
) -> heed::Result<()> {
|
||||
self.main.remap_types::<Str, SerdeJson<DisabledTyposTerms>>().put(
|
||||
txn,
|
||||
main_key::DISABLED_TYPOS_TERMS,
|
||||
disabled_typos_terms,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub(crate) fn delete_disabled_typos_terms(&self, txn: &mut RwTxn<'_>) -> heed::Result<()> {
|
||||
self.main
|
||||
.remap_types::<Str, SerdeJson<DisabledTyposTerms>>()
|
||||
.delete(txn, main_key::DISABLED_TYPOS_TERMS)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl DisabledTyposTerms {
|
||||
pub fn is_exact(&self, word: &str) -> bool {
|
||||
// If disable_on_numbers is true, we disable the word if it contains only numbers or punctuation
|
||||
self.disable_on_numbers && word.chars().all(|c| c.is_numeric() || c.is_ascii_punctuation())
|
||||
}
|
||||
}
|
||||
@@ -32,13 +32,13 @@ impl ExternalDocumentsIds {
|
||||
&self,
|
||||
rtxn: &RoTxn<'_>,
|
||||
external_id: A,
|
||||
) -> heed::Result<Option<DocumentId>> {
|
||||
) -> heed::Result<Option<u32>> {
|
||||
self.0.get(rtxn, external_id.as_ref())
|
||||
}
|
||||
|
||||
/// An helper function to debug this type, returns an `HashMap` of both,
|
||||
/// soft and hard fst maps, combined.
|
||||
pub fn to_hash_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<String, DocumentId>> {
|
||||
pub fn to_hash_map(&self, rtxn: &RoTxn<'_>) -> heed::Result<HashMap<String, u32>> {
|
||||
let mut map = HashMap::default();
|
||||
for result in self.0.iter(rtxn)? {
|
||||
let (external, internal) = result?;
|
||||
|
||||
@@ -7,7 +7,6 @@ use crate::FieldId;
|
||||
mod global;
|
||||
pub mod metadata;
|
||||
pub use global::GlobalFieldsIdsMap;
|
||||
pub use metadata::{FieldIdMapWithMetadata, MetadataBuilder};
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
pub struct FieldsIdsMap {
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -12,7 +12,6 @@ mod asc_desc;
|
||||
mod attribute_patterns;
|
||||
mod criterion;
|
||||
pub mod database_stats;
|
||||
pub mod disabled_typos_terms;
|
||||
mod error;
|
||||
mod external_documents_ids;
|
||||
pub mod facet;
|
||||
@@ -52,19 +51,18 @@ pub use search::new::{
|
||||
};
|
||||
use serde_json::Value;
|
||||
pub use thread_pool_no_abort::{PanicCatched, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
|
||||
pub use {arroy, charabia as tokenizer, heed, rhai};
|
||||
pub use {charabia as tokenizer, heed, rhai};
|
||||
|
||||
pub use self::asc_desc::{AscDesc, AscDescError, Member, SortError};
|
||||
pub use self::attribute_patterns::{AttributePatterns, PatternMatch};
|
||||
pub use self::attribute_patterns::AttributePatterns;
|
||||
pub use self::attribute_patterns::PatternMatch;
|
||||
pub use self::criterion::{default_criteria, Criterion, CriterionError};
|
||||
pub use self::error::{
|
||||
Error, FieldIdMapMissingEntry, InternalError, SerializationError, UserError,
|
||||
};
|
||||
pub use self::external_documents_ids::ExternalDocumentsIds;
|
||||
pub use self::fieldids_weights_map::FieldidsWeightsMap;
|
||||
pub use self::fields_ids_map::{
|
||||
FieldIdMapWithMetadata, FieldsIdsMap, GlobalFieldsIdsMap, MetadataBuilder,
|
||||
};
|
||||
pub use self::fields_ids_map::{FieldsIdsMap, GlobalFieldsIdsMap};
|
||||
pub use self::filterable_attributes_rules::{
|
||||
FilterFeatures, FilterableAttributesFeatures, FilterableAttributesPatterns,
|
||||
FilterableAttributesRule,
|
||||
@@ -85,6 +83,8 @@ pub use self::search::{
|
||||
};
|
||||
pub use self::update::ChannelCongestion;
|
||||
|
||||
pub use arroy;
|
||||
|
||||
pub type Result<T> = std::result::Result<T, error::Error>;
|
||||
|
||||
pub type Attribute = u32;
|
||||
|
||||
@@ -105,10 +105,10 @@ impl Prompt {
|
||||
max_bytes,
|
||||
};
|
||||
|
||||
// // render template with special object that's OK with `doc.*` and `fields.*`
|
||||
// this.template
|
||||
// .render(&template_checker::TemplateChecker)
|
||||
// .map_err(NewPromptError::invalid_fields_in_template)?;
|
||||
// render template with special object that's OK with `doc.*` and `fields.*`
|
||||
this.template
|
||||
.render(&template_checker::TemplateChecker)
|
||||
.map_err(NewPromptError::invalid_fields_in_template)?;
|
||||
|
||||
Ok(this)
|
||||
}
|
||||
|
||||
@@ -1,11 +1,10 @@
|
||||
use std::collections::BTreeSet;
|
||||
use std::fmt::{Debug, Display};
|
||||
use std::ops::Bound::{self, Excluded, Included, Unbounded};
|
||||
use std::ops::Bound::{self, Excluded, Included};
|
||||
|
||||
use either::Either;
|
||||
pub use filter_parser::{Condition, Error as FPError, FilterCondition, Token};
|
||||
use heed::types::LazyDecode;
|
||||
use heed::BytesEncode;
|
||||
use memchr::memmem::Finder;
|
||||
use roaring::{MultiOps, RoaringBitmap};
|
||||
use serde_json::Value;
|
||||
@@ -15,7 +14,7 @@ use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::error::{Error, UserError};
|
||||
use crate::filterable_attributes_rules::{filtered_matching_patterns, matching_features};
|
||||
use crate::heed_codec::facet::{
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValue, FacetGroupValueCodec,
|
||||
FacetGroupKey, FacetGroupKeyCodec, FacetGroupValue, FacetGroupValueCodec, OrderedF64Codec,
|
||||
};
|
||||
use crate::index::db_name::FACET_ID_STRING_DOCIDS;
|
||||
use crate::{
|
||||
@@ -272,7 +271,7 @@ impl<'a> Filter<'a> {
|
||||
// as the facets values are all in the same database and prefixed by the
|
||||
// field id and the level.
|
||||
|
||||
let (number_bounds, (left_str, right_str)) = match operator {
|
||||
let (left, right) = match operator {
|
||||
// return an error if the filter is not allowed for this field
|
||||
Condition::GreaterThan(_)
|
||||
| Condition::GreaterThanOrEqual(_)
|
||||
@@ -306,37 +305,17 @@ impl<'a> Filter<'a> {
|
||||
));
|
||||
}
|
||||
Condition::GreaterThan(val) => {
|
||||
let number = val.parse_finite_float().ok();
|
||||
let number_bounds = number.map(|number| (Excluded(number), Included(f64::MAX)));
|
||||
let str_bounds = (Excluded(val.value()), Unbounded);
|
||||
(number_bounds, str_bounds)
|
||||
(Excluded(val.parse_finite_float()?), Included(f64::MAX))
|
||||
}
|
||||
Condition::GreaterThanOrEqual(val) => {
|
||||
let number = val.parse_finite_float().ok();
|
||||
let number_bounds = number.map(|number| (Included(number), Included(f64::MAX)));
|
||||
let str_bounds = (Included(val.value()), Unbounded);
|
||||
(number_bounds, str_bounds)
|
||||
}
|
||||
Condition::LowerThan(val) => {
|
||||
let number = val.parse_finite_float().ok();
|
||||
let number_bounds = number.map(|number| (Included(f64::MIN), Excluded(number)));
|
||||
let str_bounds = (Unbounded, Excluded(val.value()));
|
||||
(number_bounds, str_bounds)
|
||||
(Included(val.parse_finite_float()?), Included(f64::MAX))
|
||||
}
|
||||
Condition::LowerThan(val) => (Included(f64::MIN), Excluded(val.parse_finite_float()?)),
|
||||
Condition::LowerThanOrEqual(val) => {
|
||||
let number = val.parse_finite_float().ok();
|
||||
let number_bounds = number.map(|number| (Included(f64::MIN), Included(number)));
|
||||
let str_bounds = (Unbounded, Included(val.value()));
|
||||
(number_bounds, str_bounds)
|
||||
(Included(f64::MIN), Included(val.parse_finite_float()?))
|
||||
}
|
||||
Condition::Between { from, to } => {
|
||||
let from_number = from.parse_finite_float().ok();
|
||||
let to_number = to.parse_finite_float().ok();
|
||||
|
||||
let number_bounds =
|
||||
from_number.zip(to_number).map(|(from, to)| (Included(from), Included(to)));
|
||||
let str_bounds = (Included(from.value()), Included(to.value()));
|
||||
(number_bounds, str_bounds)
|
||||
(Included(from.parse_finite_float()?), Included(to.parse_finite_float()?))
|
||||
}
|
||||
Condition::Null => {
|
||||
let is_null = index.null_faceted_documents_ids(rtxn, field_id)?;
|
||||
@@ -436,47 +415,29 @@ impl<'a> Filter<'a> {
|
||||
};
|
||||
|
||||
let mut output = RoaringBitmap::new();
|
||||
|
||||
if let Some((left_number, right_number)) = number_bounds {
|
||||
Self::explore_facet_levels(
|
||||
rtxn,
|
||||
numbers_db,
|
||||
field_id,
|
||||
&left_number,
|
||||
&right_number,
|
||||
universe,
|
||||
&mut output,
|
||||
)?;
|
||||
}
|
||||
|
||||
Self::explore_facet_levels(
|
||||
Self::explore_facet_number_levels(
|
||||
rtxn,
|
||||
strings_db,
|
||||
numbers_db,
|
||||
field_id,
|
||||
&left_str,
|
||||
&right_str,
|
||||
left,
|
||||
right,
|
||||
universe,
|
||||
&mut output,
|
||||
)?;
|
||||
|
||||
Ok(output)
|
||||
}
|
||||
|
||||
/// Aggregates the documents ids that are part of the specified range automatically
|
||||
/// going deeper through the levels.
|
||||
fn explore_facet_levels<'data, BoundCodec>(
|
||||
rtxn: &'data heed::RoTxn<'data>,
|
||||
db: heed::Database<FacetGroupKeyCodec<BoundCodec>, FacetGroupValueCodec>,
|
||||
fn explore_facet_number_levels(
|
||||
rtxn: &heed::RoTxn<'_>,
|
||||
db: heed::Database<FacetGroupKeyCodec<OrderedF64Codec>, FacetGroupValueCodec>,
|
||||
field_id: FieldId,
|
||||
left: &'data Bound<<BoundCodec as heed::BytesEncode<'data>>::EItem>,
|
||||
right: &'data Bound<<BoundCodec as heed::BytesEncode<'data>>::EItem>,
|
||||
left: Bound<f64>,
|
||||
right: Bound<f64>,
|
||||
universe: Option<&RoaringBitmap>,
|
||||
output: &mut RoaringBitmap,
|
||||
) -> Result<()>
|
||||
where
|
||||
BoundCodec: for<'b> BytesEncode<'b>,
|
||||
for<'b> <BoundCodec as BytesEncode<'b>>::EItem: Sized + PartialOrd,
|
||||
{
|
||||
) -> Result<()> {
|
||||
match (left, right) {
|
||||
// lower TO upper when lower > upper must return no result
|
||||
(Included(l), Included(r)) if l > r => return Ok(()),
|
||||
@@ -485,8 +446,8 @@ impl<'a> Filter<'a> {
|
||||
(Excluded(l), Included(r)) if l >= r => return Ok(()),
|
||||
(_, _) => (),
|
||||
}
|
||||
facet_range_search::find_docids_of_facet_within_bounds::<BoundCodec>(
|
||||
rtxn, db, field_id, left, right, universe, output,
|
||||
facet_range_search::find_docids_of_facet_within_bounds::<OrderedF64Codec>(
|
||||
rtxn, db, field_id, &left, &right, universe, output,
|
||||
)?;
|
||||
|
||||
Ok(())
|
||||
@@ -1288,24 +1249,28 @@ mod tests {
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
assert!(result.contains(0));
|
||||
let filter = Filter::from_str("price < inf").unwrap().unwrap();
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
// this is allowed due to filters with strings
|
||||
assert!(result.contains(1));
|
||||
assert!(matches!(
|
||||
filter.evaluate(&rtxn, &index),
|
||||
Err(crate::Error::UserError(crate::error::UserError::InvalidFilter(_)))
|
||||
));
|
||||
|
||||
let filter = Filter::from_str("price = NaN").unwrap().unwrap();
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
assert!(result.is_empty());
|
||||
let filter = Filter::from_str("price < NaN").unwrap().unwrap();
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
assert!(result.contains(1));
|
||||
assert!(matches!(
|
||||
filter.evaluate(&rtxn, &index),
|
||||
Err(crate::Error::UserError(crate::error::UserError::InvalidFilter(_)))
|
||||
));
|
||||
|
||||
let filter = Filter::from_str("price = infinity").unwrap().unwrap();
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
assert!(result.contains(2));
|
||||
let filter = Filter::from_str("price < infinity").unwrap().unwrap();
|
||||
let result = filter.evaluate(&rtxn, &index).unwrap();
|
||||
assert!(result.contains(0));
|
||||
assert!(result.contains(1));
|
||||
assert!(matches!(
|
||||
filter.evaluate(&rtxn, &index),
|
||||
Err(crate::Error::UserError(crate::error::UserError::InvalidFilter(_)))
|
||||
));
|
||||
}
|
||||
|
||||
#[test]
|
||||
|
||||
@@ -8,7 +8,7 @@ use roaring::bitmap::RoaringBitmap;
|
||||
|
||||
pub use self::facet::{FacetDistribution, Filter, OrderBy, DEFAULT_VALUES_PER_FACET};
|
||||
pub use self::new::matches::{FormatOptions, MatchBounds, MatcherBuilder, MatchingWords};
|
||||
use self::new::{execute_vector_search, PartialSearchResult, VectorStoreStats};
|
||||
use self::new::{execute_vector_search, PartialSearchResult};
|
||||
use crate::filterable_attributes_rules::{filtered_matching_patterns, matching_features};
|
||||
use crate::score_details::{ScoreDetails, ScoringStrategy};
|
||||
use crate::vector::Embedder;
|
||||
@@ -269,12 +269,6 @@ impl<'a> Search<'a> {
|
||||
)?,
|
||||
};
|
||||
|
||||
if let Some(VectorStoreStats { total_time, total_queries, total_results }) =
|
||||
ctx.vector_store_stats
|
||||
{
|
||||
tracing::debug!("Vector store stats: total_time={total_time:.02?}, total_queries={total_queries}, total_results={total_results}");
|
||||
}
|
||||
|
||||
// consume context and located_query_terms to build MatchingWords.
|
||||
let matching_words = match located_query_terms {
|
||||
Some(located_query_terms) => MatchingWords::new(ctx, located_query_terms),
|
||||
|
||||
@@ -22,8 +22,6 @@ mod vector_sort;
|
||||
mod tests;
|
||||
|
||||
use std::collections::HashSet;
|
||||
use std::ops::AddAssign;
|
||||
use std::time::Duration;
|
||||
|
||||
use bucket_sort::{bucket_sort, BucketSortOutput};
|
||||
use charabia::{Language, TokenizerBuilder};
|
||||
@@ -74,7 +72,6 @@ pub struct SearchContext<'ctx> {
|
||||
pub phrase_docids: PhraseDocIdsCache,
|
||||
pub restricted_fids: Option<RestrictedFids>,
|
||||
pub prefix_search: PrefixSearch,
|
||||
pub vector_store_stats: Option<VectorStoreStats>,
|
||||
}
|
||||
|
||||
impl<'ctx> SearchContext<'ctx> {
|
||||
@@ -104,7 +101,6 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
phrase_docids: <_>::default(),
|
||||
restricted_fids: None,
|
||||
prefix_search,
|
||||
vector_store_stats: None,
|
||||
})
|
||||
}
|
||||
|
||||
@@ -170,25 +166,6 @@ impl<'ctx> SearchContext<'ctx> {
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Default)]
|
||||
pub struct VectorStoreStats {
|
||||
/// The total time spent on vector search.
|
||||
pub total_time: Duration,
|
||||
/// The number of searches performed.
|
||||
pub total_queries: usize,
|
||||
/// The number of nearest neighbors found.
|
||||
pub total_results: usize,
|
||||
}
|
||||
|
||||
impl AddAssign for VectorStoreStats {
|
||||
fn add_assign(&mut self, other: Self) {
|
||||
let Self { total_time, total_queries, total_results } = self;
|
||||
*total_time += other.total_time;
|
||||
*total_queries += other.total_queries;
|
||||
*total_results += other.total_results;
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Copy, PartialEq, PartialOrd, Ord, Eq)]
|
||||
pub enum Word {
|
||||
Original(Interned<String>),
|
||||
|
||||
@@ -1,10 +1,8 @@
|
||||
use std::iter::FromIterator;
|
||||
use std::time::Instant;
|
||||
|
||||
use roaring::RoaringBitmap;
|
||||
|
||||
use super::ranking_rules::{RankingRule, RankingRuleOutput, RankingRuleQueryTrait};
|
||||
use super::VectorStoreStats;
|
||||
use crate::score_details::{self, ScoreDetails};
|
||||
use crate::vector::{ArroyWrapper, DistributionShift, Embedder};
|
||||
use crate::{DocumentId, Result, SearchContext, SearchLogger};
|
||||
@@ -55,15 +53,9 @@ impl<Q: RankingRuleQueryTrait> VectorSort<Q> {
|
||||
) -> Result<()> {
|
||||
let target = &self.target;
|
||||
|
||||
let before = Instant::now();
|
||||
let reader = ArroyWrapper::new(ctx.index.vector_arroy, self.embedder_index, self.quantized);
|
||||
let results = reader.nns_by_vector(ctx.txn, target, self.limit, Some(vector_candidates))?;
|
||||
self.cached_sorted_docids = results.into_iter();
|
||||
*ctx.vector_store_stats.get_or_insert_default() += VectorStoreStats {
|
||||
total_time: before.elapsed(),
|
||||
total_queries: 1,
|
||||
total_results: self.cached_sorted_docids.len(),
|
||||
};
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: milli/src/index.rs
|
||||
---
|
||||
[0, ]
|
||||
@@ -0,0 +1,4 @@
|
||||
---
|
||||
source: milli/src/index.rs
|
||||
---
|
||||
[]
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
source: crates/milli/src/test_index.rs
|
||||
---
|
||||
[0, ]
|
||||
@@ -1,4 +0,0 @@
|
||||
---
|
||||
source: crates/milli/src/test_index.rs
|
||||
---
|
||||
[]
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,45 +0,0 @@
|
||||
use deserr::Deserr;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use utoipa::ToSchema;
|
||||
|
||||
use crate::index::ChatConfig;
|
||||
use crate::prompt::{default_max_bytes, PromptData};
|
||||
use crate::update::Setting;
|
||||
|
||||
#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq, Deserr, ToSchema)]
|
||||
#[serde(deny_unknown_fields, rename_all = "camelCase")]
|
||||
#[deserr(deny_unknown_fields, rename_all = camelCase)]
|
||||
pub struct ChatSettings {
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub description: Setting<String>,
|
||||
|
||||
/// A liquid template used to render documents to a text that can be embedded.
|
||||
///
|
||||
/// Meillisearch interpolates the template for each document and sends the resulting text to the embedder.
|
||||
/// The embedder then generates document vectors based on this text.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
pub document_template: Setting<String>,
|
||||
|
||||
/// Rendered texts are truncated to this size. Defaults to 400.
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<usize>)]
|
||||
pub document_template_max_bytes: Setting<usize>,
|
||||
}
|
||||
|
||||
impl From<ChatConfig> for ChatSettings {
|
||||
fn from(config: ChatConfig) -> Self {
|
||||
let ChatConfig { description, prompt: PromptData { template, max_bytes } } = config;
|
||||
ChatSettings {
|
||||
description: Setting::Set(description),
|
||||
document_template: Setting::Set(template),
|
||||
document_template_max_bytes: Setting::Set(
|
||||
max_bytes.unwrap_or(default_max_bytes()).get(),
|
||||
),
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -127,8 +127,7 @@ pub fn extract_word_docids<R: io::Read + io::Seek>(
|
||||
// merge all deletions
|
||||
let obkv = KvReaderDelAdd::from_slice(value);
|
||||
if let Some(value) = obkv.get(DelAdd::Deletion) {
|
||||
let delete_from_exact = settings_diff.old.exact_attributes.contains(&fid)
|
||||
|| settings_diff.old.disabled_typos_terms.is_exact(w);
|
||||
let delete_from_exact = settings_diff.old.exact_attributes.contains(&fid);
|
||||
buffer.clear();
|
||||
let mut obkv = KvWriterDelAdd::new(&mut buffer);
|
||||
obkv.insert(DelAdd::Deletion, value)?;
|
||||
@@ -140,8 +139,7 @@ pub fn extract_word_docids<R: io::Read + io::Seek>(
|
||||
}
|
||||
// merge all additions
|
||||
if let Some(value) = obkv.get(DelAdd::Addition) {
|
||||
let add_in_exact = settings_diff.new.exact_attributes.contains(&fid)
|
||||
|| settings_diff.new.disabled_typos_terms.is_exact(w);
|
||||
let add_in_exact = settings_diff.new.exact_attributes.contains(&fid);
|
||||
buffer.clear();
|
||||
let mut obkv = KvWriterDelAdd::new(&mut buffer);
|
||||
obkv.insert(DelAdd::Addition, value)?;
|
||||
|
||||
@@ -273,11 +273,14 @@ pub(crate) fn write_typed_chunk_into_index(
|
||||
unreachable!();
|
||||
};
|
||||
let clonable_word_docids = unsafe { as_cloneable_grenad(&word_docids_reader) }?;
|
||||
let clonable_exact_word_docids =
|
||||
unsafe { as_cloneable_grenad(&exact_word_docids_reader) }?;
|
||||
|
||||
word_docids_builder.push(word_docids_reader.into_cursor()?);
|
||||
exact_word_docids_builder.push(exact_word_docids_reader.into_cursor()?);
|
||||
word_fid_docids_builder.push(word_fid_docids_reader.into_cursor()?);
|
||||
fst_merger_builder.push(clonable_word_docids.into_cursor()?);
|
||||
fst_merger_builder.push(clonable_exact_word_docids.into_cursor()?);
|
||||
}
|
||||
|
||||
let word_docids_merger = word_docids_builder.build();
|
||||
|
||||
@@ -1,5 +1,4 @@
|
||||
pub use self::available_ids::AvailableIds;
|
||||
pub use self::chat::ChatSettings;
|
||||
pub use self::clear_documents::ClearDocuments;
|
||||
pub use self::concurrent_available_ids::ConcurrentAvailableIds;
|
||||
pub use self::facet::bulk::FacetsUpdateBulk;
|
||||
@@ -14,7 +13,6 @@ pub use self::words_prefix_integer_docids::WordPrefixIntegerDocids;
|
||||
pub use self::words_prefixes_fst::WordsPrefixesFst;
|
||||
|
||||
mod available_ids;
|
||||
mod chat;
|
||||
mod clear_documents;
|
||||
mod concurrent_available_ids;
|
||||
pub(crate) mod del_add;
|
||||
|
||||
@@ -319,11 +319,8 @@ impl WordDocidsExtractors {
|
||||
let doc_alloc = &context.doc_alloc;
|
||||
|
||||
let exact_attributes = index.exact_attributes(rtxn)?;
|
||||
let disabled_typos_terms = index.disabled_typos_terms(rtxn)?;
|
||||
let is_exact = |fname: &str, word: &str| {
|
||||
exact_attributes.iter().any(|attr| contained_in(fname, attr))
|
||||
|| disabled_typos_terms.is_exact(word)
|
||||
};
|
||||
let is_exact_attribute =
|
||||
|fname: &str| exact_attributes.iter().any(|attr| contained_in(fname, attr));
|
||||
match document_change {
|
||||
DocumentChange::Deletion(inner) => {
|
||||
let mut token_fn = |fname: &str, fid, pos, word: &str| {
|
||||
@@ -331,7 +328,7 @@ impl WordDocidsExtractors {
|
||||
fid,
|
||||
pos,
|
||||
word,
|
||||
is_exact(fname, word),
|
||||
is_exact_attribute(fname),
|
||||
inner.docid(),
|
||||
doc_alloc,
|
||||
)
|
||||
@@ -359,7 +356,7 @@ impl WordDocidsExtractors {
|
||||
fid,
|
||||
pos,
|
||||
word,
|
||||
is_exact(fname, word),
|
||||
is_exact_attribute(fname),
|
||||
inner.docid(),
|
||||
doc_alloc,
|
||||
)
|
||||
@@ -375,7 +372,7 @@ impl WordDocidsExtractors {
|
||||
fid,
|
||||
pos,
|
||||
word,
|
||||
is_exact(fname, word),
|
||||
is_exact_attribute(fname),
|
||||
inner.docid(),
|
||||
doc_alloc,
|
||||
)
|
||||
@@ -392,7 +389,7 @@ impl WordDocidsExtractors {
|
||||
fid,
|
||||
pos,
|
||||
word,
|
||||
is_exact(fname, word),
|
||||
is_exact_attribute(fname),
|
||||
inner.docid(),
|
||||
doc_alloc,
|
||||
)
|
||||
|
||||
@@ -9,7 +9,6 @@ pub use document_operation::{DocumentOperation, PayloadStats};
|
||||
use hashbrown::HashMap;
|
||||
use heed::RwTxn;
|
||||
pub use partial_dump::PartialDump;
|
||||
pub use post_processing::recompute_word_fst_from_word_docids_database;
|
||||
pub use update_by_function::UpdateByFunction;
|
||||
pub use write::ChannelCongestion;
|
||||
use write::{build_vectors, update_index, write_to_db};
|
||||
|
||||
@@ -131,20 +131,6 @@ fn compute_word_fst(
|
||||
}
|
||||
}
|
||||
|
||||
pub fn recompute_word_fst_from_word_docids_database(index: &Index, wtxn: &mut RwTxn) -> Result<()> {
|
||||
let fst = fst::Set::default().map_data(std::borrow::Cow::Owned)?;
|
||||
let mut word_fst_builder = WordFstBuilder::new(&fst)?;
|
||||
let words = index.word_docids.iter(wtxn)?.remap_data_type::<DecodeIgnore>();
|
||||
for res in words {
|
||||
let (word, _) = res?;
|
||||
word_fst_builder.register_word(DelAdd::Addition, word.as_ref())?;
|
||||
}
|
||||
let (word_fst_mmap, _) = word_fst_builder.build(index, wtxn)?;
|
||||
index.main.remap_types::<Str, Bytes>().put(wtxn, WORDS_FST_KEY, &word_fst_mmap)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tracing::instrument(level = "trace", skip_all, target = "indexing::facet_search")]
|
||||
fn compute_facet_search_database(
|
||||
index: &Index,
|
||||
|
||||
@@ -13,20 +13,19 @@ use time::OffsetDateTime;
|
||||
|
||||
use super::del_add::{DelAdd, DelAddOperation};
|
||||
use super::index_documents::{IndexDocumentsConfig, Transform};
|
||||
use super::{ChatSettings, IndexerConfig};
|
||||
use super::IndexerConfig;
|
||||
use crate::attribute_patterns::PatternMatch;
|
||||
use crate::constants::RESERVED_GEO_FIELD_NAME;
|
||||
use crate::criterion::Criterion;
|
||||
use crate::disabled_typos_terms::DisabledTyposTerms;
|
||||
use crate::error::UserError;
|
||||
use crate::fields_ids_map::metadata::{FieldIdMapWithMetadata, MetadataBuilder};
|
||||
use crate::filterable_attributes_rules::match_faceted_field;
|
||||
use crate::index::{
|
||||
ChatConfig, IndexEmbeddingConfig, PrefixSearch, DEFAULT_MIN_WORD_LEN_ONE_TYPO,
|
||||
IndexEmbeddingConfig, PrefixSearch, DEFAULT_MIN_WORD_LEN_ONE_TYPO,
|
||||
DEFAULT_MIN_WORD_LEN_TWO_TYPOS,
|
||||
};
|
||||
use crate::order_by_map::OrderByMap;
|
||||
use crate::prompt::{default_max_bytes, PromptData};
|
||||
use crate::prompt::default_max_bytes;
|
||||
use crate::proximity::ProximityPrecision;
|
||||
use crate::update::index_documents::IndexDocumentsMethod;
|
||||
use crate::update::{IndexDocuments, UpdateIndexingStep};
|
||||
@@ -170,7 +169,6 @@ pub struct Settings<'a, 't, 'i> {
|
||||
synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||
primary_key: Setting<String>,
|
||||
authorize_typos: Setting<bool>,
|
||||
disable_on_numbers: Setting<bool>,
|
||||
min_word_len_two_typos: Setting<u8>,
|
||||
min_word_len_one_typo: Setting<u8>,
|
||||
exact_words: Setting<BTreeSet<String>>,
|
||||
@@ -185,7 +183,6 @@ pub struct Settings<'a, 't, 'i> {
|
||||
localized_attributes_rules: Setting<Vec<LocalizedAttributesRule>>,
|
||||
prefix_search: Setting<PrefixSearch>,
|
||||
facet_search: Setting<bool>,
|
||||
chat: Setting<ChatSettings>,
|
||||
}
|
||||
|
||||
impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
@@ -210,7 +207,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
synonyms: Setting::NotSet,
|
||||
primary_key: Setting::NotSet,
|
||||
authorize_typos: Setting::NotSet,
|
||||
disable_on_numbers: Setting::NotSet,
|
||||
exact_words: Setting::NotSet,
|
||||
min_word_len_two_typos: Setting::NotSet,
|
||||
min_word_len_one_typo: Setting::NotSet,
|
||||
@@ -224,7 +220,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
localized_attributes_rules: Setting::NotSet,
|
||||
prefix_search: Setting::NotSet,
|
||||
facet_search: Setting::NotSet,
|
||||
chat: Setting::NotSet,
|
||||
indexer_config,
|
||||
}
|
||||
}
|
||||
@@ -359,14 +354,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.min_word_len_one_typo = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn set_disable_on_numbers(&mut self, disable_on_numbers: bool) {
|
||||
self.disable_on_numbers = Setting::Set(disable_on_numbers);
|
||||
}
|
||||
|
||||
pub fn reset_disable_on_numbers(&mut self) {
|
||||
self.disable_on_numbers = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn set_exact_words(&mut self, words: BTreeSet<String>) {
|
||||
self.exact_words = Setting::Set(words);
|
||||
}
|
||||
@@ -455,14 +442,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.facet_search = Setting::Reset;
|
||||
}
|
||||
|
||||
pub fn set_chat(&mut self, value: ChatSettings) {
|
||||
self.chat = Setting::Set(value);
|
||||
}
|
||||
|
||||
pub fn reset_chat(&mut self) {
|
||||
self.chat = Setting::Reset;
|
||||
}
|
||||
|
||||
#[tracing::instrument(
|
||||
level = "trace"
|
||||
skip(self, progress_callback, should_abort, settings_diff),
|
||||
@@ -887,24 +866,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_disabled_typos_terms(&mut self) -> Result<()> {
|
||||
let mut disabled_typos_terms = self.index.disabled_typos_terms(self.wtxn)?;
|
||||
match self.disable_on_numbers {
|
||||
Setting::Set(disable_on_numbers) => {
|
||||
disabled_typos_terms.disable_on_numbers = disable_on_numbers;
|
||||
}
|
||||
Setting::Reset => {
|
||||
self.index.delete_disabled_typos_terms(self.wtxn)?;
|
||||
disabled_typos_terms.disable_on_numbers =
|
||||
DisabledTyposTerms::default().disable_on_numbers;
|
||||
}
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
self.index.put_disabled_typos_terms(self.wtxn, &disabled_typos_terms)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_exact_words(&mut self) -> Result<()> {
|
||||
match self.exact_words {
|
||||
Setting::Set(ref mut words) => {
|
||||
@@ -1249,45 +1210,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn update_chat_config(&mut self) -> heed::Result<bool> {
|
||||
match &mut self.chat {
|
||||
Setting::Set(ChatSettings {
|
||||
description: new_description,
|
||||
document_template: new_document_template,
|
||||
document_template_max_bytes: new_document_template_max_bytes,
|
||||
}) => {
|
||||
let mut old = self.index.chat_config(self.wtxn)?;
|
||||
let ChatConfig {
|
||||
ref mut description,
|
||||
prompt: PromptData { ref mut template, ref mut max_bytes },
|
||||
} = old;
|
||||
|
||||
match new_description {
|
||||
Setting::Set(d) => *description = d.clone(),
|
||||
Setting::Reset => *description = Default::default(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match new_document_template {
|
||||
Setting::Set(dt) => *template = dt.clone(),
|
||||
Setting::Reset => *template = Default::default(),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
match new_document_template_max_bytes {
|
||||
Setting::Set(m) => *max_bytes = NonZeroUsize::new(*m),
|
||||
Setting::Reset => *max_bytes = Some(default_max_bytes()),
|
||||
Setting::NotSet => (),
|
||||
}
|
||||
|
||||
self.index.put_chat_config(self.wtxn, &old)?;
|
||||
Ok(true)
|
||||
}
|
||||
Setting::Reset => self.index.delete_chat_config(self.wtxn),
|
||||
Setting::NotSet => Ok(false),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn execute<FP, FA>(mut self, progress_callback: FP, should_abort: FA) -> Result<()>
|
||||
where
|
||||
FP: Fn(UpdateIndexingStep) + Sync,
|
||||
@@ -1324,8 +1246,6 @@ impl<'a, 't, 'i> Settings<'a, 't, 'i> {
|
||||
self.update_prefix_search()?;
|
||||
self.update_facet_search()?;
|
||||
self.update_localized_attributes_rules()?;
|
||||
self.update_disabled_typos_terms()?;
|
||||
self.update_chat_config()?;
|
||||
|
||||
let embedding_config_updates = self.update_embedding_configs()?;
|
||||
|
||||
@@ -1407,7 +1327,6 @@ impl InnerIndexSettingsDiff {
|
||||
|| old_settings.prefix_search != new_settings.prefix_search
|
||||
|| old_settings.localized_attributes_rules
|
||||
!= new_settings.localized_attributes_rules
|
||||
|| old_settings.disabled_typos_terms != new_settings.disabled_typos_terms
|
||||
};
|
||||
|
||||
let cache_exact_attributes = old_settings.exact_attributes != new_settings.exact_attributes;
|
||||
@@ -1607,7 +1526,6 @@ pub(crate) struct InnerIndexSettings {
|
||||
pub user_defined_searchable_attributes: Option<Vec<String>>,
|
||||
pub sortable_fields: HashSet<String>,
|
||||
pub exact_attributes: HashSet<FieldId>,
|
||||
pub disabled_typos_terms: DisabledTyposTerms,
|
||||
pub proximity_precision: ProximityPrecision,
|
||||
pub embedding_configs: EmbeddingConfigs,
|
||||
pub geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||
@@ -1656,7 +1574,7 @@ impl InnerIndexSettings {
|
||||
.map(|fields| fields.into_iter().map(|f| f.to_string()).collect());
|
||||
let builder = MetadataBuilder::from_index(index, rtxn)?;
|
||||
let fields_ids_map = FieldIdMapWithMetadata::new(fields_ids_map, builder);
|
||||
let disabled_typos_terms = index.disabled_typos_terms(rtxn)?;
|
||||
|
||||
Ok(Self {
|
||||
stop_words,
|
||||
allowed_separators,
|
||||
@@ -1674,7 +1592,6 @@ impl InnerIndexSettings {
|
||||
geo_fields_ids,
|
||||
prefix_search,
|
||||
facet_search,
|
||||
disabled_typos_terms,
|
||||
})
|
||||
}
|
||||
|
||||
|
||||
@@ -896,7 +896,6 @@ fn test_correct_settings_init() {
|
||||
localized_attributes_rules,
|
||||
prefix_search,
|
||||
facet_search,
|
||||
disable_on_numbers,
|
||||
} = settings;
|
||||
assert!(matches!(searchable_fields, Setting::NotSet));
|
||||
assert!(matches!(displayed_fields, Setting::NotSet));
|
||||
@@ -924,7 +923,6 @@ fn test_correct_settings_init() {
|
||||
assert!(matches!(localized_attributes_rules, Setting::NotSet));
|
||||
assert!(matches!(prefix_search, Setting::NotSet));
|
||||
assert!(matches!(facet_search, Setting::NotSet));
|
||||
assert!(matches!(disable_on_numbers, Setting::NotSet));
|
||||
})
|
||||
.unwrap();
|
||||
}
|
||||
|
||||
@@ -1,14 +1,12 @@
|
||||
mod v1_12;
|
||||
mod v1_13;
|
||||
mod v1_14;
|
||||
mod v1_15;
|
||||
|
||||
use heed::RwTxn;
|
||||
use v1_12::{V1_12_3_To_V1_13_0, V1_12_To_V1_12_3};
|
||||
use v1_13::{V1_13_0_To_V1_13_1, V1_13_1_To_Latest_V1_13};
|
||||
use v1_14::Latest_V1_13_To_Latest_V1_14;
|
||||
use v1_15::Latest_V1_14_To_Latest_V1_15;
|
||||
|
||||
use crate::constants::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
use crate::progress::{Progress, VariableNameStep};
|
||||
use crate::{Index, InternalError, Result};
|
||||
|
||||
@@ -25,16 +23,12 @@ trait UpgradeIndex {
|
||||
}
|
||||
|
||||
/// Return true if the cached stats of the index must be regenerated
|
||||
pub fn upgrade<MSP>(
|
||||
pub fn upgrade(
|
||||
wtxn: &mut RwTxn,
|
||||
index: &Index,
|
||||
db_version: (u32, u32, u32),
|
||||
must_stop_processing: MSP,
|
||||
progress: Progress,
|
||||
) -> Result<bool>
|
||||
where
|
||||
MSP: Fn() -> bool + Sync,
|
||||
{
|
||||
) -> Result<bool> {
|
||||
let from = index.get_version(wtxn)?.unwrap_or(db_version);
|
||||
let upgrade_functions: &[&dyn UpgradeIndex] = &[
|
||||
&V1_12_To_V1_12_3 {},
|
||||
@@ -42,10 +36,6 @@ where
|
||||
&V1_13_0_To_V1_13_1 {},
|
||||
&V1_13_1_To_Latest_V1_13 {},
|
||||
&Latest_V1_13_To_Latest_V1_14 {},
|
||||
&Latest_V1_14_To_Latest_V1_15 {},
|
||||
// This is the last upgrade function, it will be called when the index is up to date.
|
||||
// any other upgrade function should be added before this one.
|
||||
&ToCurrentNoOp {},
|
||||
];
|
||||
|
||||
let start = match from {
|
||||
@@ -53,9 +43,8 @@ where
|
||||
(1, 12, 3..) => 1,
|
||||
(1, 13, 0) => 2,
|
||||
(1, 13, _) => 4,
|
||||
(1, 14, _) => 5,
|
||||
// We must handle the current version in the match because in case of a failure some index may have been upgraded but not other.
|
||||
(1, 15, _) => 6,
|
||||
(1, 14, _) => 4,
|
||||
(major, minor, patch) => {
|
||||
return Err(InternalError::CannotUpgradeToVersion(major, minor, patch).into())
|
||||
}
|
||||
@@ -67,9 +56,6 @@ where
|
||||
let mut current_version = from;
|
||||
let mut regenerate_stats = false;
|
||||
for (i, upgrade) in upgrade_path.iter().enumerate() {
|
||||
if (must_stop_processing)() {
|
||||
return Err(crate::Error::InternalError(InternalError::AbortedIndexation));
|
||||
}
|
||||
let target = upgrade.target_version();
|
||||
progress.update_progress(VariableNameStep::<UpgradeVersion>::new(
|
||||
format!(
|
||||
@@ -91,22 +77,3 @@ where
|
||||
|
||||
Ok(regenerate_stats)
|
||||
}
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
struct ToCurrentNoOp {}
|
||||
|
||||
impl UpgradeIndex for ToCurrentNoOp {
|
||||
fn upgrade(
|
||||
&self,
|
||||
_wtxn: &mut RwTxn,
|
||||
_index: &Index,
|
||||
_original: (u32, u32, u32),
|
||||
_progress: Progress,
|
||||
) -> Result<bool> {
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn target_version(&self) -> (u32, u32, u32) {
|
||||
(VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,7 @@
|
||||
use heed::RwTxn;
|
||||
|
||||
use super::UpgradeIndex;
|
||||
use crate::constants::{VERSION_MAJOR, VERSION_MINOR, VERSION_PATCH};
|
||||
use crate::database_stats::DatabaseStats;
|
||||
use crate::progress::Progress;
|
||||
use crate::{make_enum_progress, Index, Result};
|
||||
@@ -50,6 +51,10 @@ impl UpgradeIndex for V1_13_1_To_Latest_V1_13 {
|
||||
}
|
||||
|
||||
fn target_version(&self) -> (u32, u32, u32) {
|
||||
(1, 13, 3)
|
||||
(
|
||||
VERSION_MAJOR.parse().unwrap(),
|
||||
VERSION_MINOR.parse().unwrap(),
|
||||
VERSION_PATCH.parse().unwrap(),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,35 +0,0 @@
|
||||
use heed::RwTxn;
|
||||
|
||||
use super::UpgradeIndex;
|
||||
use crate::progress::Progress;
|
||||
use crate::update::new::indexer::recompute_word_fst_from_word_docids_database;
|
||||
use crate::{make_enum_progress, Index, Result};
|
||||
|
||||
#[allow(non_camel_case_types)]
|
||||
pub(super) struct Latest_V1_14_To_Latest_V1_15();
|
||||
|
||||
impl UpgradeIndex for Latest_V1_14_To_Latest_V1_15 {
|
||||
fn upgrade(
|
||||
&self,
|
||||
wtxn: &mut RwTxn,
|
||||
index: &Index,
|
||||
_original: (u32, u32, u32),
|
||||
progress: Progress,
|
||||
) -> Result<bool> {
|
||||
// Recompute the word FST from the word docids database.
|
||||
make_enum_progress! {
|
||||
enum TypoTolerance {
|
||||
RecomputeWordFst,
|
||||
}
|
||||
};
|
||||
|
||||
progress.update_progress(TypoTolerance::RecomputeWordFst);
|
||||
recompute_word_fst_from_word_docids_database(index, wtxn)?;
|
||||
|
||||
Ok(false)
|
||||
}
|
||||
|
||||
fn target_version(&self) -> (u32, u32, u32) {
|
||||
(1, 15, 0)
|
||||
}
|
||||
}
|
||||
@@ -33,7 +33,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Defaults to `openAi`
|
||||
pub source: Setting<EmbedderSource>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -56,7 +55,6 @@ pub struct EmbeddingSettings {
|
||||
/// - For source `openAi`, defaults to `text-embedding-3-small`
|
||||
/// - For source `huggingFace`, defaults to `BAAI/bge-base-en-v1.5`
|
||||
pub model: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -77,7 +75,6 @@ pub struct EmbeddingSettings {
|
||||
/// - When `model` is set to default, defaults to `617ca489d9e86b49b8167676d8220688b99db36e`
|
||||
/// - Otherwise, defaults to `null`
|
||||
pub revision: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<OverridePooling>)]
|
||||
@@ -99,7 +96,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Embedders created before this parameter was available default to `forceMean` to preserve the existing behavior.
|
||||
pub pooling: Setting<OverridePooling>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -122,7 +118,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - This setting is partially hidden when returned by the settings
|
||||
pub api_key: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -146,7 +141,6 @@ pub struct EmbeddingSettings {
|
||||
/// - For source `openAi`, the dimensions is the maximum allowed by the model.
|
||||
/// - For sources `ollama` and `rest`, the dimensions are inferred by embedding a sample text.
|
||||
pub dimensions: Setting<usize>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<bool>)]
|
||||
@@ -173,7 +167,6 @@ pub struct EmbeddingSettings {
|
||||
/// first enabling it. If you are unsure of whether the performance-relevancy tradeoff is right for you,
|
||||
/// we recommend to use this parameter on a test index first.
|
||||
pub binary_quantized: Setting<bool>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<bool>)]
|
||||
@@ -190,7 +183,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ When modified, embeddings are regenerated for documents whose rendering through the template produces a different text.
|
||||
pub document_template: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<usize>)]
|
||||
@@ -209,7 +201,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - Defaults to 400
|
||||
pub document_template_max_bytes: Setting<usize>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<String>)]
|
||||
@@ -228,7 +219,6 @@ pub struct EmbeddingSettings {
|
||||
/// - 🌱 When modified for source `openAi`, embeddings are never regenerated
|
||||
/// - 🏗️ When modified for sources `ollama` and `rest`, embeddings are always regenerated
|
||||
pub url: Setting<String>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<serde_json::Value>)]
|
||||
@@ -246,7 +236,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ Changing the value of this parameter always regenerates embeddings
|
||||
pub request: Setting<serde_json::Value>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<serde_json::Value>)]
|
||||
@@ -264,7 +253,6 @@ pub struct EmbeddingSettings {
|
||||
///
|
||||
/// - 🏗️ Changing the value of this parameter always regenerates embeddings
|
||||
pub response: Setting<serde_json::Value>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default)]
|
||||
#[schema(value_type = Option<BTreeMap<String, String>>)]
|
||||
|
||||
Reference in New Issue
Block a user