mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-27 00:31:02 +00:00
Merge branch 'main' into request-fragments-test
This commit is contained in:
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
2
.github/workflows/publish-apt-brew-pkg.yml
vendored
@ -32,7 +32,7 @@ jobs:
|
|||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.11.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/debian/meilisearch.deb
|
file: target/debian/meilisearch.deb
|
||||||
|
8
.github/workflows/publish-binaries.yml
vendored
8
.github/workflows/publish-binaries.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.11.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/meilisearch
|
file: target/release/meilisearch
|
||||||
@ -81,7 +81,7 @@ jobs:
|
|||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.11.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/release/${{ matrix.artifact_name }}
|
file: target/release/${{ matrix.artifact_name }}
|
||||||
@ -113,7 +113,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.11.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
@ -178,7 +178,7 @@ jobs:
|
|||||||
- name: Upload the binary to release
|
- name: Upload the binary to release
|
||||||
# No need to upload binaries for dry run (cron)
|
# No need to upload binaries for dry run (cron)
|
||||||
if: github.event_name == 'release'
|
if: github.event_name == 'release'
|
||||||
uses: svenstaro/upload-release-action@2.7.0
|
uses: svenstaro/upload-release-action@2.11.1
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
file: target/${{ matrix.target }}/release/meilisearch
|
file: target/${{ matrix.target }}/release/meilisearch
|
||||||
|
10
.github/workflows/test-suite.yml
vendored
10
.github/workflows/test-suite.yml
vendored
@ -29,7 +29,7 @@ jobs:
|
|||||||
- name: Setup test with Rust stable
|
- name: Setup test with Rust stable
|
||||||
uses: dtolnay/rust-toolchain@1.85
|
uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.8
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -51,7 +51,7 @@ jobs:
|
|||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v3
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.8
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
@ -155,7 +155,7 @@ jobs:
|
|||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: dtolnay/rust-toolchain@1.85
|
- uses: dtolnay/rust-toolchain@1.85
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.8
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -172,7 +172,7 @@ jobs:
|
|||||||
profile: minimal
|
profile: minimal
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.8
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -191,7 +191,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v2.7.8
|
uses: Swatinem/rust-cache@v2.8.0
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
# Since we never ran the `build.rs` script in the benchmark directory we are missing one auto-generated import file.
|
||||||
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
# Since we want to trigger (and fail) this action as fast as possible, instead of building the benchmark crate
|
||||||
|
34
Cargo.lock
generated
34
Cargo.lock
generated
@ -580,7 +580,7 @@ source = "git+https://github.com/meilisearch/bbqueue#cbb87cc707b5af415ef203bdaf2
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
@ -770,7 +770,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "build-info"
|
name = "build-info"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"time",
|
"time",
|
||||||
@ -1774,7 +1774,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "dump"
|
name = "dump"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"big_s",
|
"big_s",
|
||||||
@ -2006,7 +2006,7 @@ checksum = "37909eebbb50d72f9059c3b6d82c0463f2ff062c9e95845c43a6c9c0355411be"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "file-store"
|
name = "file-store"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"tempfile",
|
"tempfile",
|
||||||
"thiserror 2.0.12",
|
"thiserror 2.0.12",
|
||||||
@ -2028,7 +2028,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"insta",
|
"insta",
|
||||||
"nom",
|
"nom",
|
||||||
@ -2049,7 +2049,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"criterion",
|
"criterion",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -2194,7 +2194,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "fuzzers"
|
name = "fuzzers"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"arbitrary",
|
"arbitrary",
|
||||||
"bumpalo",
|
"bumpalo",
|
||||||
@ -2994,7 +2994,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "index-scheduler"
|
name = "index-scheduler"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"backoff",
|
"backoff",
|
||||||
@ -3230,7 +3230,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "json-depth-checker"
|
name = "json-depth-checker"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"criterion",
|
"criterion",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -3724,7 +3724,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meili-snap"
|
name = "meili-snap"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"insta",
|
"insta",
|
||||||
"md5",
|
"md5",
|
||||||
@ -3735,7 +3735,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch"
|
name = "meilisearch"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-cors",
|
"actix-cors",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
@ -3830,7 +3830,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch-auth"
|
name = "meilisearch-auth"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"base64 0.22.1",
|
"base64 0.22.1",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
@ -3849,7 +3849,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilisearch-types"
|
name = "meilisearch-types"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-web",
|
"actix-web",
|
||||||
"anyhow",
|
"anyhow",
|
||||||
@ -3884,7 +3884,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "meilitool"
|
name = "meilitool"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"clap",
|
"clap",
|
||||||
@ -3918,7 +3918,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "milli"
|
name = "milli"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"allocator-api2 0.3.0",
|
"allocator-api2 0.3.0",
|
||||||
"arroy",
|
"arroy",
|
||||||
@ -4470,7 +4470,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "permissive-json-pointer"
|
name = "permissive-json-pointer"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"big_s",
|
"big_s",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
@ -7258,7 +7258,7 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "xtask"
|
name = "xtask"
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"anyhow",
|
"anyhow",
|
||||||
"build-info",
|
"build-info",
|
||||||
|
@ -22,7 +22,7 @@ members = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[workspace.package]
|
[workspace.package]
|
||||||
version = "1.15.2"
|
version = "1.16.0"
|
||||||
authors = [
|
authors = [
|
||||||
"Quentin de Quelen <quentin@dequelen.me>",
|
"Quentin de Quelen <quentin@dequelen.me>",
|
||||||
"Clément Renault <clement@meilisearch.com>",
|
"Clément Renault <clement@meilisearch.com>",
|
||||||
|
@ -116,6 +116,15 @@ impl DumpReader {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn chat_completions_settings(
|
||||||
|
&mut self,
|
||||||
|
) -> Result<Box<dyn Iterator<Item = Result<(String, v6::ChatCompletionSettings)>> + '_>> {
|
||||||
|
match self {
|
||||||
|
DumpReader::Current(current) => current.chat_completions_settings(),
|
||||||
|
DumpReader::Compat(_compat) => Ok(Box::new(std::iter::empty())),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
pub fn features(&self) -> Result<Option<v6::RuntimeTogglableFeatures>> {
|
||||||
match self {
|
match self {
|
||||||
DumpReader::Current(current) => Ok(current.features()),
|
DumpReader::Current(current) => Ok(current.features()),
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use std::ffi::OsStr;
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::{BufRead, BufReader, ErrorKind};
|
use std::io::{BufRead, BufReader, ErrorKind};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
@ -21,6 +22,7 @@ pub type Unchecked = meilisearch_types::settings::Unchecked;
|
|||||||
pub type Task = crate::TaskDump;
|
pub type Task = crate::TaskDump;
|
||||||
pub type Batch = meilisearch_types::batches::Batch;
|
pub type Batch = meilisearch_types::batches::Batch;
|
||||||
pub type Key = meilisearch_types::keys::Key;
|
pub type Key = meilisearch_types::keys::Key;
|
||||||
|
pub type ChatCompletionSettings = meilisearch_types::features::ChatCompletionSettings;
|
||||||
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
pub type RuntimeTogglableFeatures = meilisearch_types::features::RuntimeTogglableFeatures;
|
||||||
pub type Network = meilisearch_types::features::Network;
|
pub type Network = meilisearch_types::features::Network;
|
||||||
|
|
||||||
@ -192,6 +194,34 @@ impl V6Reader {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn chat_completions_settings(
|
||||||
|
&mut self,
|
||||||
|
) -> Result<Box<dyn Iterator<Item = Result<(String, ChatCompletionSettings)>> + '_>> {
|
||||||
|
let entries = match fs::read_dir(self.dump.path().join("chat-completions-settings")) {
|
||||||
|
Ok(entries) => entries,
|
||||||
|
Err(e) if e.kind() == ErrorKind::NotFound => return Ok(Box::new(std::iter::empty())),
|
||||||
|
Err(e) => return Err(e.into()),
|
||||||
|
};
|
||||||
|
Ok(Box::new(
|
||||||
|
entries
|
||||||
|
.map(|entry| -> Result<Option<_>> {
|
||||||
|
let entry = entry?;
|
||||||
|
let file_name = entry.file_name();
|
||||||
|
let path = Path::new(&file_name);
|
||||||
|
if entry.file_type()?.is_file() && path.extension() == Some(OsStr::new("json"))
|
||||||
|
{
|
||||||
|
let name = path.file_stem().unwrap().to_str().unwrap().to_string();
|
||||||
|
let file = File::open(entry.path())?;
|
||||||
|
let settings = serde_json::from_reader(file)?;
|
||||||
|
Ok(Some((name, settings)))
|
||||||
|
} else {
|
||||||
|
Ok(None)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.filter_map(|entry| entry.transpose()),
|
||||||
|
))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
|
pub fn features(&self) -> Option<RuntimeTogglableFeatures> {
|
||||||
self.features
|
self.features
|
||||||
}
|
}
|
||||||
|
@ -5,7 +5,7 @@ use std::path::PathBuf;
|
|||||||
use flate2::write::GzEncoder;
|
use flate2::write::GzEncoder;
|
||||||
use flate2::Compression;
|
use flate2::Compression;
|
||||||
use meilisearch_types::batches::Batch;
|
use meilisearch_types::batches::Batch;
|
||||||
use meilisearch_types::features::{Network, RuntimeTogglableFeatures};
|
use meilisearch_types::features::{ChatCompletionSettings, Network, RuntimeTogglableFeatures};
|
||||||
use meilisearch_types::keys::Key;
|
use meilisearch_types::keys::Key;
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use serde_json::{Map, Value};
|
use serde_json::{Map, Value};
|
||||||
@ -51,6 +51,10 @@ impl DumpWriter {
|
|||||||
KeyWriter::new(self.dir.path().to_path_buf())
|
KeyWriter::new(self.dir.path().to_path_buf())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn create_chat_completions_settings(&self) -> Result<ChatCompletionsSettingsWriter> {
|
||||||
|
ChatCompletionsSettingsWriter::new(self.dir.path().join("chat-completions-settings"))
|
||||||
|
}
|
||||||
|
|
||||||
pub fn create_tasks_queue(&self) -> Result<TaskWriter> {
|
pub fn create_tasks_queue(&self) -> Result<TaskWriter> {
|
||||||
TaskWriter::new(self.dir.path().join("tasks"))
|
TaskWriter::new(self.dir.path().join("tasks"))
|
||||||
}
|
}
|
||||||
@ -104,6 +108,24 @@ impl KeyWriter {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub struct ChatCompletionsSettingsWriter {
|
||||||
|
path: PathBuf,
|
||||||
|
}
|
||||||
|
|
||||||
|
impl ChatCompletionsSettingsWriter {
|
||||||
|
pub(crate) fn new(path: PathBuf) -> Result<Self> {
|
||||||
|
std::fs::create_dir(&path)?;
|
||||||
|
Ok(ChatCompletionsSettingsWriter { path })
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn push_settings(&mut self, name: &str, settings: &ChatCompletionSettings) -> Result<()> {
|
||||||
|
let mut settings_file = File::create(self.path.join(name).with_extension("json"))?;
|
||||||
|
serde_json::to_writer(&mut settings_file, &settings)?;
|
||||||
|
settings_file.flush()?;
|
||||||
|
Ok(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
pub struct TaskWriter {
|
pub struct TaskWriter {
|
||||||
queue: BufWriter<File>,
|
queue: BufWriter<File>,
|
||||||
update_files: PathBuf,
|
update_files: PathBuf,
|
||||||
|
@ -103,6 +103,7 @@ make_enum_progress! {
|
|||||||
pub enum DumpCreationProgress {
|
pub enum DumpCreationProgress {
|
||||||
StartTheDumpCreation,
|
StartTheDumpCreation,
|
||||||
DumpTheApiKeys,
|
DumpTheApiKeys,
|
||||||
|
DumpTheChatCompletionSettings,
|
||||||
DumpTheTasks,
|
DumpTheTasks,
|
||||||
DumpTheBatches,
|
DumpTheBatches,
|
||||||
DumpTheIndexes,
|
DumpTheIndexes,
|
||||||
|
@ -43,7 +43,16 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
|
|
||||||
// 2. dump the tasks
|
// 2. dump the chat completion settings
|
||||||
|
// TODO should I skip the export if the chat completion has been disabled?
|
||||||
|
progress.update_progress(DumpCreationProgress::DumpTheChatCompletionSettings);
|
||||||
|
let mut dump_chat_completion_settings = dump.create_chat_completions_settings()?;
|
||||||
|
for result in self.chat_settings.iter(&rtxn)? {
|
||||||
|
let (name, chat_settings) = result?;
|
||||||
|
dump_chat_completion_settings.push_settings(name, &chat_settings)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 3. dump the tasks
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheTasks);
|
progress.update_progress(DumpCreationProgress::DumpTheTasks);
|
||||||
let mut dump_tasks = dump.create_tasks_queue()?;
|
let mut dump_tasks = dump.create_tasks_queue()?;
|
||||||
|
|
||||||
@ -81,7 +90,7 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
let mut dump_content_file = dump_tasks.push_task(&t.into())?;
|
let mut dump_content_file = dump_tasks.push_task(&t.into())?;
|
||||||
|
|
||||||
// 2.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
// 3.1. Dump the `content_file` associated with the task if there is one and the task is not finished yet.
|
||||||
if let Some(content_file) = content_file {
|
if let Some(content_file) = content_file {
|
||||||
if self.scheduler.must_stop_processing.get() {
|
if self.scheduler.must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
@ -105,7 +114,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
dump_tasks.flush()?;
|
dump_tasks.flush()?;
|
||||||
|
|
||||||
// 3. dump the batches
|
// 4. dump the batches
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheBatches);
|
progress.update_progress(DumpCreationProgress::DumpTheBatches);
|
||||||
let mut dump_batches = dump.create_batches_queue()?;
|
let mut dump_batches = dump.create_batches_queue()?;
|
||||||
|
|
||||||
@ -138,7 +147,7 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
dump_batches.flush()?;
|
dump_batches.flush()?;
|
||||||
|
|
||||||
// 4. Dump the indexes
|
// 5. Dump the indexes
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
|
progress.update_progress(DumpCreationProgress::DumpTheIndexes);
|
||||||
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
|
let nb_indexes = self.index_mapper.index_mapping.len(&rtxn)? as u32;
|
||||||
let mut count = 0;
|
let mut count = 0;
|
||||||
@ -175,7 +184,7 @@ impl IndexScheduler {
|
|||||||
let documents = index
|
let documents = index
|
||||||
.all_documents(&rtxn)
|
.all_documents(&rtxn)
|
||||||
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?;
|
||||||
// 4.1. Dump the documents
|
// 5.1. Dump the documents
|
||||||
for ret in documents {
|
for ret in documents {
|
||||||
if self.scheduler.must_stop_processing.get() {
|
if self.scheduler.must_stop_processing.get() {
|
||||||
return Err(Error::AbortedTask);
|
return Err(Error::AbortedTask);
|
||||||
@ -233,7 +242,7 @@ impl IndexScheduler {
|
|||||||
atomic.fetch_add(1, Ordering::Relaxed);
|
atomic.fetch_add(1, Ordering::Relaxed);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4.2. Dump the settings
|
// 5.2. Dump the settings
|
||||||
let settings = meilisearch_types::settings::settings(
|
let settings = meilisearch_types::settings::settings(
|
||||||
index,
|
index,
|
||||||
&rtxn,
|
&rtxn,
|
||||||
@ -244,7 +253,7 @@ impl IndexScheduler {
|
|||||||
Ok(())
|
Ok(())
|
||||||
})?;
|
})?;
|
||||||
|
|
||||||
// 5. Dump experimental feature settings
|
// 6. Dump experimental feature settings
|
||||||
progress.update_progress(DumpCreationProgress::DumpTheExperimentalFeatures);
|
progress.update_progress(DumpCreationProgress::DumpTheExperimentalFeatures);
|
||||||
let features = self.features().runtime_features();
|
let features = self.features().runtime_features();
|
||||||
dump.create_experimental_features(features)?;
|
dump.create_experimental_features(features)?;
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, batch_uid: 1, status: succeeded, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, batch_uid: 2, status: succeeded, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
3 {uid: 3, batch_uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggo` already exists.", error_code: "index_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_already_exists" }, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
@ -57,7 +57,7 @@ girafo: { number_of_documents: 0, field_distribution: {} }
|
|||||||
[timestamp] [4,]
|
[timestamp] [4,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
1 {uid: 1, details: {"primaryKey":"mouse"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"catto":1}}, stop reason: "created batch containing only task with id 1 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
2 {uid: 2, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 2 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
3 {uid: 3, details: {"primaryKey":"bone"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"indexCreation":1},"indexUids":{"doggo":1}}, stop reason: "created batch containing only task with id 3 of type `indexCreation` that cannot be batched with any other task.", }
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued [0,]
|
enqueued [0,]
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, status: enqueued, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
@ -37,7 +37,7 @@ catto [1,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: failed, error: ResponseError { code: 200, message: "Planned failure for tests.", error_code: "internal", error_type: "internal", error_link: "https://docs.meilisearch.com/errors#internal" }, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
@ -40,7 +40,7 @@ doggo [2,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"failed":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -6,7 +6,7 @@ source: crates/index-scheduler/src/scheduler/test_failure.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 15, 2) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
0 {uid: 0, batch_uid: 0, status: succeeded, details: { from: (1, 12, 0), to: (1, 16, 0) }, kind: UpgradeDatabase { from: (1, 12, 0) }}
|
||||||
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
1 {uid: 1, status: enqueued, details: { primary_key: Some("mouse") }, kind: IndexCreation { index_uid: "catto", primary_key: Some("mouse") }}
|
||||||
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
2 {uid: 2, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
3 {uid: 3, status: enqueued, details: { primary_key: Some("bone") }, kind: IndexCreation { index_uid: "doggo", primary_key: Some("bone") }}
|
||||||
@ -43,7 +43,7 @@ doggo [2,3,]
|
|||||||
[timestamp] [0,]
|
[timestamp] [0,]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Batches:
|
### All Batches:
|
||||||
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.15.2"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
0 {uid: 0, details: {"upgradeFrom":"v1.12.0","upgradeTo":"v1.16.0"}, stats: {"totalNbTasks":1,"status":{"succeeded":1},"types":{"upgradeDatabase":1},"indexUids":{}}, stop reason: "stopped after the last task of type `upgradeDatabase` because they cannot be batched with tasks of any other type.", }
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Batch to tasks mapping:
|
### Batch to tasks mapping:
|
||||||
0 [0,]
|
0 [0,]
|
||||||
|
@ -169,5 +169,5 @@ german = ["meilisearch-types/german"]
|
|||||||
turkish = ["meilisearch-types/turkish"]
|
turkish = ["meilisearch-types/turkish"]
|
||||||
|
|
||||||
[package.metadata.mini-dashboard]
|
[package.metadata.mini-dashboard]
|
||||||
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.19/build.zip"
|
assets-url = "https://github.com/meilisearch/mini-dashboard/releases/download/v0.2.20/build.zip"
|
||||||
sha1 = "7974430d5277c97f67cf6e95eec6faaac2788834"
|
sha1 = "82a7ddd7bf14bb5323c3d235d2b62892a98b6a59"
|
||||||
|
@ -498,14 +498,20 @@ fn import_dump(
|
|||||||
keys.push(key);
|
keys.push(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
// 3. Import the runtime features and network
|
// 3. Import the `ChatCompletionSettings`s.
|
||||||
|
for result in dump_reader.chat_completions_settings()? {
|
||||||
|
let (name, settings) = result?;
|
||||||
|
index_scheduler.put_chat_settings(&name, &settings)?;
|
||||||
|
}
|
||||||
|
|
||||||
|
// 4. Import the runtime features and network
|
||||||
let features = dump_reader.features()?.unwrap_or_default();
|
let features = dump_reader.features()?.unwrap_or_default();
|
||||||
index_scheduler.put_runtime_features(features)?;
|
index_scheduler.put_runtime_features(features)?;
|
||||||
|
|
||||||
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
let network = dump_reader.network()?.cloned().unwrap_or_default();
|
||||||
index_scheduler.put_network(network)?;
|
index_scheduler.put_network(network)?;
|
||||||
|
|
||||||
// 3.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
// 4.1 Use all cpus to process dump if `max_indexing_threads` not configured
|
||||||
let backup_config;
|
let backup_config;
|
||||||
let base_config = index_scheduler.indexer_config();
|
let base_config = index_scheduler.indexer_config();
|
||||||
|
|
||||||
@ -522,7 +528,7 @@ fn import_dump(
|
|||||||
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
// /!\ The tasks must be imported AFTER importing the indexes or else the scheduler might
|
||||||
// try to process tasks while we're trying to import the indexes.
|
// try to process tasks while we're trying to import the indexes.
|
||||||
|
|
||||||
// 4. Import the indexes.
|
// 5. Import the indexes.
|
||||||
for index_reader in dump_reader.indexes()? {
|
for index_reader in dump_reader.indexes()? {
|
||||||
let mut index_reader = index_reader?;
|
let mut index_reader = index_reader?;
|
||||||
let metadata = index_reader.metadata();
|
let metadata = index_reader.metadata();
|
||||||
@ -535,20 +541,20 @@ fn import_dump(
|
|||||||
let mut wtxn = index.write_txn()?;
|
let mut wtxn = index.write_txn()?;
|
||||||
|
|
||||||
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
let mut builder = milli::update::Settings::new(&mut wtxn, &index, indexer_config);
|
||||||
// 4.1 Import the primary key if there is one.
|
// 5.1 Import the primary key if there is one.
|
||||||
if let Some(ref primary_key) = metadata.primary_key {
|
if let Some(ref primary_key) = metadata.primary_key {
|
||||||
builder.set_primary_key(primary_key.to_string());
|
builder.set_primary_key(primary_key.to_string());
|
||||||
}
|
}
|
||||||
|
|
||||||
// 4.2 Import the settings.
|
// 5.2 Import the settings.
|
||||||
tracing::info!("Importing the settings.");
|
tracing::info!("Importing the settings.");
|
||||||
let settings = index_reader.settings()?;
|
let settings = index_reader.settings()?;
|
||||||
apply_settings_to_builder(&settings, &mut builder);
|
apply_settings_to_builder(&settings, &mut builder);
|
||||||
let embedder_stats: Arc<EmbedderStats> = Default::default();
|
let embedder_stats: Arc<EmbedderStats> = Default::default();
|
||||||
builder.execute(&|| false, &progress, embedder_stats.clone())?;
|
builder.execute(&|| false, &progress, embedder_stats.clone())?;
|
||||||
|
|
||||||
// 4.3 Import the documents.
|
// 5.3 Import the documents.
|
||||||
// 4.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
// 5.3.1 We need to recreate the grenad+obkv format accepted by the index.
|
||||||
tracing::info!("Importing the documents.");
|
tracing::info!("Importing the documents.");
|
||||||
let file = tempfile::tempfile()?;
|
let file = tempfile::tempfile()?;
|
||||||
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
let mut builder = DocumentsBatchBuilder::new(BufWriter::new(file));
|
||||||
@ -559,7 +565,7 @@ fn import_dump(
|
|||||||
// This flush the content of the batch builder.
|
// This flush the content of the batch builder.
|
||||||
let file = builder.into_inner()?.into_inner()?;
|
let file = builder.into_inner()?.into_inner()?;
|
||||||
|
|
||||||
// 4.3.2 We feed it to the milli index.
|
// 5.3.2 We feed it to the milli index.
|
||||||
let reader = BufReader::new(file);
|
let reader = BufReader::new(file);
|
||||||
let reader = DocumentsBatchReader::from_reader(reader)?;
|
let reader = DocumentsBatchReader::from_reader(reader)?;
|
||||||
|
|
||||||
@ -591,15 +597,15 @@ fn import_dump(
|
|||||||
index_scheduler.refresh_index_stats(&uid)?;
|
index_scheduler.refresh_index_stats(&uid)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5. Import the queue
|
// 6. Import the queue
|
||||||
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
let mut index_scheduler_dump = index_scheduler.register_dumped_task()?;
|
||||||
// 5.1. Import the batches
|
// 6.1. Import the batches
|
||||||
for ret in dump_reader.batches()? {
|
for ret in dump_reader.batches()? {
|
||||||
let batch = ret?;
|
let batch = ret?;
|
||||||
index_scheduler_dump.register_dumped_batch(batch)?;
|
index_scheduler_dump.register_dumped_batch(batch)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
// 5.2. Import the tasks
|
// 6.2. Import the tasks
|
||||||
for ret in dump_reader.tasks()? {
|
for ret in dump_reader.tasks()? {
|
||||||
let (task, file) = ret?;
|
let (task, file) = ret?;
|
||||||
index_scheduler_dump.register_dumped_task(task, file)?;
|
index_scheduler_dump.register_dumped_task(task, file)?;
|
||||||
|
@ -15,6 +15,33 @@ lazy_static! {
|
|||||||
"Meilisearch number of degraded search requests"
|
"Meilisearch number of degraded search requests"
|
||||||
))
|
))
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
|
pub static ref MEILISEARCH_CHAT_SEARCH_REQUESTS: IntCounterVec = register_int_counter_vec!(
|
||||||
|
opts!(
|
||||||
|
"meilisearch_chat_search_requests",
|
||||||
|
"Meilisearch number of search requests performed by the chat route itself"
|
||||||
|
),
|
||||||
|
&["type"]
|
||||||
|
)
|
||||||
|
.expect("Can't create a metric");
|
||||||
|
pub static ref MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||||
|
opts!("meilisearch_chat_prompt_tokens_usage", "Meilisearch Chat Prompt Tokens Usage"),
|
||||||
|
&["workspace", "model"]
|
||||||
|
)
|
||||||
|
.expect("Can't create a metric");
|
||||||
|
pub static ref MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE: IntCounterVec =
|
||||||
|
register_int_counter_vec!(
|
||||||
|
opts!(
|
||||||
|
"meilisearch_chat_completion_tokens_usage",
|
||||||
|
"Meilisearch Chat Completion Tokens Usage"
|
||||||
|
),
|
||||||
|
&["workspace", "model"]
|
||||||
|
)
|
||||||
|
.expect("Can't create a metric");
|
||||||
|
pub static ref MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE: IntCounterVec = register_int_counter_vec!(
|
||||||
|
opts!("meilisearch_chat_total_tokens_usage", "Meilisearch Chat Total Tokens Usage"),
|
||||||
|
&["workspace", "model"]
|
||||||
|
)
|
||||||
|
.expect("Can't create a metric");
|
||||||
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
pub static ref MEILISEARCH_DB_SIZE_BYTES: IntGauge =
|
||||||
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
|
register_int_gauge!(opts!("meilisearch_db_size_bytes", "Meilisearch DB Size In Bytes"))
|
||||||
.expect("Can't create a metric");
|
.expect("Can't create a metric");
|
||||||
|
@ -13,9 +13,9 @@ use async_openai::types::{
|
|||||||
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
|
ChatCompletionRequestDeveloperMessageContent, ChatCompletionRequestMessage,
|
||||||
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
|
ChatCompletionRequestSystemMessage, ChatCompletionRequestSystemMessageContent,
|
||||||
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
ChatCompletionRequestToolMessage, ChatCompletionRequestToolMessageContent,
|
||||||
ChatCompletionStreamResponseDelta, ChatCompletionToolArgs, ChatCompletionToolType,
|
ChatCompletionStreamOptions, ChatCompletionStreamResponseDelta, ChatCompletionToolArgs,
|
||||||
CreateChatCompletionRequest, CreateChatCompletionStreamResponse, FinishReason, FunctionCall,
|
ChatCompletionToolType, CreateChatCompletionRequest, CreateChatCompletionStreamResponse,
|
||||||
FunctionCallStream, FunctionObjectArgs,
|
FinishReason, FunctionCall, FunctionCallStream, FunctionObjectArgs,
|
||||||
};
|
};
|
||||||
use async_openai::Client;
|
use async_openai::Client;
|
||||||
use bumpalo::Bump;
|
use bumpalo::Bump;
|
||||||
@ -48,7 +48,11 @@ use crate::analytics::Analytics;
|
|||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
use crate::extractors::authentication::policies::ActionPolicy;
|
use crate::extractors::authentication::policies::ActionPolicy;
|
||||||
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
use crate::extractors::authentication::{extract_token_from_request, GuardedData, Policy as _};
|
||||||
use crate::metrics::MEILISEARCH_DEGRADED_SEARCH_REQUESTS;
|
use crate::metrics::{
|
||||||
|
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE, MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE,
|
||||||
|
MEILISEARCH_CHAT_SEARCH_REQUESTS, MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE,
|
||||||
|
MEILISEARCH_DEGRADED_SEARCH_REQUESTS,
|
||||||
|
};
|
||||||
use crate::routes::chats::utils::SseEventSender;
|
use crate::routes::chats::utils::SseEventSender;
|
||||||
use crate::routes::indexes::search::search_kind;
|
use crate::routes::indexes::search::search_kind;
|
||||||
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
|
use crate::search::{add_search_rules, prepare_search, search_from_kind, SearchQuery};
|
||||||
@ -286,7 +290,7 @@ async fn process_search_request(
|
|||||||
let output = output?;
|
let output = output?;
|
||||||
let mut documents = Vec::new();
|
let mut documents = Vec::new();
|
||||||
if let Ok((ref rtxn, ref search_result)) = output {
|
if let Ok((ref rtxn, ref search_result)) = output {
|
||||||
// aggregate.succeed(search_result);
|
MEILISEARCH_CHAT_SEARCH_REQUESTS.with_label_values(&["internal"]).inc();
|
||||||
if search_result.degraded {
|
if search_result.degraded {
|
||||||
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
MEILISEARCH_DEGRADED_SEARCH_REQUESTS.inc();
|
||||||
}
|
}
|
||||||
@ -488,6 +492,7 @@ async fn streamed_chat(
|
|||||||
|
|
||||||
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
let (tx, rx) = tokio::sync::mpsc::channel(10);
|
||||||
let tx = SseEventSender::new(tx);
|
let tx = SseEventSender::new(tx);
|
||||||
|
let workspace_uid = workspace_uid.to_string();
|
||||||
let _join_handle = Handle::current().spawn(async move {
|
let _join_handle = Handle::current().spawn(async move {
|
||||||
let client = Client::with_config(config.clone());
|
let client = Client::with_config(config.clone());
|
||||||
let mut global_tool_calls = HashMap::<u32, Call>::new();
|
let mut global_tool_calls = HashMap::<u32, Call>::new();
|
||||||
@ -497,6 +502,7 @@ async fn streamed_chat(
|
|||||||
let output = run_conversation(
|
let output = run_conversation(
|
||||||
&index_scheduler,
|
&index_scheduler,
|
||||||
&auth_ctrl,
|
&auth_ctrl,
|
||||||
|
&workspace_uid,
|
||||||
&search_queue,
|
&search_queue,
|
||||||
&auth_token,
|
&auth_token,
|
||||||
&client,
|
&client,
|
||||||
@ -534,6 +540,7 @@ async fn run_conversation<C: async_openai::config::Config>(
|
|||||||
Data<IndexScheduler>,
|
Data<IndexScheduler>,
|
||||||
>,
|
>,
|
||||||
auth_ctrl: &web::Data<AuthController>,
|
auth_ctrl: &web::Data<AuthController>,
|
||||||
|
workspace_uid: &str,
|
||||||
search_queue: &web::Data<SearchQueue>,
|
search_queue: &web::Data<SearchQueue>,
|
||||||
auth_token: &str,
|
auth_token: &str,
|
||||||
client: &Client<C>,
|
client: &Client<C>,
|
||||||
@ -543,13 +550,34 @@ async fn run_conversation<C: async_openai::config::Config>(
|
|||||||
global_tool_calls: &mut HashMap<u32, Call>,
|
global_tool_calls: &mut HashMap<u32, Call>,
|
||||||
function_support: FunctionSupport,
|
function_support: FunctionSupport,
|
||||||
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
|
) -> Result<ControlFlow<Option<FinishReason>, ()>, SendError<Event>> {
|
||||||
|
use DbChatCompletionSource::*;
|
||||||
|
|
||||||
let mut finish_reason = None;
|
let mut finish_reason = None;
|
||||||
|
chat_completion.stream_options = match source {
|
||||||
|
OpenAi | AzureOpenAi => Some(ChatCompletionStreamOptions { include_usage: true }),
|
||||||
|
Mistral | VLlm => None,
|
||||||
|
};
|
||||||
|
|
||||||
// safety: unwrap: can only happens if `stream` was set to `false`
|
// safety: unwrap: can only happens if `stream` was set to `false`
|
||||||
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
let mut response = client.chat().create_stream(chat_completion.clone()).await.unwrap();
|
||||||
while let Some(result) = response.next().await {
|
while let Some(result) = response.next().await {
|
||||||
match result {
|
match result {
|
||||||
Ok(resp) => {
|
Ok(resp) => {
|
||||||
let choice = &resp.choices[0];
|
if let Some(usage) = resp.usage.as_ref() {
|
||||||
|
MEILISEARCH_CHAT_PROMPT_TOKENS_USAGE
|
||||||
|
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||||
|
.inc_by(usage.prompt_tokens as u64);
|
||||||
|
MEILISEARCH_CHAT_COMPLETION_TOKENS_USAGE
|
||||||
|
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||||
|
.inc_by(usage.completion_tokens as u64);
|
||||||
|
MEILISEARCH_CHAT_TOTAL_TOKENS_USAGE
|
||||||
|
.with_label_values(&[workspace_uid, &chat_completion.model])
|
||||||
|
.inc_by(usage.total_tokens as u64);
|
||||||
|
}
|
||||||
|
let choice = match resp.choices.first() {
|
||||||
|
Some(choice) => choice,
|
||||||
|
None => break,
|
||||||
|
};
|
||||||
finish_reason = choice.finish_reason;
|
finish_reason = choice.finish_reason;
|
||||||
|
|
||||||
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;
|
let ChatCompletionStreamResponseDelta { ref tool_calls, .. } = &choice.delta;
|
||||||
|
@ -43,7 +43,7 @@ async fn version_too_old() {
|
|||||||
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
std::fs::write(db_path.join("VERSION"), "1.11.9999").unwrap();
|
||||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||||
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.15.2");
|
snapshot!(err, @"Database version 1.11.9999 is too old for the experimental dumpless upgrade feature. Please generate a dump using the v1.11.9999 and import it in the v1.16.0");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -58,7 +58,7 @@ async fn version_requires_downgrade() {
|
|||||||
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
std::fs::write(db_path.join("VERSION"), format!("{major}.{minor}.{patch}")).unwrap();
|
||||||
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
let options = Opt { experimental_dumpless_upgrade: true, ..default_settings };
|
||||||
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
let err = Server::new_with_options(options).await.map(|_| ()).unwrap_err();
|
||||||
snapshot!(err, @"Database version 1.15.3 is higher than the Meilisearch version 1.15.2. Downgrade is not supported");
|
snapshot!(err, @"Database version 1.16.1 is higher than the Meilisearch version 1.16.0. Downgrade is not supported");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"progress": null,
|
"progress": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"stats": {
|
"stats": {
|
||||||
"totalNbTasks": 1,
|
"totalNbTasks": 1,
|
||||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"progress": null,
|
"progress": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"stats": {
|
"stats": {
|
||||||
"totalNbTasks": 1,
|
"totalNbTasks": 1,
|
||||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"progress": null,
|
"progress": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"stats": {
|
"stats": {
|
||||||
"totalNbTasks": 1,
|
"totalNbTasks": 1,
|
||||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"error": null,
|
"error": null,
|
||||||
"duration": "[duration]",
|
"duration": "[duration]",
|
||||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"error": null,
|
"error": null,
|
||||||
"duration": "[duration]",
|
"duration": "[duration]",
|
||||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"error": null,
|
"error": null,
|
||||||
"duration": "[duration]",
|
"duration": "[duration]",
|
||||||
|
@ -8,7 +8,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"progress": null,
|
"progress": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"stats": {
|
"stats": {
|
||||||
"totalNbTasks": 1,
|
"totalNbTasks": 1,
|
||||||
|
@ -12,7 +12,7 @@ source: crates/meilisearch/tests/upgrade/v1_12/v1_12_0.rs
|
|||||||
"canceledBy": null,
|
"canceledBy": null,
|
||||||
"details": {
|
"details": {
|
||||||
"upgradeFrom": "v1.12.0",
|
"upgradeFrom": "v1.12.0",
|
||||||
"upgradeTo": "v1.15.2"
|
"upgradeTo": "v1.16.0"
|
||||||
},
|
},
|
||||||
"error": null,
|
"error": null,
|
||||||
"duration": "[duration]",
|
"duration": "[duration]",
|
||||||
|
Reference in New Issue
Block a user