Compare commits

..

1 Commits

Author SHA1 Message Date
Louis Dureuil
036251dad5 Revert mimalloc to 0.1.37 2024-06-05 14:21:01 +02:00
13 changed files with 43 additions and 49 deletions

34
Cargo.lock generated
View File

@@ -494,7 +494,7 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
[[package]]
name = "benchmarks"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"bytes",
@@ -639,7 +639,7 @@ dependencies = [
[[package]]
name = "build-info"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"time",
@@ -1539,7 +1539,7 @@ dependencies = [
[[package]]
name = "dump"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"big_s",
@@ -1787,7 +1787,7 @@ dependencies = [
[[package]]
name = "file-store"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"faux",
"tempfile",
@@ -1810,7 +1810,7 @@ dependencies = [
[[package]]
name = "filter-parser"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"insta",
"nom",
@@ -1830,7 +1830,7 @@ dependencies = [
[[package]]
name = "flatten-serde-json"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"criterion",
"serde_json",
@@ -1948,7 +1948,7 @@ dependencies = [
[[package]]
name = "fuzzers"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"arbitrary",
"clap",
@@ -2442,7 +2442,7 @@ checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d"
[[package]]
name = "index-scheduler"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"big_s",
@@ -2638,7 +2638,7 @@ dependencies = [
[[package]]
name = "json-depth-checker"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"criterion",
"serde_json",
@@ -3275,7 +3275,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
[[package]]
name = "meili-snap"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"insta",
"md5",
@@ -3284,7 +3284,7 @@ dependencies = [
[[package]]
name = "meilisearch"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"actix-cors",
"actix-http",
@@ -3377,7 +3377,7 @@ dependencies = [
[[package]]
name = "meilisearch-auth"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"base64 0.21.7",
"enum-iterator",
@@ -3396,7 +3396,7 @@ dependencies = [
[[package]]
name = "meilisearch-types"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"actix-web",
"anyhow",
@@ -3426,7 +3426,7 @@ dependencies = [
[[package]]
name = "meilitool"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"clap",
@@ -3465,7 +3465,7 @@ dependencies = [
[[package]]
name = "milli"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"arroy",
"big_s",
@@ -3906,7 +3906,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
[[package]]
name = "permissive-json-pointer"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"big_s",
"serde_json",
@@ -6074,7 +6074,7 @@ dependencies = [
[[package]]
name = "xtask"
version = "1.8.3"
version = "1.8.1"
dependencies = [
"anyhow",
"build-info",

View File

@@ -22,7 +22,7 @@ members = [
]
[workspace.package]
version = "1.8.3"
version = "1.8.1"
authors = [
"Quentin de Quelen <quentin@dequelen.me>",
"Clément Renault <clement@meilisearch.com>",

View File

@@ -14,7 +14,7 @@ license.workspace = true
anyhow = "1.0.79"
csv = "1.3.0"
milli = { path = "../milli" }
mimalloc = { version = "0.1.39", default-features = false }
mimalloc = { version = "0.1.37", default-features = false }
serde_json = { version = "1.0.111", features = ["preserve_order"] }
[dev-dependencies]

View File

@@ -152,7 +152,6 @@ impl Settings<Unchecked> {
}
#[derive(Debug, Clone, Deserialize)]
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(deny_unknown_fields)]
#[serde(rename_all = "camelCase")]

View File

@@ -182,7 +182,6 @@ impl Settings<Unchecked> {
}
}
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
#[derive(Debug, Clone, Deserialize)]
#[cfg_attr(test, derive(serde::Serialize))]
#[serde(deny_unknown_fields)]

View File

@@ -200,7 +200,6 @@ impl std::ops::Deref for IndexUid {
}
}
#[allow(dead_code)] // otherwise rustc complains that the fields go unused
#[derive(Debug)]
#[cfg_attr(test, derive(serde::Serialize))]
#[cfg_attr(test, serde(rename_all = "camelCase"))]

View File

@@ -56,7 +56,7 @@ jsonwebtoken = "9.2.0"
lazy_static = "1.4.0"
meilisearch-auth = { path = "../meilisearch-auth" }
meilisearch-types = { path = "../meilisearch-types" }
mimalloc = { version = "0.1.39", default-features = false }
mimalloc = { version = "0.1.37", default-features = false }
mime = "0.3.17"
num_cpus = "1.16.0"
obkv = "0.2.1"

View File

@@ -40,9 +40,8 @@ pub struct Permit {
impl Drop for Permit {
fn drop(&mut self) {
let sender = self.sender.clone();
// if the channel is closed then the whole instance is down
std::mem::drop(tokio::spawn(async move { sender.send(()).await }));
let _ = futures::executor::block_on(self.sender.send(()));
}
}

View File

@@ -74,10 +74,10 @@ csv = "1.3.0"
candle-core = { version = "0.4.1" }
candle-transformers = { version = "0.4.1" }
candle-nn = { version = "0.4.1" }
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default-features = false, features = [
tokenizers = { git = "https://github.com/huggingface/tokenizers.git", tag = "v0.15.2", version = "0.15.2", default_features = false, features = [
"onig",
] }
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default-features = false, features = [
hf-hub = { git = "https://github.com/dureuill/hf-hub.git", branch = "rust_tls", default_features = false, features = [
"online",
] }
tiktoken-rs = "0.5.8"
@@ -89,7 +89,7 @@ ureq = { version = "2.9.6", features = ["json"] }
url = "2.5.0"
[dev-dependencies]
mimalloc = { version = "0.1.39", default-features = false }
mimalloc = { version = "0.1.37", default-features = false }
big_s = "1.0.2"
insta = "1.34.0"
maplit = "1.0.2"

View File

@@ -22,7 +22,7 @@ pub enum SearchEvents {
RankingRuleStartIteration { ranking_rule_idx: usize, universe_len: u64 },
RankingRuleNextBucket { ranking_rule_idx: usize, universe_len: u64, bucket_len: u64 },
RankingRuleSkipBucket { ranking_rule_idx: usize, bucket_len: u64 },
RankingRuleEndIteration { ranking_rule_idx: usize },
RankingRuleEndIteration { ranking_rule_idx: usize, universe_len: u64 },
ExtendResults { new: Vec<u32> },
ProximityGraph { graph: RankingRuleGraph<ProximityGraph> },
ProximityPaths { paths: Vec<Vec<Interned<ProximityCondition>>> },
@@ -123,9 +123,12 @@ impl SearchLogger<QueryGraph> for VisualSearchLogger {
&mut self,
ranking_rule_idx: usize,
_ranking_rule: &dyn RankingRule<QueryGraph>,
_universe: &RoaringBitmap,
universe: &RoaringBitmap,
) {
self.events.push(SearchEvents::RankingRuleEndIteration { ranking_rule_idx });
self.events.push(SearchEvents::RankingRuleEndIteration {
ranking_rule_idx,
universe_len: universe.len(),
});
self.location.pop();
}
fn add_to_results(&mut self, docids: &[u32]) {
@@ -323,7 +326,7 @@ impl<'ctx> DetailedLoggerFinish<'ctx> {
assert!(ranking_rule_idx == self.rr_action_counter.len() - 1);
self.write_skip_bucket(bucket_len)?;
}
SearchEvents::RankingRuleEndIteration { ranking_rule_idx } => {
SearchEvents::RankingRuleEndIteration { ranking_rule_idx, universe_len: _ } => {
assert!(ranking_rule_idx == self.rr_action_counter.len() - 1);
self.write_end_iteration()?;
}

View File

@@ -11,7 +11,7 @@ mod extract_word_position_docids;
use std::fs::File;
use std::io::BufReader;
use std::sync::{Arc, OnceLock};
use std::sync::Arc;
use crossbeam_channel::Sender;
use rayon::prelude::*;
@@ -31,7 +31,7 @@ use self::extract_word_position_docids::extract_word_position_docids;
use super::helpers::{as_cloneable_grenad, CursorClonableMmap, GrenadParameters};
use super::{helpers, TypedChunk};
use crate::update::settings::InnerIndexSettingsDiff;
use crate::{FieldId, Result, ThreadPoolNoAbort, ThreadPoolNoAbortBuilder};
use crate::{FieldId, Result, ThreadPoolNoAbortBuilder};
/// Extract data for each databases from obkv documents in parallel.
/// Send data in grenad file over provided Sender.
@@ -213,18 +213,6 @@ fn run_extraction_task<FE, FS, M>(
})
}
fn request_threads() -> &'static ThreadPoolNoAbort {
static REQUEST_THREADS: OnceLock<ThreadPoolNoAbort> = OnceLock::new();
REQUEST_THREADS.get_or_init(|| {
ThreadPoolNoAbortBuilder::new()
.num_threads(crate::vector::REQUEST_PARALLELISM)
.thread_name(|index| format!("embedding-request-{index}"))
.build()
.unwrap()
})
}
/// Extract chunked data and send it into lmdb_writer_sx sender:
/// - documents
fn send_original_documents_data(
@@ -239,6 +227,11 @@ fn send_original_documents_data(
let documents_chunk_cloned = original_documents_chunk.clone();
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
let request_threads = ThreadPoolNoAbortBuilder::new()
.num_threads(crate::vector::REQUEST_PARALLELISM)
.thread_name(|index| format!("embedding-request-{index}"))
.build()?;
if settings_diff.reindex_vectors() || !settings_diff.settings_update_only() {
let settings_diff = settings_diff.clone();
rayon::spawn(move || {
@@ -256,7 +249,7 @@ fn send_original_documents_data(
prompts,
indexer,
embedder.clone(),
request_threads(),
&request_threads,
) {
Ok(results) => Some(results),
Err(error) => {

View File

@@ -48,6 +48,7 @@ pub struct Transform<'a, 'i> {
fields_ids_map: FieldsIdsMap,
indexer_settings: &'a IndexerConfig,
pub autogenerate_docids: bool,
pub index_documents_method: IndexDocumentsMethod,
available_documents_ids: AvailableDocumentsIds,
@@ -101,7 +102,7 @@ impl<'a, 'i> Transform<'a, 'i> {
index: &'i Index,
indexer_settings: &'a IndexerConfig,
index_documents_method: IndexDocumentsMethod,
_autogenerate_docids: bool,
autogenerate_docids: bool,
) -> Result<Self> {
// We must choose the appropriate merge function for when two or more documents
// with the same user id must be merged or fully replaced in the same batch.
@@ -135,6 +136,7 @@ impl<'a, 'i> Transform<'a, 'i> {
index,
fields_ids_map: index.fields_ids_map(wtxn)?,
indexer_settings,
autogenerate_docids,
available_documents_ids: AvailableDocumentsIds::from_documents_ids(&documents_ids),
original_sorter,
flattened_sorter,

View File

@@ -21,7 +21,7 @@ reqwest = { version = "0.11.23", features = [
"stream",
"json",
"rustls-tls",
], default-features = false }
], default_features = false }
serde = { version = "1.0.195", features = ["derive"] }
serde_json = "1.0.111"
sha2 = "0.10.8"