mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-07-19 04:50:37 +00:00
Compare commits
1 Commits
v1.8.1-but
...
tmp-spawn-
Author | SHA1 | Date | |
---|---|---|---|
b3952e8b3d |
34
Cargo.lock
generated
34
Cargo.lock
generated
@ -494,7 +494,7 @@ checksum = "8c3c1a368f70d6cf7302d78f8f7093da241fb8e8807c05cc9e51a125895a6d5b"
|
||||
|
||||
[[package]]
|
||||
name = "benchmarks"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"bytes",
|
||||
@ -639,7 +639,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "build-info"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"time",
|
||||
@ -1539,7 +1539,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "dump"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@ -1787,7 +1787,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "file-store"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"faux",
|
||||
"tempfile",
|
||||
@ -1810,7 +1810,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "filter-parser"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"nom",
|
||||
@ -1830,7 +1830,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "flatten-serde-json"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@ -1948,7 +1948,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "fuzzers"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"arbitrary",
|
||||
"clap",
|
||||
@ -2442,7 +2442,7 @@ checksum = "206ca75c9c03ba3d4ace2460e57b189f39f43de612c2f85836e65c929701bb2d"
|
||||
|
||||
[[package]]
|
||||
name = "index-scheduler"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"big_s",
|
||||
@ -2638,7 +2638,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "json-depth-checker"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"criterion",
|
||||
"serde_json",
|
||||
@ -3275,7 +3275,7 @@ checksum = "490cc448043f947bae3cbee9c203358d62dbee0db12107a74be5c30ccfd09771"
|
||||
|
||||
[[package]]
|
||||
name = "meili-snap"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"insta",
|
||||
"md5",
|
||||
@ -3284,7 +3284,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"actix-cors",
|
||||
"actix-http",
|
||||
@ -3377,7 +3377,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-auth"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"base64 0.21.7",
|
||||
"enum-iterator",
|
||||
@ -3396,7 +3396,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilisearch-types"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"actix-web",
|
||||
"anyhow",
|
||||
@ -3426,7 +3426,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "meilitool"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"clap",
|
||||
@ -3465,7 +3465,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "milli"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"arroy",
|
||||
"big_s",
|
||||
@ -3906,7 +3906,7 @@ checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e"
|
||||
|
||||
[[package]]
|
||||
name = "permissive-json-pointer"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"big_s",
|
||||
"serde_json",
|
||||
@ -6074,7 +6074,7 @@ dependencies = [
|
||||
|
||||
[[package]]
|
||||
name = "xtask"
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"build-info",
|
||||
|
@ -22,7 +22,7 @@ members = [
|
||||
]
|
||||
|
||||
[workspace.package]
|
||||
version = "1.8.1"
|
||||
version = "1.8.0"
|
||||
authors = [
|
||||
"Quentin de Quelen <quentin@dequelen.me>",
|
||||
"Clément Renault <clement@meilisearch.com>",
|
||||
|
@ -117,69 +117,3 @@ async fn geo_bounding_box_with_string_and_number() {
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn bug_4640() {
|
||||
// https://github.com/meilisearch/meilisearch/issues/4640
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.update_settings_filterable_attributes(json!(["_geo"])).await;
|
||||
let (ret, _code) = index.update_settings_sortable_attributes(json!(["_geo"])).await;
|
||||
index.wait_task(ret.uid()).await;
|
||||
|
||||
// Sort the document with the second one first
|
||||
index
|
||||
.search(
|
||||
json!({
|
||||
"sort": ["_geoPoint(45.4777599, 9.1967508):asc"],
|
||||
}),
|
||||
|response, code| {
|
||||
assert_eq!(code, 200, "{}", response);
|
||||
snapshot!(json_string!(response, { ".processingTimeMs" => "[time]" }), @r###"
|
||||
{
|
||||
"hits": [
|
||||
{
|
||||
"id": 2,
|
||||
"name": "La Bella Italia",
|
||||
"address": "456 Elm Street, Townsville",
|
||||
"type": "Italian",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": "45.4777599",
|
||||
"lng": "9.1967508"
|
||||
}
|
||||
},
|
||||
{
|
||||
"id": 1,
|
||||
"name": "Taco Truck",
|
||||
"address": "444 Salsa Street, Burritoville",
|
||||
"type": "Mexican",
|
||||
"rating": 9,
|
||||
"_geo": {
|
||||
"lat": 34.0522,
|
||||
"lng": -118.2437
|
||||
},
|
||||
"_geoDistance": 9714063
|
||||
},
|
||||
{
|
||||
"id": 3,
|
||||
"name": "Crêpe Truck",
|
||||
"address": "2 Billig Avenue, Rouenville",
|
||||
"type": "French",
|
||||
"rating": 10
|
||||
}
|
||||
],
|
||||
"query": "",
|
||||
"processingTimeMs": "[time]",
|
||||
"limit": 20,
|
||||
"offset": 0,
|
||||
"estimatedTotalHits": 3
|
||||
}
|
||||
"###);
|
||||
},
|
||||
)
|
||||
.await;
|
||||
}
|
||||
|
@ -45,6 +45,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
obkv_documents: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||
) -> Result<ExtractedFacetValues> {
|
||||
puffin::profile_function!();
|
||||
|
||||
@ -126,18 +127,12 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
||||
add_exists.insert(document);
|
||||
}
|
||||
|
||||
let del_geo_support = settings_diff
|
||||
.old
|
||||
.geo_fields_ids
|
||||
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
|
||||
let add_geo_support = settings_diff
|
||||
.new
|
||||
.geo_fields_ids
|
||||
.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
|
||||
let geo_support =
|
||||
geo_fields_ids.map_or(false, |(lat, lng)| field_id == lat || field_id == lng);
|
||||
let del_filterable_values =
|
||||
del_value.map(|value| extract_facet_values(&value, del_geo_support));
|
||||
del_value.map(|value| extract_facet_values(&value, geo_support));
|
||||
let add_filterable_values =
|
||||
add_value.map(|value| extract_facet_values(&value, add_geo_support));
|
||||
add_value.map(|value| extract_facet_values(&value, geo_support));
|
||||
|
||||
// Those closures are just here to simplify things a bit.
|
||||
let mut insert_numbers_diff = |del_numbers, add_numbers| {
|
||||
|
@ -8,7 +8,6 @@ use super::helpers::{create_writer, writer_into_reader, GrenadParameters};
|
||||
use crate::error::GeoError;
|
||||
use crate::update::del_add::{DelAdd, KvReaderDelAdd, KvWriterDelAdd};
|
||||
use crate::update::index_documents::extract_finite_float_from_value;
|
||||
use crate::update::settings::{InnerIndexSettings, InnerIndexSettingsDiff};
|
||||
use crate::{FieldId, InternalError, Result};
|
||||
|
||||
/// Extracts the geographical coordinates contained in each document under the `_geo` field.
|
||||
@ -19,7 +18,7 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
||||
obkv_documents: grenad::Reader<R>,
|
||||
indexer: GrenadParameters,
|
||||
primary_key_id: FieldId,
|
||||
settings_diff: &InnerIndexSettingsDiff,
|
||||
(lat_fid, lng_fid): (FieldId, FieldId),
|
||||
) -> Result<grenad::Reader<BufReader<File>>> {
|
||||
puffin::profile_function!();
|
||||
|
||||
@ -41,27 +40,47 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
||||
serde_json::from_slice(document_id).unwrap()
|
||||
};
|
||||
|
||||
// extract old version
|
||||
let del_lat_lng =
|
||||
extract_lat_lng(&obkv, &settings_diff.old, DelAdd::Deletion, document_id)?;
|
||||
// extract new version
|
||||
let add_lat_lng =
|
||||
extract_lat_lng(&obkv, &settings_diff.new, DelAdd::Addition, document_id)?;
|
||||
// first we get the two fields
|
||||
match (obkv.get(lat_fid), obkv.get(lng_fid)) {
|
||||
(Some(lat), Some(lng)) => {
|
||||
let deladd_lat_obkv = KvReaderDelAdd::new(lat);
|
||||
let deladd_lng_obkv = KvReaderDelAdd::new(lng);
|
||||
|
||||
if del_lat_lng != add_lat_lng {
|
||||
let mut obkv = KvWriterDelAdd::memory();
|
||||
if let Some([lat, lng]) = del_lat_lng {
|
||||
#[allow(clippy::drop_non_drop)]
|
||||
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
||||
obkv.insert(DelAdd::Deletion, bytes)?;
|
||||
// then we extract the values
|
||||
let del_lat_lng = deladd_lat_obkv
|
||||
.get(DelAdd::Deletion)
|
||||
.zip(deladd_lng_obkv.get(DelAdd::Deletion))
|
||||
.map(|(lat, lng)| extract_lat_lng(lat, lng, document_id))
|
||||
.transpose()?;
|
||||
let add_lat_lng = deladd_lat_obkv
|
||||
.get(DelAdd::Addition)
|
||||
.zip(deladd_lng_obkv.get(DelAdd::Addition))
|
||||
.map(|(lat, lng)| extract_lat_lng(lat, lng, document_id))
|
||||
.transpose()?;
|
||||
|
||||
if del_lat_lng != add_lat_lng {
|
||||
let mut obkv = KvWriterDelAdd::memory();
|
||||
if let Some([lat, lng]) = del_lat_lng {
|
||||
#[allow(clippy::drop_non_drop)]
|
||||
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
||||
obkv.insert(DelAdd::Deletion, bytes)?;
|
||||
}
|
||||
if let Some([lat, lng]) = add_lat_lng {
|
||||
#[allow(clippy::drop_non_drop)]
|
||||
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
||||
obkv.insert(DelAdd::Addition, bytes)?;
|
||||
}
|
||||
let bytes = obkv.into_inner()?;
|
||||
writer.insert(docid_bytes, bytes)?;
|
||||
}
|
||||
}
|
||||
if let Some([lat, lng]) = add_lat_lng {
|
||||
#[allow(clippy::drop_non_drop)]
|
||||
let bytes: [u8; 16] = concat_arrays![lat.to_ne_bytes(), lng.to_ne_bytes()];
|
||||
obkv.insert(DelAdd::Addition, bytes)?;
|
||||
(None, Some(_)) => {
|
||||
return Err(GeoError::MissingLatitude { document_id: document_id() }.into())
|
||||
}
|
||||
let bytes = obkv.into_inner()?;
|
||||
writer.insert(docid_bytes, bytes)?;
|
||||
(Some(_), None) => {
|
||||
return Err(GeoError::MissingLongitude { document_id: document_id() }.into())
|
||||
}
|
||||
(None, None) => (),
|
||||
}
|
||||
}
|
||||
|
||||
@ -69,37 +88,16 @@ pub fn extract_geo_points<R: io::Read + io::Seek>(
|
||||
}
|
||||
|
||||
/// Extract the finite floats lat and lng from two bytes slices.
|
||||
fn extract_lat_lng(
|
||||
document: &obkv::KvReader<FieldId>,
|
||||
settings: &InnerIndexSettings,
|
||||
deladd: DelAdd,
|
||||
document_id: impl Fn() -> Value,
|
||||
) -> Result<Option<[f64; 2]>> {
|
||||
match settings.geo_fields_ids {
|
||||
Some((lat_fid, lng_fid)) => {
|
||||
let lat = document.get(lat_fid).map(KvReaderDelAdd::new).and_then(|r| r.get(deladd));
|
||||
let lng = document.get(lng_fid).map(KvReaderDelAdd::new).and_then(|r| r.get(deladd));
|
||||
let (lat, lng) = match (lat, lng) {
|
||||
(Some(lat), Some(lng)) => (lat, lng),
|
||||
(Some(_), None) => {
|
||||
return Err(GeoError::MissingLatitude { document_id: document_id() }.into())
|
||||
}
|
||||
(None, Some(_)) => {
|
||||
return Err(GeoError::MissingLongitude { document_id: document_id() }.into())
|
||||
}
|
||||
(None, None) => return Ok(None),
|
||||
};
|
||||
let lat = extract_finite_float_from_value(
|
||||
serde_json::from_slice(lat).map_err(InternalError::SerdeJson)?,
|
||||
)
|
||||
.map_err(|lat| GeoError::BadLatitude { document_id: document_id(), value: lat })?;
|
||||
fn extract_lat_lng(lat: &[u8], lng: &[u8], document_id: impl Fn() -> Value) -> Result<[f64; 2]> {
|
||||
let lat = extract_finite_float_from_value(
|
||||
serde_json::from_slice(lat).map_err(InternalError::SerdeJson)?,
|
||||
)
|
||||
.map_err(|lat| GeoError::BadLatitude { document_id: document_id(), value: lat })?;
|
||||
|
||||
let lng = extract_finite_float_from_value(
|
||||
serde_json::from_slice(lng).map_err(InternalError::SerdeJson)?,
|
||||
)
|
||||
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?;
|
||||
Ok(Some([lat, lng]))
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
let lng = extract_finite_float_from_value(
|
||||
serde_json::from_slice(lng).map_err(InternalError::SerdeJson)?,
|
||||
)
|
||||
.map_err(|lng| GeoError::BadLongitude { document_id: document_id(), value: lng })?;
|
||||
|
||||
Ok([lat, lng])
|
||||
}
|
||||
|
@ -43,6 +43,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
primary_key_id: FieldId,
|
||||
geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||
settings_diff: Arc<InnerIndexSettingsDiff>,
|
||||
max_positions_per_attributes: Option<u32>,
|
||||
) -> Result<()> {
|
||||
@ -71,6 +72,7 @@ pub(crate) fn data_from_obkv_documents(
|
||||
indexer,
|
||||
lmdb_writer_sx.clone(),
|
||||
primary_key_id,
|
||||
geo_fields_ids,
|
||||
settings_diff.clone(),
|
||||
max_positions_per_attributes,
|
||||
)
|
||||
@ -227,12 +229,15 @@ fn send_original_documents_data(
|
||||
let documents_chunk_cloned = original_documents_chunk.clone();
|
||||
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
|
||||
|
||||
let request_threads = ThreadPoolNoAbortBuilder::new()
|
||||
.num_threads(crate::vector::REQUEST_PARALLELISM)
|
||||
.thread_name(|index| format!("embedding-request-{index}"))
|
||||
.build()?;
|
||||
let new_embedding_configs = settings_diff.new.embedding_configs.clone();
|
||||
|
||||
if settings_diff.reindex_vectors() || !settings_diff.settings_update_only() {
|
||||
if (settings_diff.reindex_vectors() || !settings_diff.settings_update_only())
|
||||
&& new_embedding_configs.get_default().is_some()
|
||||
{
|
||||
let request_threads = ThreadPoolNoAbortBuilder::new()
|
||||
.num_threads(crate::vector::REQUEST_PARALLELISM)
|
||||
.thread_name(|index| format!("embedding-request-{index}"))
|
||||
.build()?;
|
||||
let settings_diff = settings_diff.clone();
|
||||
rayon::spawn(move || {
|
||||
for (name, (embedder, prompt)) in settings_diff.new.embedding_configs.clone() {
|
||||
@ -298,6 +303,7 @@ fn send_and_extract_flattened_documents_data(
|
||||
indexer: GrenadParameters,
|
||||
lmdb_writer_sx: Sender<Result<TypedChunk>>,
|
||||
primary_key_id: FieldId,
|
||||
geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||
settings_diff: Arc<InnerIndexSettingsDiff>,
|
||||
max_positions_per_attributes: Option<u32>,
|
||||
) -> Result<(
|
||||
@ -307,13 +313,12 @@ fn send_and_extract_flattened_documents_data(
|
||||
let flattened_documents_chunk =
|
||||
flattened_documents_chunk.and_then(|c| unsafe { as_cloneable_grenad(&c) })?;
|
||||
|
||||
if settings_diff.run_geo_indexing() {
|
||||
if let Some(geo_fields_ids) = geo_fields_ids {
|
||||
let documents_chunk_cloned = flattened_documents_chunk.clone();
|
||||
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
|
||||
let settings_diff = settings_diff.clone();
|
||||
rayon::spawn(move || {
|
||||
let result =
|
||||
extract_geo_points(documents_chunk_cloned, indexer, primary_key_id, &settings_diff);
|
||||
extract_geo_points(documents_chunk_cloned, indexer, primary_key_id, geo_fields_ids);
|
||||
let _ = match result {
|
||||
Ok(geo_points) => lmdb_writer_sx_cloned.send(Ok(TypedChunk::GeoPoints(geo_points))),
|
||||
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
|
||||
@ -352,6 +357,7 @@ fn send_and_extract_flattened_documents_data(
|
||||
flattened_documents_chunk.clone(),
|
||||
indexer,
|
||||
&settings_diff,
|
||||
geo_fields_ids,
|
||||
)?;
|
||||
|
||||
// send fid_docid_facet_numbers_chunk to DB writer
|
||||
|
@ -324,6 +324,28 @@ where
|
||||
// get the primary key field id
|
||||
let primary_key_id = settings_diff.new.fields_ids_map.id(&primary_key).unwrap();
|
||||
|
||||
// get the fid of the `_geo.lat` and `_geo.lng` fields.
|
||||
let mut field_id_map = self.index.fields_ids_map(self.wtxn)?;
|
||||
|
||||
// self.index.fields_ids_map($a)? ==>> field_id_map
|
||||
let geo_fields_ids = match field_id_map.id("_geo") {
|
||||
Some(gfid) => {
|
||||
let is_sortable = self.index.sortable_fields_ids(self.wtxn)?.contains(&gfid);
|
||||
let is_filterable = self.index.filterable_fields_ids(self.wtxn)?.contains(&gfid);
|
||||
// if `_geo` is faceted then we get the `lat` and `lng`
|
||||
if is_sortable || is_filterable {
|
||||
let field_ids = field_id_map
|
||||
.insert("_geo.lat")
|
||||
.zip(field_id_map.insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
Some(field_ids)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
let pool_params = GrenadParameters {
|
||||
chunk_compression_type: self.indexer_config.chunk_compression_type,
|
||||
chunk_compression_level: self.indexer_config.chunk_compression_level,
|
||||
@ -390,6 +412,7 @@ where
|
||||
pool_params,
|
||||
lmdb_writer_sx.clone(),
|
||||
primary_key_id,
|
||||
geo_fields_ids,
|
||||
settings_diff.clone(),
|
||||
max_positions_per_attributes,
|
||||
)
|
||||
|
@ -1161,11 +1161,6 @@ impl InnerIndexSettingsDiff {
|
||||
pub fn settings_update_only(&self) -> bool {
|
||||
self.settings_update_only
|
||||
}
|
||||
|
||||
pub fn run_geo_indexing(&self) -> bool {
|
||||
self.old.geo_fields_ids != self.new.geo_fields_ids
|
||||
|| (!self.settings_update_only && self.new.geo_fields_ids.is_some())
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -1182,7 +1177,6 @@ pub(crate) struct InnerIndexSettings {
|
||||
pub proximity_precision: ProximityPrecision,
|
||||
pub embedding_configs: EmbeddingConfigs,
|
||||
pub existing_fields: HashSet<String>,
|
||||
pub geo_fields_ids: Option<(FieldId, FieldId)>,
|
||||
}
|
||||
|
||||
impl InnerIndexSettings {
|
||||
@ -1191,7 +1185,7 @@ impl InnerIndexSettings {
|
||||
let stop_words = stop_words.map(|sw| sw.map_data(Vec::from).unwrap());
|
||||
let allowed_separators = index.allowed_separators(rtxn)?;
|
||||
let dictionary = index.dictionary(rtxn)?;
|
||||
let mut fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let fields_ids_map = index.fields_ids_map(rtxn)?;
|
||||
let user_defined_searchable_fields = index.user_defined_searchable_fields(rtxn)?;
|
||||
let user_defined_searchable_fields =
|
||||
user_defined_searchable_fields.map(|sf| sf.into_iter().map(String::from).collect());
|
||||
@ -1206,24 +1200,6 @@ impl InnerIndexSettings {
|
||||
.into_iter()
|
||||
.filter_map(|(field, count)| (count != 0).then_some(field))
|
||||
.collect();
|
||||
// index.fields_ids_map($a)? ==>> fields_ids_map
|
||||
let geo_fields_ids = match fields_ids_map.id("_geo") {
|
||||
Some(gfid) => {
|
||||
let is_sortable = index.sortable_fields_ids(rtxn)?.contains(&gfid);
|
||||
let is_filterable = index.filterable_fields_ids(rtxn)?.contains(&gfid);
|
||||
// if `_geo` is faceted then we get the `lat` and `lng`
|
||||
if is_sortable || is_filterable {
|
||||
let field_ids = fields_ids_map
|
||||
.insert("_geo.lat")
|
||||
.zip(fields_ids_map.insert("_geo.lng"))
|
||||
.ok_or(UserError::AttributeLimitReached)?;
|
||||
Some(field_ids)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
};
|
||||
|
||||
Ok(Self {
|
||||
stop_words,
|
||||
@ -1238,7 +1214,6 @@ impl InnerIndexSettings {
|
||||
proximity_precision,
|
||||
embedding_configs,
|
||||
existing_fields,
|
||||
geo_fields_ids,
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -54,7 +54,7 @@
|
||||
"sha256": "27e25efd0b68b159b8b21350d9af76938710cb29ce0393fa71b41c4f3c630ffe"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/movies/settings",
|
||||
"method": "PATCH",
|
||||
@ -78,10 +78,8 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
"synchronous": "DontWait"
|
||||
},
|
||||
{
|
||||
"route": "indexes/movies/documents",
|
||||
"method": "POST",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "5b6e4cb660bc20327776e8a33ea197b43d9ec84856710ead1cc87ab24df77de1"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/movies/settings",
|
||||
"method": "PATCH",
|
||||
@ -30,10 +30,8 @@
|
||||
]
|
||||
}
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
"synchronous": "DontWait"
|
||||
},
|
||||
{
|
||||
"route": "indexes/movies/documents",
|
||||
"method": "POST",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "d215e395e4240f12f03b8f1f68901eac82d9e7ded5b462cbf4a6b8efde76c6c6"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "experimental-features",
|
||||
"method": "PATCH",
|
||||
@ -55,9 +55,7 @@
|
||||
}
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/movies/documents",
|
||||
"method": "POST",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "d215e395e4240f12f03b8f1f68901eac82d9e7ded5b462cbf4a6b8efde76c6c6"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "experimental-features",
|
||||
"method": "PATCH",
|
||||
@ -49,9 +49,7 @@
|
||||
"asset": "movies-100.json"
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/movies/settings",
|
||||
"method": "PATCH",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
@ -59,9 +59,7 @@
|
||||
"asset": "150k-people.json"
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
@ -61,9 +61,7 @@
|
||||
"asset": "150k-people.json"
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
@ -61,9 +61,7 @@
|
||||
"asset": "150k-people.json"
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
|
@ -11,7 +11,7 @@
|
||||
"sha256": "28c359a0956958af0ba204ec11bad3045a0864a10b4838914fea25a01724f84b"
|
||||
}
|
||||
},
|
||||
"precommands": [
|
||||
"commands": [
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
@ -62,18 +62,14 @@
|
||||
"asset": "150k-people.json"
|
||||
},
|
||||
"synchronous": "WaitForTask"
|
||||
}
|
||||
],
|
||||
"commands": [
|
||||
},
|
||||
{
|
||||
"route": "indexes/peoples/settings",
|
||||
"method": "PATCH",
|
||||
"body": {
|
||||
"inline": {
|
||||
"typoTolerance": {
|
||||
"disableOnAttributes": [
|
||||
"featured_job_organization_name"
|
||||
]
|
||||
"disableOnAttributes": ["featured_job_organization_name"]
|
||||
}
|
||||
}
|
||||
},
|
||||
@ -97,22 +93,7 @@
|
||||
"body": {
|
||||
"inline": {
|
||||
"typoTolerance": {
|
||||
"disableOnWords": [
|
||||
"Ben",
|
||||
"Elowitz",
|
||||
"Kevin",
|
||||
"Flaherty",
|
||||
"Ron",
|
||||
"Dustin",
|
||||
"Owen",
|
||||
"Chris",
|
||||
"Mark",
|
||||
"Matt",
|
||||
"Peter",
|
||||
"Van",
|
||||
"Head",
|
||||
"of"
|
||||
]
|
||||
"disableOnWords": ["Ben","Elowitz","Kevin","Flaherty", "Ron", "Dustin", "Owen", "Chris", "Mark", "Matt", "Peter", "Van", "Head", "of"]
|
||||
}
|
||||
}
|
||||
},
|
||||
|
@ -55,10 +55,6 @@ impl Client {
|
||||
pub fn delete(&self, route: &str) -> reqwest::RequestBuilder {
|
||||
self.request(reqwest::Method::DELETE, route)
|
||||
}
|
||||
|
||||
pub fn base_url(&self) -> Option<&str> {
|
||||
self.base_url.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Deserialize)]
|
||||
|
@ -18,9 +18,12 @@ pub enum DashboardClient {
|
||||
}
|
||||
|
||||
impl DashboardClient {
|
||||
pub fn new(dashboard_url: String, api_key: Option<&str>) -> anyhow::Result<Self> {
|
||||
let dashboard_client =
|
||||
Client::new(Some(dashboard_url), api_key, Some(std::time::Duration::from_secs(60)))?;
|
||||
pub fn new(dashboard_url: &str, api_key: Option<&str>) -> anyhow::Result<Self> {
|
||||
let dashboard_client = Client::new(
|
||||
Some(format!("{}/api/v1", dashboard_url)),
|
||||
api_key,
|
||||
Some(std::time::Duration::from_secs(60)),
|
||||
)?;
|
||||
|
||||
Ok(Self::Client(dashboard_client))
|
||||
}
|
||||
@ -33,7 +36,7 @@ impl DashboardClient {
|
||||
let Self::Client(dashboard_client) = self else { return Ok(()) };
|
||||
|
||||
let response = dashboard_client
|
||||
.put("/api/v1/machine")
|
||||
.put("machine")
|
||||
.json(&json!({"hostname": env.hostname}))
|
||||
.send()
|
||||
.await
|
||||
@ -59,7 +62,7 @@ impl DashboardClient {
|
||||
let Self::Client(dashboard_client) = self else { return Ok(Uuid::now_v7()) };
|
||||
|
||||
let response = dashboard_client
|
||||
.put("/api/v1/invocation")
|
||||
.put("invocation")
|
||||
.json(&json!({
|
||||
"commit": {
|
||||
"sha1": build_info.commit_sha1,
|
||||
@ -94,7 +97,7 @@ impl DashboardClient {
|
||||
let Self::Client(dashboard_client) = self else { return Ok(Uuid::now_v7()) };
|
||||
|
||||
let response = dashboard_client
|
||||
.put("/api/v1/workload")
|
||||
.put("workload")
|
||||
.json(&json!({
|
||||
"invocation_uuid": invocation_uuid,
|
||||
"name": &workload.name,
|
||||
@ -121,7 +124,7 @@ impl DashboardClient {
|
||||
let Self::Client(dashboard_client) = self else { return Ok(()) };
|
||||
|
||||
let response = dashboard_client
|
||||
.put("/api/v1/run")
|
||||
.put("run")
|
||||
.json(&json!({
|
||||
"workload_uuid": workload_uuid,
|
||||
"data": report
|
||||
@ -156,7 +159,7 @@ impl DashboardClient {
|
||||
pub async fn mark_as_failed(&self, invocation_uuid: Uuid, failure_reason: Option<String>) {
|
||||
if let DashboardClient::Client(client) = self {
|
||||
let response = client
|
||||
.post("/api/v1/cancel-invocation")
|
||||
.post("cancel-invocation")
|
||||
.json(&json!({
|
||||
"invocation_uuid": invocation_uuid,
|
||||
"failure_reason": failure_reason,
|
||||
@ -183,28 +186,4 @@ impl DashboardClient {
|
||||
|
||||
tracing::warn!(%invocation_uuid, "marked invocation as failed or canceled");
|
||||
}
|
||||
|
||||
/// Result URL in markdown
|
||||
pub(crate) fn result_url(
|
||||
&self,
|
||||
workload_name: &str,
|
||||
build_info: &build_info::BuildInfo,
|
||||
baseline_branch: &str,
|
||||
) -> String {
|
||||
let Self::Client(client) = self else { return Default::default() };
|
||||
let Some(base_url) = client.base_url() else { return Default::default() };
|
||||
|
||||
let Some(commit_sha1) = build_info.commit_sha1 else { return Default::default() };
|
||||
|
||||
// https://bench.meilisearch.dev/view_spans?commit_sha1=500ddc76b549fb9f1af54b2dd6abfa15960381bb&workload_name=settings-add-remove-filters.json&target_branch=reduce-transform-disk-usage&baseline_branch=main
|
||||
let mut url = format!(
|
||||
"{base_url}/view_spans?commit_sha1={commit_sha1}&workload_name={workload_name}"
|
||||
);
|
||||
|
||||
if let Some(target_branch) = build_info.branch {
|
||||
url += &format!("&target_branch={target_branch}&baseline_branch={baseline_branch}");
|
||||
}
|
||||
|
||||
format!("[{workload_name} compared with {baseline_branch}]({url})")
|
||||
}
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ mod env_info;
|
||||
mod meili_process;
|
||||
mod workload;
|
||||
|
||||
use std::io::LineWriter;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Context;
|
||||
@ -91,7 +90,6 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
|
||||
let subscriber = tracing_subscriber::registry().with(
|
||||
tracing_subscriber::fmt::layer()
|
||||
.with_writer(|| LineWriter::new(std::io::stderr()))
|
||||
.with_span_events(FmtSpan::NEW | FmtSpan::CLOSE)
|
||||
.with_filter(filter),
|
||||
);
|
||||
@ -112,7 +110,7 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
let dashboard_client = if args.no_dashboard {
|
||||
dashboard::DashboardClient::new_dry()
|
||||
} else {
|
||||
dashboard::DashboardClient::new(args.dashboard_url.clone(), args.api_key.as_deref())?
|
||||
dashboard::DashboardClient::new(&args.dashboard_url, args.api_key.as_deref())?
|
||||
};
|
||||
|
||||
// reporting uses its own client because keeping the stream open to wait for entries
|
||||
@ -138,7 +136,7 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
let commit_message = build_info.commit_msg.context("missing commit message")?.split('\n').next().unwrap();
|
||||
let max_workloads = args.workload_file.len();
|
||||
let reason: Option<&str> = args.reason.as_deref();
|
||||
let invocation_uuid = dashboard_client.create_invocation(build_info.clone(), commit_message, env, max_workloads, reason).await?;
|
||||
let invocation_uuid = dashboard_client.create_invocation( build_info, commit_message, env, max_workloads, reason).await?;
|
||||
|
||||
tracing::info!(workload_count = args.workload_file.len(), "handling workload files");
|
||||
|
||||
@ -146,7 +144,6 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
let workload_runs = tokio::spawn(
|
||||
{
|
||||
let dashboard_client = dashboard_client.clone();
|
||||
let mut dashboard_urls = Vec::new();
|
||||
async move {
|
||||
for workload_file in args.workload_file.iter() {
|
||||
let workload: Workload = serde_json::from_reader(
|
||||
@ -155,8 +152,6 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
)
|
||||
.with_context(|| format!("error parsing {} as JSON", workload_file.display()))?;
|
||||
|
||||
let workload_name = workload.name.clone();
|
||||
|
||||
workload::execute(
|
||||
&assets_client,
|
||||
&dashboard_client,
|
||||
@ -168,23 +163,8 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
&args,
|
||||
)
|
||||
.await?;
|
||||
|
||||
let result_url = dashboard_client.result_url(&workload_name, &build_info, "main");
|
||||
|
||||
if !result_url.is_empty() {
|
||||
dashboard_urls.push(result_url);
|
||||
}
|
||||
|
||||
if let Some(branch) = build_info.branch {
|
||||
let result_url = dashboard_client.result_url(&workload_name, &build_info, branch);
|
||||
|
||||
|
||||
if !result_url.is_empty() {
|
||||
dashboard_urls.push(result_url);
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok::<_, anyhow::Error>(dashboard_urls)
|
||||
Ok::<(), anyhow::Error>(())
|
||||
}});
|
||||
|
||||
// handle ctrl-c
|
||||
@ -196,19 +176,13 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
|
||||
// wait for the end of the main task, handle result
|
||||
match workload_runs.await {
|
||||
Ok(Ok(urls)) => {
|
||||
Ok(Ok(_)) => {
|
||||
tracing::info!("Success");
|
||||
println!("☀️ Benchmark invocation completed, please find the results for your workloads below:");
|
||||
for url in urls {
|
||||
println!("- {url}");
|
||||
}
|
||||
Ok::<(), anyhow::Error>(())
|
||||
}
|
||||
Ok(Err(error)) => {
|
||||
tracing::error!(%invocation_uuid, error = %error, "invocation failed, attempting to report the failure to dashboard");
|
||||
dashboard_client.mark_as_failed(invocation_uuid, Some(error.to_string())).await;
|
||||
println!("☔️ Benchmark invocation failed...");
|
||||
println!("{error}");
|
||||
tracing::warn!(%invocation_uuid, "invocation marked as failed following error");
|
||||
Err(error)
|
||||
},
|
||||
@ -217,20 +191,10 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
|
||||
Ok(panic) => {
|
||||
tracing::error!("invocation panicked, attempting to report the failure to dashboard");
|
||||
dashboard_client.mark_as_failed( invocation_uuid, Some("Panicked".into())).await;
|
||||
println!("‼️ Benchmark invocation panicked 😱");
|
||||
let msg = match panic.downcast_ref::<&'static str>() {
|
||||
Some(s) => *s,
|
||||
None => match panic.downcast_ref::<String>() {
|
||||
Some(s) => &s[..],
|
||||
None => "Box<dyn Any>",
|
||||
},
|
||||
};
|
||||
println!("panicked at {msg}");
|
||||
std::panic::resume_unwind(panic)
|
||||
}
|
||||
Err(_) => {
|
||||
tracing::warn!("task was canceled");
|
||||
println!("🚫 Benchmark invocation was canceled");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
@ -22,8 +22,6 @@ pub struct Workload {
|
||||
pub run_count: u16,
|
||||
pub extra_cli_args: Vec<String>,
|
||||
pub assets: BTreeMap<String, Asset>,
|
||||
#[serde(default)]
|
||||
pub precommands: Vec<super::command::Command>,
|
||||
pub commands: Vec<super::command::Command>,
|
||||
}
|
||||
|
||||
@ -39,15 +37,6 @@ async fn run_commands(
|
||||
let report_folder = &args.report_folder;
|
||||
let workload_name = &workload.name;
|
||||
|
||||
for batch in workload
|
||||
.precommands
|
||||
.as_slice()
|
||||
.split_inclusive(|command| !matches!(command.synchronous, SyncMode::DontWait))
|
||||
{
|
||||
super::command::run_batch(meili_client, batch, &workload.assets, &args.asset_folder)
|
||||
.await?;
|
||||
}
|
||||
|
||||
std::fs::create_dir_all(report_folder)
|
||||
.with_context(|| format!("could not create report directory at {report_folder}"))?;
|
||||
|
||||
|
Reference in New Issue
Block a user