mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-23 12:57:17 +00:00
Compare commits
11 Commits
v1.12.8
...
k-kumar-01
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
36b897858a | ||
|
|
fce132a21b | ||
|
|
71834787ec | ||
|
|
b004db37c7 | ||
|
|
0c04cd1d9f | ||
|
|
34254b42b6 | ||
|
|
c0aa018c87 | ||
|
|
b21d7aedf9 | ||
|
|
2f1a9105b9 | ||
|
|
27bb591331 | ||
|
|
94a1f5a8ea |
2
.github/workflows/bench-pr.yml
vendored
2
.github/workflows/bench-pr.yml
vendored
@@ -55,7 +55,7 @@ jobs:
|
|||||||
reaction-type: "rocket"
|
reaction-type: "rocket"
|
||||||
repo-token: ${{ env.GH_TOKEN }}
|
repo-token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: xt0rted/pull-request-comment-branch@v2
|
- uses: xt0rted/pull-request-comment-branch@v3
|
||||||
id: comment-branch
|
id: comment-branch
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|||||||
2
.github/workflows/benchmarks-pr.yml
vendored
2
.github/workflows/benchmarks-pr.yml
vendored
@@ -56,7 +56,7 @@ jobs:
|
|||||||
reaction-type: "eyes"
|
reaction-type: "eyes"
|
||||||
repo-token: ${{ env.GH_TOKEN }}
|
repo-token: ${{ env.GH_TOKEN }}
|
||||||
|
|
||||||
- uses: xt0rted/pull-request-comment-branch@v2
|
- uses: xt0rted/pull-request-comment-branch@v3
|
||||||
id: comment-branch
|
id: comment-branch
|
||||||
with:
|
with:
|
||||||
repo_token: ${{ env.GH_TOKEN }}
|
repo_token: ${{ env.GH_TOKEN }}
|
||||||
|
|||||||
@@ -48,6 +48,27 @@ cargo xtask bench --no-dashboard -- workloads/my_workload_1.json workloads/my_wo
|
|||||||
|
|
||||||
For processing the results, look at [Looking at benchmark results/Without dashboard](#without-dashboard).
|
For processing the results, look at [Looking at benchmark results/Without dashboard](#without-dashboard).
|
||||||
|
|
||||||
|
#### Sending a workload by hand
|
||||||
|
|
||||||
|
Sometimes you want to visualize the metrics of a worlkoad that comes from a custom report.
|
||||||
|
It is not quite easy to trick the benchboard in thinking that your report is legitimate but here are the commands you can run to upload your firefox report on a running benchboard.
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Name this hostname whatever you want
|
||||||
|
echo '{ "hostname": "the-best-place" }' | xh PUT 'http://127.0.0.1:9001/api/v1/machine'
|
||||||
|
|
||||||
|
# You'll receive an UUID from this command that we will call $invocation_uuid
|
||||||
|
echo '{ "commit": { "sha1": "1234567", "commit_date": "2024-09-05 12:00:12.0 +00:00:00", "message": "A cool message" }, "machine_hostname": "the-best-place", "max_workloads": 1 }' | xh PUT 'http://127.0.0.1:9001/api/v1/invocation'
|
||||||
|
|
||||||
|
# Just use UUID from the previous command
|
||||||
|
# and you'll receive another UUID that we will call $workload_uuid
|
||||||
|
echo '{ "invocation_uuid": "$invocation_uuid", "name": "toto", "max_runs": 1 }' | xh PUT 'http://127.0.0.1:9001/api/v1/workload'
|
||||||
|
|
||||||
|
# And now use your $workload_uuid and the content of your firefox report
|
||||||
|
# but don't forget to convert your firefox report from JSONLines into an object
|
||||||
|
echo '{ "workload_uuid": "$workload_uuid", "data": $REPORT_JSON_DATA }' | xh PUT 'http://127.0.0.1:9001/api/v1/run'
|
||||||
|
```
|
||||||
|
|
||||||
### In CI
|
### In CI
|
||||||
|
|
||||||
We have dedicated runners to run workloads on CI. Currently, there are three ways of running the CI:
|
We have dedicated runners to run workloads on CI. Currently, there are three ways of running the CI:
|
||||||
|
|||||||
@@ -29,6 +29,8 @@ pub mod documents;
|
|||||||
pub mod facet_search;
|
pub mod facet_search;
|
||||||
pub mod search;
|
pub mod search;
|
||||||
mod search_analytics;
|
mod search_analytics;
|
||||||
|
#[cfg(test)]
|
||||||
|
mod search_test;
|
||||||
pub mod settings;
|
pub mod settings;
|
||||||
mod settings_analytics;
|
mod settings_analytics;
|
||||||
pub mod similar;
|
pub mod similar;
|
||||||
|
|||||||
@@ -198,7 +198,7 @@ impl TryFrom<SearchQueryGet> for SearchQuery {
|
|||||||
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
// TODO: TAMO: split on :asc, and :desc, instead of doing some weird things
|
||||||
|
|
||||||
/// Transform the sort query parameter into something that matches the post expected format.
|
/// Transform the sort query parameter into something that matches the post expected format.
|
||||||
fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
pub fn fix_sort_query_parameters(sort_query: &str) -> Vec<String> {
|
||||||
let mut sort_parameters = Vec::new();
|
let mut sort_parameters = Vec::new();
|
||||||
let mut merge = false;
|
let mut merge = false;
|
||||||
for current_sort in sort_query.trim_matches('"').split(',').map(|s| s.trim()) {
|
for current_sort in sort_query.trim_matches('"').split(',').map(|s| s.trim()) {
|
||||||
@@ -356,30 +356,3 @@ pub fn search_kind(
|
|||||||
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
(_, None, Some(_)) => Err(MeilisearchHttpError::MissingSearchHybrid.into()),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod test {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn test_fix_sort_query_parameters() {
|
|
||||||
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
|
|
||||||
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
|
|
||||||
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
|
|
||||||
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(12.45,13.56):desc".to_string(),]);
|
|
||||||
let sort = fix_sort_query_parameters(
|
|
||||||
"doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc",
|
|
||||||
);
|
|
||||||
assert_eq!(
|
|
||||||
sort,
|
|
||||||
vec![
|
|
||||||
"doggo:asc".to_string(),
|
|
||||||
"_geoPoint(12.45,13.56,2590352):desc".to_string(),
|
|
||||||
"catto:desc".to_string(),
|
|
||||||
]
|
|
||||||
);
|
|
||||||
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
|
|
||||||
// This is ugly but eh, I don't want to write a full parser just for this unused route
|
|
||||||
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(1,2),catto:desc".to_string(),]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|||||||
22
crates/meilisearch/src/routes/indexes/search_test.rs
Normal file
22
crates/meilisearch/src/routes/indexes/search_test.rs
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
use crate::routes::indexes::search::fix_sort_query_parameters;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_fix_sort_query_parameters() {
|
||||||
|
let sort = fix_sort_query_parameters("_geoPoint(12, 13):asc");
|
||||||
|
assert_eq!(sort, vec!["_geoPoint(12,13):asc".to_string()]);
|
||||||
|
let sort = fix_sort_query_parameters("doggo:asc,_geoPoint(12.45,13.56):desc");
|
||||||
|
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(12.45,13.56):desc".to_string(),]);
|
||||||
|
let sort =
|
||||||
|
fix_sort_query_parameters("doggo:asc , _geoPoint(12.45, 13.56, 2590352):desc , catto:desc");
|
||||||
|
assert_eq!(
|
||||||
|
sort,
|
||||||
|
vec![
|
||||||
|
"doggo:asc".to_string(),
|
||||||
|
"_geoPoint(12.45,13.56,2590352):desc".to_string(),
|
||||||
|
"catto:desc".to_string(),
|
||||||
|
]
|
||||||
|
);
|
||||||
|
let sort = fix_sort_query_parameters("doggo:asc , _geoPoint(1, 2), catto:desc");
|
||||||
|
// This is ugly but eh, I don't want to write a full parser just for this unused route
|
||||||
|
assert_eq!(sort, vec!["doggo:asc".to_string(), "_geoPoint(1,2),catto:desc".to_string(),]);
|
||||||
|
}
|
||||||
@@ -30,6 +30,8 @@ use milli::{
|
|||||||
use regex::Regex;
|
use regex::Regex;
|
||||||
use serde::Serialize;
|
use serde::Serialize;
|
||||||
use serde_json::{json, Value};
|
use serde_json::{json, Value};
|
||||||
|
#[cfg(test)]
|
||||||
|
mod mod_test;
|
||||||
|
|
||||||
use crate::error::MeilisearchHttpError;
|
use crate::error::MeilisearchHttpError;
|
||||||
|
|
||||||
@@ -1557,7 +1559,7 @@ pub fn perform_similar(
|
|||||||
Ok(result)
|
Ok(result)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn insert_geo_distance(sorts: &[String], document: &mut Document) {
|
pub fn insert_geo_distance(sorts: &[String], document: &mut Document) {
|
||||||
lazy_static::lazy_static! {
|
lazy_static::lazy_static! {
|
||||||
static ref GEO_REGEX: Regex =
|
static ref GEO_REGEX: Regex =
|
||||||
Regex::new(r"_geoPoint\(\s*([[:digit:].\-]+)\s*,\s*([[:digit:].\-]+)\s*\)").unwrap();
|
Regex::new(r"_geoPoint\(\s*([[:digit:].\-]+)\s*,\s*([[:digit:].\-]+)\s*\)").unwrap();
|
||||||
|
|||||||
114
crates/meilisearch/src/search/mod_test.rs
Normal file
114
crates/meilisearch/src/search/mod_test.rs
Normal file
@@ -0,0 +1,114 @@
|
|||||||
|
use meilisearch_types::Document;
|
||||||
|
use serde_json::json;
|
||||||
|
|
||||||
|
use crate::search::insert_geo_distance;
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_insert_geo_distance() {
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": 50.629973371633746,
|
||||||
|
"lng": 3.0569447399419567
|
||||||
|
},
|
||||||
|
"city": "Lille",
|
||||||
|
"id": "1"
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50.629973371633746,3.0569447399419567):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50.629973371633746, 3.0569447399419567):asc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint( 50.629973371633746 , 3.0569447399419567 ):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let sorters = &[
|
||||||
|
"prix:asc",
|
||||||
|
"villeneuve:desc",
|
||||||
|
"_geoPoint(50.629973371633746, 3.0569447399419567):asc",
|
||||||
|
"ubu:asc",
|
||||||
|
]
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
// only the first geoPoint is used to compute the distance
|
||||||
|
let sorters = &[
|
||||||
|
"chien:desc",
|
||||||
|
"_geoPoint(50.629973371633746, 3.0569447399419567):asc",
|
||||||
|
"pangolin:desc",
|
||||||
|
"_geoPoint(100.0, -80.0):asc",
|
||||||
|
"chat:asc",
|
||||||
|
]
|
||||||
|
.map(|s| s.to_string());
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
// there was no _geoPoint so nothing is inserted in the document
|
||||||
|
let sorters = &["chien:asc".to_string()];
|
||||||
|
let mut document = value;
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), None);
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_insert_geo_distance_with_coords_as_string() {
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": "50",
|
||||||
|
"lng": 3
|
||||||
|
}
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": "50",
|
||||||
|
"lng": "3"
|
||||||
|
},
|
||||||
|
"id": "1"
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
|
||||||
|
let value: Document = serde_json::from_str(
|
||||||
|
r#"{
|
||||||
|
"_geo": {
|
||||||
|
"lat": 50,
|
||||||
|
"lng": "3"
|
||||||
|
},
|
||||||
|
"id": "1"
|
||||||
|
}"#,
|
||||||
|
)
|
||||||
|
.unwrap();
|
||||||
|
|
||||||
|
let sorters = &["_geoPoint(50,3):desc".to_string()];
|
||||||
|
let mut document = value.clone();
|
||||||
|
insert_geo_distance(sorters, &mut document);
|
||||||
|
assert_eq!(document.get("_geoDistance"), Some(&json!(0)));
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user