Fix coordinates order in filters

This commit is contained in:
Mubelotix
2025-08-19 14:24:38 +02:00
parent 5df7c7756d
commit bfb6cdc16b
2 changed files with 48 additions and 44 deletions

View File

@ -14,7 +14,7 @@ async fn basic_add_settings_and_geojson_documents() {
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(response,
@r#"
{
@ -92,7 +92,7 @@ async fn basic_add_settings_and_geojson_documents() {
}
"#);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(response,
@r#"
{
@ -160,11 +160,11 @@ async fn basic_add_geojson_documents_and_settings() {
}
"#);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(response,
@r#"
{
"message": "Index `[uuid]`: Attribute `_geojson` is not filterable. This index does not have configured filterable attributes.\n14:15 _geoPolygon([0,0],[2,0],[2,2],[0,2])",
"message": "Index `[uuid]`: Attribute `_geojson` is not filterable. This index does not have configured filterable attributes.\n14:15 _geoPolygon([0,0],[0,2],[2,2],[2,0])",
"code": "invalid_search_filter",
"type": "invalid_request",
"link": "https://docs.meilisearch.com/errors#invalid_search_filter"
@ -174,7 +174,7 @@ async fn basic_add_geojson_documents_and_settings() {
let (task, _status_code) =
index.update_settings(json!({"filterableAttributes": ["_geojson"]})).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
snapshot!(response,
@r#"
{
@ -217,17 +217,17 @@ async fn add_and_remove_geojson() {
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0.9,0],[0.9,0.9],[0,0.9])").await;
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (task, _) = index.delete_document(0).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0.9,0],[0.9,0.9],[0,0.9])").await;
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
// add it back
@ -240,9 +240,9 @@ async fn add_and_remove_geojson() {
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0.9,0],[0.9,0.9],[0,0.9])").await;
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
}
@ -262,9 +262,9 @@ async fn partial_update_geojson() {
let (task, _status_code) = index.add_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0.9,0],[0.9,0.9],[0,0.9])").await;
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let documents = json!([
@ -276,12 +276,12 @@ async fn partial_update_geojson() {
let (task, _status_code) = index.update_documents(documents, None).await;
server.wait_task(task.uid()).await.succeeded();
let (response, _code) =
index.search_get("?filter=_geoPolygon([0,0],[0.9,0],[0.9,0.9],[0,0.9])").await;
index.search_get("?filter=_geoPolygon([0,0],[0,0.9],[0.9,0.9],[0.9,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[2,0],[2,2],[0,2])").await;
let (response, _code) = index.search_get("?filter=_geoPolygon([0,0],[0,2],[2,2],[2,0])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 1);
let (response, _code) =
index.search_get("?filter=_geoPolygon([0.9,0.9],[2,0.9],[2,2],[0.9,2])").await;
index.search_get("?filter=_geoPolygon([0.9,0.9],[0.9,2],[2,2],[2,0.9])").await;
assert_eq!(response.get("hits").unwrap().as_array().unwrap().len(), 0);
}
@ -291,7 +291,7 @@ async fn geo_bounding_box() {
// The bounding box is a polygon over middle Europe
let (response, code) =
index.search_get("?filter=_geoBoundingBox([21.43443989912143,50.53987503447863],[0.54979129195425,43.76393151539099])&attributesToRetrieve=name").await;
index.search_get("?filter=_geoBoundingBox([50.53987503447863,21.43443989912143],[43.76393151539099,0.54979129195425])&attributesToRetrieve=name").await;
snapshot!(code, @"200 OK");
snapshot!(response, @r#"
{
@ -357,7 +357,6 @@ async fn geo_bounding_box() {
"#);
// Between Russia and Alaska
// WARNING: This test doesn't pass, the countries are those I imagine being found but maybe there is also Canada or something
let (response, code) = index
.search_get("?filter=_geoBoundingBox([70,-148],[63,152])&attributesToRetrieve=name")
.await;
@ -365,6 +364,9 @@ async fn geo_bounding_box() {
snapshot!(response, @r#"
{
"hits": [
{
"name": "Canada"
},
{
"name": "Russia"
},
@ -376,7 +378,7 @@ async fn geo_bounding_box() {
"processingTimeMs": "[duration]",
"limit": 20,
"offset": 0,
"estimatedTotalHits": 2
"estimatedTotalHits": 3
}
"#);
}

View File

@ -826,10 +826,10 @@ impl<'a> Filter<'a> {
if index.is_geojson_filtering_enabled(rtxn)? {
let polygon = geo_types::Polygon::new(
geo_types::LineString(vec![
geo_types::Coord { x: top_right[0], y: top_right[1] },
geo_types::Coord { x: bottom_left[0], y: top_right[1] },
geo_types::Coord { x: bottom_left[0], y: bottom_left[1] },
geo_types::Coord { x: top_right[0], y: bottom_left[1] },
geo_types::Coord { x: top_right[1], y: top_right[0] },
geo_types::Coord { x: bottom_left[1], y: top_right[0] },
geo_types::Coord { x: bottom_left[1], y: bottom_left[0] },
geo_types::Coord { x: top_right[1], y: bottom_left[0] },
]),
Vec::new(),
);
@ -848,7 +848,9 @@ impl<'a> Filter<'a> {
(None, Some(r2)) => Ok(r2),
(None, None) => Err(top_right_point[0].as_external_error(
FilterError::AttributeNotFilterable {
attribute: RESERVED_GEO_FIELD_NAME,
attribute: &format!(
"{RESERVED_GEO_FIELD_NAME}/{RESERVED_GEOJSON_FIELD_NAME}"
),
filterable_patterns: filtered_matching_patterns(
filterable_attribute_rules,
&|features| features.is_filterable(),
@ -870,20 +872,20 @@ impl<'a> Filter<'a> {
))?;
}
let polygon = geo_types::Polygon::new(
geo_types::LineString(
points
.iter()
.map(|p| {
Ok(geo_types::Coord {
x: p[0].parse_finite_float()?,
y: p[1].parse_finite_float()?,
})
})
.collect::<Result<_, filter_parser::Error>>()?,
),
Vec::new(),
);
let mut coords = Vec::new();
for [lat_token, lng_token] in points {
let lat = lat_token.parse_finite_float()?;
let lng = lng_token.parse_finite_float()?;
if !(-90.0..=90.0).contains(&lat) {
return Err(lat_token.as_external_error(BadGeoError::Lat(lat)))?;
}
if !(-180.0..=180.0).contains(&lng) {
return Err(lng_token.as_external_error(BadGeoError::Lng(lng)))?;
}
coords.push(geo_types::Coord { x: lng, y: lat });
}
let polygon = geo_types::Polygon::new(geo_types::LineString(coords), Vec::new());
let result = index
.cellulite
.in_shape(rtxn, &polygon, &mut |_| ())
@ -1038,10 +1040,10 @@ mod tests {
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. This index does not have configured filterable attributes.
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. This index does not have configured filterable attributes.
18:20 _geoBoundingBox([42, 150], [30, 10])
"###);
");
let filter = Filter::from_str("dog = \"bernese mountain\"").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
@ -1071,10 +1073,10 @@ mod tests {
let filter = Filter::from_str("_geoBoundingBox([42, 150], [30, 10])").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();
snapshot!(error.to_string(), @r###"
Attribute `_geo` is not filterable. Available filterable attribute patterns are: `title`.
snapshot!(error.to_string(), @r"
Attribute `_geo/_geojson` is not filterable. Available filterable attribute patterns are: `title`.
18:20 _geoBoundingBox([42, 150], [30, 10])
"###);
");
let filter = Filter::from_str("name = 12").unwrap().unwrap();
let error = filter.evaluate(&rtxn, &index).unwrap_err();