mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-05 20:25:42 +00:00
Compare commits
6 Commits
prototype-
...
prototype-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d1dc0f5998 | ||
|
|
4dae440405 | ||
|
|
82f05964bb | ||
|
|
6106adddda | ||
|
|
98b3774720 | ||
|
|
791007aa11 |
3
.github/uffizzi/Dockerfile
vendored
3
.github/uffizzi/Dockerfile
vendored
@@ -7,8 +7,7 @@ WORKDIR /meilisearch
|
||||
|
||||
ARG COMMIT_SHA
|
||||
ARG COMMIT_DATE
|
||||
ARG GIT_TAG
|
||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY . .
|
||||
|
||||
1
.github/workflows/publish-docker-images.yml
vendored
1
.github/workflows/publish-docker-images.yml
vendored
@@ -92,7 +92,6 @@ jobs:
|
||||
build-args: |
|
||||
COMMIT_SHA=${{ github.sha }}
|
||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||
GIT_TAG=$(printf "%q" ${{ github.ref_name }})
|
||||
|
||||
# /!\ Don't touch this without checking with Cloud team
|
||||
- name: Send CI information to Cloud team
|
||||
|
||||
@@ -121,13 +121,14 @@ The full Meilisearch release process is described in [this guide](https://github
|
||||
Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users.
|
||||
|
||||
The prototype name must follow this convention: `prototype-X-Y` where
|
||||
- `X` is the feature name formatted in `kebab-case`. It should not end with a single number.
|
||||
- `X` is the feature name formatted in `kebab-case`. It should not end with a number segment.
|
||||
- `Y` is the version of the prototype, starting from `0`.
|
||||
|
||||
✅ Example: `prototype-auto-resize-0`. </br>
|
||||
❌ Bad example: `auto-resize-0`: lacks the `prototype` prefix. </br>
|
||||
❌ Bad example: `prototype-auto-resize`: lacks the version suffix. </br>
|
||||
❌ Bad example: `prototype-auto-resize-0-0`: feature name ends with a single number.
|
||||
✅ Example: `prototype-auto-resize-0`.
|
||||
|
||||
❌ Bad example: `auto-resize-0`: lacks the `prototype` prefix.
|
||||
❌ Bad example: `prototype-auto-resize`: lacks the version suffix.
|
||||
❌ Bad example: `prototype-auto-resize-0-0`: feature name ends with a number segment.
|
||||
|
||||
Steps to create a prototype:
|
||||
|
||||
|
||||
@@ -7,8 +7,7 @@ WORKDIR /meilisearch
|
||||
|
||||
ARG COMMIT_SHA
|
||||
ARG COMMIT_DATE
|
||||
ARG GIT_TAG
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE}
|
||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||
|
||||
COPY . .
|
||||
|
||||
@@ -1,9 +1,6 @@
|
||||
use vergen::{vergen, Config, SemverKind};
|
||||
|
||||
fn main() {
|
||||
// Note: any code that needs VERGEN_ environment variables should take care to define them manually in the Dockerfile and pass them
|
||||
// in the corresponding GitHub workflow (publish_docker.yml).
|
||||
// This is due to the Dockerfile building the binary outside of the git directory.
|
||||
let mut config = Config::default();
|
||||
// allow using non-annotated tags
|
||||
*config.git_mut().semver_kind_mut() = SemverKind::Lightweight;
|
||||
|
||||
@@ -755,8 +755,7 @@ impl MultiSearchAggregator {
|
||||
|
||||
let user_agents = extract_user_agents(request).into_iter().collect();
|
||||
|
||||
let distinct_indexes: HashSet<_> =
|
||||
query.iter().map(|query| query.index_uid.as_str()).collect();
|
||||
let distinct_indexes: HashSet<_> = query.iter().map(|query| &query.index_uid).collect();
|
||||
|
||||
Self {
|
||||
timestamp,
|
||||
|
||||
@@ -5,7 +5,6 @@ use log::debug;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::ResponseError;
|
||||
use meilisearch_types::keys::actions;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::analytics::{Analytics, MultiSearchAggregator};
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
@@ -20,24 +19,13 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(SeqHandler(search_with_post))));
|
||||
}
|
||||
|
||||
#[derive(Serialize)]
|
||||
struct SearchResults {
|
||||
results: Vec<SearchResultWithIndex>,
|
||||
}
|
||||
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueries {
|
||||
queries: Vec<SearchQueryWithIndex>,
|
||||
}
|
||||
|
||||
pub async fn search_with_post(
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
|
||||
params: ValidatedJson<SearchQueries, DeserrJsonError>,
|
||||
params: ValidatedJson<Vec<SearchQueryWithIndex>, DeserrJsonError>,
|
||||
req: HttpRequest,
|
||||
analytics: web::Data<dyn Analytics>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let queries = params.into_inner().queries;
|
||||
let queries = params.into_inner();
|
||||
|
||||
let mut multi_aggregate = MultiSearchAggregator::from_queries(&queries, &req);
|
||||
|
||||
@@ -60,10 +48,7 @@ pub async fn search_with_post(
|
||||
let search_result =
|
||||
tokio::task::spawn_blocking(move || perform_search(&index, query)).await?;
|
||||
|
||||
search_results.push(SearchResultWithIndex {
|
||||
index_uid: index_uid.into_inner(),
|
||||
result: search_result?,
|
||||
});
|
||||
search_results.push(SearchResultWithIndex { index_uid, result: search_result? });
|
||||
}
|
||||
Ok(search_results)
|
||||
}
|
||||
@@ -79,5 +64,5 @@ pub async fn search_with_post(
|
||||
|
||||
debug!("returns: {:?}", search_results);
|
||||
|
||||
Ok(HttpResponse::Ok().json(SearchResults { results: search_results }))
|
||||
Ok(HttpResponse::Ok().json(search_results))
|
||||
}
|
||||
|
||||
@@ -8,7 +8,6 @@ use either::Either;
|
||||
use meilisearch_auth::IndexSearchRules;
|
||||
use meilisearch_types::deserr::DeserrJsonError;
|
||||
use meilisearch_types::error::deserr_codes::*;
|
||||
use meilisearch_types::index_uid::IndexUid;
|
||||
use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS;
|
||||
use meilisearch_types::{milli, Document};
|
||||
use milli::tokenizer::TokenizerBuilder;
|
||||
@@ -83,8 +82,7 @@ impl SearchQuery {
|
||||
#[derive(Debug, deserr::DeserializeFromValue)]
|
||||
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
|
||||
pub struct SearchQueryWithIndex {
|
||||
#[deserr(error = DeserrJsonError<InvalidIndexUid>, missing_field_error = DeserrJsonError::missing_index_uid)]
|
||||
pub index_uid: IndexUid,
|
||||
pub index_uid: String,
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSearchQ>)]
|
||||
pub q: Option<String>,
|
||||
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
|
||||
@@ -122,7 +120,7 @@ pub struct SearchQueryWithIndex {
|
||||
}
|
||||
|
||||
impl SearchQueryWithIndex {
|
||||
pub fn into_index_query(self) -> (IndexUid, SearchQuery) {
|
||||
pub fn into_index_query(self) -> (String, SearchQuery) {
|
||||
let SearchQueryWithIndex {
|
||||
index_uid,
|
||||
q,
|
||||
|
||||
@@ -8,13 +8,9 @@ use crate::common::Server;
|
||||
async fn search_empty_list() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!({"queries": []})).await;
|
||||
let (response, code) = server.search(json!([])).await;
|
||||
snapshot!(code, @"200 OK");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"results": []
|
||||
}
|
||||
"###);
|
||||
snapshot!(json_string!(response), @"[]");
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@@ -25,23 +21,7 @@ async fn search_json_object() {
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Missing field `queries`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn search_json_array() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server.search(json!([])).await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
{
|
||||
"message": "Invalid value type: expected an object, but found an array: `[]`",
|
||||
"message": "Invalid value type: expected an array, but found an object: `{}`",
|
||||
"code": "bad_request",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#bad_request"
|
||||
@@ -59,13 +39,13 @@ async fn simple_search_single_index() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "test", "q": "captain"},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
insta::assert_json_snapshot!(response, { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
@@ -99,56 +79,6 @@ async fn simple_search_single_index() {
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_missing_index_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
{"q": "glass"},
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Missing field `indexUid` inside `.queries[0]`",
|
||||
"code": "missing_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#missing_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_illegal_index_uid() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
|
||||
let documents = DOCUMENTS.clone();
|
||||
index.add_documents(documents, None).await;
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
{"indexUid": "hé", "q": "glass"},
|
||||
]}))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
insta::assert_json_snapshot!(response, @r###"
|
||||
{
|
||||
"message": "Invalid value at `.queries[0].indexUid`: `hé` is not a valid index uid. Index uid can be an integer or a string containing only alphanumeric characters, hyphens (-) and underscores (_).",
|
||||
"code": "invalid_index_uid",
|
||||
"type": "invalid_request",
|
||||
"link": "https://docs.meilisearch.com/errors#invalid_index_uid"
|
||||
}
|
||||
"###);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn simple_search_two_indexes() {
|
||||
let server = Server::new().await;
|
||||
@@ -164,13 +94,13 @@ async fn simple_search_two_indexes() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"200 OK");
|
||||
insta::assert_json_snapshot!(response["results"], { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
insta::assert_json_snapshot!(response, { "[].processingTimeMs" => "[time]" }, @r###"
|
||||
[
|
||||
{
|
||||
"indexUid": "test",
|
||||
@@ -241,10 +171,10 @@ async fn search_one_index_doesnt_exist() {
|
||||
index.wait_task(0).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
@@ -262,10 +192,10 @@ async fn search_multiple_indexes_dont_exist() {
|
||||
let server = Server::new().await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass"},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"404 Not Found");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
@@ -294,10 +224,10 @@ async fn search_one_query_error() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti"},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
@@ -326,10 +256,10 @@ async fn search_multiple_query_errors() {
|
||||
index.wait_task(1).await;
|
||||
|
||||
let (response, code) = server
|
||||
.search(json!({"queries": [
|
||||
.search(json!([
|
||||
{"indexUid" : "test", "q": "glass", "facets": ["title"]},
|
||||
{"indexUid": "nested", "q": "pesti", "facets": ["doggos"]},
|
||||
]}))
|
||||
]))
|
||||
.await;
|
||||
snapshot!(code, @"400 Bad Request");
|
||||
snapshot!(json_string!(response), @r###"
|
||||
|
||||
Reference in New Issue
Block a user