mirror of
https://github.com/meilisearch/meilisearch.git
synced 2025-12-21 20:06:58 +00:00
Compare commits
12 Commits
yoeight/un
...
openapi-co
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6463486740 | ||
|
|
ea82738ee2 | ||
|
|
89989dd587 | ||
|
|
97905583d9 | ||
|
|
96270fb7ae | ||
|
|
5fd6d8d4b3 | ||
|
|
aaa2075c48 | ||
|
|
54000203f5 | ||
|
|
9db2b16eed | ||
|
|
2ba3fafcc3 | ||
|
|
14db3dbcc4 | ||
|
|
a61ef955fc |
50
.github/workflows/check-openapi-file.yml
vendored
Normal file
50
.github/workflows/check-openapi-file.yml
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
name: Check OpenAPI file
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
pull_request:
|
||||
merge_group:
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
env:
|
||||
CARGO_TERM_COLOR: always
|
||||
RUST_BACKTRACE: 1
|
||||
RUSTFLAGS: "-D warnings"
|
||||
|
||||
jobs:
|
||||
check-openapi:
|
||||
name: Check OpenAPI specification
|
||||
runs-on: ubuntu-22.04
|
||||
steps:
|
||||
- uses: actions/checkout@v5
|
||||
|
||||
- name: Setup Rust
|
||||
uses: dtolnay/rust-toolchain@1.91.1
|
||||
|
||||
- name: Cache dependencies
|
||||
uses: Swatinem/rust-cache@v2.8.0
|
||||
|
||||
- name: Setup Node.js
|
||||
uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '20'
|
||||
|
||||
- name: Install OpenAPI tools
|
||||
run: npm install -g @apidevtools/swagger-cli @stoplight/spectral-cli
|
||||
|
||||
- name: Generate OpenAPI specification
|
||||
run: cargo run --release -p openapi-generator -- -o /tmp/openapi.json
|
||||
|
||||
- name: Check all routes have summaries
|
||||
run: cargo run --release -p openapi-generator -- --check-summaries
|
||||
|
||||
# Validates that the OpenAPI file is syntactically correct and conforms to the OpenAPI specification
|
||||
- name: Validate OpenAPI schema
|
||||
run: swagger-cli validate /tmp/openapi.json
|
||||
|
||||
# Lints the OpenAPI file for best practices (descriptions, examples, naming conventions, etc.)
|
||||
# Ruleset is defined in crates/openapi-generator/.spectral.yaml
|
||||
- name: Lint OpenAPI specification
|
||||
run: spectral lint /tmp/openapi.json --ruleset crates/openapi-generator/.spectral.yaml
|
||||
25
.github/workflows/publish-release-assets.yml
vendored
25
.github/workflows/publish-release-assets.yml
vendored
@@ -7,6 +7,9 @@ on:
|
||||
release:
|
||||
types: [published]
|
||||
|
||||
permissions:
|
||||
contents: read
|
||||
|
||||
jobs:
|
||||
check-version:
|
||||
name: Check the version validity
|
||||
@@ -89,8 +92,8 @@ jobs:
|
||||
asset_name: meilisearch-${{ matrix.edition-suffix }}${{ matrix.asset_name }}
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
publish-openapi-file:
|
||||
name: Publish OpenAPI file
|
||||
publish-openapi-files:
|
||||
name: Publish OpenAPI files
|
||||
needs: check-version
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
@@ -101,16 +104,26 @@ jobs:
|
||||
with:
|
||||
toolchain: stable
|
||||
override: true
|
||||
- name: Generate OpenAPI file
|
||||
- name: Generate OpenAPI files
|
||||
run: |
|
||||
cd crates/openapi-generator
|
||||
cargo run --release -- --pretty --output ../../meilisearch.json
|
||||
- name: Upload OpenAPI to Release
|
||||
cargo run --release -- --pretty --debug --output ../../meilisearch-openapi.json
|
||||
cargo run --release -- --pretty --debug --with-mintlify-code-samples --output ../../meilisearch-openapi-mintlify.json
|
||||
- name: Upload OpenAPI file to Release
|
||||
# No need to upload for dry run (cron or workflow_dispatch)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: ./meilisearch.json
|
||||
file: ./meilisearch-openapi.json
|
||||
asset_name: meilisearch-openapi.json
|
||||
tag: ${{ github.ref }}
|
||||
- name: Upload Mintlify OpenAPI file to Release
|
||||
# No need to upload for dry run (cron or workflow_dispatch)
|
||||
if: github.event_name == 'release'
|
||||
uses: svenstaro/upload-release-action@2.11.2
|
||||
with:
|
||||
repo_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||
file: ./meilisearch-openapi-mintlify.json
|
||||
asset_name: meilisearch-openapi-mintlify.json
|
||||
tag: ${{ github.ref }}
|
||||
|
||||
2
.github/workflows/test-suite.yml
vendored
2
.github/workflows/test-suite.yml
vendored
@@ -15,7 +15,7 @@ env:
|
||||
|
||||
jobs:
|
||||
test-linux:
|
||||
name: Tests on Ubuntu
|
||||
name: Tests on ${{ matrix.runner }} ${{ matrix.features }}
|
||||
runs-on: ${{ matrix.runner }}
|
||||
strategy:
|
||||
matrix:
|
||||
|
||||
3
.gitignore
vendored
3
.gitignore
vendored
@@ -29,3 +29,6 @@ crates/meilisearch/db.snapshot
|
||||
|
||||
# Fuzzcheck data for the facet indexing fuzz test
|
||||
crates/milli/fuzz/update::facet::incremental::fuzz::fuzz/
|
||||
|
||||
# OpenAPI generator
|
||||
**/meilisearch-openapi.json
|
||||
|
||||
@@ -117,7 +117,7 @@ With swagger:
|
||||
With the internal crate:
|
||||
```bash
|
||||
cd crates/openapi-generator
|
||||
cargo run --release -- --pretty --output meilisearch.json
|
||||
cargo run --release -- --pretty
|
||||
```
|
||||
|
||||
### Logging
|
||||
|
||||
983
Cargo.lock
generated
983
Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -27,7 +27,7 @@ pub(crate) struct FeatureData {
|
||||
network: Arc<RwLock<Network>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct RoFeatures {
|
||||
runtime: RuntimeTogglableFeatures,
|
||||
}
|
||||
|
||||
@@ -46,7 +46,7 @@ pub struct CreateApiKey {
|
||||
#[deserr(default, error = DeserrJsonError<InvalidApiKeyName>)]
|
||||
pub name: Option<String>,
|
||||
/// A uuid v4 to identify the API Key. If not specified, it's generated by Meilisearch.
|
||||
#[schema(value_type = Uuid, example = json!(null))]
|
||||
#[schema(value_type = Option<String>, example = "01b4bc42-eb33-4041-b481-254d00cce834")]
|
||||
#[deserr(default = Uuid::new_v4(), error = DeserrJsonError<InvalidApiKeyUid>, try_from(&String) = Uuid::from_str -> uuid::Error)]
|
||||
pub uid: KeyId,
|
||||
/// A list of actions permitted for the key. `["*"]` for all actions. The `*` character can be used as a wildcard when located at the last position. e.g. `documents.*` to authorize access on all documents endpoints.
|
||||
|
||||
@@ -302,7 +302,7 @@ pub struct Settings<T> {
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidSettingsLocalizedAttributes>)]
|
||||
#[schema(value_type = Option<Vec<LocalizedAttributesRuleView>>, example = json!(50))]
|
||||
#[schema(value_type = Option<Vec<LocalizedAttributesRuleView>>, example = json!(null))]
|
||||
pub localized_attributes: Setting<Vec<LocalizedAttributesRuleView>>,
|
||||
|
||||
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||
|
||||
@@ -21,8 +21,8 @@ pub struct TaskView {
|
||||
/// The unique sequential identifier of the task.
|
||||
#[schema(value_type = u32, example = 4312)]
|
||||
pub uid: TaskId,
|
||||
/// The unique identifier of the index where this task is operated.
|
||||
#[schema(value_type = Option<u32>, example = json!("movies"))]
|
||||
/// The unique identifier of the batch where this task is grouped.
|
||||
#[schema(value_type = Option<u32>, example = json!(12))]
|
||||
pub batch_uid: Option<BatchId>,
|
||||
#[serde(default)]
|
||||
pub index_uid: Option<String>,
|
||||
|
||||
@@ -190,6 +190,7 @@ pub enum KindWithContent {
|
||||
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, ToSchema)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
pub struct IndexSwap {
|
||||
#[schema(value_type = Vec<String>, example = json!(["indexA", "indexB"]))]
|
||||
pub indexes: (String, String),
|
||||
#[serde(default)]
|
||||
pub rename: bool,
|
||||
@@ -607,7 +608,7 @@ impl std::error::Error for ParseTaskStatusError {}
|
||||
ToSchema,
|
||||
)]
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase", example = json!(enum_iterator::all::<Kind>().collect::<Vec<_>>()))]
|
||||
#[schema(rename_all = "camelCase", example = "documentAdditionOrUpdate")]
|
||||
pub enum Kind {
|
||||
DocumentAdditionOrUpdate,
|
||||
DocumentEdition,
|
||||
|
||||
@@ -1,19 +1,18 @@
|
||||
mod error;
|
||||
|
||||
use actix_http::Payload;
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::{FromRequest, HttpRequest};
|
||||
pub use error::AuthenticationError;
|
||||
use futures::future::err;
|
||||
use futures::Future;
|
||||
use futures_util::future::ok;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
use std::marker::PhantomData;
|
||||
use std::ops::Deref;
|
||||
use std::pin::Pin;
|
||||
|
||||
use actix_web::http::header::AUTHORIZATION;
|
||||
use actix_web::web::Data;
|
||||
use actix_web::FromRequest;
|
||||
pub use error::AuthenticationError;
|
||||
use futures::future::err;
|
||||
use futures::Future;
|
||||
use meilisearch_auth::{AuthController, AuthFilter};
|
||||
use meilisearch_types::error::{Code, ResponseError};
|
||||
|
||||
use self::policies::AuthError;
|
||||
|
||||
pub struct GuardedData<P, D> {
|
||||
@@ -115,70 +114,6 @@ impl<P: Policy + 'static, D: 'static + Clone> FromRequest for GuardedData<P, D>
|
||||
}
|
||||
}
|
||||
|
||||
pub struct OptionallyGuardedData<P, D> {
|
||||
data: D,
|
||||
filters: AuthFilter,
|
||||
_marker: PhantomData<P>,
|
||||
}
|
||||
|
||||
impl<P, D> OptionallyGuardedData<P, D> {
|
||||
pub fn filters(&self) -> &AuthFilter {
|
||||
&self.filters
|
||||
}
|
||||
}
|
||||
|
||||
impl<P, D> Deref for OptionallyGuardedData<P, D> {
|
||||
type Target = D;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.data
|
||||
}
|
||||
}
|
||||
|
||||
impl<P: Policy + 'static, D: 'static + Clone> FromRequest for OptionallyGuardedData<P, D> {
|
||||
type Error = ResponseError;
|
||||
type Future = Pin<Box<dyn Future<Output = Result<Self, Self::Error>>>>;
|
||||
|
||||
fn from_request(req: &HttpRequest, _payload: &mut Payload) -> Self::Future {
|
||||
let data = if let Some(d) = req.app_data::<D>().cloned() {
|
||||
d
|
||||
} else {
|
||||
return Box::pin(err(AuthenticationError::IrretrievableState.into()));
|
||||
};
|
||||
|
||||
match req.app_data::<Data<AuthController>>().cloned() {
|
||||
Some(auth) => match extract_token_from_request(req) {
|
||||
Ok(Some(token)) => {
|
||||
let token = token.to_owned();
|
||||
|
||||
let index = req.match_info().get("index_uid").map(String::from);
|
||||
// TODO: find a less hardcoded way?
|
||||
Box::pin(async move {
|
||||
let guarded =
|
||||
GuardedData::<P, D>::auth_bearer(auth, token, index, Some(data))
|
||||
.await?;
|
||||
|
||||
Ok(OptionallyGuardedData {
|
||||
data: guarded.data,
|
||||
filters: guarded.filters,
|
||||
_marker: PhantomData,
|
||||
})
|
||||
})
|
||||
}
|
||||
|
||||
Ok(None) => Box::pin(ok(OptionallyGuardedData {
|
||||
data,
|
||||
filters: AuthFilter::default(),
|
||||
_marker: PhantomData,
|
||||
})),
|
||||
Err(e) => Box::pin(err(e.into())),
|
||||
},
|
||||
|
||||
None => Box::pin(err(AuthenticationError::IrretrievableState.into())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn extract_token_from_request(
|
||||
req: &actix_web::HttpRequest,
|
||||
) -> Result<Option<&str>, AuthenticationError> {
|
||||
|
||||
@@ -46,10 +46,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
(status = OK, description = "Return the batch", body = BatchView, content_type = "application/json", example = json!(
|
||||
{
|
||||
"uid": 1,
|
||||
"details": {
|
||||
"receivedDocuments": 1,
|
||||
"indexedDocuments": 1
|
||||
},
|
||||
"details": {},
|
||||
"progress": null,
|
||||
"stats": {
|
||||
"totalNbTasks": 1,
|
||||
|
||||
@@ -54,7 +54,7 @@ crate::empty_analytics!(DumpAnalytics, "Dump Created");
|
||||
"taskUid": 0,
|
||||
"indexUid": null,
|
||||
"status": "enqueued",
|
||||
"type": "DumpCreation",
|
||||
"type": "dumpCreation",
|
||||
"enqueuedAt": "2021-01-01T09:39:00.000000Z"
|
||||
}
|
||||
)),
|
||||
|
||||
@@ -41,6 +41,10 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(web::resource("").route(web::post().to(export)));
|
||||
}
|
||||
|
||||
/// Export to a remote Meilisearch
|
||||
///
|
||||
/// Triggers an export process to a remote Meilisearch instance. This allows you to send
|
||||
/// documents and settings from the current instance to another Meilisearch server.
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "",
|
||||
|
||||
@@ -32,6 +32,8 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
}
|
||||
|
||||
/// Compact an index
|
||||
///
|
||||
/// Triggers a compaction process on the specified index. Compaction reorganizes the index database to make it smaller and more efficient.
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "{indexUid}/compact",
|
||||
|
||||
@@ -696,7 +696,7 @@ pub struct UpdateDocumentsQuery {
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexCustomMetadata>)]
|
||||
pub custom_metadata: Option<String>,
|
||||
|
||||
#[param(example = "true")]
|
||||
#[param(example = true)]
|
||||
#[deserr(default, try_from(&String) = from_string_skip_creation -> DeserrQueryParamError<InvalidSkipCreation>, error = DeserrQueryParamError<InvalidSkipCreation>)]
|
||||
/// Only update documents if they already exist.
|
||||
pub skip_creation: Option<bool>,
|
||||
|
||||
@@ -51,7 +51,7 @@ mod similar_analytics;
|
||||
(path = "/", api = settings::SettingsApi),
|
||||
(path = "/", api = compact::CompactApi),
|
||||
),
|
||||
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats, compact::compact),
|
||||
paths(list_indexes, create_index, get_index, update_index, delete_index, get_index_stats),
|
||||
tags(
|
||||
(
|
||||
name = "Indexes",
|
||||
|
||||
@@ -77,11 +77,11 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
|
||||
"release_date": 1418256000
|
||||
}
|
||||
],
|
||||
"id": "143",
|
||||
"offset": 0,
|
||||
"limit": 2,
|
||||
"estimatedTotalHits": 976,
|
||||
"processingTimeMs": 35,
|
||||
"query": "american "
|
||||
"processingTimeMs": 35
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
|
||||
@@ -159,11 +159,11 @@ pub async fn similar_get(
|
||||
"release_date": 1418256000
|
||||
}
|
||||
],
|
||||
"id": "143",
|
||||
"offset": 0,
|
||||
"limit": 2,
|
||||
"estimatedTotalHits": 976,
|
||||
"processingTimeMs": 35,
|
||||
"query": "american "
|
||||
"processingTimeMs": 35
|
||||
}
|
||||
)),
|
||||
(status = 404, description = "Index not found", body = ResponseError, content_type = "application/json", example = json!(
|
||||
|
||||
@@ -11,7 +11,7 @@ use time::OffsetDateTime;
|
||||
use utoipa::OpenApi;
|
||||
|
||||
use crate::extractors::authentication::policies::ActionPolicy;
|
||||
use crate::extractors::authentication::OptionallyGuardedData;
|
||||
use crate::extractors::authentication::{AuthenticationError, GuardedData};
|
||||
use crate::routes::create_all_stats;
|
||||
use crate::search_queue::SearchQueue;
|
||||
|
||||
@@ -123,15 +123,20 @@ meilisearch_used_db_size_bytes 409600
|
||||
)
|
||||
)]
|
||||
pub async fn get_metrics(
|
||||
index_scheduler: OptionallyGuardedData<
|
||||
ActionPolicy<{ actions::METRICS_GET }>,
|
||||
Data<IndexScheduler>,
|
||||
>,
|
||||
index_scheduler: GuardedData<ActionPolicy<{ actions::METRICS_GET }>, Data<IndexScheduler>>,
|
||||
auth_controller: Data<AuthController>,
|
||||
search_queue: web::Data<SearchQueue>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
index_scheduler.features().check_metrics()?;
|
||||
let auth_filters = index_scheduler.filters();
|
||||
if !auth_filters.all_indexes_authorized() {
|
||||
let mut error = ResponseError::from(AuthenticationError::InvalidToken);
|
||||
error
|
||||
.message
|
||||
.push_str(" The API key for the `/metrics` route must allow access to all indexes.");
|
||||
return Err(error);
|
||||
}
|
||||
|
||||
let response = create_all_stats((*index_scheduler).clone(), auth_controller, auth_filters)?;
|
||||
|
||||
crate::metrics::MEILISEARCH_DB_SIZE_BYTES.set(response.database_size as i64);
|
||||
@@ -143,12 +148,10 @@ pub async fn get_metrics(
|
||||
crate::metrics::MEILISEARCH_SEARCHES_WAITING_TO_BE_PROCESSED
|
||||
.set(search_queue.searches_waiting() as i64);
|
||||
|
||||
if auth_filters.all_indexes_authorized() {
|
||||
for (index, value) in response.indexes.iter() {
|
||||
crate::metrics::MEILISEARCH_INDEX_DOCS_COUNT
|
||||
.with_label_values(&[index])
|
||||
.set(value.number_of_documents as i64);
|
||||
}
|
||||
for (index, value) in response.indexes.iter() {
|
||||
crate::metrics::MEILISEARCH_INDEX_DOCS_COUNT
|
||||
.with_label_values(&[index])
|
||||
.set(value.number_of_documents as i64);
|
||||
}
|
||||
|
||||
for (kind, value) in index_scheduler.get_stats()? {
|
||||
|
||||
@@ -99,6 +99,8 @@ mod webhooks;
|
||||
paths(get_health, get_version, get_stats),
|
||||
tags(
|
||||
(name = "Stats", description = "Stats gives extended information and metrics about indexes and the Meilisearch database."),
|
||||
(name = "Health", description = "The health check endpoint enables you to periodically test the health of your Meilisearch instance."),
|
||||
(name = "Version", description = "Returns the version of the running Meilisearch instance."),
|
||||
),
|
||||
modifiers(&OpenApiAuth),
|
||||
servers((
|
||||
|
||||
@@ -99,11 +99,7 @@ async fn get_network(
|
||||
#[serde(rename_all = "camelCase")]
|
||||
#[schema(rename_all = "camelCase")]
|
||||
pub struct Remote {
|
||||
#[schema(value_type = Option<String>, example = json!({
|
||||
"ms-0": Remote { url: Setting::Set("http://localhost:7700".into()), search_api_key: Setting::Reset, write_api_key: Setting::Reset },
|
||||
"ms-1": Remote { url: Setting::Set("http://localhost:7701".into()), search_api_key: Setting::Set("foo".into()), write_api_key: Setting::Set("bar".into()) },
|
||||
"ms-2": Remote { url: Setting::Set("http://localhost:7702".into()), search_api_key: Setting::Set("bar".into()), write_api_key: Setting::Set("foo".into()) },
|
||||
}))]
|
||||
#[schema(value_type = Option<String>, example = "http://localhost:7700")]
|
||||
#[deserr(default, error = DeserrJsonError<InvalidNetworkUrl>)]
|
||||
#[serde(default)]
|
||||
pub url: Setting<String>,
|
||||
|
||||
@@ -78,48 +78,48 @@ pub struct TasksFilterQuery {
|
||||
|
||||
/// Permits to filter tasks by their uid. By default, when the uids query parameter is not set, all task uids are returned. It's possible to specify several uids by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598]))]
|
||||
pub uids: OptionStarOrList<u32>,
|
||||
/// Permits to filter tasks using the uid of the task that canceled them. It's possible to specify several task uids by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374]))]
|
||||
pub canceled_by: OptionStarOrList<u32>,
|
||||
/// Permits to filter tasks by their related type. By default, when `types` query parameter is not set, all task types are returned. It's possible to specify several types by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!([Kind::DocumentAdditionOrUpdate, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!([Kind::DocumentAdditionOrUpdate]))]
|
||||
pub types: OptionStarOrList<Kind>,
|
||||
/// Permits to filter tasks by their status. By default, when `statuses` query parameter is not set, all task statuses are returned. It's possible to specify several statuses by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled, Status::Enqueued, Status::Processing, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled, Status::Enqueued, Status::Processing]))]
|
||||
pub statuses: OptionStarOrList<Status>,
|
||||
/// Permits to filter tasks by their related index. By default, when `indexUids` query parameter is not set, the tasks of all the indexes are returned. It is possible to specify several indexes by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater", "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater"]))]
|
||||
pub index_uids: OptionStarOrList<IndexUid>,
|
||||
|
||||
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their startedAt time. Matches tasks started after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their startedAt time. Matches tasks started before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
}
|
||||
|
||||
@@ -173,52 +173,52 @@ impl TaskDeletionOrCancelationQuery {
|
||||
pub struct TaskDeletionOrCancelationQuery {
|
||||
/// Permits to filter tasks by their uid. By default, when the `uids` query parameter is not set, all task uids are returned. It's possible to specify several uids by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskUids>)]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598]))]
|
||||
pub uids: OptionStarOrList<u32>,
|
||||
/// Lets you filter tasks by their `batchUid`.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidBatchUids>)]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([231, 423, 598]))]
|
||||
pub batch_uids: OptionStarOrList<BatchId>,
|
||||
/// Permits to filter tasks using the uid of the task that canceled them. It's possible to specify several task uids by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskCanceledBy>)]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<u32>>, example = json!([374]))]
|
||||
pub canceled_by: OptionStarOrList<u32>,
|
||||
/// Permits to filter tasks by their related type. By default, when `types` query parameter is not set, all task types are returned. It's possible to specify several types by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskTypes>)]
|
||||
#[param(required = false, value_type = Option<Vec<Kind>>, example = json!([Kind::DocumentDeletion, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<Kind>>, example = json!([Kind::DocumentDeletion]))]
|
||||
pub types: OptionStarOrList<Kind>,
|
||||
/// Permits to filter tasks by their status. By default, when `statuses` query parameter is not set, all task statuses are returned. It's possible to specify several statuses by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskStatuses>)]
|
||||
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled, "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<Status>>, example = json!([Status::Succeeded, Status::Failed, Status::Canceled]))]
|
||||
pub statuses: OptionStarOrList<Status>,
|
||||
/// Permits to filter tasks by their related index. By default, when `indexUids` query parameter is not set, the tasks of all the indexes are returned. It is possible to specify several indexes by separating them with the `,` character.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidIndexUid>)]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater", "*"]))]
|
||||
#[param(required = false, value_type = Option<Vec<String>>, example = json!(["movies", "theater"]))]
|
||||
pub index_uids: OptionStarOrList<IndexUid>,
|
||||
|
||||
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their enqueuedAt time. Matches tasks enqueued before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeEnqueuedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_enqueued_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their startedAt time. Matches tasks started after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_started_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their startedAt time. Matches tasks started before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeStartedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_started_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished after the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskAfterFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_after -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub after_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
/// Permits to filter tasks based on their finishedAt time. Matches tasks finished before the given date. Supports RFC 3339 date format.
|
||||
#[deserr(default, error = DeserrQueryParamError<InvalidTaskBeforeFinishedAt>, try_from(OptionStarOr<String>) = deserialize_date_before -> InvalidTaskDateError)]
|
||||
#[param(required = false, value_type = Option<String>, example = json!(["2024-08-08T16:37:09.971Z", "*"]))]
|
||||
#[param(required = false, value_type = Option<String>, example = "2024-08-08T16:37:09.971Z")]
|
||||
pub before_finished_at: OptionStarOr<OffsetDateTime>,
|
||||
}
|
||||
|
||||
@@ -519,15 +519,9 @@ pub struct AllTasks {
|
||||
"uid": 144,
|
||||
"indexUid": "mieli",
|
||||
"status": "succeeded",
|
||||
"type": "settingsUpdate",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"settings": {
|
||||
"filterableAttributes": [
|
||||
"play_count"
|
||||
]
|
||||
}
|
||||
},
|
||||
"details": null,
|
||||
"error": null,
|
||||
"duration": "PT0.009330S",
|
||||
"enqueuedAt": "2024-08-08T09:01:13.348471Z",
|
||||
@@ -583,19 +577,16 @@ async fn get_tasks(
|
||||
path = "/{taskUid}",
|
||||
tag = "Tasks",
|
||||
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
|
||||
params(("taskUid", format = UInt32, example = 0, description = "The task identifier", nullable = false)),
|
||||
params(("taskUid", format = UInt32, example = "0", description = "The task identifier", nullable = false)),
|
||||
responses(
|
||||
(status = 200, description = "Task successfully retrieved", body = TaskView, content_type = "application/json", example = json!(
|
||||
{
|
||||
"uid": 1,
|
||||
"indexUid": "movies",
|
||||
"status": "succeeded",
|
||||
"type": "documentAdditionOrUpdate",
|
||||
"type": "indexCreation",
|
||||
"canceledBy": null,
|
||||
"details": {
|
||||
"receivedDocuments": 79000,
|
||||
"indexedDocuments": 79000
|
||||
},
|
||||
"details": null,
|
||||
"error": null,
|
||||
"duration": "PT1S",
|
||||
"enqueuedAt": "2021-01-01T09:39:00.000000Z",
|
||||
@@ -654,7 +645,7 @@ async fn get_task(
|
||||
path = "/{taskUid}/documents",
|
||||
tag = "Tasks",
|
||||
security(("Bearer" = ["tasks.get", "tasks.*", "*"])),
|
||||
params(("taskUid", format = UInt32, example = 0, description = "The task identifier", nullable = false)),
|
||||
params(("taskUid", format = UInt32, example = "0", description = "The task identifier", nullable = false)),
|
||||
responses(
|
||||
(status = 200, description = "The content of the task update", body = serde_json::Value, content_type = "application/x-ndjson"),
|
||||
(status = 401, description = "The authorization header is missing", body = ResponseError, content_type = "application/json", example = json!(
|
||||
|
||||
@@ -111,6 +111,9 @@ pub(super) struct WebhookResults {
|
||||
results: Vec<WebhookWithMetadataRedactedAuthorization>,
|
||||
}
|
||||
|
||||
/// List webhooks
|
||||
///
|
||||
/// Get the list of all registered webhooks.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "",
|
||||
@@ -296,6 +299,9 @@ fn check_changed(uuid: Uuid, webhook: &Webhook) -> Result<(), WebhooksError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Get a webhook
|
||||
///
|
||||
/// Get a single webhook by its UUID.
|
||||
#[utoipa::path(
|
||||
get,
|
||||
path = "/{uuid}",
|
||||
@@ -331,6 +337,9 @@ async fn get_webhook(
|
||||
Ok(HttpResponse::Ok().json(webhook))
|
||||
}
|
||||
|
||||
/// Create a webhook
|
||||
///
|
||||
/// Create a new webhook to receive task notifications.
|
||||
#[utoipa::path(
|
||||
post,
|
||||
path = "",
|
||||
@@ -389,6 +398,9 @@ async fn post_webhook(
|
||||
Ok(HttpResponse::Created().json(response))
|
||||
}
|
||||
|
||||
/// Update a webhook
|
||||
///
|
||||
/// Update an existing webhook's URL or headers.
|
||||
#[utoipa::path(
|
||||
patch,
|
||||
path = "/{uuid}",
|
||||
@@ -441,6 +453,9 @@ async fn patch_webhook(
|
||||
Ok(HttpResponse::Ok().json(response))
|
||||
}
|
||||
|
||||
/// Delete a webhook
|
||||
///
|
||||
/// Delete an existing webhook by its UUID.
|
||||
#[utoipa::path(
|
||||
delete,
|
||||
path = "/{uuid}",
|
||||
|
||||
@@ -91,7 +91,7 @@ rhai = { version = "1.23.6", features = [
|
||||
"sync",
|
||||
] }
|
||||
arroy = "0.6.4-nested-rtxns"
|
||||
hannoy = { version = "0.1.0-nested-rtxns", features = ["arroy"] }
|
||||
hannoy = { version = "0.1.2-nested-rtxns", features = ["arroy"] }
|
||||
rand = "0.8.5"
|
||||
tracing = "0.1.41"
|
||||
ureq = { version = "2.12.1", features = ["json"] }
|
||||
|
||||
8
crates/openapi-generator/.spectral.yaml
Normal file
8
crates/openapi-generator/.spectral.yaml
Normal file
@@ -0,0 +1,8 @@
|
||||
extends: ["spectral:oas"]
|
||||
|
||||
rules:
|
||||
# Disable the security scopes warning
|
||||
# Meilisearch uses Bearer authentication with API key permissions that work like scopes,
|
||||
# but OpenAPI 3.0 doesn't support scopes with HTTP Bearer authentication (only OAuth2).
|
||||
# The security annotations document required permissions but are not OAuth2 scopes.
|
||||
oas3-operation-security-defined: off
|
||||
@@ -10,3 +10,5 @@ serde_json = "1.0"
|
||||
clap = { version = "4.5.52", features = ["derive"] }
|
||||
anyhow = "1.0.100"
|
||||
utoipa = "5.4.0"
|
||||
reqwest = { version = "0.12", features = ["blocking"] }
|
||||
regex = "1.10"
|
||||
|
||||
@@ -1,21 +1,65 @@
|
||||
use std::borrow::Cow;
|
||||
use std::collections::hash_map::Entry;
|
||||
use std::collections::HashMap;
|
||||
use std::path::PathBuf;
|
||||
|
||||
use anyhow::Result;
|
||||
use anyhow::{Context, Result};
|
||||
use clap::Parser;
|
||||
use meilisearch::routes::MeilisearchApi;
|
||||
use serde_json::{json, Value};
|
||||
use utoipa::OpenApi;
|
||||
|
||||
/// HTTP methods supported in OpenAPI specifications.
|
||||
const HTTP_METHODS: &[&str] = &["get", "post", "put", "patch", "delete"];
|
||||
|
||||
/// Type alias for the mapping from OpenAPI keys to their code samples.
|
||||
type CodeSamplesMap = HashMap<String, Vec<CodeSample>>;
|
||||
|
||||
/// Type alias for the mapping from OpenAPI keys to sample IDs.
|
||||
type KeyMapping = HashMap<String, String>;
|
||||
|
||||
/// Language used in the documentation repository (contains the key mapping)
|
||||
const DOCS_LANG: &str = "cURL";
|
||||
|
||||
/// Mapping of repository URLs to language names.
|
||||
/// The "cURL" entry is special: it contains the key mapping used to resolve sample IDs for all SDKs.
|
||||
const CODE_SAMPLES: &[(&str, &str)] = &[
|
||||
("https://raw.githubusercontent.com/meilisearch/documentation/refs/heads/main/.code-samples.meilisearch.yaml", "cURL"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-dotnet/refs/heads/main/.code-samples.meilisearch.yaml", "C#"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-dart/refs/heads/main/.code-samples.meilisearch.yaml", "Dart"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-go/refs/heads/main/.code-samples.meilisearch.yaml", "Go"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-java/refs/heads/main/.code-samples.meilisearch.yaml", "Java"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-js/refs/heads/main/.code-samples.meilisearch.yaml", "JS"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-php/refs/heads/main/.code-samples.meilisearch.yaml", "PHP"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-python/refs/heads/main/.code-samples.meilisearch.yaml", "Python"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-ruby/refs/heads/main/.code-samples.meilisearch.yaml", "Ruby"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-rust/refs/heads/main/.code-samples.meilisearch.yaml", "Rust"),
|
||||
("https://raw.githubusercontent.com/meilisearch/meilisearch-swift/refs/heads/main/.code-samples.meilisearch.yaml", "Swift"),
|
||||
];
|
||||
|
||||
#[derive(Parser)]
|
||||
#[command(name = "openapi-generator")]
|
||||
#[command(about = "Generate OpenAPI specification for Meilisearch")]
|
||||
struct Cli {
|
||||
/// Output file path (default: meilisearch.json)
|
||||
/// Output file path (default: meilisearch-openapi.json)
|
||||
#[arg(short, long, value_name = "FILE")]
|
||||
output: Option<PathBuf>,
|
||||
|
||||
/// Pretty print the JSON output
|
||||
#[arg(short, long)]
|
||||
pretty: bool,
|
||||
|
||||
/// Include Mintlify code samples from SDK repositories
|
||||
#[arg(long)]
|
||||
with_mintlify_code_samples: bool,
|
||||
|
||||
/// Debug mode: display the mapping table and code samples
|
||||
#[arg(long)]
|
||||
debug: bool,
|
||||
|
||||
/// Check that all routes have a summary (useful for CI)
|
||||
#[arg(long)]
|
||||
check_summaries: bool,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
@@ -24,14 +68,31 @@ fn main() -> Result<()> {
|
||||
// Generate the OpenAPI specification
|
||||
let openapi = MeilisearchApi::openapi();
|
||||
|
||||
// Convert to serde_json::Value for modification
|
||||
let mut openapi_value: Value = serde_json::to_value(&openapi)?;
|
||||
|
||||
// Fetch and add code samples if enabled
|
||||
if cli.with_mintlify_code_samples {
|
||||
let code_samples = fetch_all_code_samples(cli.debug)?;
|
||||
add_code_samples_to_openapi(&mut openapi_value, &code_samples, cli.debug)?;
|
||||
}
|
||||
|
||||
// Clean up null descriptions in tags
|
||||
clean_null_descriptions(&mut openapi_value);
|
||||
|
||||
// Check that all routes have summaries if requested
|
||||
if cli.check_summaries {
|
||||
check_all_routes_have_summaries(&openapi_value)?;
|
||||
}
|
||||
|
||||
// Determine output path
|
||||
let output_path = cli.output.unwrap_or_else(|| PathBuf::from("meilisearch.json"));
|
||||
let output_path = cli.output.unwrap_or_else(|| PathBuf::from("meilisearch-openapi.json"));
|
||||
|
||||
// Serialize to JSON
|
||||
let json = if cli.pretty {
|
||||
serde_json::to_string_pretty(&openapi)?
|
||||
serde_json::to_string_pretty(&openapi_value)?
|
||||
} else {
|
||||
serde_json::to_string(&openapi)?
|
||||
serde_json::to_string(&openapi_value)?
|
||||
};
|
||||
|
||||
// Write to file
|
||||
@@ -41,3 +102,651 @@ fn main() -> Result<()> {
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Code sample for a specific language.
|
||||
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||
struct CodeSample {
|
||||
lang: String,
|
||||
source: String,
|
||||
}
|
||||
|
||||
/// Fetches and parses code samples from all SDK repositories.
|
||||
///
|
||||
/// Returns a map from OpenAPI key (e.g., `"get_indexes"`) to a list of code samples
|
||||
/// for different languages.
|
||||
fn fetch_all_code_samples(debug: bool) -> Result<CodeSamplesMap> {
|
||||
// First, fetch the documentation file to get the OpenAPI key -> code sample ID mapping
|
||||
let (docs_url, _) = CODE_SAMPLES
|
||||
.iter()
|
||||
.find(|(_, lang)| *lang == DOCS_LANG)
|
||||
.context("Documentation source not found in CODE_SAMPLES")?;
|
||||
|
||||
let docs_content = reqwest::blocking::get(*docs_url)
|
||||
.context("Failed to fetch documentation code samples")?
|
||||
.text()
|
||||
.context("Failed to read documentation code samples response")?;
|
||||
|
||||
// Build mapping from OpenAPI key to code sample ID (only first match per key)
|
||||
let openapi_key_to_sample_id = build_openapi_key_mapping(&docs_content);
|
||||
|
||||
// Build final result
|
||||
let mut all_samples: CodeSamplesMap = HashMap::new();
|
||||
|
||||
// Loop through all CODE_SAMPLES files
|
||||
for (url, lang) in CODE_SAMPLES {
|
||||
// Fetch content (reuse docs_content for documentation)
|
||||
let content: Cow<'_, str> = if *lang == DOCS_LANG {
|
||||
Cow::Borrowed(&docs_content)
|
||||
} else {
|
||||
match reqwest::blocking::get(*url).and_then(|r| r.text()) {
|
||||
Ok(text) => Cow::Owned(text),
|
||||
Err(e) => {
|
||||
eprintln!("Warning: Failed to fetch code samples for {}: {}", lang, e);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
// Parse all code samples from this file
|
||||
let sample_id_to_code = parse_code_samples_from_file(&content);
|
||||
|
||||
// Add to result using the mapping
|
||||
for (openapi_key, sample_id) in &openapi_key_to_sample_id {
|
||||
if let Some(source) = sample_id_to_code.get(sample_id) {
|
||||
all_samples
|
||||
.entry(openapi_key.clone())
|
||||
.or_default()
|
||||
.push(CodeSample { lang: lang.to_string(), source: source.clone() });
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug mode: display mapping table and code samples
|
||||
if debug {
|
||||
println!("\n=== OpenAPI Key to Sample ID Mapping ===\n");
|
||||
let mut keys: Vec<_> = openapi_key_to_sample_id.keys().collect();
|
||||
keys.sort();
|
||||
for key in keys {
|
||||
println!(" {} -> {}", key, openapi_key_to_sample_id[key]);
|
||||
}
|
||||
|
||||
println!("\n=== Code Samples ===\n");
|
||||
let mut sample_keys: Vec<_> = all_samples.keys().collect();
|
||||
sample_keys.sort();
|
||||
for key in sample_keys {
|
||||
let samples = &all_samples[key];
|
||||
let langs: Vec<_> = samples.iter().map(|s| s.lang.as_str()).collect();
|
||||
println!(" {} -> {}", key, langs.join(", "));
|
||||
}
|
||||
println!();
|
||||
}
|
||||
|
||||
Ok(all_samples)
|
||||
}
|
||||
|
||||
/// Builds a mapping from OpenAPI key to code sample ID from the documentation file.
|
||||
///
|
||||
/// The OpenAPI key is found on a line starting with `# ` (hash + space), containing a single word
|
||||
/// that starts with an HTTP method followed by an underscore (e.g., `# get_indexes`).
|
||||
/// The code sample ID is the first word of the next line.
|
||||
/// Only keeps the first code sample ID per OpenAPI key.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```yaml
|
||||
/// # get_indexes
|
||||
/// get_indexes_1: |-
|
||||
/// curl \
|
||||
/// -X GET 'MEILISEARCH_URL/indexes'
|
||||
/// get_indexes_2: |-
|
||||
/// curl \
|
||||
/// -X GET 'MEILISEARCH_URL/indexes?limit=5'
|
||||
/// # post_indexes
|
||||
/// create_indexes_1: |-
|
||||
/// curl \
|
||||
/// -X POST 'MEILISEARCH_URL/indexes'
|
||||
/// ```
|
||||
///
|
||||
/// This produces: `{"get_indexes": "get_indexes_1", "post_indexes": "create_indexes_1"}`
|
||||
fn build_openapi_key_mapping(content: &str) -> KeyMapping {
|
||||
let mut mapping = KeyMapping::new();
|
||||
let lines: Vec<_> = content.lines().collect();
|
||||
|
||||
for window in lines.windows(2) {
|
||||
let [line, next_line] = window else { continue };
|
||||
|
||||
// Check if line starts with "# " and extract the word
|
||||
let Some(word) = line.strip_prefix("# ").map(str::trim) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
// Must be a single word (no spaces) starting with an HTTP method prefix
|
||||
if word.contains(' ') || !is_http_method_prefixed(word) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Extract sample ID from next line (first word before `:`)
|
||||
let sample_id = next_line.split(':').next().map(str::trim).filter(|s| !s.is_empty());
|
||||
|
||||
// Only insert if key doesn't exist (keeps first match)
|
||||
if let (Entry::Vacant(entry), Some(id)) = (mapping.entry(word.to_string()), sample_id) {
|
||||
entry.insert(id.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
mapping
|
||||
}
|
||||
|
||||
/// Checks if a word starts with an HTTP method followed by an underscore.
|
||||
fn is_http_method_prefixed(word: &str) -> bool {
|
||||
HTTP_METHODS
|
||||
.iter()
|
||||
.any(|&method| word.strip_prefix(method).is_some_and(|rest| rest.starts_with('_')))
|
||||
}
|
||||
|
||||
/// Parses all code samples from a YAML-like file.
|
||||
///
|
||||
/// A code sample ID is found when a line contains `: |-`.
|
||||
/// The code sample value is everything between `: |-` and:
|
||||
/// - The next code sample (next line containing `: |-`)
|
||||
/// - OR a line starting with `#` at column 0 (indented `#` is part of the code sample)
|
||||
/// - OR the end of file
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```yaml
|
||||
/// get_indexes_1: |-
|
||||
/// client.getIndexes()
|
||||
/// # I write something
|
||||
/// # COMMENT TO IGNORE
|
||||
/// get_indexes_2: |-
|
||||
/// client.getIndexes({ limit: 3 })
|
||||
/// ```
|
||||
///
|
||||
/// This produces:
|
||||
/// - `get_indexes_1` → `"client.getIndexes()\n# I write something"`
|
||||
/// - `get_indexes_2` → `"client.getIndexes({ limit: 3 })"`
|
||||
fn parse_code_samples_from_file(content: &str) -> HashMap<String, String> {
|
||||
let mut samples: HashMap<String, String> = HashMap::new();
|
||||
let mut current_sample_id: Option<String> = None;
|
||||
let mut current_lines: Vec<String> = Vec::new();
|
||||
let mut base_indent: Option<usize> = None;
|
||||
|
||||
for line in content.lines() {
|
||||
// Check if this line starts a new code sample (contains `: |-`)
|
||||
if line.contains(": |-") {
|
||||
// Save previous sample if exists
|
||||
if let Some(sample_id) = current_sample_id.take() {
|
||||
let value = current_lines.join("\n").trim_end().to_string();
|
||||
samples.insert(sample_id, value);
|
||||
}
|
||||
current_lines.clear();
|
||||
base_indent = None;
|
||||
|
||||
// Extract sample ID (first word before `:`)
|
||||
if let Some(id) = line.split(':').next() {
|
||||
current_sample_id = Some(id.trim().to_string());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if this line ends the current code sample (line starts with `#` at column 0)
|
||||
// Indented `#` (spaces or tabs) is part of the code sample
|
||||
if line.starts_with('#') {
|
||||
// Save current sample and reset
|
||||
if let Some(sample_id) = current_sample_id.take() {
|
||||
let value = current_lines.join("\n").trim_end().to_string();
|
||||
samples.insert(sample_id, value);
|
||||
}
|
||||
current_lines.clear();
|
||||
base_indent = None;
|
||||
continue;
|
||||
}
|
||||
|
||||
// If we're in a code sample, add this line to the value
|
||||
if current_sample_id.is_some() {
|
||||
// Handle empty lines
|
||||
if line.trim().is_empty() {
|
||||
if !current_lines.is_empty() {
|
||||
current_lines.push(String::new());
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Calculate indentation and strip base indent
|
||||
let indent = line.len() - line.trim_start().len();
|
||||
let base = *base_indent.get_or_insert(indent);
|
||||
|
||||
// Remove base indentation
|
||||
let dedented = line.get(base..).unwrap_or_else(|| line.trim_start());
|
||||
current_lines.push(dedented.to_string());
|
||||
}
|
||||
}
|
||||
|
||||
// Don't forget the last sample
|
||||
if let Some(sample_id) = current_sample_id {
|
||||
let value = current_lines.join("\n").trim_end().to_string();
|
||||
samples.insert(sample_id, value);
|
||||
}
|
||||
|
||||
samples
|
||||
}
|
||||
|
||||
/// Converts an OpenAPI path and HTTP method to a code sample key.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// - Path: `/indexes/{index_uid}/documents/{document_id}`
|
||||
/// - Method: `GET`
|
||||
/// - Result: `get_indexes_indexUid_documents_documentId`
|
||||
fn path_to_key(path: &str, method: &str) -> String {
|
||||
let method_lower = method.to_lowercase();
|
||||
|
||||
// Remove leading slash and convert path
|
||||
let path_part = path
|
||||
.trim_start_matches('/')
|
||||
.split('/')
|
||||
.map(|segment| {
|
||||
if segment.starts_with('{') && segment.ends_with('}') {
|
||||
// Convert {param_name} to camelCase
|
||||
let param = &segment[1..segment.len() - 1];
|
||||
to_camel_case(param)
|
||||
} else {
|
||||
// Keep path segments as-is, but replace hyphens with underscores
|
||||
segment.replace('-', "_")
|
||||
}
|
||||
})
|
||||
.collect::<Vec<_>>()
|
||||
.join("_");
|
||||
|
||||
if path_part.is_empty() {
|
||||
method_lower
|
||||
} else {
|
||||
format!("{}_{}", method_lower, path_part)
|
||||
}
|
||||
}
|
||||
|
||||
/// Converts a `snake_case` string to `camelCase`.
|
||||
///
|
||||
/// # Example
|
||||
///
|
||||
/// ```
|
||||
/// assert_eq!(to_camel_case("index_uid"), "indexUid");
|
||||
/// ```
|
||||
fn to_camel_case(s: &str) -> String {
|
||||
let mut result = String::with_capacity(s.len());
|
||||
let mut capitalize_next = false;
|
||||
|
||||
for (i, c) in s.chars().enumerate() {
|
||||
match c {
|
||||
'_' => capitalize_next = true,
|
||||
_ if capitalize_next => {
|
||||
result.push(c.to_ascii_uppercase());
|
||||
capitalize_next = false;
|
||||
}
|
||||
_ if i == 0 => result.push(c.to_ascii_lowercase()),
|
||||
_ => result.push(c),
|
||||
}
|
||||
}
|
||||
|
||||
result
|
||||
}
|
||||
|
||||
/// Adds code samples to the OpenAPI specification as `x-codeSamples` extensions.
|
||||
fn add_code_samples_to_openapi(
|
||||
openapi: &mut Value,
|
||||
code_samples: &CodeSamplesMap,
|
||||
debug: bool,
|
||||
) -> Result<()> {
|
||||
let paths = openapi
|
||||
.get_mut("paths")
|
||||
.and_then(|p| p.as_object_mut())
|
||||
.context("OpenAPI spec missing 'paths' object")?;
|
||||
|
||||
let mut routes_with_samples: Vec<String> = Vec::new();
|
||||
let mut routes_without_samples: Vec<String> = Vec::new();
|
||||
|
||||
// Collect all routes first for sorted debug output
|
||||
let mut all_routes: Vec<(String, String, String)> = Vec::new(); // (path, method, key)
|
||||
|
||||
for (path, path_item) in paths.iter_mut() {
|
||||
let Some(path_item) = path_item.as_object_mut() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for method in HTTP_METHODS {
|
||||
let Some(operation) = path_item.get_mut(*method) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let key = path_to_key(path, method);
|
||||
all_routes.push((path.clone(), method.to_string(), key.clone()));
|
||||
|
||||
if let Some(samples) = code_samples.get(&key) {
|
||||
routes_with_samples.push(key);
|
||||
|
||||
// Create x-codeSamples array according to Redocly spec
|
||||
// Sort by language name for consistent output
|
||||
let mut sorted_samples = samples.clone();
|
||||
sorted_samples.sort_by(|a, b| a.lang.cmp(&b.lang));
|
||||
|
||||
let code_sample_array: Vec<Value> = sorted_samples
|
||||
.iter()
|
||||
.map(|sample| {
|
||||
json!({
|
||||
"lang": sample.lang,
|
||||
"source": sample.source
|
||||
})
|
||||
})
|
||||
.collect();
|
||||
|
||||
if let Some(op) = operation.as_object_mut() {
|
||||
op.insert("x-codeSamples".to_string(), json!(code_sample_array));
|
||||
}
|
||||
} else {
|
||||
routes_without_samples.push(key);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Debug output
|
||||
if debug {
|
||||
routes_without_samples.sort();
|
||||
|
||||
if !routes_without_samples.is_empty() {
|
||||
println!("=== Routes without code samples ===\n");
|
||||
for key in &routes_without_samples {
|
||||
println!(" {}", key);
|
||||
}
|
||||
}
|
||||
|
||||
let total = all_routes.len();
|
||||
let with_samples = routes_with_samples.len();
|
||||
let without_samples = routes_without_samples.len();
|
||||
let percentage = if total > 0 { (with_samples as f64 / total as f64) * 100.0 } else { 0.0 };
|
||||
|
||||
println!("\n=== Summary ===\n");
|
||||
println!(" Total routes: {}", total);
|
||||
println!(" With code samples: {} ({:.1}%)", with_samples, percentage);
|
||||
println!(" Missing code samples: {} ({:.1}%)\n", without_samples, 100.0 - percentage);
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Cleans up null descriptions in tags to make Mintlify work.
|
||||
///
|
||||
/// Removes any `"description"` fields with null values (both JSON `null` and `"null"` string)
|
||||
/// from the tags array and all nested objects.
|
||||
fn clean_null_descriptions(openapi: &mut Value) {
|
||||
if let Some(tags) = openapi.get_mut("tags").and_then(|t| t.as_array_mut()) {
|
||||
for tag in tags.iter_mut() {
|
||||
remove_null_descriptions_recursive(tag);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Recursively removes all `"description"` fields that are `null` or the `"null"` string.
|
||||
fn remove_null_descriptions_recursive(value: &mut Value) {
|
||||
if let Some(obj) = value.as_object_mut() {
|
||||
// Check and remove description if it's null or "null" string
|
||||
if let Some(desc) = obj.get("description") {
|
||||
if desc.is_null() || (desc.is_string() && desc.as_str() == Some("null")) {
|
||||
obj.remove("description");
|
||||
}
|
||||
}
|
||||
|
||||
// Recursively process all nested objects
|
||||
for (_, v) in obj.iter_mut() {
|
||||
remove_null_descriptions_recursive(v);
|
||||
}
|
||||
} else if let Some(arr) = value.as_array_mut() {
|
||||
// Recursively process arrays
|
||||
for item in arr.iter_mut() {
|
||||
remove_null_descriptions_recursive(item);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Checks that all routes have a summary field.
|
||||
///
|
||||
/// Returns an error if any route is missing a summary.
|
||||
fn check_all_routes_have_summaries(openapi: &Value) -> Result<()> {
|
||||
let paths = openapi
|
||||
.get("paths")
|
||||
.and_then(|p| p.as_object())
|
||||
.context("OpenAPI spec missing 'paths' object")?;
|
||||
|
||||
let mut missing_summaries: Vec<String> = Vec::new();
|
||||
|
||||
for (path, path_item) in paths.iter() {
|
||||
let Some(path_item) = path_item.as_object() else {
|
||||
continue;
|
||||
};
|
||||
|
||||
for method in HTTP_METHODS {
|
||||
let Some(operation) = path_item.get(*method) else {
|
||||
continue;
|
||||
};
|
||||
|
||||
let has_summary =
|
||||
operation.get("summary").and_then(|s| s.as_str()).is_some_and(|s| !s.is_empty());
|
||||
|
||||
if !has_summary {
|
||||
missing_summaries.push(format!("{} {}", method.to_uppercase(), path));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if missing_summaries.is_empty() {
|
||||
println!("All routes have summaries.");
|
||||
Ok(())
|
||||
} else {
|
||||
missing_summaries.sort();
|
||||
eprintln!("The following routes are missing a summary:");
|
||||
for route in &missing_summaries {
|
||||
eprintln!(" - {}", route);
|
||||
}
|
||||
eprintln!("\nTo fix this, add a doc-comment (///) above the route handler function.");
|
||||
eprintln!("The first line becomes the summary, subsequent lines become the description.");
|
||||
eprintln!("\nExample:");
|
||||
eprintln!(" /// List webhooks");
|
||||
eprintln!(" ///");
|
||||
eprintln!(" /// Get the list of all registered webhooks.");
|
||||
eprintln!(" #[utoipa::path(...)]");
|
||||
eprintln!(" async fn get_webhooks(...) {{ ... }}");
|
||||
anyhow::bail!("{} route(s) missing summary", missing_summaries.len());
|
||||
}
|
||||
}
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
|
||||
#[test]
|
||||
fn test_path_to_key() {
|
||||
assert_eq!(path_to_key("/indexes", "GET"), "get_indexes");
|
||||
assert_eq!(path_to_key("/indexes/{index_uid}", "GET"), "get_indexes_indexUid");
|
||||
assert_eq!(
|
||||
path_to_key("/indexes/{index_uid}/documents", "POST"),
|
||||
"post_indexes_indexUid_documents"
|
||||
);
|
||||
assert_eq!(
|
||||
path_to_key("/indexes/{index_uid}/documents/{document_id}", "GET"),
|
||||
"get_indexes_indexUid_documents_documentId"
|
||||
);
|
||||
assert_eq!(
|
||||
path_to_key("/indexes/{index_uid}/settings/stop-words", "GET"),
|
||||
"get_indexes_indexUid_settings_stop_words"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_to_camel_case() {
|
||||
assert_eq!(to_camel_case("index_uid"), "indexUid");
|
||||
assert_eq!(to_camel_case("document_id"), "documentId");
|
||||
assert_eq!(to_camel_case("task_uid"), "taskUid");
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_build_openapi_key_mapping() {
|
||||
let yaml = r#"
|
||||
# get_indexes
|
||||
get_indexes_1: |-
|
||||
curl \
|
||||
-X GET 'MEILISEARCH_URL/indexes'
|
||||
get_indexes_2: |-
|
||||
curl \
|
||||
-X GET 'MEILISEARCH_URL/indexes?limit=5'
|
||||
# post_indexes
|
||||
create_indexes_1: |-
|
||||
curl \
|
||||
-X POST 'MEILISEARCH_URL/indexes'
|
||||
# get_version
|
||||
get_version_1: |-
|
||||
curl \
|
||||
-X GET 'MEILISEARCH_URL/version'
|
||||
# COMMENT WITHOUT KEY - SHOULD BE IGNORED
|
||||
## COMMENT WITHOUT KEY - SHOULD BE IGNORED
|
||||
unrelated_sample_without_comment: |-
|
||||
curl \
|
||||
-X GET 'MEILISEARCH_URL/something'
|
||||
"#;
|
||||
let mapping = build_openapi_key_mapping(yaml);
|
||||
|
||||
// Should have 3 OpenAPI keys
|
||||
assert_eq!(mapping.len(), 3);
|
||||
assert!(mapping.contains_key("get_indexes"));
|
||||
assert!(mapping.contains_key("post_indexes"));
|
||||
assert!(mapping.contains_key("get_version"));
|
||||
|
||||
// Only keeps the first code sample ID per OpenAPI key
|
||||
assert_eq!(mapping["get_indexes"], "get_indexes_1");
|
||||
assert_eq!(mapping["post_indexes"], "create_indexes_1");
|
||||
assert_eq!(mapping["get_version"], "get_version_1");
|
||||
|
||||
// Comments with multiple words or ## should be ignored and not create keys
|
||||
assert!(!mapping.contains_key("COMMENT"));
|
||||
assert!(!mapping.contains_key("##"));
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_parse_code_samples_from_file() {
|
||||
let yaml = r#"
|
||||
get_indexes_1: |-
|
||||
client.getIndexes()
|
||||
# I write something
|
||||
# COMMENT TO IGNORE
|
||||
get_indexes_2: |-
|
||||
client.getIndexes({ limit: 3 })
|
||||
update_document: |-
|
||||
// Code with blank line
|
||||
|
||||
updateDoc(doc)
|
||||
// End
|
||||
|
||||
delete_document_1: |-
|
||||
client.deleteDocument(1)
|
||||
no_newline_at_end: |-
|
||||
client.update({ id: 1 })
|
||||
key_with_empty_sample: |-
|
||||
# This should produce an empty string for the sample
|
||||
complex_block: |-
|
||||
// Some code
|
||||
Indented line
|
||||
# Indented comment
|
||||
Last line
|
||||
"#;
|
||||
let samples = parse_code_samples_from_file(yaml);
|
||||
|
||||
assert_eq!(samples.len(), 7);
|
||||
assert!(samples.contains_key("get_indexes_1"));
|
||||
assert!(samples.contains_key("get_indexes_2"));
|
||||
assert!(samples.contains_key("update_document"));
|
||||
assert!(samples.contains_key("delete_document_1"));
|
||||
assert!(samples.contains_key("no_newline_at_end"));
|
||||
assert!(samples.contains_key("key_with_empty_sample"));
|
||||
assert!(samples.contains_key("complex_block"));
|
||||
|
||||
// get_indexes_1 includes indented comment
|
||||
assert_eq!(samples["get_indexes_1"], "client.getIndexes()\n# I write something");
|
||||
|
||||
// get_indexes_2 is a single line
|
||||
assert_eq!(samples["get_indexes_2"], "client.getIndexes({ limit: 3 })");
|
||||
|
||||
// update_document contains a blank line and some code
|
||||
assert_eq!(samples["update_document"], "// Code with blank line\n\nupdateDoc(doc)\n// End");
|
||||
|
||||
// delete_document_1
|
||||
assert_eq!(samples["delete_document_1"], "client.deleteDocument(1)");
|
||||
|
||||
// no_newline_at_end, explicitly just one line
|
||||
assert_eq!(samples["no_newline_at_end"], "client.update({ id: 1 })");
|
||||
|
||||
// key_with_empty_sample should be empty string
|
||||
assert_eq!(samples["key_with_empty_sample"], "");
|
||||
|
||||
// complex_block preserves indentation and comments
|
||||
assert_eq!(
|
||||
samples["complex_block"],
|
||||
"// Some code\n Indented line\n # Indented comment\nLast line"
|
||||
);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_clean_null_descriptions() {
|
||||
let mut openapi = json!({
|
||||
"tags": [
|
||||
{
|
||||
"name": "Test1",
|
||||
"description": "null"
|
||||
},
|
||||
{
|
||||
"name": "Test2",
|
||||
"description": null
|
||||
},
|
||||
{
|
||||
"name": "Test3",
|
||||
"description": "Valid description"
|
||||
},
|
||||
{
|
||||
"name": "Test4",
|
||||
"description": "null",
|
||||
"externalDocs": {
|
||||
"url": "https://example.com",
|
||||
"description": null
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Test5",
|
||||
"externalDocs": {
|
||||
"url": "https://example.com",
|
||||
"description": "null"
|
||||
}
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
clean_null_descriptions(&mut openapi);
|
||||
|
||||
let tags = openapi["tags"].as_array().unwrap();
|
||||
|
||||
// Test1: description "null" should be removed
|
||||
assert!(!tags[0].as_object().unwrap().contains_key("description"));
|
||||
|
||||
// Test2: description null should be removed
|
||||
assert!(!tags[1].as_object().unwrap().contains_key("description"));
|
||||
|
||||
// Test3: valid description should remain
|
||||
assert_eq!(tags[2]["description"], "Valid description");
|
||||
|
||||
// Test4: both tag description and externalDocs description should be removed
|
||||
assert!(!tags[3].as_object().unwrap().contains_key("description"));
|
||||
assert!(!tags[3]["externalDocs"].as_object().unwrap().contains_key("description"));
|
||||
assert_eq!(tags[3]["externalDocs"]["url"], "https://example.com");
|
||||
|
||||
// Test5: externalDocs description "null" should be removed
|
||||
assert!(!tags[4]["externalDocs"].as_object().unwrap().contains_key("description"));
|
||||
assert_eq!(tags[4]["externalDocs"]["url"], "https://example.com");
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user