Add response updating logic

This commit is contained in:
Mubelotix
2025-08-26 10:59:12 +02:00
parent b2a72b0363
commit 4290901dea
10 changed files with 233 additions and 85 deletions

View File

@ -99,7 +99,7 @@ async fn wait_for_health(
asset_folder: &str, asset_folder: &str,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
for i in 0..100 { for i in 0..100 {
let res = run_command(client.clone(), health_command(), assets, asset_folder).await; let res = run_command(client, &health_command(), assets, asset_folder, false).await;
if res.is_ok() { if res.is_ok() {
// check that this is actually the current Meilisearch instance that answered us // check that this is actually the current Meilisearch instance that answered us
if let Some(exit_code) = if let Some(exit_code) =

View File

@ -6,7 +6,7 @@ mod workload;
use crate::common::args::CommonArgs; use crate::common::args::CommonArgs;
use crate::common::logs::setup_logs; use crate::common::logs::setup_logs;
use crate::common::workload::Workload; use crate::common::workload::Workload;
use std::path::PathBuf; use std::{path::PathBuf, sync::Arc};
use anyhow::{bail, Context}; use anyhow::{bail, Context};
use clap::Parser; use clap::Parser;
@ -89,11 +89,11 @@ pub fn run(args: BenchDeriveArgs) -> anyhow::Result<()> {
None, None,
)?; )?;
let meili_client = Client::new( let meili_client = Arc::new(Client::new(
Some("http://127.0.0.1:7700".into()), Some("http://127.0.0.1:7700".into()),
args.common.master_key.as_deref(), args.common.master_key.as_deref(),
Some(std::time::Duration::from_secs(args.common.tasks_queue_timeout_secs)), Some(std::time::Duration::from_secs(args.common.tasks_queue_timeout_secs)),
)?; )?);
// enter runtime // enter runtime

View File

@ -2,10 +2,11 @@ use std::collections::BTreeMap;
use std::fs::File; use std::fs::File;
use std::io::{Seek as _, Write as _}; use std::io::{Seek as _, Write as _};
use std::path::Path; use std::path::Path;
use std::sync::Arc;
use anyhow::{bail, Context as _}; use anyhow::{bail, Context as _};
use futures_util::TryStreamExt as _; use futures_util::TryStreamExt as _;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use serde_json::json; use serde_json::json;
use tokio::task::JoinHandle; use tokio::task::JoinHandle;
use uuid::Uuid; use uuid::Uuid;
@ -19,7 +20,7 @@ use crate::common::command::{run_commands, Command};
/// A bench workload. /// A bench workload.
/// Not to be confused with [a test workload](crate::test::workload::Workload). /// Not to be confused with [a test workload](crate::test::workload::Workload).
#[derive(Deserialize)] #[derive(Serialize, Deserialize)]
pub struct BenchWorkload { pub struct BenchWorkload {
pub name: String, pub name: String,
pub run_count: u16, pub run_count: u16,
@ -35,7 +36,7 @@ pub struct BenchWorkload {
async fn run_workload_commands( async fn run_workload_commands(
dashboard_client: &DashboardClient, dashboard_client: &DashboardClient,
logs_client: &Client, logs_client: &Client,
meili_client: &Client, meili_client: &Arc<Client>,
workload_uuid: Uuid, workload_uuid: Uuid,
workload: &BenchWorkload, workload: &BenchWorkload,
args: &BenchDeriveArgs, args: &BenchDeriveArgs,
@ -43,9 +44,10 @@ async fn run_workload_commands(
) -> anyhow::Result<JoinHandle<anyhow::Result<File>>> { ) -> anyhow::Result<JoinHandle<anyhow::Result<File>>> {
let report_folder = &args.report_folder; let report_folder = &args.report_folder;
let workload_name = &workload.name; let workload_name = &workload.name;
let assets = Arc::new(workload.assets.clone());
let asset_folder = args.common.asset_folder.clone().leak();
run_commands(meili_client, &workload.precommands, &workload.assets, &args.common.asset_folder) run_commands(meili_client, &workload.precommands, &assets, asset_folder, false).await?;
.await?;
std::fs::create_dir_all(report_folder) std::fs::create_dir_all(report_folder)
.with_context(|| format!("could not create report directory at {report_folder}"))?; .with_context(|| format!("could not create report directory at {report_folder}"))?;
@ -55,8 +57,7 @@ async fn run_workload_commands(
let report_handle = start_report(logs_client, trace_filename, &workload.target).await?; let report_handle = start_report(logs_client, trace_filename, &workload.target).await?;
run_commands(meili_client, &workload.commands, &workload.assets, &args.common.asset_folder) run_commands(meili_client, &workload.commands, &assets, asset_folder, false).await?;
.await?;
let processor = let processor =
stop_report(dashboard_client, logs_client, workload_uuid, report_filename, report_handle) stop_report(dashboard_client, logs_client, workload_uuid, report_filename, report_handle)
@ -71,7 +72,7 @@ pub async fn execute(
assets_client: &Client, assets_client: &Client,
dashboard_client: &DashboardClient, dashboard_client: &DashboardClient,
logs_client: &Client, logs_client: &Client,
meili_client: &Client, meili_client: &Arc<Client>,
invocation_uuid: Uuid, invocation_uuid: Uuid,
master_key: Option<&str>, master_key: Option<&str>,
workload: BenchWorkload, workload: BenchWorkload,
@ -119,7 +120,7 @@ pub async fn execute(
async fn execute_run( async fn execute_run(
dashboard_client: &DashboardClient, dashboard_client: &DashboardClient,
logs_client: &Client, logs_client: &Client,
meili_client: &Client, meili_client: &Arc<Client>,
workload_uuid: Uuid, workload_uuid: Uuid,
master_key: Option<&str>, master_key: Option<&str>,
workload: &BenchWorkload, workload: &BenchWorkload,

View File

@ -3,21 +3,22 @@ use std::io::{Read as _, Seek as _, Write as _};
use anyhow::{bail, Context}; use anyhow::{bail, Context};
use futures_util::TryStreamExt as _; use futures_util::TryStreamExt as _;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use sha2::Digest; use sha2::Digest;
use super::client::Client; use super::client::Client;
#[derive(Deserialize, Clone)] #[derive(Serialize, Deserialize, Clone)]
pub struct Asset { pub struct Asset {
pub local_location: Option<String>, pub local_location: Option<String>,
pub remote_location: Option<String>, pub remote_location: Option<String>,
#[serde(default)] #[serde(default, skip_serializing_if = "AssetFormat::is_default")]
pub format: AssetFormat, pub format: AssetFormat,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub sha256: Option<String>, pub sha256: Option<String>,
} }
#[derive(Deserialize, Default, Copy, Clone)] #[derive(Serialize, Deserialize, Default, Copy, Clone)]
pub enum AssetFormat { pub enum AssetFormat {
#[default] #[default]
Auto, Auto,
@ -27,6 +28,10 @@ pub enum AssetFormat {
} }
impl AssetFormat { impl AssetFormat {
fn is_default(&self) -> bool {
matches!(self, AssetFormat::Auto)
}
pub fn to_content_type(self, filename: &str) -> &'static str { pub fn to_content_type(self, filename: &str) -> &'static str {
match self { match self {
AssetFormat::Auto => Self::auto_detect(filename).to_content_type(filename), AssetFormat::Auto => Self::auto_detect(filename).to_content_type(filename),

View File

@ -1,5 +1,5 @@
use anyhow::Context; use anyhow::Context;
use serde::Deserialize; use serde::{Deserialize, Serialize};
#[derive(Debug, Clone)] #[derive(Debug, Clone)]
pub struct Client { pub struct Client {
@ -61,7 +61,7 @@ impl Client {
} }
} }
#[derive(Debug, Clone, Copy, Deserialize)] #[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(rename_all = "SCREAMING_SNAKE_CASE")] #[serde(rename_all = "SCREAMING_SNAKE_CASE")]
pub enum Method { pub enum Method {
Get, Get,

View File

@ -1,24 +1,32 @@
use std::collections::BTreeMap; use std::collections::BTreeMap;
use std::fmt::Display; use std::fmt::Display;
use std::io::Read as _; use std::io::Read as _;
use std::sync::Arc;
use anyhow::{bail, Context as _}; use anyhow::{bail, Context as _};
use serde::Deserialize; use reqwest::StatusCode;
use serde::{Deserialize, Serialize};
use serde_json::Value;
use crate::common::assets::{fetch_asset, Asset}; use crate::common::assets::{fetch_asset, Asset};
use crate::common::client::{Client, Method}; use crate::common::client::{Client, Method};
#[derive(Clone, Deserialize)] #[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "camelCase")]
pub struct Command { pub struct Command {
pub route: String, pub route: String,
pub method: Method, pub method: Method,
#[serde(default)] #[serde(default)]
pub body: Body, pub body: Body,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expected_status: Option<u16>,
#[serde(default, skip_serializing_if = "Option::is_none")]
pub expected_response: Option<serde_json::Value>,
#[serde(default)] #[serde(default)]
synchronous: SyncMode, synchronous: SyncMode,
} }
#[derive(Default, Clone, Deserialize)] #[derive(Default, Clone, Serialize, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum Body { pub enum Body {
Inline { Inline {
@ -63,7 +71,7 @@ impl Display for Command {
} }
} }
#[derive(Default, Debug, Clone, Copy, Deserialize)] #[derive(Default, Debug, Clone, Copy, Serialize, Deserialize)]
enum SyncMode { enum SyncMode {
DontWait, DontWait,
#[default] #[default]
@ -72,41 +80,42 @@ enum SyncMode {
} }
async fn run_batch( async fn run_batch(
client: &Client, client: &Arc<Client>,
batch: &[Command], batch: Vec<Command>,
assets: &BTreeMap<String, Asset>, assets: &Arc<BTreeMap<String, Asset>>,
asset_folder: &str, asset_folder: &'static str,
) -> anyhow::Result<()> { return_response: bool,
let [.., last] = batch else { return Ok(()) }; ) -> anyhow::Result<Vec<(Value, StatusCode)>> {
let [.., last] = batch.as_slice() else { return Ok(Vec::new()) };
let sync = last.synchronous; let sync = last.synchronous;
let batch_len = batch.len();
let mut tasks = tokio::task::JoinSet::new(); let mut tasks = Vec::with_capacity(batch.len());
for batch in batch {
for command in batch { let client2 = Arc::clone(&client);
// FIXME: you probably don't want to copy assets everytime here let assets2 = Arc::clone(&assets);
tasks.spawn({ tasks.push(tokio::spawn(async move {
let client = client.clone(); run(&client2, &batch, &assets2, asset_folder, return_response).await
let command = command.clone(); }));
let assets = assets.clone();
let asset_folder = asset_folder.to_owned();
async move { run(client, command, &assets, &asset_folder).await }
});
} }
while let Some(result) = tasks.join_next().await { let mut outputs = Vec::with_capacity(if return_response { batch_len } else { 0 });
result for task in tasks {
.context("panicked while executing command")? let output = task.await.context("task panicked")??;
.context("error while executing command")?; if let Some(output) = output {
if return_response {
outputs.push(output);
}
}
} }
match sync { match sync {
SyncMode::DontWait => {} SyncMode::DontWait => {}
SyncMode::WaitForResponse => {} SyncMode::WaitForResponse => {}
SyncMode::WaitForTask => wait_for_tasks(client).await?, SyncMode::WaitForTask => wait_for_tasks(&client).await?,
} }
Ok(()) Ok(outputs)
} }
async fn wait_for_tasks(client: &Client) -> anyhow::Result<()> { async fn wait_for_tasks(client: &Client) -> anyhow::Result<()> {
@ -150,13 +159,16 @@ async fn wait_for_tasks(client: &Client) -> anyhow::Result<()> {
#[tracing::instrument(skip(client, command, assets, asset_folder), fields(command = %command))] #[tracing::instrument(skip(client, command, assets, asset_folder), fields(command = %command))]
pub async fn run( pub async fn run(
client: Client, client: &Client,
mut command: Command, command: &Command,
assets: &BTreeMap<String, Asset>, assets: &BTreeMap<String, Asset>,
asset_folder: &str, asset_folder: &str,
) -> anyhow::Result<()> { return_value: bool,
) -> anyhow::Result<Option<(Value, StatusCode)>> {
// memtake the body here to leave an empty body in its place, so that command is not partially moved-out // memtake the body here to leave an empty body in its place, so that command is not partially moved-out
let body = std::mem::take(&mut command.body) let body = command
.body
.clone()
.get(assets, asset_folder) .get(assets, asset_folder)
.with_context(|| format!("while getting body for command {command}"))?; .with_context(|| format!("while getting body for command {command}"))?;
@ -172,7 +184,17 @@ pub async fn run(
request.send().await.with_context(|| format!("error sending command: {}", command))?; request.send().await.with_context(|| format!("error sending command: {}", command))?;
let code = response.status(); let code = response.status();
if code.is_client_error() {
if let Some(expected_status) = command.expected_status {
if code.as_u16() != expected_status {
let response = response
.text()
.await
.context("could not read response body as text")
.context("reading response body when checking expected status")?;
bail!("unexpected status code: got {}, expected {expected_status}, response body: '{response}'", code.as_u16());
}
} else if code.is_client_error() {
tracing::error!(%command, %code, "error in workload file"); tracing::error!(%command, %code, "error in workload file");
let response: serde_json::Value = response let response: serde_json::Value = response
.json() .json()
@ -190,22 +212,44 @@ pub async fn run(
bail!("server error: server responded with error code {code} and '{response}'") bail!("server error: server responded with error code {code} and '{response}'")
} }
Ok(()) if return_value {
let response: serde_json::Value = response
.json()
.await
.context("could not deserialize response as JSON")
.context("parsing response when recording expected response")?;
return Ok(Some((response, code)));
} else if let Some(expected_response) = &command.expected_response {
let response: serde_json::Value = response
.json()
.await
.context("could not deserialize response as JSON")
.context("parsing response when checking expected response")?;
if &response != expected_response {
bail!("unexpected response: got '{response}', expected '{expected_response}'");
}
}
Ok(None)
} }
pub async fn run_commands( pub async fn run_commands(
client: &Client, client: &Arc<Client>,
commands: &[Command], commands: &[Command],
assets: &BTreeMap<String, Asset>, assets: &Arc<BTreeMap<String, Asset>>,
asset_folder: &str, asset_folder: &'static str,
) -> anyhow::Result<()> { return_response: bool,
) -> anyhow::Result<Vec<(Value, StatusCode)>> {
let mut responses = Vec::new();
for batch in for batch in
commands.split_inclusive(|command| !matches!(command.synchronous, SyncMode::DontWait)) commands.split_inclusive(|command| !matches!(command.synchronous, SyncMode::DontWait))
{ {
run_batch(client, batch, assets, asset_folder).await?; let mut new_responses =
run_batch(client, batch.to_vec(), assets, asset_folder, return_response).await?;
responses.append(&mut new_responses);
} }
Ok(()) Ok(responses)
} }
pub fn health_command() -> Command { pub fn health_command() -> Command {
@ -214,5 +258,7 @@ pub fn health_command() -> Command {
method: crate::common::client::Method::Get, method: crate::common::client::Method::Get,
body: Default::default(), body: Default::default(),
synchronous: SyncMode::WaitForResponse, synchronous: SyncMode::WaitForResponse,
expected_status: None,
expected_response: None,
} }
} }

View File

@ -1,8 +1,8 @@
use serde::Deserialize; use serde::{Deserialize, Serialize};
use crate::{bench::BenchWorkload, test::TestWorkload}; use crate::{bench::BenchWorkload, test::TestWorkload};
#[derive(Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(tag = "type")] #[serde(tag = "type")]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub enum Workload { pub enum Workload {

View File

@ -1,4 +1,4 @@
use std::time::Duration; use std::{sync::Arc, time::Duration};
use crate::common::{args::CommonArgs, client::Client, logs::setup_logs, workload::Workload}; use crate::common::{args::CommonArgs, client::Client, logs::setup_logs, workload::Workload};
use anyhow::{bail, Context}; use anyhow::{bail, Context};
@ -15,6 +15,14 @@ pub struct TestDeriveArgs {
/// Common arguments shared with other commands /// Common arguments shared with other commands
#[command(flatten)] #[command(flatten)]
common: CommonArgs, common: CommonArgs,
/// Enables workloads to be rewritten in place to update expected responses.
#[arg(short, long, default_value_t = false)]
pub update_responses: bool,
/// Enables workloads to be rewritten in place to add missing expected responses.
#[arg(short, long, default_value_t = false)]
pub add_missing_responses: bool,
} }
pub fn run(args: TestDeriveArgs) -> anyhow::Result<()> { pub fn run(args: TestDeriveArgs) -> anyhow::Result<()> {
@ -30,18 +38,19 @@ async fn run_inner(args: TestDeriveArgs) -> anyhow::Result<()> {
setup_logs(&args.common.log_filter)?; setup_logs(&args.common.log_filter)?;
// setup clients // setup clients
let assets_client = Client::new( let assets_client = Arc::new(Client::new(
None, None,
args.common.assets_key.as_deref(), args.common.assets_key.as_deref(),
Some(Duration::from_secs(3600)), // 1h Some(Duration::from_secs(3600)), // 1h
)?; )?);
let meili_client = Client::new( let meili_client = Arc::new(Client::new(
Some("http://127.0.0.1:7700".into()), Some("http://127.0.0.1:7700".into()),
args.common.master_key.as_deref(), args.common.master_key.as_deref(),
Some(Duration::from_secs(args.common.tasks_queue_timeout_secs)), Some(Duration::from_secs(args.common.tasks_queue_timeout_secs)),
)?; )?);
let asset_folder = args.common.asset_folder.clone().leak();
for workload_file in &args.common.workload_file { for workload_file in &args.common.workload_file {
let workload: Workload = serde_json::from_reader( let workload: Workload = serde_json::from_reader(
std::fs::File::open(workload_file) std::fs::File::open(workload_file)
@ -49,16 +58,18 @@ async fn run_inner(args: TestDeriveArgs) -> anyhow::Result<()> {
) )
.with_context(|| format!("error parsing {} as JSON", workload_file.display()))?; .with_context(|| format!("error parsing {} as JSON", workload_file.display()))?;
let Workload::Test(mut workload) = workload else { let Workload::Test(workload) = workload else {
bail!("workload file {} is not a test workload", workload_file.display()); bail!("workload file {} is not a test workload", workload_file.display());
}; };
match workload.run(&args, &assets_client, &meili_client).await { let name = workload.name.clone();
match workload.run(&args, &assets_client, &meili_client, asset_folder).await {
Ok(_) => { Ok(_) => {
println!("✅ Workload {} completed successfully", workload.name,); println!("✅ Workload {name} completed successfully");
} }
Err(error) => { Err(error) => {
println!("❌ Workload {} failed: {error}", workload.name,); println!("❌ Workload {name} failed: {error}");
println!("Is this intentional? If so, rerun with --update-responses to update the workload files.");
return Err(error); return Err(error);
} }
} }

View File

@ -1,12 +1,14 @@
use anyhow::Context;
use cargo_metadata::semver::Version; use cargo_metadata::semver::Version;
use serde::Deserialize; use serde::{Deserialize, Serialize};
use std::collections::BTreeMap; use std::{collections::BTreeMap, sync::Arc};
use crate::{ use crate::{
common::{ common::{
assets::{fetch_assets, Asset}, assets::{fetch_assets, Asset},
client::Client, client::Client,
command::{run_commands, Command}, command::{run_commands, Command},
workload::Workload,
}, },
test::{versions::expand_assets_with_versions, TestDeriveArgs}, test::{versions::expand_assets_with_versions, TestDeriveArgs},
}; };
@ -33,21 +35,33 @@ impl<'a> Deserialize<'a> for VersionOrLatest {
} }
} }
#[derive(Deserialize)] impl Serialize for VersionOrLatest {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
match self {
VersionOrLatest::Version(v) => serializer.serialize_str(&v.to_string()),
VersionOrLatest::Latest => serializer.serialize_str("latest"),
}
}
}
#[derive(Serialize, Deserialize)]
#[serde(untagged)] #[serde(untagged)]
pub enum CommandOrUpgrade { pub enum CommandOrUpgrade {
Command(Command), Command(Command),
Upgrade { upgrade: VersionOrLatest }, Upgrade { upgrade: VersionOrLatest },
} }
enum CommandOrUpgradeVec { enum CommandOrUpgradeVec<'a> {
Commands(Vec<Command>), Commands(Vec<&'a mut Command>),
Upgrade(VersionOrLatest), Upgrade(VersionOrLatest),
} }
/// A test workload. /// A test workload.
/// Not to be confused with [a bench workload](crate::bench::workload::Workload). /// Not to be confused with [a bench workload](crate::bench::workload::Workload).
#[derive(Deserialize)] #[derive(Serialize, Deserialize)]
#[serde(rename_all = "camelCase")] #[serde(rename_all = "camelCase")]
pub struct TestWorkload { pub struct TestWorkload {
pub name: String, pub name: String,
@ -58,18 +72,19 @@ pub struct TestWorkload {
impl TestWorkload { impl TestWorkload {
pub async fn run( pub async fn run(
&mut self, mut self,
args: &TestDeriveArgs, args: &TestDeriveArgs,
assets_client: &Client, assets_client: &Client,
meili_client: &Client, meili_client: &Arc<Client>,
asset_folder: &'static str,
) -> anyhow::Result<()> { ) -> anyhow::Result<()> {
// Group commands between upgrades // Group commands between upgrades
let mut commands_or_upgrade = Vec::new(); let mut commands_or_upgrade = Vec::new();
let mut current_commands = Vec::new(); let mut current_commands = Vec::new();
let mut all_versions = vec![self.initial_version.clone()]; let mut all_versions = vec![self.initial_version.clone()];
for command_or_upgrade in &self.commands { for command_or_upgrade in &mut self.commands {
match command_or_upgrade { match command_or_upgrade {
CommandOrUpgrade::Command(command) => current_commands.push(command.clone()), CommandOrUpgrade::Command(command) => current_commands.push(command),
CommandOrUpgrade::Upgrade { upgrade } => { CommandOrUpgrade::Upgrade { upgrade } => {
if !current_commands.is_empty() { if !current_commands.is_empty() {
commands_or_upgrade.push(CommandOrUpgradeVec::Commands(current_commands)); commands_or_upgrade.push(CommandOrUpgradeVec::Commands(current_commands));
@ -82,16 +97,40 @@ impl TestWorkload {
} }
} }
} }
if !current_commands.is_empty() {
commands_or_upgrade.push(CommandOrUpgradeVec::Commands(current_commands));
}
// Fetch assets // Fetch assets
expand_assets_with_versions(&mut self.assets, &all_versions).await?; expand_assets_with_versions(&mut self.assets, &all_versions).await?;
fetch_assets(assets_client, &self.assets, &args.common.asset_folder).await?; fetch_assets(assets_client, &self.assets, &args.common.asset_folder).await?;
let assets = Arc::new(self.assets.clone());
let return_responses = dbg!(args.add_missing_responses || args.update_responses);
for command_or_upgrade in commands_or_upgrade { for command_or_upgrade in commands_or_upgrade {
match command_or_upgrade { match command_or_upgrade {
CommandOrUpgradeVec::Commands(commands) => { CommandOrUpgradeVec::Commands(commands) => {
run_commands(meili_client, &commands, &self.assets, &args.common.asset_folder) let cloned: Vec<_> = commands.iter().map(|c| (*c).clone()).collect();
.await?; let responses = run_commands(
meili_client,
&cloned,
&assets,
asset_folder,
return_responses,
)
.await?;
if return_responses {
assert_eq!(responses.len(), cloned.len());
for (command, (response, status)) in commands.into_iter().zip(responses) {
if args.update_responses
|| (dbg!(args.add_missing_responses)
&& dbg!(command.expected_response.is_none()))
{
command.expected_response = Some(response);
command.expected_status = Some(status.as_u16());
}
}
}
} }
CommandOrUpgradeVec::Upgrade(version) => { CommandOrUpgradeVec::Upgrade(version) => {
todo!() todo!()
@ -99,6 +138,23 @@ impl TestWorkload {
} }
} }
// Write back the workload if needed
if return_responses {
// Filter out the assets we added for the versions
self.assets.retain(|_, asset| {
asset.local_location.as_ref().is_none_or(|a| !a.starts_with("meilisearch-"))
});
let workload = Workload::Test(self);
let file = std::fs::File::create(&args.common.workload_file[0]).with_context(|| {
format!("could not open {}", args.common.workload_file[0].display())
})?;
serde_json::to_writer_pretty(file, &workload).with_context(|| {
format!("could not write to {}", args.common.workload_file[0].display())
})?;
tracing::info!("Updated workload file {}", args.common.workload_file[0].display());
}
Ok(()) Ok(())
} }
} }

View File

@ -1,6 +1,6 @@
{ {
"name": "movies",
"type": "test", "type": "test",
"name": "movies",
"initialVersion": "1.12.0", "initialVersion": "1.12.0",
"assets": { "assets": {
"movies.json": { "movies.json": {
@ -9,5 +9,34 @@
"sha256": "5b6e4cb660bc20327776e8a33ea197b43d9ec84856710ead1cc87ab24df77de1" "sha256": "5b6e4cb660bc20327776e8a33ea197b43d9ec84856710ead1cc87ab24df77de1"
} }
}, },
"commands": [] "commands": [
{
"route": "indexes/movies/settings",
"method": "PATCH",
"body": {
"inline": {
"filterableAttributes": [
"genres",
"release_date"
],
"searchableAttributes": [
"title",
"overview"
],
"sortableAttributes": [
"release_date"
]
}
},
"synchronous": "DontWait"
},
{
"route": "indexes/movies/documents",
"method": "POST",
"body": {
"asset": "movies.json"
},
"synchronous": "WaitForTask"
}
]
} }