mirror of
				https://github.com/meilisearch/meilisearch.git
				synced 2025-10-31 16:06:31 +00:00 
			
		
		
		
	chore: get rid of chrono in favor of time
Chrono has been unmaintened for a few month now and there is a CVE on it. make clippy happy bump milli
This commit is contained in:
		
							
								
								
									
										72
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							
							
						
						
									
										72
									
								
								Cargo.lock
									
									
									
										generated
									
									
									
								
							| @@ -300,6 +300,12 @@ version = "1.5.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "c5d78ce20460b82d3fa150275ed9d55e21064fc7951177baacf86a145c4a4b1f" | ||||
|  | ||||
| [[package]] | ||||
| name = "arrayvec" | ||||
| version = "0.5.2" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "23b62fc65de8e4e7f52534fb52b0f3ed04746ae267519eef2a83941e8085068b" | ||||
|  | ||||
| [[package]] | ||||
| name = "as-slice" | ||||
| version = "0.1.5" | ||||
| @@ -647,7 +653,6 @@ dependencies = [ | ||||
|  "libc", | ||||
|  "num-integer", | ||||
|  "num-traits", | ||||
|  "serde", | ||||
|  "time 0.1.44", | ||||
|  "winapi", | ||||
| ] | ||||
| @@ -989,9 +994,9 @@ dependencies = [ | ||||
| [[package]] | ||||
| name = "filter-parser" | ||||
| version = "0.1.0" | ||||
| source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.1#ea15ad6c34492b32eb7ac06e69de02b6dc70a707" | ||||
| source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.2#f2984f66e64838d51f5cce412693fa411ee3f2d4" | ||||
| dependencies = [ | ||||
|  "nom", | ||||
|  "nom 7.1.0", | ||||
|  "nom_locate", | ||||
| ] | ||||
|  | ||||
| @@ -1479,6 +1484,15 @@ version = "2.3.1" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "68f2d64f2edebec4ce84ad108148e67e1064789bee435edc5b60ad398714a3a9" | ||||
|  | ||||
| [[package]] | ||||
| name = "iso8601-duration" | ||||
| version = "0.1.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "60b51dd97fa24074214b9eb14da518957573f4dec3189112610ae1ccec9ac464" | ||||
| dependencies = [ | ||||
|  "nom 5.1.2", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| name = "itertools" | ||||
| version = "0.10.3" | ||||
| @@ -1568,6 +1582,19 @@ dependencies = [ | ||||
|  "fst", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| name = "lexical-core" | ||||
| version = "0.7.6" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "6607c62aa161d23d17a9072cc5da0be67cdfc89d3afb1e8d9c842bebc2525ffe" | ||||
| dependencies = [ | ||||
|  "arrayvec", | ||||
|  "bitflags", | ||||
|  "cfg-if 1.0.0", | ||||
|  "ryu", | ||||
|  "static_assertions", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| name = "libc" | ||||
| version = "0.2.114" | ||||
| @@ -1688,7 +1715,6 @@ checksum = "a3e378b66a060d48947b590737b30a1be76706c8dd7b8ba0f2fe3989c68a853f" | ||||
| name = "meilisearch-auth" | ||||
| version = "0.25.0" | ||||
| dependencies = [ | ||||
|  "chrono", | ||||
|  "enum-iterator", | ||||
|  "heed", | ||||
|  "meilisearch-error", | ||||
| @@ -1697,6 +1723,7 @@ dependencies = [ | ||||
|  "serde_json", | ||||
|  "sha2", | ||||
|  "thiserror", | ||||
|  "time 0.3.7", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| @@ -1727,7 +1754,6 @@ dependencies = [ | ||||
|  "byte-unit", | ||||
|  "bytes", | ||||
|  "cargo_toml", | ||||
|  "chrono", | ||||
|  "clap", | ||||
|  "crossbeam-channel", | ||||
|  "either", | ||||
| @@ -1740,6 +1766,7 @@ dependencies = [ | ||||
|  "hex", | ||||
|  "http", | ||||
|  "indexmap", | ||||
|  "iso8601-duration", | ||||
|  "itertools", | ||||
|  "jsonwebtoken", | ||||
|  "log", | ||||
| @@ -1775,6 +1802,7 @@ dependencies = [ | ||||
|  "tempfile", | ||||
|  "thiserror", | ||||
|  "tikv-jemallocator", | ||||
|  "time 0.3.7", | ||||
|  "tokio", | ||||
|  "tokio-stream", | ||||
|  "urlencoding", | ||||
| @@ -1796,7 +1824,6 @@ dependencies = [ | ||||
|  "atomic_refcell", | ||||
|  "byte-unit", | ||||
|  "bytes", | ||||
|  "chrono", | ||||
|  "clap", | ||||
|  "crossbeam-channel", | ||||
|  "csv", | ||||
| @@ -1840,6 +1867,7 @@ dependencies = [ | ||||
|  "tar", | ||||
|  "tempfile", | ||||
|  "thiserror", | ||||
|  "time 0.3.7", | ||||
|  "tokio", | ||||
|  "uuid", | ||||
|  "walkdir", | ||||
| @@ -1888,14 +1916,13 @@ dependencies = [ | ||||
|  | ||||
| [[package]] | ||||
| name = "milli" | ||||
| version = "0.22.1" | ||||
| source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.1#ea15ad6c34492b32eb7ac06e69de02b6dc70a707" | ||||
| version = "0.23.0" | ||||
| source = "git+https://github.com/meilisearch/milli.git?tag=v0.22.2#f2984f66e64838d51f5cce412693fa411ee3f2d4" | ||||
| dependencies = [ | ||||
|  "bimap", | ||||
|  "bincode", | ||||
|  "bstr", | ||||
|  "byteorder", | ||||
|  "chrono", | ||||
|  "concat-arrays", | ||||
|  "crossbeam-channel", | ||||
|  "csv", | ||||
| @@ -1927,6 +1954,7 @@ dependencies = [ | ||||
|  "smallstr", | ||||
|  "smallvec", | ||||
|  "tempfile", | ||||
|  "time 0.3.7", | ||||
|  "uuid", | ||||
| ] | ||||
|  | ||||
| @@ -2029,6 +2057,17 @@ name = "nelson" | ||||
| version = "0.1.0" | ||||
| source = "git+https://github.com/MarinPostma/nelson.git?rev=675f13885548fb415ead8fbb447e9e6d9314000a#675f13885548fb415ead8fbb447e9e6d9314000a" | ||||
|  | ||||
| [[package]] | ||||
| name = "nom" | ||||
| version = "5.1.2" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "ffb4262d26ed83a1c0a33a38fe2bb15797329c85770da05e6b828ddb782627af" | ||||
| dependencies = [ | ||||
|  "lexical-core", | ||||
|  "memchr", | ||||
|  "version_check", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| name = "nom" | ||||
| version = "7.1.0" | ||||
| @@ -2048,7 +2087,7 @@ checksum = "37794436ca3029a3089e0b95d42da1f0b565ad271e4d3bb4bad0c7bb70b10605" | ||||
| dependencies = [ | ||||
|  "bytecount", | ||||
|  "memchr", | ||||
|  "nom", | ||||
|  "nom 7.1.0", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| @@ -2780,16 +2819,16 @@ dependencies = [ | ||||
|  | ||||
| [[package]] | ||||
| name = "segment" | ||||
| version = "0.1.2" | ||||
| version = "0.2.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "9bdcc286fff0e7c5ccd46c06a301c7a8a848b06acedc6983707bd311eb358002" | ||||
| checksum = "5c14967a911a216177366bac6dfa1209b597e311a32360431c63526e27b814fb" | ||||
| dependencies = [ | ||||
|  "async-trait", | ||||
|  "chrono", | ||||
|  "reqwest", | ||||
|  "serde", | ||||
|  "serde_json", | ||||
|  "thiserror", | ||||
|  "time 0.3.7", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| @@ -2976,6 +3015,12 @@ dependencies = [ | ||||
|  "path-slash", | ||||
| ] | ||||
|  | ||||
| [[package]] | ||||
| name = "static_assertions" | ||||
| version = "1.1.0" | ||||
| source = "registry+https://github.com/rust-lang/crates.io-index" | ||||
| checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f" | ||||
|  | ||||
| [[package]] | ||||
| name = "strsim" | ||||
| version = "0.10.0" | ||||
| @@ -3147,6 +3192,7 @@ dependencies = [ | ||||
|  "itoa 1.0.1", | ||||
|  "libc", | ||||
|  "num_threads", | ||||
|  "serde", | ||||
|  "time-macros", | ||||
| ] | ||||
|  | ||||
|   | ||||
| @@ -7,9 +7,9 @@ edition = "2021" | ||||
| enum-iterator = "0.7.0" | ||||
| heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" } | ||||
| sha2 = "0.9.6" | ||||
| chrono = { version = "0.4.19", features = ["serde"] } | ||||
| meilisearch-error = { path = "../meilisearch-error" } | ||||
| serde_json = { version = "1.0.67", features = ["preserve_order"] } | ||||
| time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } | ||||
| rand = "0.8.4" | ||||
| serde = { version = "1.0.130", features = ["derive"] } | ||||
| thiserror = "1.0.28" | ||||
|   | ||||
| @@ -10,13 +10,13 @@ pub type Result<T> = std::result::Result<T, AuthControllerError>; | ||||
| pub enum AuthControllerError { | ||||
|     #[error("`{0}` field is mandatory.")] | ||||
|     MissingParameter(&'static str), | ||||
|     #[error("actions field value `{0}` is invalid. It should be an array of string representing action names.")] | ||||
|     #[error("`actions` field value `{0}` is invalid. It should be an array of string representing action names.")] | ||||
|     InvalidApiKeyActions(Value), | ||||
|     #[error("indexes field value `{0}` is invalid. It should be an array of string representing index names.")] | ||||
|     #[error("`indexes` field value `{0}` is invalid. It should be an array of string representing index names.")] | ||||
|     InvalidApiKeyIndexes(Value), | ||||
|     #[error("expiresAt field value `{0}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.")] | ||||
|     #[error("`expiresAt` field value `{0}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.")] | ||||
|     InvalidApiKeyExpiresAt(Value), | ||||
|     #[error("description field value `{0}` is invalid. It should be a string or specified as a null value.")] | ||||
|     #[error("`description` field value `{0}` is invalid. It should be a string or specified as a null value.")] | ||||
|     InvalidApiKeyDescription(Value), | ||||
|     #[error("API key `{0}` not found.")] | ||||
|     ApiKeyNotFound(String), | ||||
|   | ||||
| @@ -1,10 +1,12 @@ | ||||
| use crate::action::Action; | ||||
| use crate::error::{AuthControllerError, Result}; | ||||
| use crate::store::{KeyId, KEY_ID_LENGTH}; | ||||
| use chrono::{DateTime, NaiveDate, NaiveDateTime, Utc}; | ||||
| use rand::Rng; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::{from_value, Value}; | ||||
| use time::format_description::well_known::Rfc3339; | ||||
| use time::macros::{format_description, time}; | ||||
| use time::{Date, OffsetDateTime, PrimitiveDateTime}; | ||||
|  | ||||
| #[derive(Debug, Deserialize, Serialize)] | ||||
| pub struct Key { | ||||
| @@ -13,9 +15,12 @@ pub struct Key { | ||||
|     pub id: KeyId, | ||||
|     pub actions: Vec<Action>, | ||||
|     pub indexes: Vec<String>, | ||||
|     pub expires_at: Option<DateTime<Utc>>, | ||||
|     pub created_at: DateTime<Utc>, | ||||
|     pub updated_at: DateTime<Utc>, | ||||
|     #[serde(with = "time::serde::rfc3339::option")] | ||||
|     pub expires_at: Option<OffsetDateTime>, | ||||
|     #[serde(with = "time::serde::rfc3339")] | ||||
|     pub created_at: OffsetDateTime, | ||||
|     #[serde(with = "time::serde::rfc3339")] | ||||
|     pub updated_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl Key { | ||||
| @@ -52,8 +57,8 @@ impl Key { | ||||
|             .map(parse_expiration_date) | ||||
|             .ok_or(AuthControllerError::MissingParameter("expiresAt"))??; | ||||
|  | ||||
|         let created_at = Utc::now(); | ||||
|         let updated_at = Utc::now(); | ||||
|         let created_at = OffsetDateTime::now_utc(); | ||||
|         let updated_at = created_at; | ||||
|  | ||||
|         Ok(Self { | ||||
|             description, | ||||
| @@ -89,24 +94,26 @@ impl Key { | ||||
|             self.expires_at = parse_expiration_date(exp)?; | ||||
|         } | ||||
|  | ||||
|         self.updated_at = Utc::now(); | ||||
|         self.updated_at = OffsetDateTime::now_utc(); | ||||
|  | ||||
|         Ok(()) | ||||
|     } | ||||
|  | ||||
|     pub(crate) fn default_admin() -> Self { | ||||
|         let now = OffsetDateTime::now_utc(); | ||||
|         Self { | ||||
|             description: Some("Default Admin API Key (Use it for all other operations. Caution! Do not use it on a public frontend)".to_string()), | ||||
|             id: generate_id(), | ||||
|             actions: vec![Action::All], | ||||
|             indexes: vec!["*".to_string()], | ||||
|             expires_at: None, | ||||
|             created_at: Utc::now(), | ||||
|             updated_at: Utc::now(), | ||||
|             created_at: now, | ||||
|             updated_at: now, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub(crate) fn default_search() -> Self { | ||||
|         let now = OffsetDateTime::now_utc(); | ||||
|         Self { | ||||
|             description: Some( | ||||
|                 "Default Search API Key (Use it to search from the frontend)".to_string(), | ||||
| @@ -115,8 +122,8 @@ impl Key { | ||||
|             actions: vec![Action::Search], | ||||
|             indexes: vec!["*".to_string()], | ||||
|             expires_at: None, | ||||
|             created_at: Utc::now(), | ||||
|             updated_at: Utc::now(), | ||||
|             created_at: now, | ||||
|             updated_at: now, | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -134,22 +141,34 @@ fn generate_id() -> [u8; KEY_ID_LENGTH] { | ||||
|     bytes | ||||
| } | ||||
|  | ||||
| fn parse_expiration_date(value: &Value) -> Result<Option<DateTime<Utc>>> { | ||||
| fn parse_expiration_date(value: &Value) -> Result<Option<OffsetDateTime>> { | ||||
|     match value { | ||||
|         Value::String(string) => DateTime::parse_from_rfc3339(string) | ||||
|             .map(|d| d.into()) | ||||
|         Value::String(string) => OffsetDateTime::parse(string, &Rfc3339) | ||||
|             .or_else(|_| { | ||||
|                 NaiveDateTime::parse_from_str(string, "%Y-%m-%dT%H:%M:%S") | ||||
|                     .map(|naive| DateTime::from_utc(naive, Utc)) | ||||
|                 PrimitiveDateTime::parse( | ||||
|                     string, | ||||
|                     format_description!( | ||||
|                         "[year repr:full base:calendar]-[month repr:numerical]-[day]T[hour]:[minute]:[second]" | ||||
|                     ), | ||||
|                 ).map(|datetime| datetime.assume_utc()) | ||||
|             }) | ||||
|             .or_else(|_| { | ||||
|                 NaiveDate::parse_from_str(string, "%Y-%m-%d") | ||||
|                     .map(|naive| DateTime::from_utc(naive.and_hms(0, 0, 0), Utc)) | ||||
|                 PrimitiveDateTime::parse( | ||||
|                     string, | ||||
|                     format_description!( | ||||
|                         "[year repr:full base:calendar]-[month repr:numerical]-[day] [hour]:[minute]:[second]" | ||||
|                     ), | ||||
|                 ).map(|datetime| datetime.assume_utc()) | ||||
|             }) | ||||
|             .or_else(|_| { | ||||
|                     Date::parse(string, format_description!( | ||||
|                         "[year repr:full base:calendar]-[month repr:numerical]-[day]" | ||||
|                     )).map(|date| PrimitiveDateTime::new(date, time!(00:00)).assume_utc()) | ||||
|             }) | ||||
|             .map_err(|_| AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) | ||||
|             // check if the key is already expired. | ||||
|             .and_then(|d| { | ||||
|                 if d > Utc::now() { | ||||
|                 if d > OffsetDateTime::now_utc() { | ||||
|                     Ok(d) | ||||
|                 } else { | ||||
|                     Err(AuthControllerError::InvalidApiKeyExpiresAt(value.clone())) | ||||
|   | ||||
| @@ -9,10 +9,10 @@ use std::path::Path; | ||||
| use std::str::from_utf8; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use chrono::Utc; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::Value; | ||||
| use sha2::{Digest, Sha256}; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| pub use action::{actions, Action}; | ||||
| use error::{AuthControllerError, Result}; | ||||
| @@ -148,7 +148,7 @@ impl AuthController { | ||||
|                 None => self.store.prefix_first_expiration_date(key, action)?, | ||||
|             }) { | ||||
|             // check expiration date. | ||||
|             Some(Some(exp)) => Ok(Utc::now() < exp), | ||||
|             Some(Some(exp)) => Ok(OffsetDateTime::now_utc() < exp), | ||||
|             // no expiration date. | ||||
|             Some(None) => Ok(true), | ||||
|             // action or index forbidden. | ||||
|   | ||||
| @@ -8,9 +8,9 @@ use std::path::Path; | ||||
| use std::str; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use chrono::{DateTime, Utc}; | ||||
| use heed::types::{ByteSlice, DecodeIgnore, SerdeJson}; | ||||
| use heed::{Database, Env, EnvOpenOptions, RwTxn}; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use super::error::Result; | ||||
| use super::{Action, Key}; | ||||
| @@ -27,7 +27,7 @@ pub type KeyId = [u8; KEY_ID_LENGTH]; | ||||
| pub struct HeedAuthStore { | ||||
|     env: Arc<Env>, | ||||
|     keys: Database<ByteSlice, SerdeJson<Key>>, | ||||
|     action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<DateTime<Utc>>>>, | ||||
|     action_keyid_index_expiration: Database<KeyIdActionCodec, SerdeJson<Option<OffsetDateTime>>>, | ||||
|     should_close_on_drop: bool, | ||||
| } | ||||
|  | ||||
| @@ -146,7 +146,7 @@ impl HeedAuthStore { | ||||
|         key: &[u8], | ||||
|         action: Action, | ||||
|         index: Option<&[u8]>, | ||||
|     ) -> Result<Option<Option<DateTime<Utc>>>> { | ||||
|     ) -> Result<Option<Option<OffsetDateTime>>> { | ||||
|         let rtxn = self.env.read_txn()?; | ||||
|         match self.get_key_id(key) { | ||||
|             Some(id) => { | ||||
| @@ -161,7 +161,7 @@ impl HeedAuthStore { | ||||
|         &self, | ||||
|         key: &[u8], | ||||
|         action: Action, | ||||
|     ) -> Result<Option<Option<DateTime<Utc>>>> { | ||||
|     ) -> Result<Option<Option<OffsetDateTime>>> { | ||||
|         let rtxn = self.env.read_txn()?; | ||||
|         match self.get_key_id(key) { | ||||
|             Some(id) => { | ||||
|   | ||||
| @@ -32,7 +32,6 @@ async-trait = "0.1.51" | ||||
| bstr = "0.2.17" | ||||
| byte-unit = { version = "4.0.12", default-features = false, features = ["std", "serde"] } | ||||
| bytes = "1.1.0" | ||||
| chrono = { version = "0.4.19", features = ["serde"] } | ||||
| crossbeam-channel = "0.5.1" | ||||
| either = "1.6.1" | ||||
| env_logger = "0.9.0" | ||||
| @@ -43,6 +42,7 @@ futures-util = "0.3.17" | ||||
| heed = { git = "https://github.com/Kerollmops/heed", tag = "v0.12.1" } | ||||
| http = "0.2.4" | ||||
| indexmap = { version = "1.7.0", features = ["serde-1"] } | ||||
| iso8601-duration = "0.1.0" | ||||
| itertools = "0.10.1" | ||||
| jsonwebtoken = "7" | ||||
| log = "0.4.14" | ||||
| @@ -61,7 +61,7 @@ rayon = "1.5.1" | ||||
| regex = "1.5.4" | ||||
| rustls = "0.20.2" | ||||
| rustls-pemfile = "0.2" | ||||
| segment = { version = "0.1.2", optional = true } | ||||
| segment = { version = "0.2.0", optional = true } | ||||
| serde = { version = "1.0.130", features = ["derive"] } | ||||
| serde_json = { version = "1.0.67", features = ["preserve_order"] } | ||||
| sha2 = "0.9.6" | ||||
| @@ -73,6 +73,7 @@ sysinfo = "0.20.2" | ||||
| tar = "0.4.37" | ||||
| tempfile = "3.2.0" | ||||
| thiserror = "1.0.28" | ||||
| time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } | ||||
| tokio = { version = "1.11.0", features = ["full"] } | ||||
| tokio-stream = "0.1.7" | ||||
| uuid = { version = "0.8.2", features = ["serde"] } | ||||
|   | ||||
| @@ -6,7 +6,6 @@ use std::time::{Duration, Instant}; | ||||
|  | ||||
| use actix_web::http::header::USER_AGENT; | ||||
| use actix_web::HttpRequest; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use http::header::CONTENT_TYPE; | ||||
| use meilisearch_auth::SearchRules; | ||||
| use meilisearch_lib::index::{SearchQuery, SearchResult}; | ||||
| @@ -18,6 +17,7 @@ use segment::message::{Identify, Track, User}; | ||||
| use segment::{AutoBatcher, Batcher, HttpClient}; | ||||
| use serde_json::{json, Value}; | ||||
| use sysinfo::{DiskExt, System, SystemExt}; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::select; | ||||
| use tokio::sync::mpsc::{self, Receiver, Sender}; | ||||
| use uuid::Uuid; | ||||
| @@ -323,7 +323,7 @@ impl Segment { | ||||
|  | ||||
| #[derive(Default)] | ||||
| pub struct SearchAggregator { | ||||
|     timestamp: Option<DateTime<Utc>>, | ||||
|     timestamp: Option<OffsetDateTime>, | ||||
|  | ||||
|     // context | ||||
|     user_agents: HashSet<String>, | ||||
| @@ -360,7 +360,7 @@ pub struct SearchAggregator { | ||||
| impl SearchAggregator { | ||||
|     pub fn from_query(query: &SearchQuery, request: &HttpRequest) -> Self { | ||||
|         let mut ret = Self::default(); | ||||
|         ret.timestamp = Some(chrono::offset::Utc::now()); | ||||
|         ret.timestamp = Some(OffsetDateTime::now_utc()); | ||||
|  | ||||
|         ret.total_received = 1; | ||||
|         ret.user_agents = extract_user_agents(request).into_iter().collect(); | ||||
| @@ -504,7 +504,7 @@ impl SearchAggregator { | ||||
|  | ||||
| #[derive(Default)] | ||||
| pub struct DocumentsAggregator { | ||||
|     timestamp: Option<DateTime<Utc>>, | ||||
|     timestamp: Option<OffsetDateTime>, | ||||
|  | ||||
|     // set to true when at least one request was received | ||||
|     updated: bool, | ||||
| @@ -524,7 +524,7 @@ impl DocumentsAggregator { | ||||
|         request: &HttpRequest, | ||||
|     ) -> Self { | ||||
|         let mut ret = Self::default(); | ||||
|         ret.timestamp = Some(chrono::offset::Utc::now()); | ||||
|         ret.timestamp = Some(OffsetDateTime::now_utc()); | ||||
|  | ||||
|         ret.updated = true; | ||||
|         ret.user_agents = extract_user_agents(request).into_iter().collect(); | ||||
|   | ||||
| @@ -94,10 +94,10 @@ pub trait Policy { | ||||
| } | ||||
|  | ||||
| pub mod policies { | ||||
|     use chrono::Utc; | ||||
|     use jsonwebtoken::{dangerous_insecure_decode, decode, Algorithm, DecodingKey, Validation}; | ||||
|     use once_cell::sync::Lazy; | ||||
|     use serde::{Deserialize, Serialize}; | ||||
|     use time::OffsetDateTime; | ||||
|  | ||||
|     use crate::extractors::authentication::Policy; | ||||
|     use meilisearch_auth::{Action, AuthController, AuthFilter, SearchRules}; | ||||
| @@ -183,7 +183,7 @@ pub mod policies { | ||||
|  | ||||
|             // Check if token is expired. | ||||
|             if let Some(exp) = exp { | ||||
|                 if Utc::now().timestamp() > exp { | ||||
|                 if OffsetDateTime::now_utc().unix_timestamp() > exp { | ||||
|                     return None; | ||||
|                 } | ||||
|             } | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| use std::str; | ||||
|  | ||||
| use actix_web::{web, HttpRequest, HttpResponse}; | ||||
| use chrono::SecondsFormat; | ||||
|  | ||||
| use meilisearch_auth::{Action, AuthController, Key}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::Value; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use crate::extractors::authentication::{policies::*, GuardedData}; | ||||
| use meilisearch_error::ResponseError; | ||||
| @@ -92,9 +92,12 @@ struct KeyView { | ||||
|     key: String, | ||||
|     actions: Vec<Action>, | ||||
|     indexes: Vec<String>, | ||||
|     expires_at: Option<String>, | ||||
|     created_at: String, | ||||
|     updated_at: String, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::option::serialize")] | ||||
|     expires_at: Option<OffsetDateTime>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     created_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     updated_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl KeyView { | ||||
| @@ -107,11 +110,9 @@ impl KeyView { | ||||
|             key: generated_key, | ||||
|             actions: key.actions, | ||||
|             indexes: key.indexes, | ||||
|             expires_at: key | ||||
|                 .expires_at | ||||
|                 .map(|dt| dt.to_rfc3339_opts(SecondsFormat::Secs, true)), | ||||
|             created_at: key.created_at.to_rfc3339_opts(SecondsFormat::Secs, true), | ||||
|             updated_at: key.updated_at.to_rfc3339_opts(SecondsFormat::Secs, true), | ||||
|             expires_at: key.expires_at, | ||||
|             created_at: key.created_at, | ||||
|             updated_at: key.updated_at, | ||||
|         } | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -1,11 +1,11 @@ | ||||
| use actix_web::{web, HttpRequest, HttpResponse}; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use log::debug; | ||||
| use meilisearch_error::ResponseError; | ||||
| use meilisearch_lib::index_controller::Update; | ||||
| use meilisearch_lib::MeiliSearch; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::json; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use crate::analytics::Analytics; | ||||
| use crate::extractors::authentication::{policies::*, GuardedData}; | ||||
| @@ -95,9 +95,12 @@ pub struct UpdateIndexRequest { | ||||
| pub struct UpdateIndexResponse { | ||||
|     name: String, | ||||
|     uid: String, | ||||
|     created_at: DateTime<Utc>, | ||||
|     updated_at: DateTime<Utc>, | ||||
|     primary_key: Option<String>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     created_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     updated_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     primary_key: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| pub async fn get_index( | ||||
|   | ||||
| @@ -1,10 +1,10 @@ | ||||
| use actix_web::{web, HttpRequest, HttpResponse}; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use log::debug; | ||||
| use meilisearch_error::ResponseError; | ||||
| use meilisearch_lib::MeiliSearch; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::json; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use crate::analytics::Analytics; | ||||
| use crate::extractors::authentication::{policies::*, GuardedData}; | ||||
| @@ -20,9 +20,12 @@ pub fn configure(cfg: &mut web::ServiceConfig) { | ||||
| pub struct UpdateIndexResponse { | ||||
|     name: String, | ||||
|     uid: String, | ||||
|     created_at: DateTime<Utc>, | ||||
|     updated_at: DateTime<Utc>, | ||||
|     primary_key: Option<String>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     created_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     updated_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     primary_key: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Deserialize)] | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| use actix_web::{web, HttpResponse}; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use log::debug; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use meilisearch_error::ResponseError; | ||||
| use meilisearch_lib::index::{Settings, Unchecked}; | ||||
| @@ -54,8 +54,10 @@ pub struct ProcessedUpdateResult { | ||||
|     #[serde(rename = "type")] | ||||
|     pub update_type: UpdateType, | ||||
|     pub duration: f64, // in seconds | ||||
|     pub enqueued_at: DateTime<Utc>, | ||||
|     pub processed_at: DateTime<Utc>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     pub enqueued_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     pub processed_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Clone, Serialize, Deserialize)] | ||||
| @@ -66,8 +68,10 @@ pub struct FailedUpdateResult { | ||||
|     pub update_type: UpdateType, | ||||
|     pub error: ResponseError, | ||||
|     pub duration: f64, // in seconds | ||||
|     pub enqueued_at: DateTime<Utc>, | ||||
|     pub processed_at: DateTime<Utc>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     pub enqueued_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     pub processed_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Clone, Serialize, Deserialize)] | ||||
| @@ -76,9 +80,13 @@ pub struct EnqueuedUpdateResult { | ||||
|     pub update_id: u64, | ||||
|     #[serde(rename = "type")] | ||||
|     pub update_type: UpdateType, | ||||
|     pub enqueued_at: DateTime<Utc>, | ||||
|     #[serde(skip_serializing_if = "Option::is_none")] | ||||
|     pub started_processing_at: Option<DateTime<Utc>>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     pub enqueued_at: OffsetDateTime, | ||||
|     #[serde( | ||||
|         skip_serializing_if = "Option::is_none", | ||||
|         serialize_with = "time::serde::rfc3339::option::serialize" | ||||
|     )] | ||||
|     pub started_processing_at: Option<OffsetDateTime>, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Clone, Serialize, Deserialize)] | ||||
|   | ||||
| @@ -1,4 +1,6 @@ | ||||
| use chrono::{DateTime, Duration, Utc}; | ||||
| use std::fmt::Write; | ||||
| use std::write; | ||||
|  | ||||
| use meilisearch_error::ResponseError; | ||||
| use meilisearch_lib::index::{Settings, Unchecked}; | ||||
| use meilisearch_lib::milli::update::IndexDocumentsMethod; | ||||
| @@ -7,6 +9,7 @@ use meilisearch_lib::tasks::task::{ | ||||
|     DocumentDeletion, Task, TaskContent, TaskEvent, TaskId, TaskResult, | ||||
| }; | ||||
| use serde::{Serialize, Serializer}; | ||||
| use time::{Duration, OffsetDateTime}; | ||||
|  | ||||
| use crate::AUTOBATCHING_ENABLED; | ||||
|  | ||||
| @@ -79,14 +82,52 @@ enum TaskDetails { | ||||
|     ClearAll { deleted_documents: Option<u64> }, | ||||
| } | ||||
|  | ||||
| /// Serialize a `time::Duration` as a best effort ISO 8601 while waiting for | ||||
| /// https://github.com/time-rs/time/issues/378. | ||||
| /// This code is a port of the old code of time that was removed in 0.2. | ||||
| fn serialize_duration<S: Serializer>( | ||||
|     duration: &Option<Duration>, | ||||
|     serializer: S, | ||||
| ) -> Result<S::Ok, S::Error> { | ||||
|     match duration { | ||||
|         Some(duration) => { | ||||
|             let duration_str = duration.to_string(); | ||||
|             serializer.serialize_str(&duration_str) | ||||
|             // technically speaking, negative duration is not valid ISO 8601 | ||||
|             if duration.is_negative() { | ||||
|                 return serializer.serialize_none(); | ||||
|             } | ||||
|  | ||||
|             const SECS_PER_DAY: i64 = Duration::DAY.whole_seconds(); | ||||
|             let secs = duration.whole_seconds(); | ||||
|             let days = secs / SECS_PER_DAY; | ||||
|             let secs = secs - days * SECS_PER_DAY; | ||||
|             let hasdate = days != 0; | ||||
|             let nanos = duration.subsec_nanoseconds(); | ||||
|             let hastime = (secs != 0 || nanos != 0) || !hasdate; | ||||
|  | ||||
|             // all the following unwrap can't fail | ||||
|             let mut res = String::new(); | ||||
|             write!(&mut res, "P").unwrap(); | ||||
|  | ||||
|             if hasdate { | ||||
|                 write!(&mut res, "{}D", days).unwrap(); | ||||
|             } | ||||
|  | ||||
|             const NANOS_PER_MILLI: i32 = Duration::MILLISECOND.subsec_nanoseconds(); | ||||
|             const NANOS_PER_MICRO: i32 = Duration::MICROSECOND.subsec_nanoseconds(); | ||||
|  | ||||
|             if hastime { | ||||
|                 if nanos == 0 { | ||||
|                     write!(&mut res, "T{}S", secs).unwrap(); | ||||
|                 } else if nanos % NANOS_PER_MILLI == 0 { | ||||
|                     write!(&mut res, "T{}.{:03}S", secs, nanos / NANOS_PER_MILLI).unwrap(); | ||||
|                 } else if nanos % NANOS_PER_MICRO == 0 { | ||||
|                     write!(&mut res, "T{}.{:06}S", secs, nanos / NANOS_PER_MICRO).unwrap(); | ||||
|                 } else { | ||||
|                     write!(&mut res, "T{}.{:09}S", secs, nanos).unwrap(); | ||||
|                 } | ||||
|             } | ||||
|  | ||||
|             serializer.serialize_str(&res) | ||||
|         } | ||||
|         None => serializer.serialize_none(), | ||||
|     } | ||||
| @@ -106,9 +147,12 @@ pub struct TaskView { | ||||
|     error: Option<ResponseError>, | ||||
|     #[serde(serialize_with = "serialize_duration")] | ||||
|     duration: Option<Duration>, | ||||
|     enqueued_at: DateTime<Utc>, | ||||
|     started_at: Option<DateTime<Utc>>, | ||||
|     finished_at: Option<DateTime<Utc>>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     enqueued_at: OffsetDateTime, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::option::serialize")] | ||||
|     started_at: Option<OffsetDateTime>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::option::serialize")] | ||||
|     finished_at: Option<OffsetDateTime>, | ||||
|     #[serde(skip_serializing_if = "Option::is_none")] | ||||
|     batch_uid: Option<Option<BatchId>>, | ||||
| } | ||||
| @@ -302,7 +346,8 @@ pub struct SummarizedTaskView { | ||||
|     status: TaskStatus, | ||||
|     #[serde(rename = "type")] | ||||
|     task_type: TaskType, | ||||
|     enqueued_at: DateTime<Utc>, | ||||
|     #[serde(serialize_with = "time::serde::rfc3339::serialize")] | ||||
|     enqueued_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl From<Task> for SummarizedTaskView { | ||||
|   | ||||
| @@ -257,7 +257,7 @@ async fn error_add_api_key_missing_parameter() { | ||||
|         "message": "`indexes` field is mandatory.", | ||||
|         "code": "missing_parameter", | ||||
|         "type": "invalid_request", | ||||
|         "link":"https://docs.meilisearch.com/errors#missing_parameter" | ||||
|         "link": "https://docs.meilisearch.com/errors#missing_parameter" | ||||
|     }); | ||||
|  | ||||
|     assert_eq!(response, expected_response); | ||||
| @@ -275,7 +275,7 @@ async fn error_add_api_key_missing_parameter() { | ||||
|         "message": "`actions` field is mandatory.", | ||||
|         "code": "missing_parameter", | ||||
|         "type": "invalid_request", | ||||
|         "link":"https://docs.meilisearch.com/errors#missing_parameter" | ||||
|         "link": "https://docs.meilisearch.com/errors#missing_parameter" | ||||
|     }); | ||||
|  | ||||
|     assert_eq!(response, expected_response); | ||||
| @@ -293,7 +293,7 @@ async fn error_add_api_key_missing_parameter() { | ||||
|         "message": "`expiresAt` field is mandatory.", | ||||
|         "code": "missing_parameter", | ||||
|         "type": "invalid_request", | ||||
|         "link":"https://docs.meilisearch.com/errors#missing_parameter" | ||||
|         "link": "https://docs.meilisearch.com/errors#missing_parameter" | ||||
|     }); | ||||
|  | ||||
|     assert_eq!(response, expected_response); | ||||
| @@ -316,7 +316,7 @@ async fn error_add_api_key_invalid_parameters_description() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"description field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#, | ||||
|         "message": r#"`description` field value `{"name":"products"}` is invalid. It should be a string or specified as a null value."#, | ||||
|         "code": "invalid_api_key_description", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" | ||||
| @@ -342,7 +342,7 @@ async fn error_add_api_key_invalid_parameters_indexes() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"indexes field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#, | ||||
|         "message": r#"`indexes` field value `{"name":"products"}` is invalid. It should be an array of string representing index names."#, | ||||
|         "code": "invalid_api_key_indexes", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" | ||||
| @@ -366,7 +366,7 @@ async fn error_add_api_key_invalid_parameters_actions() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"actions field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#, | ||||
|         "message": r#"`actions` field value `{"name":"products"}` is invalid. It should be an array of string representing action names."#, | ||||
|         "code": "invalid_api_key_actions", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" | ||||
| @@ -386,7 +386,7 @@ async fn error_add_api_key_invalid_parameters_actions() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"actions field value `["doc.add"]` is invalid. It should be an array of string representing action names."#, | ||||
|         "message": r#"`actions` field value `["doc.add"]` is invalid. It should be an array of string representing action names."#, | ||||
|         "code": "invalid_api_key_actions", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" | ||||
| @@ -412,7 +412,7 @@ async fn error_add_api_key_invalid_parameters_expires_at() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"expiresAt field value `{"name":"products"}` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#, | ||||
|         "message": r#"`expiresAt` field value `{"name":"products"}` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#, | ||||
|         "code": "invalid_api_key_expires_at", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" | ||||
| @@ -438,7 +438,7 @@ async fn error_add_api_key_invalid_parameters_expires_at_in_the_past() { | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": r#"expiresAt field value `"2010-11-13T00:00:00Z"` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'."#, | ||||
|         "message": r#"`expiresAt` field value `"2010-11-13T00:00:00Z"` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'."#, | ||||
|         "code": "invalid_api_key_expires_at", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" | ||||
| @@ -1213,7 +1213,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { | ||||
|     let (response, code) = server.patch_api_key(&key, content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": "description field value `13` is invalid. It should be a string or specified as a null value.", | ||||
|         "message": "`description` field value `13` is invalid. It should be a string or specified as a null value.", | ||||
|         "code": "invalid_api_key_description", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_description" | ||||
| @@ -1230,7 +1230,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { | ||||
|     let (response, code) = server.patch_api_key(&key, content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": "indexes field value `13` is invalid. It should be an array of string representing index names.", | ||||
|         "message": "`indexes` field value `13` is invalid. It should be an array of string representing index names.", | ||||
|         "code": "invalid_api_key_indexes", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_indexes" | ||||
| @@ -1246,7 +1246,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { | ||||
|     let (response, code) = server.patch_api_key(&key, content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": "actions field value `13` is invalid. It should be an array of string representing action names.", | ||||
|         "message": "`actions` field value `13` is invalid. It should be an array of string representing action names.", | ||||
|         "code": "invalid_api_key_actions", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_actions" | ||||
| @@ -1262,7 +1262,7 @@ async fn error_patch_api_key_indexes_invalid_parameters() { | ||||
|     let (response, code) = server.patch_api_key(&key, content).await; | ||||
|  | ||||
|     let expected_response = json!({ | ||||
|         "message": "expiresAt field value `13` is invalid. It should be in ISO-8601 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DDTHH:MM:SS'.", | ||||
|         "message": "`expiresAt` field value `13` is invalid. It should follow the RFC 3339 format to represents a date or datetime in the future or specified as a null value. e.g. 'YYYY-MM-DD' or 'YYYY-MM-DD HH:MM:SS'.", | ||||
|         "code": "invalid_api_key_expires_at", | ||||
|         "type": "invalid_request", | ||||
|         "link": "https://docs.meilisearch.com/errors#invalid_api_key_expires_at" | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| use crate::common::Server; | ||||
| use chrono::{Duration, Utc}; | ||||
| use ::time::format_description::well_known::Rfc3339; | ||||
| use maplit::{hashmap, hashset}; | ||||
| use once_cell::sync::Lazy; | ||||
| use serde_json::{json, Value}; | ||||
| use std::collections::{HashMap, HashSet}; | ||||
| use time::{Duration, OffsetDateTime}; | ||||
|  | ||||
| pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'static str>>> = | ||||
|     Lazy::new(|| { | ||||
| @@ -76,7 +77,7 @@ async fn error_access_expired_key() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": ALL_ACTIONS.clone(), | ||||
|         "expiresAt": (Utc::now() + Duration::seconds(1)), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -106,7 +107,7 @@ async fn error_access_unauthorized_index() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["sales"], | ||||
|         "actions": ALL_ACTIONS.clone(), | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -137,7 +138,7 @@ async fn error_access_unauthorized_action() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": [], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -174,7 +175,7 @@ async fn access_authorized_restricted_index() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": [], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -213,7 +214,7 @@ async fn access_authorized_no_index_restriction() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": [], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -263,7 +264,7 @@ async fn access_authorized_stats_restricted_index() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": ["stats.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
| @@ -303,7 +304,7 @@ async fn access_authorized_stats_no_index_restriction() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["stats.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
| @@ -343,7 +344,7 @@ async fn list_authorized_indexes_restricted_index() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": ["indexes.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
| @@ -384,7 +385,7 @@ async fn list_authorized_indexes_no_index_restriction() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["indexes.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
| @@ -424,7 +425,7 @@ async fn list_authorized_tasks_restricted_index() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["products"], | ||||
|         "actions": ["tasks.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
| @@ -464,7 +465,7 @@ async fn list_authorized_tasks_no_index_restriction() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["tasks.get"], | ||||
|         "expiresAt": Utc::now() + Duration::hours(1), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
|     assert_eq!(code, 201); | ||||
|   | ||||
| @@ -1,9 +1,10 @@ | ||||
| use crate::common::Server; | ||||
| use chrono::{Duration, Utc}; | ||||
| use ::time::format_description::well_known::Rfc3339; | ||||
| use maplit::hashmap; | ||||
| use once_cell::sync::Lazy; | ||||
| use serde_json::{json, Value}; | ||||
| use std::collections::HashMap; | ||||
| use time::{Duration, OffsetDateTime}; | ||||
|  | ||||
| use super::authorization::{ALL_ACTIONS, AUTHORIZATIONS}; | ||||
|  | ||||
| @@ -63,22 +64,22 @@ static ACCEPTED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| { | ||||
|         json!({ | ||||
|             "indexes": ["*"], | ||||
|             "actions": ["*"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         json!({ | ||||
|             "indexes": ["*"], | ||||
|             "actions": ["search"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         json!({ | ||||
|             "indexes": ["sales"], | ||||
|             "actions": ["*"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         json!({ | ||||
|             "indexes": ["sales"], | ||||
|             "actions": ["search"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|     ] | ||||
| }); | ||||
| @@ -89,23 +90,23 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| { | ||||
|         json!({ | ||||
|             "indexes": ["*"], | ||||
|             "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(), | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         json!({ | ||||
|             "indexes": ["sales"], | ||||
|             "actions": ALL_ACTIONS.iter().cloned().filter(|a| *a != "search" && *a != "*").collect::<Vec<_>>(), | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         // bad index | ||||
|         json!({ | ||||
|             "indexes": ["products"], | ||||
|             "actions": ["*"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|         json!({ | ||||
|             "indexes": ["products"], | ||||
|             "actions": ["search"], | ||||
|             "expiresAt": Utc::now() + Duration::days(1) | ||||
|             "expiresAt": (OffsetDateTime::now_utc() + Duration::days(1)).format(&Rfc3339).unwrap() | ||||
|         }), | ||||
|     ] | ||||
| }); | ||||
| @@ -204,19 +205,19 @@ async fn search_authorized_simple_token() { | ||||
|     let tenant_tokens = vec![ | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["*"]), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["sales"]), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {}}), | ||||
| @@ -253,19 +254,19 @@ async fn search_authorized_filter_token() { | ||||
|     let tenant_tokens = vec![ | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {"filter": "color = blue"}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {"filter": "color = blue"}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {"filter": ["color = blue"]}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {"filter": ["color = blue"]}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         // filter on sales should override filters on * | ||||
|         hashmap! { | ||||
| @@ -273,28 +274,28 @@ async fn search_authorized_filter_token() { | ||||
|                 "*": {"filter": "color = green"}, | ||||
|                 "sales": {"filter": "color = blue"} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {}, | ||||
|                 "sales": {"filter": "color = blue"} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {"filter": "color = green"}, | ||||
|                 "sales": {"filter": ["color = blue"]} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {}, | ||||
|                 "sales": {"filter": ["color = blue"]} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|     ]; | ||||
|  | ||||
| @@ -307,19 +308,19 @@ async fn filter_search_authorized_filter_token() { | ||||
|     let tenant_tokens = vec![ | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {"filter": "color = blue"}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {"filter": "color = blue"}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {"filter": ["color = blue"]}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {"filter": ["color = blue"]}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         // filter on sales should override filters on * | ||||
|         hashmap! { | ||||
| @@ -327,28 +328,28 @@ async fn filter_search_authorized_filter_token() { | ||||
|                 "*": {"filter": "color = green"}, | ||||
|                 "sales": {"filter": "color = blue"} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {}, | ||||
|                 "sales": {"filter": "color = blue"} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {"filter": "color = green"}, | ||||
|                 "sales": {"filter": ["color = blue"]} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({ | ||||
|                 "*": {}, | ||||
|                 "sales": {"filter": ["color = blue"]} | ||||
|             }), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|     ]; | ||||
|  | ||||
| @@ -361,27 +362,27 @@ async fn error_search_token_forbidden_parent_key() { | ||||
|     let tenant_tokens = vec![ | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": Value::Null}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["*"]), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": Value::Null}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["sales"]), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|     ]; | ||||
|  | ||||
| @@ -395,11 +396,11 @@ async fn error_search_forbidden_token() { | ||||
|         // bad index | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"products": {}}), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["products"]), | ||||
|             "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"products": {}}), | ||||
| @@ -416,27 +417,27 @@ async fn error_search_forbidden_token() { | ||||
|         // expired token | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": {}}), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"*": Value::Null}), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["*"]), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": {}}), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!({"sales": Value::Null}), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|         hashmap! { | ||||
|             "searchRules" => json!(["sales"]), | ||||
|             "exp" => json!((Utc::now() - Duration::hours(1)).timestamp()) | ||||
|             "exp" => json!((OffsetDateTime::now_utc() - Duration::hours(1)).unix_timestamp()) | ||||
|         }, | ||||
|     ]; | ||||
|  | ||||
| @@ -452,7 +453,7 @@ async fn error_access_forbidden_routes() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["*"], | ||||
|         "expiresAt": (Utc::now() + Duration::hours(1)), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -463,7 +464,7 @@ async fn error_access_forbidden_routes() { | ||||
|  | ||||
|     let tenant_token = hashmap! { | ||||
|         "searchRules" => json!(["*"]), | ||||
|         "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|         "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|     }; | ||||
|     let web_token = generate_tenant_token(&key, tenant_token); | ||||
|     server.use_api_key(&web_token); | ||||
| @@ -487,7 +488,7 @@ async fn error_access_expired_parent_key() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["*"], | ||||
|         "expiresAt": (Utc::now() + Duration::seconds(1)), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::seconds(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -498,7 +499,7 @@ async fn error_access_expired_parent_key() { | ||||
|  | ||||
|     let tenant_token = hashmap! { | ||||
|         "searchRules" => json!(["*"]), | ||||
|         "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|         "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|     }; | ||||
|     let web_token = generate_tenant_token(&key, tenant_token); | ||||
|     server.use_api_key(&web_token); | ||||
| @@ -529,7 +530,7 @@ async fn error_access_modified_token() { | ||||
|     let content = json!({ | ||||
|         "indexes": ["*"], | ||||
|         "actions": ["*"], | ||||
|         "expiresAt": (Utc::now() + Duration::hours(1)), | ||||
|         "expiresAt": (OffsetDateTime::now_utc() + Duration::hours(1)).format(&Rfc3339).unwrap(), | ||||
|     }); | ||||
|  | ||||
|     let (response, code) = server.add_api_key(content).await; | ||||
| @@ -540,7 +541,7 @@ async fn error_access_modified_token() { | ||||
|  | ||||
|     let tenant_token = hashmap! { | ||||
|         "searchRules" => json!(["products"]), | ||||
|         "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|         "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|     }; | ||||
|     let web_token = generate_tenant_token(&key, tenant_token); | ||||
|     server.use_api_key(&web_token); | ||||
| @@ -554,7 +555,7 @@ async fn error_access_modified_token() { | ||||
|  | ||||
|     let tenant_token = hashmap! { | ||||
|         "searchRules" => json!(["*"]), | ||||
|         "exp" => json!((Utc::now() + Duration::hours(1)).timestamp()) | ||||
|         "exp" => json!((OffsetDateTime::now_utc() + Duration::hours(1)).unix_timestamp()) | ||||
|     }; | ||||
|  | ||||
|     let alt = generate_tenant_token(&key, tenant_token); | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| use crate::common::{GetAllDocumentsOptions, Server}; | ||||
| use actix_web::test; | ||||
| use chrono::DateTime; | ||||
| use meilisearch_http::{analytics, create_app}; | ||||
| use serde_json::{json, Value}; | ||||
| use time::{format_description::well_known::Rfc3339, OffsetDateTime}; | ||||
|  | ||||
| /// This is the basic usage of our API and every other tests uses the content-type application/json | ||||
| #[actix_rt::test] | ||||
| @@ -568,9 +568,9 @@ async fn add_documents_no_index_creation() { | ||||
|     assert_eq!(response["details"]["indexedDocuments"], 1); | ||||
|  | ||||
|     let processed_at = | ||||
|         DateTime::parse_from_rfc3339(response["finishedAt"].as_str().unwrap()).unwrap(); | ||||
|         OffsetDateTime::parse(response["finishedAt"].as_str().unwrap(), &Rfc3339).unwrap(); | ||||
|     let enqueued_at = | ||||
|         DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap(); | ||||
|         OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap(); | ||||
|     assert!(processed_at > enqueued_at); | ||||
|  | ||||
|     // index was created, and primary key was infered. | ||||
|   | ||||
| @@ -1,6 +1,6 @@ | ||||
| use crate::common::Server; | ||||
| use chrono::DateTime; | ||||
| use serde_json::json; | ||||
| use time::{format_description::well_known::Rfc3339, OffsetDateTime}; | ||||
|  | ||||
| #[actix_rt::test] | ||||
| async fn update_primary_key() { | ||||
| @@ -25,8 +25,10 @@ async fn update_primary_key() { | ||||
|     assert!(response.get("createdAt").is_some()); | ||||
|     assert!(response.get("updatedAt").is_some()); | ||||
|  | ||||
|     let created_at = DateTime::parse_from_rfc3339(response["createdAt"].as_str().unwrap()).unwrap(); | ||||
|     let updated_at = DateTime::parse_from_rfc3339(response["updatedAt"].as_str().unwrap()).unwrap(); | ||||
|     let created_at = | ||||
|         OffsetDateTime::parse(response["createdAt"].as_str().unwrap(), &Rfc3339).unwrap(); | ||||
|     let updated_at = | ||||
|         OffsetDateTime::parse(response["updatedAt"].as_str().unwrap(), &Rfc3339).unwrap(); | ||||
|     assert!(created_at < updated_at); | ||||
|  | ||||
|     assert_eq!(response["primaryKey"], "primary"); | ||||
|   | ||||
| @@ -1,6 +1,7 @@ | ||||
| use crate::common::Server; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use serde_json::json; | ||||
| use time::format_description::well_known::Rfc3339; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| #[actix_rt::test] | ||||
| async fn error_get_task_unexisting_index() { | ||||
| @@ -98,7 +99,8 @@ macro_rules! assert_valid_summarized_task { | ||||
|         assert_eq!($response["status"], "enqueued"); | ||||
|         assert_eq!($response["type"], $task_type); | ||||
|         let date = $response["enqueuedAt"].as_str().expect("missing date"); | ||||
|         date.parse::<DateTime<Utc>>().unwrap(); | ||||
|  | ||||
|         OffsetDateTime::parse(date, &Rfc3339).unwrap(); | ||||
|     }}; | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -12,7 +12,6 @@ async-stream = "0.3.2" | ||||
| async-trait = "0.1.51" | ||||
| byte-unit = { version = "4.0.12", default-features = false, features = ["std"] } | ||||
| bytes = "1.1.0" | ||||
| chrono = { version = "0.4.19", features = ["serde"] } | ||||
| csv = "1.1.6" | ||||
| crossbeam-channel = "0.5.1" | ||||
| either = "1.6.1" | ||||
| @@ -28,7 +27,7 @@ lazy_static = "1.4.0" | ||||
| log = "0.4.14" | ||||
| meilisearch-error = { path = "../meilisearch-error" } | ||||
| meilisearch-auth = { path = "../meilisearch-auth" } | ||||
| milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.1" } | ||||
| milli = { git = "https://github.com/meilisearch/milli.git", tag = "v0.22.2" } | ||||
| mime = "0.3.16" | ||||
| num_cpus = "1.13.0" | ||||
| once_cell = "1.8.0" | ||||
| @@ -45,6 +44,7 @@ clap = { version = "3.0", features = ["derive", "env"] } | ||||
| tar = "0.4.37" | ||||
| tempfile = "3.2.0" | ||||
| thiserror = "1.0.28" | ||||
| time = { version = "0.3.7", features = ["serde-well-known", "formatting", "parsing", "macros"] } | ||||
| tokio = { version = "1.11.0", features = ["full"] } | ||||
| uuid = { version = "0.8.2", features = ["serde"] } | ||||
| walkdir = "2.3.2" | ||||
|   | ||||
| @@ -5,12 +5,12 @@ use std::ops::Deref; | ||||
| use std::path::Path; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use chrono::{DateTime, Utc}; | ||||
| use heed::{EnvOpenOptions, RoTxn}; | ||||
| use milli::update::{IndexerConfig, Setting}; | ||||
| use milli::{obkv_to_json, FieldDistribution, FieldId}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use serde_json::{Map, Value}; | ||||
| use time::OffsetDateTime; | ||||
| use uuid::Uuid; | ||||
|  | ||||
| use crate::EnvSizer; | ||||
| @@ -24,8 +24,10 @@ pub type Document = Map<String, Value>; | ||||
| #[derive(Debug, Serialize, Deserialize, Clone)] | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct IndexMeta { | ||||
|     pub created_at: DateTime<Utc>, | ||||
|     pub updated_at: DateTime<Utc>, | ||||
|     #[serde(with = "time::serde::rfc3339")] | ||||
|     pub created_at: OffsetDateTime, | ||||
|     #[serde(with = "time::serde::rfc3339")] | ||||
|     pub updated_at: OffsetDateTime, | ||||
|     pub primary_key: Option<String>, | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -3,9 +3,10 @@ use std::path::{Path, PathBuf}; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use async_stream::stream; | ||||
| use chrono::Utc; | ||||
| use futures::{lock::Mutex, stream::StreamExt}; | ||||
| use log::{error, trace}; | ||||
| use time::macros::format_description; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::{mpsc, oneshot, RwLock}; | ||||
|  | ||||
| use super::error::{DumpActorError, Result}; | ||||
| @@ -29,7 +30,9 @@ pub struct DumpActor { | ||||
|  | ||||
| /// Generate uid from creation date | ||||
| fn generate_uid() -> String { | ||||
|     Utc::now().format("%Y%m%d-%H%M%S%3f").to_string() | ||||
|     OffsetDateTime::now_utc() | ||||
|         .format(format_description!("%Y%m%d-%H%M%S%3f")) | ||||
|         .unwrap() | ||||
| } | ||||
|  | ||||
| impl DumpActor { | ||||
|   | ||||
| @@ -1,8 +1,8 @@ | ||||
| use anyhow::bail; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use meilisearch_error::Code; | ||||
| use milli::update::IndexDocumentsMethod; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
| use uuid::Uuid; | ||||
|  | ||||
| use crate::index::{Settings, Unchecked}; | ||||
| @@ -51,7 +51,7 @@ pub enum UpdateMeta { | ||||
| pub struct Enqueued { | ||||
|     pub update_id: u64, | ||||
|     pub meta: UpdateMeta, | ||||
|     pub enqueued_at: DateTime<Utc>, | ||||
|     pub enqueued_at: OffsetDateTime, | ||||
|     pub content: Option<Uuid>, | ||||
| } | ||||
|  | ||||
| @@ -59,7 +59,7 @@ pub struct Enqueued { | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct Processed { | ||||
|     pub success: UpdateResult, | ||||
|     pub processed_at: DateTime<Utc>, | ||||
|     pub processed_at: OffsetDateTime, | ||||
|     #[serde(flatten)] | ||||
|     pub from: Processing, | ||||
| } | ||||
| @@ -69,7 +69,7 @@ pub struct Processed { | ||||
| pub struct Processing { | ||||
|     #[serde(flatten)] | ||||
|     pub from: Enqueued, | ||||
|     pub started_processing_at: DateTime<Utc>, | ||||
|     pub started_processing_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize, Clone)] | ||||
| @@ -77,7 +77,7 @@ pub struct Processing { | ||||
| pub struct Aborted { | ||||
|     #[serde(flatten)] | ||||
|     pub from: Enqueued, | ||||
|     pub aborted_at: DateTime<Utc>, | ||||
|     pub aborted_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
| @@ -86,7 +86,7 @@ pub struct Failed { | ||||
|     #[serde(flatten)] | ||||
|     pub from: Processing, | ||||
|     pub error: ResponseError, | ||||
|     pub failed_at: DateTime<Utc>, | ||||
|     pub failed_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| #[derive(Debug, Serialize, Deserialize)] | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| use chrono::{DateTime, Utc}; | ||||
| use meilisearch_error::{Code, ResponseError}; | ||||
| use milli::update::IndexDocumentsMethod; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
| use uuid::Uuid; | ||||
|  | ||||
| use crate::index::{Settings, Unchecked}; | ||||
| @@ -107,7 +107,7 @@ pub enum UpdateMeta { | ||||
| pub struct Enqueued { | ||||
|     pub update_id: u64, | ||||
|     pub meta: Update, | ||||
|     pub enqueued_at: DateTime<Utc>, | ||||
|     pub enqueued_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl Enqueued { | ||||
| @@ -122,7 +122,7 @@ impl Enqueued { | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct Processed { | ||||
|     pub success: v2::UpdateResult, | ||||
|     pub processed_at: DateTime<Utc>, | ||||
|     pub processed_at: OffsetDateTime, | ||||
|     #[serde(flatten)] | ||||
|     pub from: Processing, | ||||
| } | ||||
| @@ -144,7 +144,7 @@ impl Processed { | ||||
| pub struct Processing { | ||||
|     #[serde(flatten)] | ||||
|     pub from: Enqueued, | ||||
|     pub started_processing_at: DateTime<Utc>, | ||||
|     pub started_processing_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl Processing { | ||||
| @@ -163,7 +163,7 @@ pub struct Failed { | ||||
|     pub from: Processing, | ||||
|     pub msg: String, | ||||
|     pub code: Code, | ||||
|     pub failed_at: DateTime<Utc>, | ||||
|     pub failed_at: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl Failed { | ||||
|   | ||||
| @@ -3,9 +3,9 @@ use std::path::{Path, PathBuf}; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use anyhow::bail; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use log::{info, trace}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| pub use actor::DumpActor; | ||||
| pub use handle_impl::*; | ||||
| @@ -40,7 +40,7 @@ pub struct Metadata { | ||||
|     db_version: String, | ||||
|     index_db_size: usize, | ||||
|     update_db_size: usize, | ||||
|     dump_date: DateTime<Utc>, | ||||
|     dump_date: OffsetDateTime, | ||||
| } | ||||
|  | ||||
| impl Metadata { | ||||
| @@ -49,7 +49,7 @@ impl Metadata { | ||||
|             db_version: env!("CARGO_PKG_VERSION").to_string(), | ||||
|             index_db_size, | ||||
|             update_db_size, | ||||
|             dump_date: Utc::now(), | ||||
|             dump_date: OffsetDateTime::now_utc(), | ||||
|         } | ||||
|     } | ||||
| } | ||||
| @@ -144,7 +144,7 @@ impl MetadataVersion { | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn dump_date(&self) -> Option<&DateTime<Utc>> { | ||||
|     pub fn dump_date(&self) -> Option<&OffsetDateTime> { | ||||
|         match self { | ||||
|             MetadataVersion::V1(_) => None, | ||||
|             MetadataVersion::V2(meta) | MetadataVersion::V3(meta) | MetadataVersion::V4(meta) => { | ||||
| @@ -169,9 +169,13 @@ pub struct DumpInfo { | ||||
|     pub status: DumpStatus, | ||||
|     #[serde(skip_serializing_if = "Option::is_none")] | ||||
|     pub error: Option<String>, | ||||
|     started_at: DateTime<Utc>, | ||||
|     #[serde(skip_serializing_if = "Option::is_none")] | ||||
|     finished_at: Option<DateTime<Utc>>, | ||||
|     #[serde(with = "time::serde::rfc3339")] | ||||
|     started_at: OffsetDateTime, | ||||
|     #[serde( | ||||
|         skip_serializing_if = "Option::is_none", | ||||
|         with = "time::serde::rfc3339::option" | ||||
|     )] | ||||
|     finished_at: Option<OffsetDateTime>, | ||||
| } | ||||
|  | ||||
| impl DumpInfo { | ||||
| @@ -180,19 +184,19 @@ impl DumpInfo { | ||||
|             uid, | ||||
|             status, | ||||
|             error: None, | ||||
|             started_at: Utc::now(), | ||||
|             started_at: OffsetDateTime::now_utc(), | ||||
|             finished_at: None, | ||||
|         } | ||||
|     } | ||||
|  | ||||
|     pub fn with_error(&mut self, error: String) { | ||||
|         self.status = DumpStatus::Failed; | ||||
|         self.finished_at = Some(Utc::now()); | ||||
|         self.finished_at = Some(OffsetDateTime::now_utc()); | ||||
|         self.error = Some(error); | ||||
|     } | ||||
|  | ||||
|     pub fn done(&mut self) { | ||||
|         self.finished_at = Some(Utc::now()); | ||||
|         self.finished_at = Some(OffsetDateTime::now_utc()); | ||||
|         self.status = DumpStatus::Done; | ||||
|     } | ||||
|  | ||||
|   | ||||
| @@ -8,11 +8,11 @@ use std::time::Duration; | ||||
|  | ||||
| use actix_web::error::PayloadError; | ||||
| use bytes::Bytes; | ||||
| use chrono::{DateTime, Utc}; | ||||
| use futures::Stream; | ||||
| use futures::StreamExt; | ||||
| use milli::update::IndexDocumentsMethod; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::{mpsc, RwLock}; | ||||
| use tokio::task::spawn_blocking; | ||||
| use tokio::time::sleep; | ||||
| @@ -107,7 +107,7 @@ impl fmt::Display for DocumentAdditionFormat { | ||||
| #[serde(rename_all = "camelCase")] | ||||
| pub struct Stats { | ||||
|     pub database_size: u64, | ||||
|     pub last_update: Option<DateTime<Utc>>, | ||||
|     pub last_update: Option<OffsetDateTime>, | ||||
|     pub indexes: BTreeMap<String, IndexStats>, | ||||
| } | ||||
|  | ||||
| @@ -579,7 +579,7 @@ where | ||||
|     } | ||||
|  | ||||
|     pub async fn get_all_stats(&self, search_rules: &SearchRules) -> Result<Stats> { | ||||
|         let mut last_task: Option<DateTime<_>> = None; | ||||
|         let mut last_task: Option<OffsetDateTime> = None; | ||||
|         let mut indexes = BTreeMap::new(); | ||||
|         let mut database_size = 0; | ||||
|         let processing_tasks = self.scheduler.read().await.get_processing_tasks().await?; | ||||
|   | ||||
| @@ -6,7 +6,6 @@ use std::convert::{TryFrom, TryInto}; | ||||
| use std::path::Path; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use chrono::Utc; | ||||
| use error::{IndexResolverError, Result}; | ||||
| use heed::Env; | ||||
| use index_store::{IndexStore, MapIndexStore}; | ||||
| @@ -14,6 +13,7 @@ use meilisearch_error::ResponseError; | ||||
| use meta_store::{HeedMetaStore, IndexMetaStore}; | ||||
| use milli::update::{DocumentDeletionResult, IndexerConfig}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::oneshot; | ||||
| use tokio::task::spawn_blocking; | ||||
| use uuid::Uuid; | ||||
| @@ -115,18 +115,19 @@ where | ||||
|             self.process_document_addition_batch(batch).await | ||||
|         } else { | ||||
|             if let Some(task) = batch.tasks.first_mut() { | ||||
|                 task.events.push(TaskEvent::Processing(Utc::now())); | ||||
|                 task.events | ||||
|                     .push(TaskEvent::Processing(OffsetDateTime::now_utc())); | ||||
|  | ||||
|                 match self.process_task(task).await { | ||||
|                     Ok(success) => { | ||||
|                         task.events.push(TaskEvent::Succeded { | ||||
|                             result: success, | ||||
|                             timestamp: Utc::now(), | ||||
|                             timestamp: OffsetDateTime::now_utc(), | ||||
|                         }); | ||||
|                     } | ||||
|                     Err(err) => task.events.push(TaskEvent::Failed { | ||||
|                         error: err.into(), | ||||
|                         timestamp: Utc::now(), | ||||
|                         timestamp: OffsetDateTime::now_utc(), | ||||
|                     }), | ||||
|                 } | ||||
|             } | ||||
| @@ -225,7 +226,7 @@ where | ||||
|  | ||||
|                 // If the index doesn't exist and we are not allowed to create it with the first | ||||
|                 // task, we must fails the whole batch. | ||||
|                 let now = Utc::now(); | ||||
|                 let now = OffsetDateTime::now_utc(); | ||||
|                 let index = match index { | ||||
|                     Ok(index) => index, | ||||
|                     Err(e) => { | ||||
| @@ -253,17 +254,17 @@ where | ||||
|  | ||||
|                 let event = match result { | ||||
|                     Ok(Ok(result)) => TaskEvent::Succeded { | ||||
|                         timestamp: Utc::now(), | ||||
|                         timestamp: OffsetDateTime::now_utc(), | ||||
|                         result: TaskResult::DocumentAddition { | ||||
|                             indexed_documents: result.indexed_documents, | ||||
|                         }, | ||||
|                     }, | ||||
|                     Ok(Err(e)) => TaskEvent::Failed { | ||||
|                         timestamp: Utc::now(), | ||||
|                         timestamp: OffsetDateTime::now_utc(), | ||||
|                         error: e.into(), | ||||
|                     }, | ||||
|                     Err(e) => TaskEvent::Failed { | ||||
|                         timestamp: Utc::now(), | ||||
|                         timestamp: OffsetDateTime::now_utc(), | ||||
|                         error: IndexResolverError::from(e).into(), | ||||
|                     }, | ||||
|                 }; | ||||
| @@ -524,7 +525,7 @@ mod test { | ||||
|                         }; | ||||
|                         if primary_key.is_some() { | ||||
|                             mocker.when::<String, IndexResult<IndexMeta>>("update_primary_key") | ||||
|                                 .then(move |_| Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None })); | ||||
|                                 .then(move |_| Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None })); | ||||
|                         } | ||||
|                         mocker.when::<(IndexDocumentsMethod, Option<String>, UpdateFileStore, IntoIter<Uuid>), IndexResult<DocumentAdditionResult>>("update_documents") | ||||
|                                 .then(move |(_, _, _, _)| result()); | ||||
| @@ -569,7 +570,7 @@ mod test { | ||||
|                     | TaskContent::IndexCreation { primary_key } => { | ||||
|                         if primary_key.is_some() { | ||||
|                             let result = move || if !index_op_fails { | ||||
|                                 Ok(IndexMeta{ created_at: Utc::now(), updated_at: Utc::now(), primary_key: None }) | ||||
|                                 Ok(IndexMeta{ created_at: OffsetDateTime::now_utc(), updated_at: OffsetDateTime::now_utc(), primary_key: None }) | ||||
|                             } else { | ||||
|                                 // return this error because it's easy to generate... | ||||
|                                 Err(IndexError::DocumentNotFound("a doc".into())) | ||||
| @@ -640,7 +641,7 @@ mod test { | ||||
|                 let update_file_store = UpdateFileStore::mock(mocker); | ||||
|                 let index_resolver = IndexResolver::new(uuid_store, index_store, update_file_store); | ||||
|  | ||||
|                 let batch = Batch { id: 1, created_at: Utc::now(), tasks: vec![task.clone()] }; | ||||
|                 let batch = Batch { id: 1, created_at: OffsetDateTime::now_utc(), tasks: vec![task.clone()] }; | ||||
|                 let result = index_resolver.process_batch(batch).await; | ||||
|  | ||||
|                 // Test for some expected output scenarios: | ||||
|   | ||||
| @@ -1,4 +1,4 @@ | ||||
| use chrono::{DateTime, Utc}; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use super::task::Task; | ||||
|  | ||||
| @@ -7,7 +7,7 @@ pub type BatchId = u64; | ||||
| #[derive(Debug)] | ||||
| pub struct Batch { | ||||
|     pub id: BatchId, | ||||
|     pub created_at: DateTime<Utc>, | ||||
|     pub created_at: OffsetDateTime, | ||||
|     pub tasks: Vec<Task>, | ||||
| } | ||||
|  | ||||
|   | ||||
| @@ -6,8 +6,8 @@ use std::sync::Arc; | ||||
| use std::time::Duration; | ||||
|  | ||||
| use atomic_refcell::AtomicRefCell; | ||||
| use chrono::Utc; | ||||
| use milli::update::IndexDocumentsMethod; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::{watch, RwLock}; | ||||
|  | ||||
| use crate::options::SchedulerConfig; | ||||
| @@ -357,7 +357,7 @@ impl Scheduler { | ||||
|             tasks.iter_mut().for_each(|t| { | ||||
|                 t.events.push(TaskEvent::Batched { | ||||
|                     batch_id: id, | ||||
|                     timestamp: Utc::now(), | ||||
|                     timestamp: OffsetDateTime::now_utc(), | ||||
|                 }) | ||||
|             }); | ||||
|  | ||||
| @@ -365,7 +365,7 @@ impl Scheduler { | ||||
|  | ||||
|             let batch = Batch { | ||||
|                 id, | ||||
|                 created_at: Utc::now(), | ||||
|                 created_at: OffsetDateTime::now_utc(), | ||||
|                 tasks, | ||||
|             }; | ||||
|  | ||||
|   | ||||
| @@ -1,9 +1,9 @@ | ||||
| use std::path::PathBuf; | ||||
|  | ||||
| use chrono::{DateTime, Utc}; | ||||
| use meilisearch_error::ResponseError; | ||||
| use milli::update::{DocumentAdditionResult, IndexDocumentsMethod}; | ||||
| use serde::{Deserialize, Serialize}; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::oneshot; | ||||
| use uuid::Uuid; | ||||
|  | ||||
| @@ -36,22 +36,22 @@ impl From<DocumentAdditionResult> for TaskResult { | ||||
| #[derive(Clone, Debug, Serialize, Deserialize, PartialEq, Eq)] | ||||
| #[cfg_attr(test, derive(proptest_derive::Arbitrary))] | ||||
| pub enum TaskEvent { | ||||
|     Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>), | ||||
|     Created(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime), | ||||
|     Batched { | ||||
|         #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] | ||||
|         timestamp: DateTime<Utc>, | ||||
|         timestamp: OffsetDateTime, | ||||
|         batch_id: BatchId, | ||||
|     }, | ||||
|     Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] DateTime<Utc>), | ||||
|     Processing(#[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] OffsetDateTime), | ||||
|     Succeded { | ||||
|         result: TaskResult, | ||||
|         #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] | ||||
|         timestamp: DateTime<Utc>, | ||||
|         timestamp: OffsetDateTime, | ||||
|     }, | ||||
|     Failed { | ||||
|         error: ResponseError, | ||||
|         #[cfg_attr(test, proptest(strategy = "test::datetime_strategy()"))] | ||||
|         timestamp: DateTime<Utc>, | ||||
|         timestamp: OffsetDateTime, | ||||
|     }, | ||||
| } | ||||
|  | ||||
| @@ -165,7 +165,7 @@ mod test { | ||||
|         ] | ||||
|     } | ||||
|  | ||||
|     pub(super) fn datetime_strategy() -> impl Strategy<Value = DateTime<Utc>> { | ||||
|         Just(Utc::now()) | ||||
|     pub(super) fn datetime_strategy() -> impl Strategy<Value = OffsetDateTime> { | ||||
|         Just(OffsetDateTime::now_utc()) | ||||
|     } | ||||
| } | ||||
|   | ||||
| @@ -5,9 +5,9 @@ use std::io::{BufWriter, Write}; | ||||
| use std::path::Path; | ||||
| use std::sync::Arc; | ||||
|  | ||||
| use chrono::Utc; | ||||
| use heed::{Env, RwTxn}; | ||||
| use log::debug; | ||||
| use time::OffsetDateTime; | ||||
|  | ||||
| use super::error::TaskError; | ||||
| use super::task::{Task, TaskContent, TaskId}; | ||||
| @@ -72,7 +72,7 @@ impl TaskStore { | ||||
|         let task = tokio::task::spawn_blocking(move || -> Result<Task> { | ||||
|             let mut txn = store.wtxn()?; | ||||
|             let next_task_id = store.next_task_id(&mut txn)?; | ||||
|             let created_at = TaskEvent::Created(Utc::now()); | ||||
|             let created_at = TaskEvent::Created(OffsetDateTime::now_utc()); | ||||
|             let task = Task { | ||||
|                 id: next_task_id, | ||||
|                 index_uid, | ||||
|   | ||||
| @@ -1,7 +1,7 @@ | ||||
| use std::sync::Arc; | ||||
| use std::time::Duration; | ||||
|  | ||||
| use chrono::Utc; | ||||
| use time::OffsetDateTime; | ||||
| use tokio::sync::{watch, RwLock}; | ||||
| use tokio::time::interval_at; | ||||
|  | ||||
| @@ -63,7 +63,8 @@ where | ||||
|         match pending { | ||||
|             Pending::Batch(mut batch) => { | ||||
|                 for task in &mut batch.tasks { | ||||
|                     task.events.push(TaskEvent::Processing(Utc::now())); | ||||
|                     task.events | ||||
|                         .push(TaskEvent::Processing(OffsetDateTime::now_utc())); | ||||
|                 } | ||||
|  | ||||
|                 batch.tasks = { | ||||
|   | ||||
		Reference in New Issue
	
	Block a user