mirror of
				https://github.com/meilisearch/meilisearch.git
				synced 2025-10-30 23:46:28 +00:00 
			
		
		
		
	fix tests
# Conflicts: # crates/index-scheduler/src/batch.rs # crates/index-scheduler/src/snapshots/lib.rs/fail_in_process_batch_for_document_deletion/after_removing_the_documents.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fifth_task_succeeds.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_bad_primary_key/fourth_task_fails.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/second_task_fails.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key/third_task_fails.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_multiple_primary_key_batch_wrong_key/second_and_third_tasks_fails.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/all_other_tasks_succeeds.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/second_task_fails.snap # crates/index-scheduler/src/snapshots/lib.rs/test_document_addition_with_set_and_null_primary_key_inference_works/third_task_succeeds.snap # Conflicts: # crates/index-scheduler/src/batch.rs # crates/meilisearch/src/search/mod.rs # crates/meilisearch/tests/vector/mod.rs # Conflicts: # crates/index-scheduler/src/batch.rs
This commit is contained in:
		| @@ -29,7 +29,6 @@ use bumpalo::collections::CollectIn; | |||||||
| use bumpalo::Bump; | use bumpalo::Bump; | ||||||
| use dump::IndexMetadata; | use dump::IndexMetadata; | ||||||
| use meilisearch_types::batches::BatchId; | use meilisearch_types::batches::BatchId; | ||||||
| use meilisearch_types::error::Code; |  | ||||||
| use meilisearch_types::heed::{RoTxn, RwTxn}; | use meilisearch_types::heed::{RoTxn, RwTxn}; | ||||||
| use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader, PrimaryKey}; | use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader, PrimaryKey}; | ||||||
| use meilisearch_types::milli::heed::CompactionOption; | use meilisearch_types::milli::heed::CompactionOption; | ||||||
| @@ -689,7 +688,9 @@ impl IndexScheduler { | |||||||
|                     let index = self.index_mapper.index(&rtxn, name)?; |                     let index = self.index_mapper.index(&rtxn, name)?; | ||||||
|                     let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string()); |                     let dst = temp_snapshot_dir.path().join("indexes").join(uuid.to_string()); | ||||||
|                     fs::create_dir_all(&dst)?; |                     fs::create_dir_all(&dst)?; | ||||||
|                     index.copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled)?; |                     index | ||||||
|  |                         .copy_to_file(dst.join("data.mdb"), CompactionOption::Enabled) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(name.to_string())))?; | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 drop(rtxn); |                 drop(rtxn); | ||||||
| @@ -791,16 +792,19 @@ impl IndexScheduler { | |||||||
|                             let content_file = self.file_store.get_update(content_file)?; |                             let content_file = self.file_store.get_update(content_file)?; | ||||||
|  |  | ||||||
|                             let reader = DocumentsBatchReader::from_reader(content_file) |                             let reader = DocumentsBatchReader::from_reader(content_file) | ||||||
|                                 .map_err(milli::Error::from)?; |                                 .map_err(|e| Error::from_milli(e.into(), None))?; | ||||||
|  |  | ||||||
|                             let (mut cursor, documents_batch_index) = |                             let (mut cursor, documents_batch_index) = | ||||||
|                                 reader.into_cursor_and_fields_index(); |                                 reader.into_cursor_and_fields_index(); | ||||||
|  |  | ||||||
|                             while let Some(doc) = |                             while let Some(doc) = cursor | ||||||
|                                 cursor.next_document().map_err(milli::Error::from)? |                                 .next_document() | ||||||
|  |                                 .map_err(|e| Error::from_milli(e.into(), None))? | ||||||
|                             { |                             { | ||||||
|                                 dump_content_file |                                 dump_content_file.push_document( | ||||||
|                                     .push_document(&obkv_to_object(doc, &documents_batch_index)?)?; |                                     &obkv_to_object(doc, &documents_batch_index) | ||||||
|  |                                         .map_err(|e| Error::from_milli(e, None))?, | ||||||
|  |                                 )?; | ||||||
|                             } |                             } | ||||||
|                             dump_content_file.flush()?; |                             dump_content_file.flush()?; | ||||||
|                         } |                         } | ||||||
| @@ -814,27 +818,41 @@ impl IndexScheduler { | |||||||
|                     let metadata = IndexMetadata { |                     let metadata = IndexMetadata { | ||||||
|                         uid: uid.to_owned(), |                         uid: uid.to_owned(), | ||||||
|                         primary_key: index.primary_key(&rtxn)?.map(String::from), |                         primary_key: index.primary_key(&rtxn)?.map(String::from), | ||||||
|                         created_at: index.created_at(&rtxn)?, |                         created_at: index | ||||||
|                         updated_at: index.updated_at(&rtxn)?, |                             .created_at(&rtxn) | ||||||
|  |                             .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, | ||||||
|  |                         updated_at: index | ||||||
|  |                             .updated_at(&rtxn) | ||||||
|  |                             .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, | ||||||
|                     }; |                     }; | ||||||
|                     let mut index_dumper = dump.create_index(uid, &metadata)?; |                     let mut index_dumper = dump.create_index(uid, &metadata)?; | ||||||
|  |  | ||||||
|                     let fields_ids_map = index.fields_ids_map(&rtxn)?; |                     let fields_ids_map = index.fields_ids_map(&rtxn)?; | ||||||
|                     let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); |                     let all_fields: Vec<_> = fields_ids_map.iter().map(|(id, _)| id).collect(); | ||||||
|                     let embedding_configs = index.embedding_configs(&rtxn)?; |                     let embedding_configs = index | ||||||
|  |                         .embedding_configs(&rtxn) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|  |  | ||||||
|  |                     let documents = index | ||||||
|  |                         .all_documents(&rtxn) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|                     // 3.1. Dump the documents |                     // 3.1. Dump the documents | ||||||
|                     for ret in index.all_documents(&rtxn)? { |                     for ret in documents { | ||||||
|                         if self.must_stop_processing.get() { |                         if self.must_stop_processing.get() { | ||||||
|                             return Err(Error::AbortedTask); |                             return Err(Error::AbortedTask); | ||||||
|                         } |                         } | ||||||
|  |  | ||||||
|                         let (id, doc) = ret?; |                         let (id, doc) = | ||||||
|  |                             ret.map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|  |  | ||||||
|                         let mut document = milli::obkv_to_json(&all_fields, &fields_ids_map, doc)?; |                         let mut document = | ||||||
|  |                             milli::obkv_to_json(&all_fields, &fields_ids_map, doc) | ||||||
|  |                                 .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|  |  | ||||||
|                         'inject_vectors: { |                         'inject_vectors: { | ||||||
|                             let embeddings = index.embeddings(&rtxn, id)?; |                             let embeddings = index | ||||||
|  |                                 .embeddings(&rtxn, id) | ||||||
|  |                                 .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|  |  | ||||||
|                             if embeddings.is_empty() { |                             if embeddings.is_empty() { | ||||||
|                                 break 'inject_vectors; |                                 break 'inject_vectors; | ||||||
| @@ -845,7 +863,7 @@ impl IndexScheduler { | |||||||
|                                 .or_insert(serde_json::Value::Object(Default::default())); |                                 .or_insert(serde_json::Value::Object(Default::default())); | ||||||
|  |  | ||||||
|                             let serde_json::Value::Object(vectors) = vectors else { |                             let serde_json::Value::Object(vectors) = vectors else { | ||||||
|                                 return Err(milli::Error::UserError( |                                 let user_err = milli::Error::UserError( | ||||||
|                                     milli::UserError::InvalidVectorsMapType { |                                     milli::UserError::InvalidVectorsMapType { | ||||||
|                                         document_id: { |                                         document_id: { | ||||||
|                                             if let Ok(Some(Ok(index))) = index |                                             if let Ok(Some(Ok(index))) = index | ||||||
| @@ -859,8 +877,9 @@ impl IndexScheduler { | |||||||
|                                         }, |                                         }, | ||||||
|                                         value: vectors.clone(), |                                         value: vectors.clone(), | ||||||
|                                     }, |                                     }, | ||||||
|                                 ) |                                 ); | ||||||
|                                 .into()); |  | ||||||
|  |                                 return Err(Error::from_milli(user_err, Some(uid.to_string()))); | ||||||
|                             }; |                             }; | ||||||
|  |  | ||||||
|                             for (embedder_name, embeddings) in embeddings { |                             for (embedder_name, embeddings) in embeddings { | ||||||
| @@ -890,7 +909,8 @@ impl IndexScheduler { | |||||||
|                         index, |                         index, | ||||||
|                         &rtxn, |                         &rtxn, | ||||||
|                         meilisearch_types::settings::SecretPolicy::RevealSecrets, |                         meilisearch_types::settings::SecretPolicy::RevealSecrets, | ||||||
|                     )?; |                     ) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?; | ||||||
|                     index_dumper.settings(&settings)?; |                     index_dumper.settings(&settings)?; | ||||||
|                     Ok(()) |                     Ok(()) | ||||||
|                 })?; |                 })?; | ||||||
| @@ -946,7 +966,8 @@ impl IndexScheduler { | |||||||
|                 // the entire batch. |                 // the entire batch. | ||||||
|                 let res = || -> Result<()> { |                 let res = || -> Result<()> { | ||||||
|                     let index_rtxn = index.read_txn()?; |                     let index_rtxn = index.read_txn()?; | ||||||
|                     let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)?; |                     let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?; | ||||||
|                     let mut wtxn = self.env.write_txn()?; |                     let mut wtxn = self.env.write_txn()?; | ||||||
|                     self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; |                     self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; | ||||||
|                     wtxn.commit()?; |                     wtxn.commit()?; | ||||||
| @@ -988,10 +1009,12 @@ impl IndexScheduler { | |||||||
|                     ); |                     ); | ||||||
|                     builder.set_primary_key(primary_key); |                     builder.set_primary_key(primary_key); | ||||||
|                     let must_stop_processing = self.must_stop_processing.clone(); |                     let must_stop_processing = self.must_stop_processing.clone(); | ||||||
|                     builder.execute( |                     builder | ||||||
|                         |indexing_step| tracing::debug!(update = ?indexing_step), |                         .execute( | ||||||
|                         || must_stop_processing.get(), |                             |indexing_step| tracing::debug!(update = ?indexing_step), | ||||||
|                     )?; |                             || must_stop_processing.get(), | ||||||
|  |                         ) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(index_uid.to_string())))?; | ||||||
|                     index_wtxn.commit()?; |                     index_wtxn.commit()?; | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
| @@ -1008,7 +1031,8 @@ impl IndexScheduler { | |||||||
|                 let res = || -> Result<()> { |                 let res = || -> Result<()> { | ||||||
|                     let mut wtxn = self.env.write_txn()?; |                     let mut wtxn = self.env.write_txn()?; | ||||||
|                     let index_rtxn = index.read_txn()?; |                     let index_rtxn = index.read_txn()?; | ||||||
|                     let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn)?; |                     let stats = crate::index_mapper::IndexStats::new(&index, &index_rtxn) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; | ||||||
|                     self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; |                     self.index_mapper.store_stats_of(&mut wtxn, &index_uid, &stats)?; | ||||||
|                     wtxn.commit()?; |                     wtxn.commit()?; | ||||||
|                     Ok(()) |                     Ok(()) | ||||||
| @@ -1031,7 +1055,9 @@ impl IndexScheduler { | |||||||
|                 let number_of_documents = || -> Result<u64> { |                 let number_of_documents = || -> Result<u64> { | ||||||
|                     let index = self.index_mapper.index(&wtxn, &index_uid)?; |                     let index = self.index_mapper.index(&wtxn, &index_uid)?; | ||||||
|                     let index_rtxn = index.read_txn()?; |                     let index_rtxn = index.read_txn()?; | ||||||
|                     Ok(index.number_of_documents(&index_rtxn)?) |                     index | ||||||
|  |                         .number_of_documents(&index_rtxn) | ||||||
|  |                         .map_err(|e| Error::from_milli(e, Some(index_uid.to_string()))) | ||||||
|                 }() |                 }() | ||||||
|                 .unwrap_or_default(); |                 .unwrap_or_default(); | ||||||
|  |  | ||||||
| @@ -1188,8 +1214,10 @@ impl IndexScheduler { | |||||||
|         }; |         }; | ||||||
|  |  | ||||||
|         match operation { |         match operation { | ||||||
|             IndexOperation::DocumentClear { mut tasks, .. } => { |             IndexOperation::DocumentClear { index_uid, mut tasks } => { | ||||||
|                 let count = milli::update::ClearDocuments::new(index_wtxn, index).execute()?; |                 let count = milli::update::ClearDocuments::new(index_wtxn, index) | ||||||
|  |                     .execute() | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(index_uid)))?; | ||||||
|  |  | ||||||
|                 let mut first_clear_found = false; |                 let mut first_clear_found = false; | ||||||
|                 for task in &mut tasks { |                 for task in &mut tasks { | ||||||
| @@ -1209,7 +1237,7 @@ impl IndexScheduler { | |||||||
|                 Ok(tasks) |                 Ok(tasks) | ||||||
|             } |             } | ||||||
|             IndexOperation::DocumentOperation { |             IndexOperation::DocumentOperation { | ||||||
|                 index_uid: _, |                 index_uid, | ||||||
|                 primary_key, |                 primary_key, | ||||||
|                 method, |                 method, | ||||||
|                 operations, |                 operations, | ||||||
| @@ -1235,13 +1263,17 @@ impl IndexScheduler { | |||||||
|  |  | ||||||
|                 let mut content_files_iter = content_files.iter(); |                 let mut content_files_iter = content_files.iter(); | ||||||
|                 let mut indexer = indexer::DocumentOperation::new(method); |                 let mut indexer = indexer::DocumentOperation::new(method); | ||||||
|                 let embedders = index.embedding_configs(index_wtxn)?; |                 let embedders = index | ||||||
|                 let embedders = self.embedders(embedders)?; |                     .embedding_configs(index_wtxn) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; | ||||||
|  |                 let embedders = self.embedders(index_uid.clone(), embedders)?; | ||||||
|                 for operation in operations { |                 for operation in operations { | ||||||
|                     match operation { |                     match operation { | ||||||
|                         DocumentOperation::Add(_content_uuid) => { |                         DocumentOperation::Add(_content_uuid) => { | ||||||
|                             let mmap = content_files_iter.next().unwrap(); |                             let mmap = content_files_iter.next().unwrap(); | ||||||
|                             indexer.add_documents(mmap)?; |                             indexer | ||||||
|  |                                 .add_documents(mmap) | ||||||
|  |                                 .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; | ||||||
|                         } |                         } | ||||||
|                         DocumentOperation::Delete(document_ids) => { |                         DocumentOperation::Delete(document_ids) => { | ||||||
|                             let document_ids: bumpalo::collections::vec::Vec<_> = document_ids |                             let document_ids: bumpalo::collections::vec::Vec<_> = document_ids | ||||||
| @@ -1266,15 +1298,17 @@ impl IndexScheduler { | |||||||
|                     } |                     } | ||||||
|                 }; |                 }; | ||||||
|  |  | ||||||
|                 let (document_changes, operation_stats, primary_key) = indexer.into_changes( |                 let (document_changes, operation_stats, primary_key) = indexer | ||||||
|                     &indexer_alloc, |                     .into_changes( | ||||||
|                     index, |                         &indexer_alloc, | ||||||
|                     &rtxn, |                         index, | ||||||
|                     primary_key.as_deref(), |                         &rtxn, | ||||||
|                     &mut new_fields_ids_map, |                         primary_key.as_deref(), | ||||||
|                     &|| must_stop_processing.get(), |                         &mut new_fields_ids_map, | ||||||
|                     &send_progress, |                         &|| must_stop_processing.get(), | ||||||
|                 )?; |                         &send_progress, | ||||||
|  |                     ) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                 let mut addition = 0; |                 let mut addition = 0; | ||||||
|                 for (stats, task) in operation_stats.into_iter().zip(&mut tasks) { |                 for (stats, task) in operation_stats.into_iter().zip(&mut tasks) { | ||||||
| @@ -1321,14 +1355,15 @@ impl IndexScheduler { | |||||||
|                         embedders, |                         embedders, | ||||||
|                         &|| must_stop_processing.get(), |                         &|| must_stop_processing.get(), | ||||||
|                         &send_progress, |                         &send_progress, | ||||||
|                     )?; |                     ) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                     tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); |                     tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 Ok(tasks) |                 Ok(tasks) | ||||||
|             } |             } | ||||||
|             IndexOperation::DocumentEdition { mut task, .. } => { |             IndexOperation::DocumentEdition { index_uid, mut task } => { | ||||||
|                 let (filter, code) = if let KindWithContent::DocumentEdition { |                 let (filter, code) = if let KindWithContent::DocumentEdition { | ||||||
|                     filter_expr, |                     filter_expr, | ||||||
|                     context: _, |                     context: _, | ||||||
| @@ -1342,16 +1377,11 @@ impl IndexScheduler { | |||||||
|                 }; |                 }; | ||||||
|  |  | ||||||
|                 let candidates = match filter.as_ref().map(Filter::from_json) { |                 let candidates = match filter.as_ref().map(Filter::from_json) { | ||||||
|                     Some(Ok(Some(filter))) => { |                     Some(Ok(Some(filter))) => filter | ||||||
|                         filter.evaluate(index_wtxn, index).map_err(|err| match err { |                         .evaluate(index_wtxn, index) | ||||||
|                             milli::Error::UserError(milli::UserError::InvalidFilter(_)) => { |                         .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?, | ||||||
|                                 Error::from(err).with_custom_error_code(Code::InvalidDocumentFilter) |  | ||||||
|                             } |  | ||||||
|                             e => e.into(), |  | ||||||
|                         })? |  | ||||||
|                     } |  | ||||||
|                     None | Some(Ok(None)) => index.documents_ids(index_wtxn)?, |                     None | Some(Ok(None)) => index.documents_ids(index_wtxn)?, | ||||||
|                     Some(Err(e)) => return Err(e.into()), |                     Some(Err(e)) => return Err(Error::from_milli(e, Some(index_uid.clone()))), | ||||||
|                 }; |                 }; | ||||||
|  |  | ||||||
|                 let (original_filter, context, function) = if let Some(Details::DocumentEdition { |                 let (original_filter, context, function) = if let Some(Details::DocumentEdition { | ||||||
| @@ -1386,8 +1416,9 @@ impl IndexScheduler { | |||||||
|                 // candidates not empty => index not empty => a primary key is set |                 // candidates not empty => index not empty => a primary key is set | ||||||
|                 let primary_key = index.primary_key(&rtxn)?.unwrap(); |                 let primary_key = index.primary_key(&rtxn)?.unwrap(); | ||||||
|  |  | ||||||
|                 let primary_key = PrimaryKey::new_or_insert(primary_key, &mut new_fields_ids_map) |                 let primary_key = | ||||||
|                     .map_err(milli::Error::from)?; |                     PrimaryKey::new_or_insert(primary_key, &mut new_fields_ids_map) | ||||||
|  |                         .map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                 let result_count = Ok((candidates.len(), candidates.len())) as Result<_>; |                 let result_count = Ok((candidates.len(), candidates.len())) as Result<_>; | ||||||
|  |  | ||||||
| @@ -1406,11 +1437,17 @@ impl IndexScheduler { | |||||||
|                     }; |                     }; | ||||||
|  |  | ||||||
|                     let indexer = UpdateByFunction::new(candidates, context.clone(), code.clone()); |                     let indexer = UpdateByFunction::new(candidates, context.clone(), code.clone()); | ||||||
|                     let document_changes = |                     let document_changes = pool | ||||||
|                         pool.install(|| indexer.into_changes(&primary_key)).unwrap()?; |                         .install(|| { | ||||||
|  |                             indexer | ||||||
|                     let embedders = index.embedding_configs(index_wtxn)?; |                                 .into_changes(&primary_key) | ||||||
|                     let embedders = self.embedders(embedders)?; |                                 .map_err(|err| Error::from_milli(err, Some(index_uid.clone()))) | ||||||
|  |                         }) | ||||||
|  |                         .unwrap()?; | ||||||
|  |                     let embedders = index | ||||||
|  |                         .embedding_configs(index_wtxn) | ||||||
|  |                         .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?; | ||||||
|  |                     let embedders = self.embedders(index_uid.clone(), embedders)?; | ||||||
|  |  | ||||||
|                     indexer::index( |                     indexer::index( | ||||||
|                         index_wtxn, |                         index_wtxn, | ||||||
| @@ -1424,7 +1461,8 @@ impl IndexScheduler { | |||||||
|                         embedders, |                         embedders, | ||||||
|                         &|| must_stop_processing.get(), |                         &|| must_stop_processing.get(), | ||||||
|                         &send_progress, |                         &send_progress, | ||||||
|                     )?; |                     ) | ||||||
|  |                     .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                     // tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); |                     // tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); | ||||||
|                 } |                 } | ||||||
| @@ -1455,7 +1493,7 @@ impl IndexScheduler { | |||||||
|  |  | ||||||
|                 Ok(vec![task]) |                 Ok(vec![task]) | ||||||
|             } |             } | ||||||
|             IndexOperation::DocumentDeletion { mut tasks, index_uid: _ } => { |             IndexOperation::DocumentDeletion { mut tasks, index_uid } => { | ||||||
|                 let mut to_delete = RoaringBitmap::new(); |                 let mut to_delete = RoaringBitmap::new(); | ||||||
|                 let external_documents_ids = index.external_documents_ids(); |                 let external_documents_ids = index.external_documents_ids(); | ||||||
|  |  | ||||||
| @@ -1476,35 +1514,23 @@ impl IndexScheduler { | |||||||
|                                 deleted_documents: Some(will_be_removed), |                                 deleted_documents: Some(will_be_removed), | ||||||
|                             }); |                             }); | ||||||
|                         } |                         } | ||||||
|                         KindWithContent::DocumentDeletionByFilter { index_uid: _, filter_expr } => { |                         KindWithContent::DocumentDeletionByFilter { index_uid, filter_expr } => { | ||||||
|                             let before = to_delete.len(); |                             let before = to_delete.len(); | ||||||
|                             let filter = match Filter::from_json(filter_expr) { |                             let filter = match Filter::from_json(filter_expr) { | ||||||
|                                 Ok(filter) => filter, |                                 Ok(filter) => filter, | ||||||
|                                 Err(err) => { |                                 Err(err) => { | ||||||
|                                     // theorically, this should be catched by deserr before reaching the index-scheduler and cannot happens |                                     // theorically, this should be catched by deserr before reaching the index-scheduler and cannot happens | ||||||
|                                     task.status = Status::Failed; |                                     task.status = Status::Failed; | ||||||
|                                     task.error = match err { |                                     task.error = Some( | ||||||
|                                         milli::Error::UserError( |                                         Error::from_milli(err, Some(index_uid.clone())).into(), | ||||||
|                                             milli::UserError::InvalidFilterExpression { .. }, |                                     ); | ||||||
|                                         ) => Some( |  | ||||||
|                                             Error::from(err) |  | ||||||
|                                                 .with_custom_error_code(Code::InvalidDocumentFilter) |  | ||||||
|                                                 .into(), |  | ||||||
|                                         ), |  | ||||||
|                                         e => Some(e.into()), |  | ||||||
|                                     }; |  | ||||||
|                                     None |                                     None | ||||||
|                                 } |                                 } | ||||||
|                             }; |                             }; | ||||||
|                             if let Some(filter) = filter { |                             if let Some(filter) = filter { | ||||||
|                                 let candidates = |                                 let candidates = filter | ||||||
|                                     filter.evaluate(index_wtxn, index).map_err(|err| match err { |                                     .evaluate(index_wtxn, index) | ||||||
|                                         milli::Error::UserError( |                                     .map_err(|err| Error::from_milli(err, Some(index_uid.clone()))); | ||||||
|                                             milli::UserError::InvalidFilter(_), |  | ||||||
|                                         ) => Error::from(err) |  | ||||||
|                                             .with_custom_error_code(Code::InvalidDocumentFilter), |  | ||||||
|                                         e => e.into(), |  | ||||||
|                                     }); |  | ||||||
|                                 match candidates { |                                 match candidates { | ||||||
|                                     Ok(candidates) => to_delete |= candidates, |                                     Ok(candidates) => to_delete |= candidates, | ||||||
|                                     Err(err) => { |                                     Err(err) => { | ||||||
| @@ -1540,8 +1566,9 @@ impl IndexScheduler { | |||||||
|                 // to_delete not empty => index not empty => primary key set |                 // to_delete not empty => index not empty => primary key set | ||||||
|                 let primary_key = index.primary_key(&rtxn)?.unwrap(); |                 let primary_key = index.primary_key(&rtxn)?.unwrap(); | ||||||
|  |  | ||||||
|                 let primary_key = PrimaryKey::new_or_insert(primary_key, &mut new_fields_ids_map) |                 let primary_key = | ||||||
|                     .map_err(milli::Error::from)?; |                     PrimaryKey::new_or_insert(primary_key, &mut new_fields_ids_map) | ||||||
|  |                         .map_err(|err| Error::from_milli(err.into(), Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                 if !tasks.iter().all(|res| res.error.is_some()) { |                 if !tasks.iter().all(|res| res.error.is_some()) { | ||||||
|                     let local_pool; |                     let local_pool; | ||||||
| @@ -1560,8 +1587,10 @@ impl IndexScheduler { | |||||||
|                     let mut indexer = indexer::DocumentDeletion::new(); |                     let mut indexer = indexer::DocumentDeletion::new(); | ||||||
|                     indexer.delete_documents_by_docids(to_delete); |                     indexer.delete_documents_by_docids(to_delete); | ||||||
|                     let document_changes = indexer.into_changes(&indexer_alloc, primary_key); |                     let document_changes = indexer.into_changes(&indexer_alloc, primary_key); | ||||||
|                     let embedders = index.embedding_configs(index_wtxn)?; |                     let embedders = index | ||||||
|                     let embedders = self.embedders(embedders)?; |                         .embedding_configs(index_wtxn) | ||||||
|  |                         .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?; | ||||||
|  |                     let embedders = self.embedders(index_uid.clone(), embedders)?; | ||||||
|  |  | ||||||
|                     indexer::index( |                     indexer::index( | ||||||
|                         index_wtxn, |                         index_wtxn, | ||||||
| @@ -1575,14 +1604,15 @@ impl IndexScheduler { | |||||||
|                         embedders, |                         embedders, | ||||||
|                         &|| must_stop_processing.get(), |                         &|| must_stop_processing.get(), | ||||||
|                         &send_progress, |                         &send_progress, | ||||||
|                     )?; |                     ) | ||||||
|  |                     .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                     // tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); |                     // tracing::info!(indexing_result = ?addition, processed_in = ?started_processing_at.elapsed(), "document indexing done"); | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 Ok(tasks) |                 Ok(tasks) | ||||||
|             } |             } | ||||||
|             IndexOperation::Settings { index_uid: _, settings, mut tasks } => { |             IndexOperation::Settings { index_uid, settings, mut tasks } => { | ||||||
|                 let indexer_config = self.index_mapper.indexer_config(); |                 let indexer_config = self.index_mapper.indexer_config(); | ||||||
|                 let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config); |                 let mut builder = milli::update::Settings::new(index_wtxn, index, indexer_config); | ||||||
|  |  | ||||||
| @@ -1596,10 +1626,12 @@ impl IndexScheduler { | |||||||
|                     task.status = Status::Succeeded; |                     task.status = Status::Succeeded; | ||||||
|                 } |                 } | ||||||
|  |  | ||||||
|                 builder.execute( |                 builder | ||||||
|                     |indexing_step| tracing::debug!(update = ?indexing_step), |                     .execute( | ||||||
|                     || must_stop_processing.get(), |                         |indexing_step| tracing::debug!(update = ?indexing_step), | ||||||
|                 )?; |                         || must_stop_processing.get(), | ||||||
|  |                     ) | ||||||
|  |                     .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?; | ||||||
|  |  | ||||||
|                 Ok(tasks) |                 Ok(tasks) | ||||||
|             } |             } | ||||||
|   | |||||||
| @@ -1,13 +1,12 @@ | |||||||
| use std::fmt::Display; | use std::fmt::Display; | ||||||
|  |  | ||||||
|  | use crate::TaskId; | ||||||
| use meilisearch_types::batches::BatchId; | use meilisearch_types::batches::BatchId; | ||||||
| use meilisearch_types::error::{Code, ErrorCode}; | use meilisearch_types::error::{Code, ErrorCode}; | ||||||
| use meilisearch_types::tasks::{Kind, Status}; | use meilisearch_types::tasks::{Kind, Status}; | ||||||
| use meilisearch_types::{heed, milli}; | use meilisearch_types::{heed, milli}; | ||||||
| use thiserror::Error; | use thiserror::Error; | ||||||
|  |  | ||||||
| use crate::TaskId; |  | ||||||
|  |  | ||||||
| #[derive(Copy, Clone, Debug, PartialEq, Eq)] | #[derive(Copy, Clone, Debug, PartialEq, Eq)] | ||||||
| pub enum DateField { | pub enum DateField { | ||||||
|     BeforeEnqueuedAt, |     BeforeEnqueuedAt, | ||||||
| @@ -122,11 +121,11 @@ pub enum Error { | |||||||
|     Dump(#[from] dump::Error), |     Dump(#[from] dump::Error), | ||||||
|     #[error(transparent)] |     #[error(transparent)] | ||||||
|     Heed(#[from] heed::Error), |     Heed(#[from] heed::Error), | ||||||
|     #[error("{}", match .index_name { |     #[error("{}", match .index_uid { | ||||||
|         Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), |         Some(uid) if !uid.is_empty() => format!("Index `{}`: {error}", uid), | ||||||
|         _ => format!("{error}") |         _ => format!("{error}") | ||||||
|     })] |     })] | ||||||
|     Milli { error: milli::Error, index_name: Option<String> }, |     Milli { error: milli::Error, index_uid: Option<String> }, | ||||||
|     #[error("An unexpected crash occurred when processing the task.")] |     #[error("An unexpected crash occurred when processing the task.")] | ||||||
|     ProcessBatchPanicked, |     ProcessBatchPanicked, | ||||||
|     #[error(transparent)] |     #[error(transparent)] | ||||||
| @@ -213,8 +212,18 @@ impl Error { | |||||||
|         Self::WithCustomErrorCode(code, Box::new(self)) |         Self::WithCustomErrorCode(code, Box::new(self)) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     pub fn from_milli(error: milli::Error, index_name: Option<String>) -> Self { |     pub fn from_milli(err: milli::Error, index_uid: Option<String>) -> Self { | ||||||
|         Self::Milli { error, index_name } |         match err { | ||||||
|  |             milli::Error::UserError(milli::UserError::InvalidFilter(_)) => { | ||||||
|  |                 Self::Milli { error: err, index_uid } | ||||||
|  |                     .with_custom_error_code(Code::InvalidDocumentFilter) | ||||||
|  |             } | ||||||
|  |             milli::Error::UserError(milli::UserError::InvalidFilterExpression { .. }) => { | ||||||
|  |                 Self::Milli { error: err, index_uid } | ||||||
|  |                     .with_custom_error_code(Code::InvalidDocumentFilter) | ||||||
|  |             } | ||||||
|  |             _ => Self::Milli { error: err, index_uid }, | ||||||
|  |         } | ||||||
|     } |     } | ||||||
| } | } | ||||||
|  |  | ||||||
|   | |||||||
| @@ -8,9 +8,8 @@ use time::OffsetDateTime; | |||||||
| use uuid::Uuid; | use uuid::Uuid; | ||||||
|  |  | ||||||
| use super::IndexStatus::{self, Available, BeingDeleted, Closing, Missing}; | use super::IndexStatus::{self, Available, BeingDeleted, Closing, Missing}; | ||||||
|  | use crate::clamp_to_page_size; | ||||||
| use crate::lru::{InsertionOutcome, LruMap}; | use crate::lru::{InsertionOutcome, LruMap}; | ||||||
| use crate::{clamp_to_page_size}; |  | ||||||
|  |  | ||||||
| /// Keep an internally consistent view of the open indexes in memory. | /// Keep an internally consistent view of the open indexes in memory. | ||||||
| /// | /// | ||||||
| /// This view is made of an LRU cache that will evict the least frequently used indexes when new indexes are opened. | /// This view is made of an LRU cache that will evict the least frequently used indexes when new indexes are opened. | ||||||
|   | |||||||
| @@ -3,19 +3,19 @@ use std::sync::{Arc, RwLock}; | |||||||
| use std::time::Duration; | use std::time::Duration; | ||||||
| use std::{fs, thread}; | use std::{fs, thread}; | ||||||
|  |  | ||||||
|  | use self::index_map::IndexMap; | ||||||
|  | use self::IndexStatus::{Available, BeingDeleted, Closing, Missing}; | ||||||
|  | use crate::uuid_codec::UuidCodec; | ||||||
|  | use crate::{Error, Result}; | ||||||
| use meilisearch_types::heed::types::{SerdeJson, Str}; | use meilisearch_types::heed::types::{SerdeJson, Str}; | ||||||
| use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn}; | use meilisearch_types::heed::{Database, Env, RoTxn, RwTxn}; | ||||||
|  | use meilisearch_types::milli; | ||||||
| use meilisearch_types::milli::update::IndexerConfig; | use meilisearch_types::milli::update::IndexerConfig; | ||||||
| use meilisearch_types::milli::{FieldDistribution, Index}; | use meilisearch_types::milli::{FieldDistribution, Index}; | ||||||
| use serde::{Deserialize, Serialize}; | use serde::{Deserialize, Serialize}; | ||||||
| use time::OffsetDateTime; | use time::OffsetDateTime; | ||||||
| use tracing::error; | use tracing::error; | ||||||
| use uuid::Uuid; | use uuid::Uuid; | ||||||
| use meilisearch_types::milli; |  | ||||||
| use self::index_map::IndexMap; |  | ||||||
| use self::IndexStatus::{Available, BeingDeleted, Closing, Missing}; |  | ||||||
| use crate::uuid_codec::UuidCodec; |  | ||||||
| use crate::{Error, Result}; |  | ||||||
|  |  | ||||||
| mod index_map; | mod index_map; | ||||||
|  |  | ||||||
| @@ -183,13 +183,18 @@ impl IndexMapper { | |||||||
|                 // Error if the UUIDv4 somehow already exists in the map, since it should be fresh. |                 // Error if the UUIDv4 somehow already exists in the map, since it should be fresh. | ||||||
|                 // This is very unlikely to happen in practice. |                 // This is very unlikely to happen in practice. | ||||||
|                 // TODO: it would be better to lazily create the index. But we need an Index::open function for milli. |                 // TODO: it would be better to lazily create the index. But we need an Index::open function for milli. | ||||||
|                 let index = self.index_map.write().unwrap().create( |                 let index = self | ||||||
|                     &uuid, |                     .index_map | ||||||
|                     &index_path, |                     .write() | ||||||
|                     date, |                     .unwrap() | ||||||
|                     self.enable_mdb_writemap, |                     .create( | ||||||
|                     self.index_base_map_size, |                         &uuid, | ||||||
|                 ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; |                         &index_path, | ||||||
|  |                         date, | ||||||
|  |                         self.enable_mdb_writemap, | ||||||
|  |                         self.index_base_map_size, | ||||||
|  |                     ) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; | ||||||
|  |  | ||||||
|                 wtxn.commit()?; |                 wtxn.commit()?; | ||||||
|  |  | ||||||
| @@ -357,7 +362,8 @@ impl IndexMapper { | |||||||
|                     }; |                     }; | ||||||
|                     let index_path = self.base_path.join(uuid.to_string()); |                     let index_path = self.base_path.join(uuid.to_string()); | ||||||
|                     // take the lock to reopen the environment. |                     // take the lock to reopen the environment. | ||||||
|                     reopen.reopen(&mut self.index_map.write().unwrap(), &index_path) |                     reopen | ||||||
|  |                         .reopen(&mut self.index_map.write().unwrap(), &index_path) | ||||||
|                         .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; |                         .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; | ||||||
|                     continue; |                     continue; | ||||||
|                 } |                 } | ||||||
| @@ -373,13 +379,15 @@ impl IndexMapper { | |||||||
|                         Missing => { |                         Missing => { | ||||||
|                             let index_path = self.base_path.join(uuid.to_string()); |                             let index_path = self.base_path.join(uuid.to_string()); | ||||||
|  |  | ||||||
|                             break index_map.create( |                             break index_map | ||||||
|                                 &uuid, |                                 .create( | ||||||
|                                 &index_path, |                                     &uuid, | ||||||
|                                 None, |                                     &index_path, | ||||||
|                                 self.enable_mdb_writemap, |                                     None, | ||||||
|                                 self.index_base_map_size, |                                     self.enable_mdb_writemap, | ||||||
|                             ).map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; |                                     self.index_base_map_size, | ||||||
|  |                                 ) | ||||||
|  |                                 .map_err(|e| Error::from_milli(e, Some(uuid.to_string())))?; | ||||||
|                         } |                         } | ||||||
|                         Available(index) => break index, |                         Available(index) => break index, | ||||||
|                         Closing(_) => { |                         Closing(_) => { | ||||||
| @@ -460,7 +468,8 @@ impl IndexMapper { | |||||||
|             None => { |             None => { | ||||||
|                 let index = self.index(rtxn, index_uid)?; |                 let index = self.index(rtxn, index_uid)?; | ||||||
|                 let index_rtxn = index.read_txn()?; |                 let index_rtxn = index.read_txn()?; | ||||||
|                 IndexStats::new(&index, &index_rtxn).map_err(|e| Error::from_milli(e, Some(uuid.to_string()))) |                 IndexStats::new(&index, &index_rtxn) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(uuid.to_string()))) | ||||||
|             } |             } | ||||||
|         } |         } | ||||||
|     } |     } | ||||||
|   | |||||||
| @@ -1678,8 +1678,9 @@ impl IndexScheduler { | |||||||
|                 tracing::info!("A batch of tasks was successfully completed with {success} successful tasks and {failure} failed tasks."); |                 tracing::info!("A batch of tasks was successfully completed with {success} successful tasks and {failure} failed tasks."); | ||||||
|             } |             } | ||||||
|             // If we have an abortion error we must stop the tick here and re-schedule tasks. |             // If we have an abortion error we must stop the tick here and re-schedule tasks. | ||||||
|             Err(Error::Milli{ |             Err(Error::Milli { | ||||||
|                error: milli::Error::InternalError(milli::InternalError::AbortedIndexation), .. |                 error: milli::Error::InternalError(milli::InternalError::AbortedIndexation), | ||||||
|  |                 .. | ||||||
|             }) |             }) | ||||||
|             | Err(Error::AbortedTask) => { |             | Err(Error::AbortedTask) => { | ||||||
|                 #[cfg(test)] |                 #[cfg(test)] | ||||||
| @@ -1700,7 +1701,8 @@ impl IndexScheduler { | |||||||
|             // 3. resize it |             // 3. resize it | ||||||
|             // 4. re-schedule tasks |             // 4. re-schedule tasks | ||||||
|             Err(Error::Milli { |             Err(Error::Milli { | ||||||
|                 error: milli::Error::UserError(milli::UserError::MaxDatabaseSizeReached), .. |                 error: milli::Error::UserError(milli::UserError::MaxDatabaseSizeReached), | ||||||
|  |                 .. | ||||||
|             }) if index_uid.is_some() => { |             }) if index_uid.is_some() => { | ||||||
|                 // fixme: add index_uid to match to avoid the unwrap |                 // fixme: add index_uid to match to avoid the unwrap | ||||||
|                 let index_uid = index_uid.unwrap(); |                 let index_uid = index_uid.unwrap(); | ||||||
| @@ -1954,11 +1956,12 @@ impl IndexScheduler { | |||||||
|                      config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized }, |                      config: milli::vector::EmbeddingConfig { embedder_options, prompt, quantized }, | ||||||
|                      .. |                      .. | ||||||
|                  }| { |                  }| { | ||||||
|                     let prompt = |                     let prompt = Arc::new( | ||||||
|                         Arc::new(prompt.try_into() |                         prompt | ||||||
|  |                             .try_into() | ||||||
|                             .map_err(meilisearch_types::milli::Error::from) |                             .map_err(meilisearch_types::milli::Error::from) | ||||||
|                             .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))? |                             .map_err(|err| Error::from_milli(err, Some(index_uid.clone())))?, | ||||||
|                         ); |                     ); | ||||||
|                     // optimistically return existing embedder |                     // optimistically return existing embedder | ||||||
|                     { |                     { | ||||||
|                         let embedders = self.embedders.read().unwrap(); |                         let embedders = self.embedders.read().unwrap(); | ||||||
| @@ -1974,8 +1977,9 @@ impl IndexScheduler { | |||||||
|                     let embedder = Arc::new( |                     let embedder = Arc::new( | ||||||
|                         Embedder::new(embedder_options.clone()) |                         Embedder::new(embedder_options.clone()) | ||||||
|                             .map_err(meilisearch_types::milli::vector::Error::from) |                             .map_err(meilisearch_types::milli::vector::Error::from) | ||||||
|                             .map_err(meilisearch_types::milli::Error::from) |                             .map_err(|err| { | ||||||
|                             .map_err(|e| Error::from_milli(e, Some(index_uid.clone())))?, |                                 Error::from_milli(err.into(), Some(index_uid.clone())) | ||||||
|  |                             })?, | ||||||
|                     ); |                     ); | ||||||
|                     { |                     { | ||||||
|                         let mut embedders = self.embedders.write().unwrap(); |                         let mut embedders = self.embedders.write().unwrap(); | ||||||
| @@ -6176,7 +6180,7 @@ mod tests { | |||||||
|             insta::assert_json_snapshot!(simple_hf_config.embedder_options); |             insta::assert_json_snapshot!(simple_hf_config.embedder_options); | ||||||
|             let simple_hf_name = name.clone(); |             let simple_hf_name = name.clone(); | ||||||
|  |  | ||||||
|             let configs = index_scheduler.embedders(configs).unwrap(); |             let configs = index_scheduler.embedders("doggos".to_string(), configs).unwrap(); | ||||||
|             let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap(); |             let (hf_embedder, _, _) = configs.get(&simple_hf_name).unwrap(); | ||||||
|             let beagle_embed = |             let beagle_embed = | ||||||
|                 hf_embedder.embed_one(S("Intel the beagle best doggo"), None).unwrap(); |                 hf_embedder.embed_one(S("Intel the beagle best doggo"), None).unwrap(); | ||||||
|   | |||||||
| @@ -9,8 +9,8 @@ source: crates/index-scheduler/src/lib.rs | |||||||
| 0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} | 0 {uid: 0, batch_uid: 0, status: succeeded, details: { settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> } }, kind: SettingsUpdate { index_uid: "doggos", new_settings: Settings { displayed_attributes: WildcardSetting(NotSet), searchable_attributes: WildcardSetting(NotSet), filterable_attributes: Set({"catto"}), sortable_attributes: NotSet, ranking_rules: NotSet, stop_words: NotSet, non_separator_tokens: NotSet, separator_tokens: NotSet, dictionary: NotSet, synonyms: NotSet, distinct_attribute: NotSet, proximity_precision: NotSet, typo_tolerance: NotSet, faceting: NotSet, pagination: NotSet, embedders: NotSet, search_cutoff_ms: NotSet, localized_attributes: NotSet, facet_search: NotSet, prefix_search: NotSet, _kind: PhantomData<meilisearch_types::settings::Unchecked> }, is_deletion: false, allow_index_creation: true }} | ||||||
| 1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} | 1 {uid: 1, batch_uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }} | ||||||
| 2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }} | 2 {uid: 2, batch_uid: 2, status: succeeded, details: { received_document_ids: 1, deleted_documents: Some(1) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1"] }} | ||||||
| 3 {uid: 3, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }} | 3 {uid: 3, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Invalid type for filter subexpression: expected: String, Array, found: true.", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: true, deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: Bool(true) }} | ||||||
| 4 {uid: 4, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Attribute `id` is not filterable. Available filterable attributes are: `catto`.\n1:3 id = 2", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: "id = 2", deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }} | 4 {uid: 4, batch_uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos`: Attribute `id` is not filterable. Available filterable attributes are: `catto`.\n1:3 id = 2", error_code: "invalid_document_filter", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid_document_filter" }, details: { original_filter: "id = 2", deleted_documents: Some(0) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("id = 2") }} | ||||||
| 5 {uid: 5, batch_uid: 2, status: succeeded, details: { original_filter: "catto EXISTS", deleted_documents: Some(1) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("catto EXISTS") }} | 5 {uid: 5, batch_uid: 2, status: succeeded, details: { original_filter: "catto EXISTS", deleted_documents: Some(1) }, kind: DocumentDeletionByFilter { index_uid: "doggos", filter_expr: String("catto EXISTS") }} | ||||||
| ---------------------------------------------------------------------- | ---------------------------------------------------------------------- | ||||||
| ### Status: | ### Status: | ||||||
|   | |||||||
| @@ -4,10 +4,10 @@ use byte_unit::{Byte, UnitType}; | |||||||
| use meilisearch_types::document_formats::{DocumentFormatError, PayloadType}; | use meilisearch_types::document_formats::{DocumentFormatError, PayloadType}; | ||||||
| use meilisearch_types::error::{Code, ErrorCode, ResponseError}; | use meilisearch_types::error::{Code, ErrorCode, ResponseError}; | ||||||
| use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; | use meilisearch_types::index_uid::{IndexUid, IndexUidFormatError}; | ||||||
|  | use meilisearch_types::milli; | ||||||
| use meilisearch_types::milli::OrderBy; | use meilisearch_types::milli::OrderBy; | ||||||
| use serde_json::Value; | use serde_json::Value; | ||||||
| use tokio::task::JoinError; | use tokio::task::JoinError; | ||||||
| use meilisearch_types::milli; |  | ||||||
|  |  | ||||||
| #[derive(Debug, thiserror::Error)] | #[derive(Debug, thiserror::Error)] | ||||||
| pub enum MeilisearchHttpError { | pub enum MeilisearchHttpError { | ||||||
| @@ -67,7 +67,7 @@ pub enum MeilisearchHttpError { | |||||||
|         Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), |         Some(name) if !name.is_empty() => format!("Index `{}`: {error}", name), | ||||||
|         _ => format!("{error}") |         _ => format!("{error}") | ||||||
|     })] |     })] | ||||||
|     Milli { error: meilisearch_types::milli::Error, index_name: Option<String> }, |     Milli { error: milli::Error, index_name: Option<String> }, | ||||||
|     #[error(transparent)] |     #[error(transparent)] | ||||||
|     Payload(#[from] PayloadError), |     Payload(#[from] PayloadError), | ||||||
|     #[error(transparent)] |     #[error(transparent)] | ||||||
| @@ -105,7 +105,7 @@ impl ErrorCode for MeilisearchHttpError { | |||||||
|             MeilisearchHttpError::SerdeJson(_) => Code::Internal, |             MeilisearchHttpError::SerdeJson(_) => Code::Internal, | ||||||
|             MeilisearchHttpError::HeedError(_) => Code::Internal, |             MeilisearchHttpError::HeedError(_) => Code::Internal, | ||||||
|             MeilisearchHttpError::IndexScheduler(e) => e.error_code(), |             MeilisearchHttpError::IndexScheduler(e) => e.error_code(), | ||||||
|             MeilisearchHttpError::Milli{error, ..} => error.error_code(), |             MeilisearchHttpError::Milli { error, .. } => error.error_code(), | ||||||
|             MeilisearchHttpError::Payload(e) => e.error_code(), |             MeilisearchHttpError::Payload(e) => e.error_code(), | ||||||
|             MeilisearchHttpError::FileStore(_) => Code::Internal, |             MeilisearchHttpError::FileStore(_) => Code::Internal, | ||||||
|             MeilisearchHttpError::DocumentFormat(e) => e.error_code(), |             MeilisearchHttpError::DocumentFormat(e) => e.error_code(), | ||||||
|   | |||||||
| @@ -185,7 +185,8 @@ pub async fn search( | |||||||
|  |  | ||||||
|     let index = index_scheduler.index(&index_uid)?; |     let index = index_scheduler.index(&index_uid)?; | ||||||
|     let features = index_scheduler.features(); |     let features = index_scheduler.features(); | ||||||
|     let search_kind = search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?; |     let search_kind = | ||||||
|  |         search_kind(&search_query, &index_scheduler, index_uid.to_string(), &index, features)?; | ||||||
|     let permit = search_queue.try_get_search_permit().await?; |     let permit = search_queue.try_get_search_permit().await?; | ||||||
|     let search_result = tokio::task::spawn_blocking(move || { |     let search_result = tokio::task::spawn_blocking(move || { | ||||||
|         perform_facet_search( |         perform_facet_search( | ||||||
|   | |||||||
| @@ -107,7 +107,10 @@ pub async fn list_indexes( | |||||||
|             if !filters.is_index_authorized(uid) { |             if !filters.is_index_authorized(uid) { | ||||||
|                 return Ok(None); |                 return Ok(None); | ||||||
|             } |             } | ||||||
|             Ok(Some(IndexView::new(uid.to_string(), index).map_err(|e| Error::from_milli(e, Some(uid.to_string())))?)) |             Ok(Some( | ||||||
|  |                 IndexView::new(uid.to_string(), index) | ||||||
|  |                     .map_err(|e| Error::from_milli(e, Some(uid.to_string())))?, | ||||||
|  |             )) | ||||||
|         })?; |         })?; | ||||||
|     // Won't cause to open all indexes because IndexView doesn't keep the `Index` opened. |     // Won't cause to open all indexes because IndexView doesn't keep the `Index` opened. | ||||||
|     let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect(); |     let indexes: Vec<IndexView> = indexes.into_iter().flatten().collect(); | ||||||
|   | |||||||
| @@ -243,11 +243,19 @@ pub async fn search_with_url_query( | |||||||
|     let index = index_scheduler.index(&index_uid)?; |     let index = index_scheduler.index(&index_uid)?; | ||||||
|     let features = index_scheduler.features(); |     let features = index_scheduler.features(); | ||||||
|  |  | ||||||
|     let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; |     let search_kind = | ||||||
|  |         search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; | ||||||
|     let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?; |     let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features)?; | ||||||
|     let permit = search_queue.try_get_search_permit().await?; |     let permit = search_queue.try_get_search_permit().await?; | ||||||
|     let search_result = tokio::task::spawn_blocking(move || { |     let search_result = tokio::task::spawn_blocking(move || { | ||||||
|         perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vector, index_scheduler.features()) |         perform_search( | ||||||
|  |             index_uid.to_string(), | ||||||
|  |             &index, | ||||||
|  |             query, | ||||||
|  |             search_kind, | ||||||
|  |             retrieve_vector, | ||||||
|  |             index_scheduler.features(), | ||||||
|  |         ) | ||||||
|     }) |     }) | ||||||
|     .await; |     .await; | ||||||
|     permit.drop().await; |     permit.drop().await; | ||||||
| @@ -287,12 +295,20 @@ pub async fn search_with_post( | |||||||
|  |  | ||||||
|     let features = index_scheduler.features(); |     let features = index_scheduler.features(); | ||||||
|  |  | ||||||
|     let search_kind = search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; |     let search_kind = | ||||||
|  |         search_kind(&query, index_scheduler.get_ref(), index_uid.to_string(), &index, features)?; | ||||||
|     let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; |     let retrieve_vectors = RetrieveVectors::new(query.retrieve_vectors, features)?; | ||||||
|  |  | ||||||
|     let permit = search_queue.try_get_search_permit().await?; |     let permit = search_queue.try_get_search_permit().await?; | ||||||
|     let search_result = tokio::task::spawn_blocking(move || { |     let search_result = tokio::task::spawn_blocking(move || { | ||||||
|         perform_search(index_uid.to_string(), &index, query, search_kind, retrieve_vectors, index_scheduler.features()) |         perform_search( | ||||||
|  |             index_uid.to_string(), | ||||||
|  |             &index, | ||||||
|  |             query, | ||||||
|  |             search_kind, | ||||||
|  |             retrieve_vectors, | ||||||
|  |             index_scheduler.features(), | ||||||
|  |         ) | ||||||
|     }) |     }) | ||||||
|     .await; |     .await; | ||||||
|     permit.drop().await; |     permit.drop().await; | ||||||
|   | |||||||
| @@ -103,8 +103,13 @@ async fn similar( | |||||||
|  |  | ||||||
|     let index = index_scheduler.index(&index_uid)?; |     let index = index_scheduler.index(&index_uid)?; | ||||||
|  |  | ||||||
|     let (embedder_name, embedder, quantized) = |     let (embedder_name, embedder, quantized) = SearchKind::embedder( | ||||||
|         SearchKind::embedder(&index_scheduler, index_uid.to_string(), &index, &query.embedder, None)?; |         &index_scheduler, | ||||||
|  |         index_uid.to_string(), | ||||||
|  |         &index, | ||||||
|  |         &query.embedder, | ||||||
|  |         None, | ||||||
|  |     )?; | ||||||
|  |  | ||||||
|     tokio::task::spawn_blocking(move || { |     tokio::task::spawn_blocking(move || { | ||||||
|         perform_similar( |         perform_similar( | ||||||
|   | |||||||
| @@ -127,14 +127,26 @@ pub async fn multi_search_with_post( | |||||||
|  |  | ||||||
|                     let index_uid_str = index_uid.to_string(); |                     let index_uid_str = index_uid.to_string(); | ||||||
|  |  | ||||||
|                     let search_kind = |                     let search_kind = search_kind( | ||||||
|                         search_kind(&query, index_scheduler.get_ref(), index_uid_str.clone(), &index, features) |                         &query, | ||||||
|                             .with_index(query_index)?; |                         index_scheduler.get_ref(), | ||||||
|  |                         index_uid_str.clone(), | ||||||
|  |                         &index, | ||||||
|  |                         features, | ||||||
|  |                     ) | ||||||
|  |                     .with_index(query_index)?; | ||||||
|                     let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features) |                     let retrieve_vector = RetrieveVectors::new(query.retrieve_vectors, features) | ||||||
|                         .with_index(query_index)?; |                         .with_index(query_index)?; | ||||||
|  |  | ||||||
|                     let search_result = tokio::task::spawn_blocking(move || { |                     let search_result = tokio::task::spawn_blocking(move || { | ||||||
|                         perform_search(index_uid_str.clone(), &index, query, search_kind, retrieve_vector, features) |                         perform_search( | ||||||
|  |                             index_uid_str.clone(), | ||||||
|  |                             &index, | ||||||
|  |                             query, | ||||||
|  |                             search_kind, | ||||||
|  |                             retrieve_vector, | ||||||
|  |                             features, | ||||||
|  |                         ) | ||||||
|                     }) |                     }) | ||||||
|                     .await |                     .await | ||||||
|                     .with_index(query_index)?; |                     .with_index(query_index)?; | ||||||
|   | |||||||
| @@ -560,7 +560,8 @@ pub fn perform_federated_search( | |||||||
|             // use an immediately invoked lambda to capture the result without returning from the function |             // use an immediately invoked lambda to capture the result without returning from the function | ||||||
|  |  | ||||||
|             let res: Result<(), ResponseError> = (|| { |             let res: Result<(), ResponseError> = (|| { | ||||||
|                 let search_kind = search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; |                 let search_kind = | ||||||
|  |                     search_kind(&query, index_scheduler, index_uid.to_string(), &index, features)?; | ||||||
|  |  | ||||||
|                 let canonicalization_kind = match (&search_kind, &query.q) { |                 let canonicalization_kind = match (&search_kind, &query.q) { | ||||||
|                     (SearchKind::SemanticOnly { .. }, _) => { |                     (SearchKind::SemanticOnly { .. }, _) => { | ||||||
| @@ -636,7 +637,8 @@ pub fn perform_federated_search( | |||||||
|                 search.offset(0); |                 search.offset(0); | ||||||
|                 search.limit(required_hit_count); |                 search.limit(required_hit_count); | ||||||
|  |  | ||||||
|                 let (result, _semantic_hit_count) = super::search_from_kind(index_uid.to_string(), search_kind, search)?; |                 let (result, _semantic_hit_count) = | ||||||
|  |                     super::search_from_kind(index_uid.to_string(), search_kind, search)?; | ||||||
|                 let format = AttributesFormat { |                 let format = AttributesFormat { | ||||||
|                     attributes_to_retrieve: query.attributes_to_retrieve, |                     attributes_to_retrieve: query.attributes_to_retrieve, | ||||||
|                     retrieve_vectors, |                     retrieve_vectors, | ||||||
| @@ -670,8 +672,10 @@ pub fn perform_federated_search( | |||||||
|  |  | ||||||
|                 let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer); |                 let formatter_builder = HitMaker::formatter_builder(matching_words, tokenizer); | ||||||
|  |  | ||||||
|                 let hit_maker = HitMaker::new(&index, &rtxn, format, formatter_builder) |                 let hit_maker = | ||||||
|                     .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; |                     HitMaker::new(&index, &rtxn, format, formatter_builder).map_err(|e| { | ||||||
|  |                         MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())) | ||||||
|  |                     })?; | ||||||
|  |  | ||||||
|                 results_by_query.push(SearchResultByQuery { |                 results_by_query.push(SearchResultByQuery { | ||||||
|                     federation_options, |                     federation_options, | ||||||
|   | |||||||
| @@ -19,7 +19,9 @@ use meilisearch_types::locales::Locale; | |||||||
| use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy}; | use meilisearch_types::milli::score_details::{ScoreDetails, ScoringStrategy}; | ||||||
| use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors; | use meilisearch_types::milli::vector::parsed_vectors::ExplicitVectors; | ||||||
| use meilisearch_types::milli::vector::Embedder; | use meilisearch_types::milli::vector::Embedder; | ||||||
| use meilisearch_types::milli::{FacetValueHit, OrderBy, SearchForFacetValues, TimeBudget}; | use meilisearch_types::milli::{ | ||||||
|  |     FacetValueHit, InternalError, OrderBy, SearchForFacetValues, TimeBudget, | ||||||
|  | }; | ||||||
| use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; | use meilisearch_types::settings::DEFAULT_PAGINATION_MAX_TOTAL_HITS; | ||||||
| use meilisearch_types::{milli, Document}; | use meilisearch_types::{milli, Document}; | ||||||
| use milli::tokenizer::{Language, TokenizerBuilder}; | use milli::tokenizer::{Language, TokenizerBuilder}; | ||||||
| @@ -281,35 +283,38 @@ pub enum SearchKind { | |||||||
| impl SearchKind { | impl SearchKind { | ||||||
|     pub(crate) fn semantic( |     pub(crate) fn semantic( | ||||||
|         index_scheduler: &index_scheduler::IndexScheduler, |         index_scheduler: &index_scheduler::IndexScheduler, | ||||||
|  |         index_uid: String, | ||||||
|         index: &Index, |         index: &Index, | ||||||
|         embedder_name: &str, |         embedder_name: &str, | ||||||
|         vector_len: Option<usize>, |         vector_len: Option<usize>, | ||||||
|     ) -> Result<Self, ResponseError> { |     ) -> Result<Self, ResponseError> { | ||||||
|         let (embedder_name, embedder, quantized) = |         let (embedder_name, embedder, quantized) = | ||||||
|             Self::embedder(index_scheduler, index, embedder_name, vector_len)?; |             Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?; | ||||||
|         Ok(Self::SemanticOnly { embedder_name, embedder, quantized }) |         Ok(Self::SemanticOnly { embedder_name, embedder, quantized }) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     pub(crate) fn hybrid( |     pub(crate) fn hybrid( | ||||||
|         index_scheduler: &index_scheduler::IndexScheduler, |         index_scheduler: &index_scheduler::IndexScheduler, | ||||||
|  |         index_uid: String, | ||||||
|         index: &Index, |         index: &Index, | ||||||
|         embedder_name: &str, |         embedder_name: &str, | ||||||
|         semantic_ratio: f32, |         semantic_ratio: f32, | ||||||
|         vector_len: Option<usize>, |         vector_len: Option<usize>, | ||||||
|     ) -> Result<Self, ResponseError> { |     ) -> Result<Self, ResponseError> { | ||||||
|         let (embedder_name, embedder, quantized) = |         let (embedder_name, embedder, quantized) = | ||||||
|             Self::embedder(index_scheduler, index, embedder_name, vector_len)?; |             Self::embedder(index_scheduler, index_uid, index, embedder_name, vector_len)?; | ||||||
|         Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio }) |         Ok(Self::Hybrid { embedder_name, embedder, quantized, semantic_ratio }) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     pub(crate) fn embedder( |     pub(crate) fn embedder( | ||||||
|         index_scheduler: &index_scheduler::IndexScheduler, |         index_scheduler: &index_scheduler::IndexScheduler, | ||||||
|  |         index_uid: String, | ||||||
|         index: &Index, |         index: &Index, | ||||||
|         embedder_name: &str, |         embedder_name: &str, | ||||||
|         vector_len: Option<usize>, |         vector_len: Option<usize>, | ||||||
|     ) -> Result<(String, Arc<Embedder>, bool), ResponseError> { |     ) -> Result<(String, Arc<Embedder>, bool), ResponseError> { | ||||||
|         let embedder_configs = index.embedding_configs(&index.read_txn()?)?; |         let embedder_configs = index.embedding_configs(&index.read_txn()?)?; | ||||||
|         let embedders = index_scheduler.embedders(embedder_configs)?; |         let embedders = index_scheduler.embedders(index_uid, embedder_configs)?; | ||||||
|  |  | ||||||
|         let (embedder, _, quantized) = embedders |         let (embedder, _, quantized) = embedders | ||||||
|             .get(embedder_name) |             .get(embedder_name) | ||||||
| @@ -890,6 +895,7 @@ fn prepare_search<'t>( | |||||||
| } | } | ||||||
|  |  | ||||||
| pub fn perform_search( | pub fn perform_search( | ||||||
|  |     index_uid: String, | ||||||
|     index: &Index, |     index: &Index, | ||||||
|     query: SearchQuery, |     query: SearchQuery, | ||||||
|     search_kind: SearchKind, |     search_kind: SearchKind, | ||||||
| @@ -916,7 +922,7 @@ pub fn perform_search( | |||||||
|             used_negative_operator, |             used_negative_operator, | ||||||
|         }, |         }, | ||||||
|         semantic_hit_count, |         semantic_hit_count, | ||||||
|     ) = search_from_kind(search_kind, search)?; |     ) = search_from_kind(index_uid, search_kind, search)?; | ||||||
|  |  | ||||||
|     let SearchQuery { |     let SearchQuery { | ||||||
|         q, |         q, | ||||||
| @@ -1069,17 +1075,27 @@ fn compute_facet_distribution_stats<S: AsRef<str>>( | |||||||
| } | } | ||||||
|  |  | ||||||
| pub fn search_from_kind( | pub fn search_from_kind( | ||||||
|  |     index_uid: String, | ||||||
|     search_kind: SearchKind, |     search_kind: SearchKind, | ||||||
|     search: milli::Search<'_>, |     search: milli::Search<'_>, | ||||||
| ) -> Result<(milli::SearchResult, Option<u32>), MeilisearchHttpError> { | ) -> Result<(milli::SearchResult, Option<u32>), MeilisearchHttpError> { | ||||||
|     let (milli_result, semantic_hit_count) = match &search_kind { |     let (milli_result, semantic_hit_count) = match &search_kind { | ||||||
|         SearchKind::KeywordOnly => (search.execute()?, None), |         SearchKind::KeywordOnly => { | ||||||
|  |             let results = search | ||||||
|  |                 .execute() | ||||||
|  |                 .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; | ||||||
|  |             (results, None) | ||||||
|  |         } | ||||||
|         SearchKind::SemanticOnly { .. } => { |         SearchKind::SemanticOnly { .. } => { | ||||||
|             let results = search.execute()?; |             let results = search | ||||||
|  |                 .execute() | ||||||
|  |                 .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid.to_string())))?; | ||||||
|             let semantic_hit_count = results.document_scores.len() as u32; |             let semantic_hit_count = results.document_scores.len() as u32; | ||||||
|             (results, Some(semantic_hit_count)) |             (results, Some(semantic_hit_count)) | ||||||
|         } |         } | ||||||
|         SearchKind::Hybrid { semantic_ratio, .. } => search.execute_hybrid(*semantic_ratio)?, |         SearchKind::Hybrid { semantic_ratio, .. } => search | ||||||
|  |             .execute_hybrid(*semantic_ratio) | ||||||
|  |             .map_err(|e| MeilisearchHttpError::from_milli(e, Some(index_uid)))?, | ||||||
|     }; |     }; | ||||||
|     Ok((milli_result, semantic_hit_count)) |     Ok((milli_result, semantic_hit_count)) | ||||||
| } | } | ||||||
| @@ -1181,7 +1197,7 @@ impl<'a> HitMaker<'a> { | |||||||
|         rtxn: &'a RoTxn<'a>, |         rtxn: &'a RoTxn<'a>, | ||||||
|         format: AttributesFormat, |         format: AttributesFormat, | ||||||
|         mut formatter_builder: MatcherBuilder<'a>, |         mut formatter_builder: MatcherBuilder<'a>, | ||||||
|     ) -> Result<Self, MeilisearchHttpError> { |     ) -> milli::Result<Self> { | ||||||
|         formatter_builder.crop_marker(format.crop_marker); |         formatter_builder.crop_marker(format.crop_marker); | ||||||
|         formatter_builder.highlight_prefix(format.highlight_pre_tag); |         formatter_builder.highlight_prefix(format.highlight_pre_tag); | ||||||
|         formatter_builder.highlight_suffix(format.highlight_post_tag); |         formatter_builder.highlight_suffix(format.highlight_post_tag); | ||||||
| @@ -1276,11 +1292,7 @@ impl<'a> HitMaker<'a> { | |||||||
|         }) |         }) | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     pub fn make_hit( |     pub fn make_hit(&self, id: u32, score: &[ScoreDetails]) -> milli::Result<SearchHit> { | ||||||
|         &self, |  | ||||||
|         id: u32, |  | ||||||
|         score: &[ScoreDetails], |  | ||||||
|     ) -> Result<SearchHit, MeilisearchHttpError> { |  | ||||||
|         let (_, obkv) = |         let (_, obkv) = | ||||||
|             self.index.iter_documents(self.rtxn, std::iter::once(id))?.next().unwrap()?; |             self.index.iter_documents(self.rtxn, std::iter::once(id))?.next().unwrap()?; | ||||||
|  |  | ||||||
| @@ -1323,7 +1335,10 @@ impl<'a> HitMaker<'a> { | |||||||
|                     .is_some_and(|conf| conf.user_provided.contains(id)); |                     .is_some_and(|conf| conf.user_provided.contains(id)); | ||||||
|                 let embeddings = |                 let embeddings = | ||||||
|                     ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided }; |                     ExplicitVectors { embeddings: Some(vector.into()), regenerate: !user_provided }; | ||||||
|                 vectors.insert(name, serde_json::to_value(embeddings)?); |                 vectors.insert( | ||||||
|  |                     name, | ||||||
|  |                     serde_json::to_value(embeddings).map_err(InternalError::SerdeJson)?, | ||||||
|  |                 ); | ||||||
|             } |             } | ||||||
|             document.insert("_vectors".into(), vectors.into()); |             document.insert("_vectors".into(), vectors.into()); | ||||||
|         } |         } | ||||||
| @@ -1369,7 +1384,7 @@ fn make_hits<'a>( | |||||||
|     format: AttributesFormat, |     format: AttributesFormat, | ||||||
|     matching_words: milli::MatchingWords, |     matching_words: milli::MatchingWords, | ||||||
|     documents_ids_scores: impl Iterator<Item = (u32, &'a Vec<ScoreDetails>)> + 'a, |     documents_ids_scores: impl Iterator<Item = (u32, &'a Vec<ScoreDetails>)> + 'a, | ||||||
| ) -> Result<Vec<SearchHit>, MeilisearchHttpError> { | ) -> milli::Result<Vec<SearchHit>> { | ||||||
|     let mut documents = Vec::new(); |     let mut documents = Vec::new(); | ||||||
|  |  | ||||||
|     let dictionary = index.dictionary(rtxn)?; |     let dictionary = index.dictionary(rtxn)?; | ||||||
| @@ -1697,12 +1712,12 @@ fn make_document( | |||||||
|     displayed_attributes: &BTreeSet<FieldId>, |     displayed_attributes: &BTreeSet<FieldId>, | ||||||
|     field_ids_map: &FieldsIdsMap, |     field_ids_map: &FieldsIdsMap, | ||||||
|     obkv: &obkv::KvReaderU16, |     obkv: &obkv::KvReaderU16, | ||||||
| ) -> Result<Document, MeilisearchHttpError> { | ) -> milli::Result<Document> { | ||||||
|     let mut document = serde_json::Map::new(); |     let mut document = serde_json::Map::new(); | ||||||
|  |  | ||||||
|     // recreate the original json |     // recreate the original json | ||||||
|     for (key, value) in obkv.iter() { |     for (key, value) in obkv.iter() { | ||||||
|         let value = serde_json::from_slice(value)?; |         let value = serde_json::from_slice(value).map_err(InternalError::SerdeJson)?; | ||||||
|         let key = field_ids_map.name(key).expect("Missing field name").to_string(); |         let key = field_ids_map.name(key).expect("Missing field name").to_string(); | ||||||
|  |  | ||||||
|         document.insert(key, value); |         document.insert(key, value); | ||||||
| @@ -1727,7 +1742,7 @@ fn format_fields( | |||||||
|     displayable_ids: &BTreeSet<FieldId>, |     displayable_ids: &BTreeSet<FieldId>, | ||||||
|     locales: Option<&[Language]>, |     locales: Option<&[Language]>, | ||||||
|     localized_attributes: &[LocalizedAttributesRule], |     localized_attributes: &[LocalizedAttributesRule], | ||||||
| ) -> Result<(Option<MatchesPosition>, Document), MeilisearchHttpError> { | ) -> milli::Result<(Option<MatchesPosition>, Document)> { | ||||||
|     let mut matches_position = compute_matches.then(BTreeMap::new); |     let mut matches_position = compute_matches.then(BTreeMap::new); | ||||||
|     let mut document = document.clone(); |     let mut document = document.clone(); | ||||||
|  |  | ||||||
| @@ -1905,7 +1920,7 @@ fn parse_filter_array(arr: &[Value]) -> Result<Option<Filter>, MeilisearchHttpEr | |||||||
|         } |         } | ||||||
|     } |     } | ||||||
|  |  | ||||||
|     Ok(Filter::from_array(ands)?) |     Filter::from_array(ands).map_err(|e| MeilisearchHttpError::from_milli(e, None)) | ||||||
| } | } | ||||||
|  |  | ||||||
| #[cfg(test)] | #[cfg(test)] | ||||||
|   | |||||||
| @@ -1681,7 +1681,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", |         "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` is not an object. Was expecting an object with the `_geo.lat` and `_geo.lng` fields but instead got `\"foobar\"`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1719,7 +1719,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", |         "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1757,7 +1757,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", |         "message": "Index `test`: Could not find latitude nor longitude in the document with the id: `\"11\"`. Was expecting `_geo.lat` and `_geo.lng` fields.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1795,7 +1795,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", |         "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1833,7 +1833,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", |         "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1871,7 +1871,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", |         "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1909,7 +1909,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", |         "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1947,7 +1947,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", |         "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `false` and `true`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -1985,7 +1985,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", |         "message": "Index `test`: Could not find longitude in the document with the id: `\"11\"`. Was expecting a `_geo.lng` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2023,7 +2023,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", |         "message": "Index `test`: Could not find latitude in the document with the id: `\"11\"`. Was expecting a `_geo.lat` field.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2061,7 +2061,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", |         "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"11\"`. Was expecting finite numbers but instead got `\"doggo\"` and `\"doggo\"`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2099,7 +2099,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", |         "message": "Index `test`: The `_geo` field in the document with the id: `\"11\"` contains the following unexpected fields: `{\"doggo\":\"are the best\"}`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2138,7 +2138,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", |         "message": "Index `test`: Could not parse longitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2175,7 +2175,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", |         "message": "Index `test`: Could not parse latitude in the document with the id: `\"12\"`. Was expecting a finite number but instead got `null`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2212,7 +2212,7 @@ async fn add_documents_invalid_geo_field() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", |         "message": "Index `test`: Could not parse latitude nor longitude in the document with the id: `\"13\"`. Was expecting finite numbers but instead got `null` and `null`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
| @@ -2279,7 +2279,7 @@ async fn add_invalid_geo_and_then_settings() { | |||||||
|         ] |         ] | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", |         "message": "Index `test`: Could not parse latitude in the document with the id: `\"11\"`. Was expecting a finite number but instead got `null`.", | ||||||
|         "code": "invalid_document_geo_field", |         "code": "invalid_document_geo_field", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" |         "link": "https://docs.meilisearch.com/errors#invalid_document_geo_field" | ||||||
|   | |||||||
| @@ -604,7 +604,7 @@ async fn delete_document_by_filter() { | |||||||
|         "originalFilter": "\"doggo = bernese\"" |         "originalFilter": "\"doggo = bernese\"" | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", |         "message": "Index `EMPTY_INDEX`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", | ||||||
|         "code": "invalid_document_filter", |         "code": "invalid_document_filter", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" |         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" | ||||||
| @@ -636,7 +636,7 @@ async fn delete_document_by_filter() { | |||||||
|         "originalFilter": "\"catto = jorts\"" |         "originalFilter": "\"catto = jorts\"" | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Attribute `catto` is not filterable. Available filterable attributes are: `id`, `title`.\n1:6 catto = jorts", |         "message": "Index `SHARED_DOCUMENTS`: Attribute `catto` is not filterable. Available filterable attributes are: `id`, `title`.\n1:6 catto = jorts", | ||||||
|         "code": "invalid_document_filter", |         "code": "invalid_document_filter", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" |         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" | ||||||
|   | |||||||
| @@ -95,7 +95,7 @@ async fn error_update_existing_primary_key() { | |||||||
|     let response = index.wait_task(2).await; |     let response = index.wait_task(2).await; | ||||||
|  |  | ||||||
|     let expected_response = json!({ |     let expected_response = json!({ | ||||||
|         "message": "Index already has a primary key: `id`.", |         "message": "Index `test`: Index already has a primary key: `id`.", | ||||||
|         "code": "index_primary_key_already_exists", |         "code": "index_primary_key_already_exists", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" |         "link": "https://docs.meilisearch.com/errors#index_primary_key_already_exists" | ||||||
|   | |||||||
| @@ -711,7 +711,7 @@ async fn filter_invalid_attribute_array() { | |||||||
|     index.wait_task(task.uid()).await; |     index.wait_task(task.uid()).await; | ||||||
|  |  | ||||||
|     let expected_response = json!({ |     let expected_response = json!({ | ||||||
|         "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", |         "message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid), | ||||||
|         "code": "invalid_search_filter", |         "code": "invalid_search_filter", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_search_filter" |         "link": "https://docs.meilisearch.com/errors#invalid_search_filter" | ||||||
| @@ -733,7 +733,7 @@ async fn filter_invalid_attribute_string() { | |||||||
|     index.wait_task(task.uid()).await; |     index.wait_task(task.uid()).await; | ||||||
|  |  | ||||||
|     let expected_response = json!({ |     let expected_response = json!({ | ||||||
|         "message": "Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", |         "message": format!("Index `{}`: Attribute `many` is not filterable. Available filterable attributes are: `title`.\n1:5 many = Glass", index.uid), | ||||||
|         "code": "invalid_search_filter", |         "code": "invalid_search_filter", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_search_filter" |         "link": "https://docs.meilisearch.com/errors#invalid_search_filter" | ||||||
| @@ -940,7 +940,7 @@ async fn sort_unsortable_attribute() { | |||||||
|     index.wait_task(response.uid()).await.succeeded(); |     index.wait_task(response.uid()).await.succeeded(); | ||||||
|  |  | ||||||
|     let expected_response = json!({ |     let expected_response = json!({ | ||||||
|         "message": "Attribute `title` is not sortable. Available sortable attributes are: `id`.", |         "message": format!("Index `{}`: Attribute `title` is not sortable. Available sortable attributes are: `id`.", index.uid), | ||||||
|         "code": "invalid_search_sort", |         "code": "invalid_search_sort", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_search_sort" |         "link": "https://docs.meilisearch.com/errors#invalid_search_sort" | ||||||
| @@ -998,7 +998,7 @@ async fn sort_unset_ranking_rule() { | |||||||
|     index.wait_task(response.uid()).await.succeeded(); |     index.wait_task(response.uid()).await.succeeded(); | ||||||
|  |  | ||||||
|     let expected_response = json!({ |     let expected_response = json!({ | ||||||
|         "message": "You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", |         "message": format!("Index `{}`: You must specify where `sort` is listed in the rankingRules setting to use the sort parameter at search time.", index.uid), | ||||||
|         "code": "invalid_search_sort", |         "code": "invalid_search_sort", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_search_sort" |         "link": "https://docs.meilisearch.com/errors#invalid_search_sort" | ||||||
| @@ -1024,19 +1024,18 @@ async fn search_on_unknown_field() { | |||||||
|         index.update_settings_searchable_attributes(json!(["id", "title"])).await; |         index.update_settings_searchable_attributes(json!(["id", "title"])).await; | ||||||
|     index.wait_task(response.uid()).await.succeeded(); |     index.wait_task(response.uid()).await.succeeded(); | ||||||
|  |  | ||||||
|  |     let expected_response = json!({ | ||||||
|  |         "message": format!("Index `{}`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", index.uid), | ||||||
|  |         "code": "invalid_search_attributes_to_search_on", | ||||||
|  |         "type": "invalid_request", | ||||||
|  |         "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" | ||||||
|  |     }); | ||||||
|     index |     index | ||||||
|         .search( |         .search( | ||||||
|             json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown"]}), |             json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown"]}), | ||||||
|             |response, code| { |             |response, code| { | ||||||
|                 snapshot!(code, @"400 Bad Request"); |                 assert_eq!(response, expected_response); | ||||||
|                 snapshot!(json_string!(response), @r###" |                 assert_eq!(code, 400); | ||||||
|                 { |  | ||||||
|                   "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", |  | ||||||
|                   "code": "invalid_search_attributes_to_search_on", |  | ||||||
|                   "type": "invalid_request", |  | ||||||
|                   "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" |  | ||||||
|                 } |  | ||||||
|                 "###); |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
| @@ -1050,19 +1049,18 @@ async fn search_on_unknown_field_plus_joker() { | |||||||
|         index.update_settings_searchable_attributes(json!(["id", "title"])).await; |         index.update_settings_searchable_attributes(json!(["id", "title"])).await; | ||||||
|     index.wait_task(response.uid()).await.succeeded(); |     index.wait_task(response.uid()).await.succeeded(); | ||||||
|  |  | ||||||
|  |     let expected_response = json!({ | ||||||
|  |         "message": format!("Index `{}`: Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", index.uid), | ||||||
|  |         "code": "invalid_search_attributes_to_search_on", | ||||||
|  |         "type": "invalid_request", | ||||||
|  |         "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" | ||||||
|  |     }); | ||||||
|     index |     index | ||||||
|         .search( |         .search( | ||||||
|             json!({"q": "Captain Marvel", "attributesToSearchOn": ["*", "unknown"]}), |             json!({"q": "Captain Marvel", "attributesToSearchOn": ["*", "unknown"]}), | ||||||
|             |response, code| { |             |response, code| { | ||||||
|                 snapshot!(code, @"400 Bad Request"); |                 assert_eq!(response, expected_response); | ||||||
|                 snapshot!(json_string!(response), @r###" |                 assert_eq!(code, 400); | ||||||
|                 { |  | ||||||
|                   "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", |  | ||||||
|                   "code": "invalid_search_attributes_to_search_on", |  | ||||||
|                   "type": "invalid_request", |  | ||||||
|                   "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" |  | ||||||
|                 } |  | ||||||
|                 "###); |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
| @@ -1071,15 +1069,8 @@ async fn search_on_unknown_field_plus_joker() { | |||||||
|         .search( |         .search( | ||||||
|             json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown", "*"]}), |             json!({"q": "Captain Marvel", "attributesToSearchOn": ["unknown", "*"]}), | ||||||
|             |response, code| { |             |response, code| { | ||||||
|                 snapshot!(code, @"400 Bad Request"); |                 assert_eq!(response, expected_response); | ||||||
|                 snapshot!(json_string!(response), @r###" |                 assert_eq!(code, 400); | ||||||
|                 { |  | ||||||
|                   "message": "Attribute `unknown` is not searchable. Available searchable attributes are: `id, title`.", |  | ||||||
|                   "code": "invalid_search_attributes_to_search_on", |  | ||||||
|                   "type": "invalid_request", |  | ||||||
|                   "link": "https://docs.meilisearch.com/errors#invalid_search_attributes_to_search_on" |  | ||||||
|                 } |  | ||||||
|                 "###); |  | ||||||
|             }, |             }, | ||||||
|         ) |         ) | ||||||
|         .await; |         .await; | ||||||
| @@ -1092,47 +1083,44 @@ async fn distinct_at_search_time() { | |||||||
|     let (task, _) = index.create(None).await; |     let (task, _) = index.create(None).await; | ||||||
|     index.wait_task(task.uid()).await.succeeded(); |     index.wait_task(task.uid()).await.succeeded(); | ||||||
|  |  | ||||||
|  |     let expected_response = json!({ | ||||||
|  |         "message": format!("Index `{}`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", index.uid), | ||||||
|  |         "code": "invalid_search_distinct", | ||||||
|  |         "type": "invalid_request", | ||||||
|  |         "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" | ||||||
|  |     }); | ||||||
|     let (response, code) = |     let (response, code) = | ||||||
|         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; |         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; | ||||||
|     snapshot!(code, @"400 Bad Request"); |     assert_eq!(response, expected_response); | ||||||
|     snapshot!(response, @r###" |     assert_eq!(code, 400); | ||||||
|     { |  | ||||||
|       "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. This index does not have configured filterable attributes.", |  | ||||||
|       "code": "invalid_search_distinct", |  | ||||||
|       "type": "invalid_request", |  | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" |  | ||||||
|     } |  | ||||||
|     "###); |  | ||||||
|  |  | ||||||
|     let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await; |     let (task, _) = index.update_settings_filterable_attributes(json!(["color", "machin"])).await; | ||||||
|     index.wait_task(task.uid()).await; |     index.wait_task(task.uid()).await; | ||||||
|  |  | ||||||
|  |     let expected_response = json!({ | ||||||
|  |         "message": format!("Index `{}`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.", index.uid), | ||||||
|  |         "code": "invalid_search_distinct", | ||||||
|  |         "type": "invalid_request", | ||||||
|  |         "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" | ||||||
|  |     }); | ||||||
|     let (response, code) = |     let (response, code) = | ||||||
|         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; |         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; | ||||||
|     snapshot!(code, @"400 Bad Request"); |     assert_eq!(response, expected_response); | ||||||
|     snapshot!(response, @r###" |     assert_eq!(code, 400); | ||||||
|     { |  | ||||||
|       "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, machin`.", |  | ||||||
|       "code": "invalid_search_distinct", |  | ||||||
|       "type": "invalid_request", |  | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" |  | ||||||
|     } |  | ||||||
|     "###); |  | ||||||
|  |  | ||||||
|     let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await; |     let (task, _) = index.update_settings_displayed_attributes(json!(["color"])).await; | ||||||
|     index.wait_task(task.uid()).await; |     index.wait_task(task.uid()).await; | ||||||
|  |  | ||||||
|  |     let expected_response = json!({ | ||||||
|  |         "message": format!("Index `{}`: Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.", index.uid), | ||||||
|  |         "code": "invalid_search_distinct", | ||||||
|  |         "type": "invalid_request", | ||||||
|  |         "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" | ||||||
|  |     }); | ||||||
|     let (response, code) = |     let (response, code) = | ||||||
|         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; |         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": "doggo.truc"})).await; | ||||||
|     snapshot!(code, @"400 Bad Request"); |     assert_eq!(response, expected_response); | ||||||
|     snapshot!(response, @r###" |     assert_eq!(code, 400); | ||||||
|     { |  | ||||||
|       "message": "Attribute `doggo.truc` is not filterable and thus, cannot be used as distinct attribute. Available filterable attributes are: `color, <..hidden-attributes>`.", |  | ||||||
|       "code": "invalid_search_distinct", |  | ||||||
|       "type": "invalid_request", |  | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_distinct" |  | ||||||
|     } |  | ||||||
|     "###); |  | ||||||
|  |  | ||||||
|     let (response, code) = |     let (response, code) = | ||||||
|         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": true})).await; |         index.search_post(json!({"page": 0, "hitsPerPage": 2, "distinct": true})).await; | ||||||
|   | |||||||
| @@ -1070,7 +1070,7 @@ async fn federation_one_query_error() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[1]`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", |       "message": "Inside `.queries[1]`: Index `nested`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", | ||||||
|       "code": "invalid_search_filter", |       "code": "invalid_search_filter", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" |       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" | ||||||
| @@ -1102,7 +1102,7 @@ async fn federation_one_query_sort_error() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[1]`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", |       "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", | ||||||
|       "code": "invalid_search_sort", |       "code": "invalid_search_sort", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" |       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" | ||||||
| @@ -1166,7 +1166,7 @@ async fn federation_multiple_query_errors() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[0]`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", |       "message": "Inside `.queries[0]`: Index `test`: Attribute `title` is not filterable. This index does not have configured filterable attributes.\n1:6 title = toto", | ||||||
|       "code": "invalid_search_filter", |       "code": "invalid_search_filter", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" |       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" | ||||||
| @@ -1198,7 +1198,7 @@ async fn federation_multiple_query_sort_errors() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[0]`: Attribute `title` is not sortable. This index does not have configured sortable attributes.", |       "message": "Inside `.queries[0]`: Index `test`: Attribute `title` is not sortable. This index does not have configured sortable attributes.", | ||||||
|       "code": "invalid_search_sort", |       "code": "invalid_search_sort", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" |       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" | ||||||
| @@ -1231,7 +1231,7 @@ async fn federation_multiple_query_errors_interleaved() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[1]`: Attribute `doggos` is not filterable. This index does not have configured filterable attributes.\n1:7 doggos IN [intel, kefir]", |       "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not filterable. This index does not have configured filterable attributes.\n1:7 doggos IN [intel, kefir]", | ||||||
|       "code": "invalid_search_filter", |       "code": "invalid_search_filter", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" |       "link": "https://docs.meilisearch.com/errors#invalid_search_filter" | ||||||
| @@ -1264,7 +1264,7 @@ async fn federation_multiple_query_sort_errors_interleaved() { | |||||||
|     snapshot!(code, @"400 Bad Request"); |     snapshot!(code, @"400 Bad Request"); | ||||||
|     snapshot!(json_string!(response), @r###" |     snapshot!(json_string!(response), @r###" | ||||||
|     { |     { | ||||||
|       "message": "Inside `.queries[1]`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", |       "message": "Inside `.queries[1]`: Index `nested`: Attribute `doggos` is not sortable. This index does not have configured sortable attributes.", | ||||||
|       "code": "invalid_search_sort", |       "code": "invalid_search_sort", | ||||||
|       "type": "invalid_request", |       "type": "invalid_request", | ||||||
|       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" |       "link": "https://docs.meilisearch.com/errors#invalid_search_sort" | ||||||
|   | |||||||
| @@ -448,7 +448,7 @@ async fn test_summarized_delete_documents_by_filter() { | |||||||
|         "originalFilter": "\"doggo = bernese\"" |         "originalFilter": "\"doggo = bernese\"" | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", |         "message": "Index `test`: Attribute `doggo` is not filterable. This index does not have configured filterable attributes.\n1:6 doggo = bernese", | ||||||
|         "code": "invalid_document_filter", |         "code": "invalid_document_filter", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" |         "link": "https://docs.meilisearch.com/errors#invalid_document_filter" | ||||||
|   | |||||||
| @@ -318,7 +318,7 @@ async fn try_to_disable_binary_quantization() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "`.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", |         "message": "Index `doggo`: `.embedders.manual.binaryQuantized`: Cannot disable the binary quantization.\n - Note: Binary quantization is a lossy operation that cannot be reverted.\n - Hint: Add a new embedder that is non-quantized and regenerate the vectors.", | ||||||
|         "code": "invalid_settings_embedders", |         "code": "invalid_settings_embedders", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_settings_embedders" |         "link": "https://docs.meilisearch.com/errors#invalid_settings_embedders" | ||||||
|   | |||||||
| @@ -250,7 +250,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Missing field `._vectors.manual.regenerate`\n  - note: `._vectors.manual` must be an array of floats, an array of arrays of floats, or an object with field `regenerate`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Missing field `._vectors.manual.regenerate`\n  - note: `._vectors.manual` must be an array of floats, an array of arrays of floats, or an object with field `regenerate`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -280,7 +280,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Missing field `._vectors.manual.regenerate`\n  - note: `._vectors.manual` must be an array of floats, an array of arrays of floats, or an object with field `regenerate`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Missing field `._vectors.manual.regenerate`\n  - note: `._vectors.manual` must be an array of floats, an array of arrays of floats, or an object with field `regenerate`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -311,7 +311,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Could not parse `._vectors.manual.regenerate`: invalid type: string \"yes please\", expected a boolean at line 1 column 26", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Could not parse `._vectors.manual.regenerate`: invalid type: string \"yes please\", expected a boolean at line 1 column 26", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -340,7 +340,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings`: expected null or an array, but found a boolean: `true`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings`: expected null or an array, but found a boolean: `true`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -369,7 +369,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0]`: expected a number or an array, but found a boolean: `true`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0]`: expected a number or an array, but found a boolean: `true`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -398,7 +398,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0][0]`: expected a number, but found a boolean: `true`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0][0]`: expected a number, but found a boolean: `true`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -440,7 +440,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[1]`: expected a number, but found an array: `[0.2,0.3]`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[1]`: expected a number, but found an array: `[0.2,0.3]`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -469,7 +469,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[1]`: expected an array, but found a number: `0.3`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[1]`: expected an array, but found a number: `0.3`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -498,7 +498,7 @@ async fn user_provided_embeddings_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0][1]`: expected a number, but found a boolean: `true`", |         "message": "Index `doggo`: Bad embedder configuration in the document with id: `0`. Invalid value type at `._vectors.manual.embeddings[0][1]`: expected a number, but found a boolean: `true`", | ||||||
|         "code": "invalid_vectors_type", |         "code": "invalid_vectors_type", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" |         "link": "https://docs.meilisearch.com/errors#invalid_vectors_type" | ||||||
| @@ -539,7 +539,7 @@ async fn user_provided_vectors_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `manual`: no vectors provided for document `40` and at least 4 other document(s)\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: opt-out for a document with `_vectors.manual: null`", |         "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document `40` and at least 4 other document(s)\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: opt-out for a document with `_vectors.manual: null`", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -569,7 +569,7 @@ async fn user_provided_vectors_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `manual`: no vectors provided for document `42`\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vector` by `_vectors` in 1 document(s).", |         "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document `42`\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vector` by `_vectors` in 1 document(s).", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -599,7 +599,7 @@ async fn user_provided_vectors_error() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `manual`: no vectors provided for document `42`\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vectors.manaul` by `_vectors.manual` in 1 document(s).", |         "message": "Index `doggo`: While embedding documents for embedder `manual`: no vectors provided for document `42`\n- Note: `manual` has `source: userProvided`, so documents must provide embeddings as an array in `_vectors.manual`.\n- Hint: try replacing `_vectors.manaul` by `_vectors.manual` in 1 document(s).", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
|   | |||||||
| @@ -713,7 +713,7 @@ async fn bad_api_key() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n  - server replied with `{\"error\":{\"message\":\"Incorrect API key provided: Bearer doggo. You can find your API key at https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":\"invalid_api_key\"}}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", |         "message": "Index `doggo`: While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n  - server replied with `{\"error\":{\"message\":\"Incorrect API key provided: Bearer doggo. You can find your API key at https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":\"invalid_api_key\"}}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -757,7 +757,7 @@ async fn bad_api_key() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n  - server replied with `{\"error\":{\"message\":\"You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":null}}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", |         "message": "Index `doggo`: While embedding documents for embedder `default`: user error: could not authenticate against OpenAI server\n  - server replied with `{\"error\":{\"message\":\"You didn't provide an API key. You need to provide your API key in an Authorization header using Bearer auth (i.e. Authorization: Bearer YOUR_KEY), or as the password field (with blank username) if you're accessing the API from your browser and are prompted for a username and password. You can obtain an API key from https://platform.openai.com/account/api-keys.\",\"type\":\"invalid_request_error\",\"param\":null,\"code\":null}}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration, and the `MEILI_OPENAI_API_KEY` and `OPENAI_API_KEY` environment variables", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
|   | |||||||
| @@ -985,7 +985,7 @@ async fn bad_settings() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting a single \"{{embedding}}\", expected `response` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected a sequence", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting a single \"{{embedding}}\", expected `response` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected a sequence", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1025,7 +1025,7 @@ async fn bad_settings() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `rest`: runtime error: was expecting embeddings of dimension `2`, got embeddings of dimensions `3`", |         "message": "Index `doggo`: While embedding documents for embedder `rest`: runtime error: was expecting embeddings of dimension `2`, got embeddings of dimensions `3`", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1178,7 +1178,7 @@ async fn server_returns_bad_request() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: sent a bad request to embedding server\n  - Hint: check that the `request` in the embedder configuration matches the remote server's API\n  - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"test\\\", expected struct MultipleRequest at line 1 column 6\"}`", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: sent a bad request to embedding server\n  - Hint: check that the `request` in the embedder configuration matches the remote server's API\n  - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"test\\\", expected struct MultipleRequest at line 1 column 6\"}`", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1247,7 +1247,7 @@ async fn server_returns_bad_request() { | |||||||
|         "indexedDocuments": 0 |         "indexedDocuments": 0 | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `rest`: user error: sent a bad request to embedding server\n  - Hint: check that the `request` in the embedder configuration matches the remote server's API\n  - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"name: kefir\\\\n\\\", expected struct MultipleRequest at line 1 column 15\"}`", |         "message": "Index `doggo`: While embedding documents for embedder `rest`: user error: sent a bad request to embedding server\n  - Hint: check that the `request` in the embedder configuration matches the remote server's API\n  - server replied with `{\"error\":\"Invalid request: invalid type: string \\\"name: kefir\\\\n\\\", expected struct MultipleRequest at line 1 column 15\"}`", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1306,7 +1306,7 @@ async fn server_returns_bad_response() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting the array of \"{{embedding}}\"s, configuration expects `response` to be an array with at least 1 item(s) but server sent an object with 1 field(s)", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting the array of \"{{embedding}}\"s, configuration expects `response` to be an array with at least 1 item(s) but server sent an object with 1 field(s)", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1362,7 +1362,7 @@ async fn server_returns_bad_response() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting item #0 from the array of \"{{embedding}}\"s, expected `response` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected a sequence", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response`, while extracting item #0 from the array of \"{{embedding}}\"s, expected `response` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected a sequence", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1414,7 +1414,7 @@ async fn server_returns_bad_response() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.output`, while extracting a single \"{{embedding}}\", expected `output` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected f32", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.output`, while extracting a single \"{{embedding}}\", expected `output` to be an array of numbers, but failed to parse server response:\n  - invalid type: map, expected f32", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1478,7 +1478,7 @@ async fn server_returns_bad_response() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.embedding`, while extracting item #0 from the array of \"{{embedding}}\"s, configuration expects `embedding` to be an object with key `data` but server sent an array of size 3", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.embedding`, while extracting item #0 from the array of \"{{embedding}}\"s, configuration expects `embedding` to be an object with key `data` but server sent an array of size 3", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1542,7 +1542,7 @@ async fn server_returns_bad_response() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.output[0]`, while extracting a single \"{{embedding}}\", configuration expects key \"embeddings\", which is missing in response\n  - Hint: item #0 inside `output` has key `embedding`, did you mean `response.output[0].embedding` in embedder configuration?", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with runtime error: error extracting embeddings from the response:\n  - in `response.output[0]`, while extracting a single \"{{embedding}}\", configuration expects key \"embeddings\", which is missing in response\n  - Hint: item #0 inside `output` has key `embedding`, did you mean `response.output[0].embedding` in embedder configuration?", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1908,7 +1908,7 @@ async fn server_custom_header() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: could not authenticate against embedding server\n  - server replied with `{\"error\":\"missing header 'my-nonstandard-auth'\"}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: could not authenticate against embedding server\n  - server replied with `{\"error\":\"missing header 'my-nonstandard-auth'\"}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -1951,7 +1951,7 @@ async fn server_custom_header() { | |||||||
|         } |         } | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: could not authenticate against embedding server\n  - server replied with `{\"error\":\"thou shall not pass, Balrog\"}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration", |         "message": "Index `doggo`: Error while generating embeddings: runtime error: could not determine model dimensions:\n  - test embedding failed with user error: could not authenticate against embedding server\n  - server replied with `{\"error\":\"thou shall not pass, Balrog\"}`\n  - Hint: Check the `apiKey` parameter in the embedder configuration", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
| @@ -2099,7 +2099,7 @@ async fn searchable_reindex() { | |||||||
|         ] |         ] | ||||||
|       }, |       }, | ||||||
|       "error": { |       "error": { | ||||||
|         "message": "While embedding documents for embedder `rest`: error: received unexpected HTTP 404 from embedding server\n  - server replied with `{\"error\":\"text not found\",\"text\":\"breed: patou\\n\"}`", |         "message": "Index `doggo`: While embedding documents for embedder `rest`: error: received unexpected HTTP 404 from embedding server\n  - server replied with `{\"error\":\"text not found\",\"text\":\"breed: patou\\n\"}`", | ||||||
|         "code": "vector_embedding_error", |         "code": "vector_embedding_error", | ||||||
|         "type": "invalid_request", |         "type": "invalid_request", | ||||||
|         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" |         "link": "https://docs.meilisearch.com/errors#vector_embedding_error" | ||||||
|   | |||||||
		Reference in New Issue
	
	Block a user