Compare commits

...

5 Commits

Author SHA1 Message Date
5ce4d5f552 Fix result logic 2024-04-11 18:54:03 +02:00
9cef8ec087 add prompt and context 2024-04-10 09:43:33 +02:00
f505fa4ae8 Add recommendation route 2024-04-09 12:30:24 +02:00
b4deb9b8db filtered_universe accepts index and txn instead of SearchContext 2024-04-09 12:03:03 +02:00
7476ad6599 Add error codes 2024-04-09 12:02:07 +02:00
14 changed files with 602 additions and 11 deletions

View File

@ -245,6 +245,9 @@ InvalidSearchCropMarker , InvalidRequest , BAD_REQUEST ;
InvalidSearchFacets , InvalidRequest , BAD_REQUEST ;
InvalidSearchSemanticRatio , InvalidRequest , BAD_REQUEST ;
InvalidFacetSearchFacetName , InvalidRequest , BAD_REQUEST ;
InvalidRecommendContext , InvalidRequest , BAD_REQUEST ;
InvalidRecommendId , InvalidRequest , BAD_REQUEST ;
InvalidRecommendPrompt , InvalidRequest , BAD_REQUEST ;
InvalidSearchFilter , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPostTag , InvalidRequest , BAD_REQUEST ;
InvalidSearchHighlightPreTag , InvalidRequest , BAD_REQUEST ;
@ -308,6 +311,8 @@ MissingFacetSearchFacetName , InvalidRequest , BAD_REQUEST ;
MissingIndexUid , InvalidRequest , BAD_REQUEST ;
MissingMasterKey , Auth , UNAUTHORIZED ;
MissingPayload , InvalidRequest , BAD_REQUEST ;
MissingPrompt , InvalidRequest , BAD_REQUEST ;
MissingPromptOrId , InvalidRequest , BAD_REQUEST ;
MissingSearchHybrid , InvalidRequest , BAD_REQUEST ;
MissingSwapIndexes , InvalidRequest , BAD_REQUEST ;
MissingTaskFilters , InvalidRequest , BAD_REQUEST ;

View File

@ -23,6 +23,8 @@ pub enum MeilisearchHttpError {
InvalidContentType(String, Vec<String>),
#[error("Document `{0}` not found.")]
DocumentNotFound(String),
#[error("Document `{0}` not found.")]
InvalidDocumentId(String),
#[error("Sending an empty filter is forbidden.")]
EmptyFilter,
#[error("Invalid syntax for the filter parameter: `expected {}, found: {1}`.", .0.join(", "))]
@ -59,6 +61,10 @@ pub enum MeilisearchHttpError {
Join(#[from] JoinError),
#[error("Invalid request: missing `hybrid` parameter when both `q` and `vector` are present.")]
MissingSearchHybrid,
#[error("Invalid request: `prompt` parameter is required when `context` is present.")]
RecommendMissingPrompt,
#[error("Invalid request: one of the `prompt` or `id` parameters is required.")]
RecommendMissingPromptOrId,
}
impl ErrorCode for MeilisearchHttpError {
@ -70,6 +76,7 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::MissingPayload(_) => Code::MissingPayload,
MeilisearchHttpError::InvalidContentType(_, _) => Code::InvalidContentType,
MeilisearchHttpError::DocumentNotFound(_) => Code::DocumentNotFound,
MeilisearchHttpError::InvalidDocumentId(_) => Code::InvalidDocumentId,
MeilisearchHttpError::EmptyFilter => Code::InvalidDocumentFilter,
MeilisearchHttpError::InvalidExpression(_, _) => Code::InvalidSearchFilter,
MeilisearchHttpError::PayloadTooLarge(_) => Code::PayloadTooLarge,
@ -86,6 +93,8 @@ impl ErrorCode for MeilisearchHttpError {
MeilisearchHttpError::DocumentFormat(e) => e.error_code(),
MeilisearchHttpError::Join(_) => Code::Internal,
MeilisearchHttpError::MissingSearchHybrid => Code::MissingSearchHybrid,
MeilisearchHttpError::RecommendMissingPrompt => Code::MissingPrompt,
MeilisearchHttpError::RecommendMissingPromptOrId => Code::MissingPromptOrId,
}
}
}

View File

@ -27,6 +27,7 @@ use crate::Opt;
pub mod documents;
pub mod facet_search;
pub mod recommend;
pub mod search;
pub mod settings;
@ -48,6 +49,7 @@ pub fn configure(cfg: &mut web::ServiceConfig) {
.service(web::scope("/documents").configure(documents::configure))
.service(web::scope("/search").configure(search::configure))
.service(web::scope("/facet-search").configure(facet_search::configure))
.service(web::scope("/recommend").configure(recommend::configure))
.service(web::scope("/settings").configure(settings::configure)),
);
}

View File

@ -0,0 +1,53 @@
use actix_web::web::{self, Data};
use actix_web::{HttpRequest, HttpResponse};
use deserr::actix_web::AwebJson;
use index_scheduler::IndexScheduler;
use meilisearch_types::deserr::DeserrJsonError;
use meilisearch_types::error::ResponseError;
use meilisearch_types::index_uid::IndexUid;
use meilisearch_types::keys::actions;
use tracing::debug;
use super::ActionPolicy;
use crate::analytics::Analytics;
use crate::extractors::authentication::GuardedData;
use crate::extractors::sequential_extractor::SeqHandler;
use crate::search::{perform_recommend, RecommendQuery, SearchKind};
pub fn configure(cfg: &mut web::ServiceConfig) {
cfg.service(web::resource("").route(web::post().to(SeqHandler(recommend))));
}
pub async fn recommend(
index_scheduler: GuardedData<ActionPolicy<{ actions::SEARCH }>, Data<IndexScheduler>>,
index_uid: web::Path<String>,
params: AwebJson<RecommendQuery, DeserrJsonError>,
_req: HttpRequest,
_analytics: web::Data<dyn Analytics>,
) -> Result<HttpResponse, ResponseError> {
let index_uid = IndexUid::try_from(index_uid.into_inner())?;
// TODO analytics
let query = params.into_inner();
debug!(parameters = ?query, "Recommend post");
let index = index_scheduler.index(&index_uid)?;
let features = index_scheduler.features();
features.check_vector("Using the recommend API.")?;
let (embedder_name, embedder) =
SearchKind::embedder(&index_scheduler, &index, query.embedder.as_deref(), None)?;
let recommendations = tokio::task::spawn_blocking(move || {
perform_recommend(&index, query, embedder_name, embedder)
})
.await?;
let recommendations = recommendations?;
debug!(returns = ?recommendations, "Recommend post");
Ok(HttpResponse::Ok().json(recommendations))
}

View File

@ -126,7 +126,7 @@ impl SearchKind {
Ok(Self::Hybrid { embedder_name, embedder, semantic_ratio })
}
fn embedder(
pub(crate) fn embedder(
index_scheduler: &index_scheduler::IndexScheduler,
index: &Index,
embedder_name: Option<&str>,
@ -312,6 +312,32 @@ impl SearchQueryWithIndex {
}
}
#[derive(Debug, Clone, Default, PartialEq, Deserr)]
#[deserr(error = DeserrJsonError, rename_all = camelCase, deny_unknown_fields)]
pub struct RecommendQuery {
#[deserr(default, error = DeserrJsonError<InvalidRecommendId>)]
pub id: Option<String>,
#[deserr(default = DEFAULT_SEARCH_OFFSET(), error = DeserrJsonError<InvalidSearchOffset>)]
pub offset: usize,
#[deserr(default = DEFAULT_SEARCH_LIMIT(), error = DeserrJsonError<InvalidSearchLimit>)]
pub limit: usize,
#[deserr(default, error = DeserrJsonError<InvalidSearchFilter>)]
pub filter: Option<Value>,
#[deserr(default, error = DeserrJsonError<InvalidEmbedder>, default)]
pub embedder: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidSearchAttributesToRetrieve>)]
pub attributes_to_retrieve: Option<BTreeSet<String>>,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScore>, default)]
pub show_ranking_score: bool,
#[deserr(default, error = DeserrJsonError<InvalidSearchShowRankingScoreDetails>, default)]
pub show_ranking_score_details: bool,
#[deserr(default, error = DeserrJsonError<InvalidRecommendPrompt>)]
pub prompt: Option<String>,
#[deserr(default, error = DeserrJsonError<InvalidRecommendContext>)]
pub context: Option<Value>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Deserr)]
#[deserr(rename_all = camelCase)]
pub enum MatchingStrategy {
@ -393,6 +419,17 @@ pub struct SearchResult {
pub used_negative_operator: bool,
}
#[derive(Serialize, Debug, Clone, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct RecommendResult {
pub hits: Vec<SearchHit>,
pub id: Option<String>,
pub prompt: Option<String>,
pub processing_time_ms: u128,
#[serde(flatten)]
pub hits_info: HitsInfo,
}
#[derive(Serialize, Debug, Clone, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct SearchResultWithIndex {
@ -796,6 +833,153 @@ pub fn perform_facet_search(
})
}
pub fn perform_recommend(
index: &Index,
query: RecommendQuery,
embedder_name: String,
embedder: Arc<Embedder>,
) -> Result<RecommendResult, MeilisearchHttpError> {
let before_search = Instant::now();
let rtxn = index.read_txn()?;
let internal_id = query
.id
.as_deref()
.map(|id| -> Result<_, MeilisearchHttpError> {
Ok(index
.external_documents_ids()
.get(&rtxn, id)?
.ok_or_else(|| MeilisearchHttpError::DocumentNotFound(id.to_owned()))?)
})
.transpose()?;
let mut recommend = match (query.prompt.as_deref(), internal_id, query.context) {
(None, Some(internal_id), None) => milli::Recommend::with_docid(
internal_id,
query.offset,
query.limit,
index,
&rtxn,
embedder_name,
embedder,
),
(Some(prompt), internal_id, context) => milli::Recommend::with_prompt(
prompt,
internal_id,
context,
query.offset,
query.limit,
index,
&rtxn,
embedder_name,
embedder,
),
(None, _, Some(_)) => return Err(MeilisearchHttpError::RecommendMissingPrompt.into()),
(None, None, None) => return Err(MeilisearchHttpError::RecommendMissingPromptOrId.into()),
};
if let Some(ref filter) = query.filter {
if let Some(facets) = parse_filter(filter)? {
recommend.filter(facets);
}
}
let milli::SearchResult {
documents_ids,
matching_words: _,
candidates,
document_scores,
degraded: _,
used_negative_operator: _,
} = recommend.execute()?;
let fields_ids_map = index.fields_ids_map(&rtxn).unwrap();
let displayed_ids = index
.displayed_fields_ids(&rtxn)?
.map(|fields| fields.into_iter().collect::<BTreeSet<_>>())
.unwrap_or_else(|| fields_ids_map.iter().map(|(id, _)| id).collect());
let fids = |attrs: &BTreeSet<String>| {
let mut ids = BTreeSet::new();
for attr in attrs {
if attr == "*" {
ids = displayed_ids.clone();
break;
}
if let Some(id) = fields_ids_map.id(attr) {
ids.insert(id);
}
}
ids
};
// The attributes to retrieve are the ones explicitly marked as to retrieve (all by default),
// but these attributes must be also be present
// - in the fields_ids_map
// - in the displayed attributes
let to_retrieve_ids: BTreeSet<_> = query
.attributes_to_retrieve
.as_ref()
.map(fids)
.unwrap_or_else(|| displayed_ids.clone())
.intersection(&displayed_ids)
.cloned()
.collect();
let mut documents = Vec::new();
let documents_iter = index.documents(&rtxn, documents_ids)?;
for ((_id, obkv), score) in documents_iter.into_iter().zip(document_scores.into_iter()) {
// First generate a document with all the displayed fields
let displayed_document = make_document(&displayed_ids, &fields_ids_map, obkv)?;
// select the attributes to retrieve
let attributes_to_retrieve = to_retrieve_ids
.iter()
.map(|&fid| fields_ids_map.name(fid).expect("Missing field name"));
let document =
permissive_json_pointer::select_values(&displayed_document, attributes_to_retrieve);
let ranking_score =
query.show_ranking_score.then(|| ScoreDetails::global_score(score.iter()));
let ranking_score_details =
query.show_ranking_score_details.then(|| ScoreDetails::to_json_map(score.iter()));
let hit = SearchHit {
document,
formatted: Default::default(),
matches_position: None,
ranking_score_details,
ranking_score,
};
documents.push(hit);
}
let max_total_hits = index
.pagination_max_total_hits(&rtxn)
.map_err(milli::Error::from)?
.map(|x| x as usize)
.unwrap_or(DEFAULT_PAGINATION_MAX_TOTAL_HITS);
let number_of_hits = min(candidates.len() as usize, max_total_hits);
let hits_info = HitsInfo::OffsetLimit {
limit: query.limit,
offset: query.offset,
estimated_total_hits: number_of_hits,
};
let result = RecommendResult {
hits: documents,
hits_info,
id: query.id,
prompt: query.prompt,
processing_time_ms: before_search.elapsed().as_millis(),
};
Ok(result)
}
fn insert_geo_distance(sorts: &[String], document: &mut Document) {
lazy_static::lazy_static! {
static ref GEO_REGEX: Regex =

View File

@ -49,7 +49,7 @@ fn main() -> Result<(), Box<dyn Error>> {
let start = Instant::now();
let mut ctx = SearchContext::new(&index, &txn);
let universe = filtered_universe(&ctx, &None)?;
let universe = filtered_universe(ctx.index, ctx.txn, &None)?;
let docs = execute_search(
&mut ctx,

View File

@ -59,6 +59,7 @@ pub use self::heed_codec::{
};
pub use self::index::Index;
pub use self::search::facet::{FacetValueHit, SearchForFacetValues};
pub use self::search::recommend::Recommend;
pub use self::search::{
FacetDistribution, Filter, FormatOptions, MatchBounds, MatcherBuilder, MatchingWords, OrderBy,
Search, SearchResult, SemanticSearch, TermsMatchingStrategy, DEFAULT_VALUES_PER_FACET,

View File

@ -29,7 +29,7 @@ impl ParsedValue {
}
impl<'a> Document<'a> {
pub fn new(
pub fn from_deladd_obkv(
data: obkv::KvReaderU16<'a>,
side: DelAdd,
inverted_field_map: &'a FieldsIdsMap,
@ -48,6 +48,20 @@ impl<'a> Document<'a> {
Self(out_data)
}
pub fn from_doc_obkv(
data: obkv::KvReaderU16<'a>,
inverted_field_map: &'a FieldsIdsMap,
) -> Self {
let mut out_data = BTreeMap::new();
for (fid, raw) in data {
let Some(name) = inverted_field_map.name(fid) else {
continue;
};
out_data.insert(name, (raw, ParsedValue::empty()));
}
Self(out_data)
}
fn is_empty(&self) -> bool {
self.0.is_empty()
}

View File

@ -2,6 +2,7 @@ mod context;
mod document;
pub(crate) mod error;
mod fields;
pub mod recommend;
mod template_checker;
use std::convert::TryFrom;
@ -9,7 +10,7 @@ use std::convert::TryFrom;
use error::{NewPromptError, RenderPromptError};
use self::context::Context;
use self::document::Document;
pub use self::document::Document;
use crate::update::del_add::DelAdd;
use crate::FieldsIdsMap;
@ -95,7 +96,7 @@ impl Prompt {
side: DelAdd,
field_id_map: &FieldsIdsMap,
) -> Result<String, RenderPromptError> {
let document = Document::new(document, side, field_id_map);
let document = Document::from_deladd_obkv(document, side, field_id_map);
let context = Context::new(&document, field_id_map);
self.template.render(&context).map_err(RenderPromptError::missing_context)

View File

@ -0,0 +1,112 @@
use liquid::model::{
DisplayCow, KStringCow, ObjectRender, ObjectSource, State, Value as LiquidValue,
};
use liquid::{ObjectView, ValueView};
use super::document::Document;
#[derive(Clone, Debug)]
pub struct Context<'a> {
document: Option<&'a Document<'a>>,
context: Option<liquid::Object>,
}
impl<'a> Context<'a> {
pub fn new(document: Option<&'a Document<'a>>, context: Option<serde_json::Value>) -> Self {
/// FIXME: unwrap
let context = context.map(|context| liquid::to_object(&context).unwrap());
Self { document, context }
}
}
impl<'a> ObjectView for Context<'a> {
fn as_value(&self) -> &dyn ValueView {
self
}
fn size(&self) -> i64 {
match (self.context.as_ref(), self.document.as_ref()) {
(None, None) => 0,
(None, Some(_)) => 1,
(Some(_), None) => 1,
(Some(_), Some(_)) => 2,
}
}
fn keys<'k>(&'k self) -> Box<dyn Iterator<Item = KStringCow<'k>> + 'k> {
let keys = match (self.context.as_ref(), self.document.as_ref()) {
(None, None) => [].as_slice(),
(None, Some(_)) => ["doc"].as_slice(),
(Some(_), None) => ["context"].as_slice(),
(Some(_), Some(_)) => ["context", "doc"].as_slice(),
};
Box::new(keys.iter().map(|s| KStringCow::from_static(s)))
}
fn values<'k>(&'k self) -> Box<dyn Iterator<Item = &'k dyn ValueView> + 'k> {
Box::new(
self.context
.as_ref()
.map(|context| context.as_value())
.into_iter()
.chain(self.document.map(|document| document.as_value()).into_iter()),
)
}
fn iter<'k>(&'k self) -> Box<dyn Iterator<Item = (KStringCow<'k>, &'k dyn ValueView)> + 'k> {
Box::new(self.keys().zip(self.values()))
}
fn contains_key(&self, index: &str) -> bool {
index == "context" || index == "doc"
}
fn get<'s>(&'s self, index: &str) -> Option<&'s dyn ValueView> {
match index {
"context" => self.context.as_ref().map(|context| context.as_value()),
"doc" => self.document.as_ref().map(|doc| doc.as_value()),
_ => None,
}
}
}
impl<'a> ValueView for Context<'a> {
fn as_debug(&self) -> &dyn std::fmt::Debug {
self
}
fn render(&self) -> liquid::model::DisplayCow<'_> {
DisplayCow::Owned(Box::new(ObjectRender::new(self)))
}
fn source(&self) -> liquid::model::DisplayCow<'_> {
DisplayCow::Owned(Box::new(ObjectSource::new(self)))
}
fn type_name(&self) -> &'static str {
"object"
}
fn query_state(&self, state: liquid::model::State) -> bool {
match state {
State::Truthy => true,
State::DefaultValue | State::Empty | State::Blank => false,
}
}
fn to_kstr(&self) -> liquid::model::KStringCow<'_> {
let s = ObjectRender::new(self).to_string();
KStringCow::from_string(s)
}
fn to_value(&self) -> LiquidValue {
LiquidValue::Object(
self.iter().map(|(k, x)| (k.to_string().into(), x.to_value())).collect(),
)
}
fn as_object(&self) -> Option<&dyn ObjectView> {
Some(self)
}
}

View File

@ -24,6 +24,7 @@ pub mod facet;
mod fst_utils;
pub mod hybrid;
pub mod new;
pub mod recommend;
#[derive(Debug, Clone)]
pub struct SemanticSearch {
@ -148,7 +149,7 @@ impl<'a> Search<'a> {
pub fn execute_for_candidates(&self, has_vector_search: bool) -> Result<RoaringBitmap> {
if has_vector_search {
let ctx = SearchContext::new(self.index, self.rtxn);
filtered_universe(&ctx, &self.filter)
filtered_universe(ctx.index, ctx.txn, &self.filter)
} else {
Ok(self.execute()?.candidates)
}
@ -161,7 +162,7 @@ impl<'a> Search<'a> {
ctx.searchable_attributes(searchable_attributes)?;
}
let universe = filtered_universe(&ctx, &self.filter)?;
let universe = filtered_universe(ctx.index, ctx.txn, &self.filter)?;
let PartialSearchResult {
located_query_terms,
candidates,

View File

@ -507,7 +507,7 @@ mod tests {
impl<'a> MatcherBuilder<'a> {
fn new_test(rtxn: &'a heed::RoTxn, index: &'a TempIndex, query: &str) -> Self {
let mut ctx = SearchContext::new(index, rtxn);
let universe = filtered_universe(&ctx, &None).unwrap();
let universe = filtered_universe(ctx.index, ctx.txn, &None).unwrap();
let crate::search::PartialSearchResult { located_query_terms, .. } = execute_search(
&mut ctx,
Some(query),

View File

@ -530,11 +530,15 @@ fn resolve_sort_criteria<'ctx, Query: RankingRuleQueryTrait>(
Ok(())
}
pub fn filtered_universe(ctx: &SearchContext, filters: &Option<Filter>) -> Result<RoaringBitmap> {
pub fn filtered_universe(
index: &Index,
txn: &RoTxn<'_>,
filters: &Option<Filter>,
) -> Result<RoaringBitmap> {
Ok(if let Some(filters) = filters {
filters.evaluate(ctx.txn, ctx.index)?
filters.evaluate(txn, index)?
} else {
ctx.index.documents_ids(ctx.txn)?
index.documents_ids(txn)?
})
}

View File

@ -0,0 +1,205 @@
use std::sync::Arc;
use ordered_float::OrderedFloat;
use roaring::RoaringBitmap;
use serde_json::Value;
use crate::score_details::{self, ScoreDetails};
use crate::vector::Embedder;
use crate::{filtered_universe, DocumentId, Filter, Index, Result, SearchResult};
enum RecommendKind<'a> {
Id(DocumentId),
Prompt { prompt: &'a str, context: Option<Value>, id: Option<DocumentId> },
}
pub struct Recommend<'a> {
kind: RecommendKind<'a>,
// this should be linked to the String in the query
filter: Option<Filter<'a>>,
offset: usize,
limit: usize,
rtxn: &'a heed::RoTxn<'a>,
index: &'a Index,
embedder_name: String,
embedder: Arc<Embedder>,
}
impl<'a> Recommend<'a> {
pub fn with_docid(
id: DocumentId,
offset: usize,
limit: usize,
index: &'a Index,
rtxn: &'a heed::RoTxn<'a>,
embedder_name: String,
embedder: Arc<Embedder>,
) -> Self {
Self {
kind: RecommendKind::Id(id),
filter: None,
offset,
limit,
rtxn,
index,
embedder_name,
embedder,
}
}
pub fn with_prompt(
prompt: &'a str,
id: Option<DocumentId>,
context: Option<Value>,
offset: usize,
limit: usize,
index: &'a Index,
rtxn: &'a heed::RoTxn<'a>,
embedder_name: String,
embedder: Arc<Embedder>,
) -> Self {
Self {
kind: RecommendKind::Prompt { prompt, context, id },
filter: None,
offset,
limit,
rtxn,
index,
embedder_name,
embedder,
}
}
pub fn filter(&mut self, filter: Filter<'a>) -> &mut Self {
self.filter = Some(filter);
self
}
pub fn execute(&self) -> Result<SearchResult> {
let universe = filtered_universe(self.index, self.rtxn, &self.filter)?;
let embedder_index =
self.index
.embedder_category_id
.get(self.rtxn, &self.embedder_name)?
.ok_or_else(|| crate::UserError::InvalidEmbedder(self.embedder_name.to_owned()))?;
let writer_index = (embedder_index as u16) << 8;
let readers: std::result::Result<Vec<_>, _> = (0..=u8::MAX)
.map_while(|k| {
arroy::Reader::open(self.rtxn, writer_index | (k as u16), self.index.vector_arroy)
.map(Some)
.or_else(|e| match e {
arroy::Error::MissingMetadata => Ok(None),
e => Err(e),
})
.transpose()
})
.collect();
let readers = readers?;
let mut results = Vec::new();
/// FIXME: make id optional...
let id = match &self.kind {
RecommendKind::Id(id) => *id,
RecommendKind::Prompt { prompt, context, id } => id.unwrap(),
};
let personalization_vector = if let RecommendKind::Prompt { prompt, context, id } =
&self.kind
{
let fields_ids_map = self.index.fields_ids_map(self.rtxn)?;
let document = if let Some(id) = id {
Some(self.index.iter_documents(self.rtxn, std::iter::once(*id))?.next().unwrap()?.1)
} else {
None
};
let document = document
.map(|document| crate::prompt::Document::from_doc_obkv(document, &fields_ids_map));
let context =
crate::prompt::recommend::Context::new(document.as_ref(), context.clone());
/// FIXME: handle error bad template
let template =
liquid::ParserBuilder::new().stdlib().build().unwrap().parse(prompt).unwrap();
/// FIXME: handle error bad context
let rendered = template.render(&context).unwrap();
/// FIXME: handle embedding error
Some(self.embedder.embed_one(rendered).unwrap())
} else {
None
};
for reader in readers.iter() {
let nns_by_item = reader.nns_by_item(
self.rtxn,
id,
self.limit + self.offset + 1,
None,
Some(&universe),
)?;
if let Some(nns_by_item) = nns_by_item {
let mut nns = match &personalization_vector {
Some(vector) => {
let candidates: RoaringBitmap =
nns_by_item.iter().map(|(docid, _)| docid).collect();
reader.nns_by_vector(
self.rtxn,
vector,
self.limit + self.offset + 1,
None,
Some(&candidates),
)?
}
None => nns_by_item,
};
results.append(&mut nns);
}
}
results.sort_unstable_by_key(|(_, distance)| OrderedFloat(*distance));
let mut documents_ids = Vec::with_capacity(self.limit);
let mut document_scores = Vec::with_capacity(self.limit);
// skip offset +1 to skip the target document that is normally returned
for (docid, distance) in results.into_iter().skip(self.offset) {
if documents_ids.len() == self.limit {
break;
}
if id == docid {
continue;
}
documents_ids.push(docid);
let score = 1.0 - distance;
let score = self
.embedder
.distribution()
.map(|distribution| distribution.shift(score))
.unwrap_or(score);
let score = ScoreDetails::Vector(score_details::Vector { similarity: Some(score) });
document_scores.push(vec![score]);
}
Ok(SearchResult {
matching_words: Default::default(),
candidates: universe,
documents_ids,
document_scores,
degraded: false,
used_negative_operator: false,
})
}
}