feat: Improve the indexing time a little bit

...by a factor of 17.6x.
This commit is contained in:
Clément Renault
2018-07-10 21:29:17 +02:00
parent a2152a4064
commit e5c54c4399
14 changed files with 497 additions and 1197 deletions

View File

@ -4,9 +4,7 @@ version = "0.1.0"
authors = ["Kerollmops <renault.cle@gmail.com>"]
[dependencies]
bincode = "1.0"
serde = "1.0"
serde_derive = "1.0"
byteorder = "1.2"
[dependencies.fst]
git = "https://github.com/Kerollmops/fst.git"
@ -17,5 +15,8 @@ git = "https://github.com/Kerollmops/levenshtein-automata.git"
branch = "custom-fst"
features = ["fst_automaton"]
[dependencies.rocksdb]
git = "https://github.com/pingcap/rust-rocksdb.git"
[dependencies.group-by]
git = "https://github.com/Kerollmops/group-by.git"

View File

@ -1,28 +1,22 @@
#[macro_use] extern crate serde_derive;
extern crate bincode;
extern crate fst;
extern crate group_by;
extern crate levenshtein_automata;
extern crate serde;
extern crate byteorder;
extern crate rocksdb;
pub mod map;
pub mod rank;
mod levenshtein;
pub mod metadata;
pub mod levenshtein;
use std::path::Path;
use std::fs;
pub use self::map::{Map, MapBuilder, Values};
pub use self::map::{
OpBuilder, IndexedValues,
OpWithStateBuilder, IndexedValuesWithState,
pub use self::metadata::{
Metadata, MetadataBuilder,
StreamWithState, StreamWithStateBuilder,
UnionWithState, OpWithStateBuilder,
IndexedValuesWithState,
};
pub use self::rank::{RankedStream};
pub use self::levenshtein::LevBuilder;
pub type DocIndexMap = Map<DocIndex>;
pub type DocIndexMapBuilder = MapBuilder<DocIndex>;
pub type DocumentId = u64;
/// This structure represent the position of a word
@ -30,7 +24,8 @@ pub type DocumentId = u64;
///
/// This is stored in the map, generated at index time,
/// extracted and interpreted at search time.
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[derive(Debug, Copy, Clone, Eq, PartialEq, PartialOrd, Ord, Hash)]
#[repr(C)]
pub struct DocIndex {
/// The document identifier where the word was found.
@ -109,12 +104,3 @@ impl Match {
}
}
}
pub fn load_map<P, Q>(map: P, values: Q) -> fst::Result<DocIndexMap>
where P: AsRef<Path>, Q: AsRef<Path>,
{
let fst = fs::read(map)?;
let values = fs::read(values)?;
DocIndexMap::from_bytes(fst, &values)
}

View File

@ -1,404 +0,0 @@
use bincode;
use fst::{self, Automaton};
use serde::de::DeserializeOwned;
use serde::ser::Serialize;
use std::collections::BTreeMap;
use std::collections::btree_map::Entry;
use std::fs::File;
use std::io::{Write, BufReader};
use std::ops::Range;
use std::path::Path;
#[derive(Debug)]
pub struct Map<T> {
inner: fst::Map,
values: Values<T>,
}
impl<T> Map<T> {
pub unsafe fn from_paths<P, Q>(map: P, values: Q) -> fst::Result<Self>
where
T: DeserializeOwned,
P: AsRef<Path>,
Q: AsRef<Path>
{
let inner = fst::Map::from_path(map)?;
// TODO handle errors !!!
let values = File::open(values).unwrap();
let values = BufReader::new(values);
let values = bincode::deserialize_from(values).unwrap();
Ok(Self { inner, values })
}
pub fn from_bytes(map: Vec<u8>, values: &[u8]) -> fst::Result<Self>
where
T: DeserializeOwned
{
let inner = fst::Map::from_bytes(map)?;
let values = bincode::deserialize(values).unwrap();
Ok(Self { inner, values })
}
pub fn stream(&self) -> Stream<T> {
Stream {
inner: self.inner.stream(),
values: &self.values,
}
}
pub fn contains_key<K: AsRef<[u8]>>(&self, key: K) -> bool {
self.inner.contains_key(key)
}
pub fn get<K: AsRef<[u8]>>(&self, key: K) -> Option<&[T]> {
self.inner.get(key).map(|i| unsafe { self.values.get_unchecked(i as usize) })
}
pub fn search<A: Automaton>(&self, aut: A) -> StreamBuilder<T, A> {
StreamBuilder {
inner: self.inner.search(aut),
values: &self.values,
}
}
pub fn as_map(&self) -> &fst::Map {
&self.inner
}
pub fn values(&self) -> &Values<T> {
&self.values
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct Values<T> {
ranges: Box<[Range<u64>]>,
values: Box<[T]>,
}
impl<T> Values<T> {
fn new(raw: Vec<Vec<T>>) -> Self {
let cap = raw.len();
let mut ranges = Vec::with_capacity(cap);
let cap = raw.iter().map(Vec::len).sum();
let mut values = Vec::with_capacity(cap);
for mut v in &raw {
let len = v.len() as u64;
let start = ranges.last().map(|&Range { end, .. }| end).unwrap_or(0);
let range = Range { start, end: start + len };
ranges.push(range);
}
values.extend(raw.into_iter().flat_map(IntoIterator::into_iter));
let ranges = ranges.into_boxed_slice();
let values = values.into_boxed_slice();
Self { ranges, values }
}
pub unsafe fn get_unchecked(&self, index: usize) -> &[T] {
let range = self.ranges.get_unchecked(index);
let range = Range { start: range.start as usize, end: range.end as usize };
self.values.get_unchecked(range)
}
}
#[derive(Debug)]
pub struct MapBuilder<T> {
map: BTreeMap<String, u64>,
// This makes many memory indirections but it is only used
// at index time, not kept for query time.
values: Vec<Vec<T>>,
}
impl<T> MapBuilder<T> {
pub fn new() -> Self {
Self {
map: BTreeMap::new(),
values: Vec::new(),
}
}
pub fn insert<S: Into<String>>(&mut self, key: S, value: T) {
let key = key.into();
match self.map.entry(key) {
Entry::Vacant(e) => {
self.values.push(vec![value]);
let index = (self.values.len() - 1) as u64;
e.insert(index);
},
Entry::Occupied(e) => {
let index = *e.get();
let values = &mut self.values[index as usize];
values.push(value);
},
}
}
pub fn build_in_memory(self) -> fst::Result<Map<T>> {
Ok(Map {
inner: fst::Map::from_iter(self.map)?,
values: Values::new(self.values),
})
}
pub fn build<W, X>(self, map_wrt: W, mut values_wrt: X) -> fst::Result<(W, X)>
where
T: Serialize,
W: Write,
X: Write
{
let mut builder = fst::MapBuilder::new(map_wrt)?;
builder.extend_iter(self.map)?;
let map = builder.into_inner()?;
let values = Values::new(self.values);
// TODO handle that error !!!
bincode::serialize_into(&mut values_wrt, &values).unwrap();
Ok((map, values_wrt))
}
}
pub struct OpBuilder<'m, 'v, T: 'v> {
inner: fst::map::OpBuilder<'m>,
values: &'v Values<T>,
}
impl<'m, 'v, T: 'v> OpBuilder<'m, 'v, T> {
pub fn new(values: &'v Values<T>) -> Self {
OpBuilder {
inner: fst::map::OpBuilder::new(),
values: values,
}
}
pub fn add<I, S>(mut self, streamable: I) -> Self
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64)>,
{
self.push(streamable);
self
}
pub fn push<I, S>(&mut self, streamable: I)
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64)>,
{
self.inner.push(streamable);
}
pub fn union(self) -> Union<'m, 'v, T> {
Union {
inner: self.inner.union(),
outs: Vec::new(),
values: self.values,
}
}
}
pub struct Union<'m, 'v, T: 'v> {
inner: fst::map::Union<'m>,
outs: Vec<IndexedValues<'v, T>>,
values: &'v Values<T>,
}
impl<'a, 'm, 'v, T: 'v + 'a> fst::Streamer<'a> for Union<'m, 'v, T> {
type Item = (&'a [u8], &'a [IndexedValues<'a, T>]);
fn next(&'a mut self) -> Option<Self::Item> {
match self.inner.next() {
Some((s, ivalues)) => {
self.outs.clear();
for ivalue in ivalues {
let index = ivalue.index;
let values = unsafe { self.values.get_unchecked(ivalue.value as usize) };
self.outs.push(IndexedValues { index, values })
}
Some((s, &self.outs))
},
None => None,
}
}
}
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct IndexedValues<'a, T: 'a> {
pub index: usize,
pub values: &'a [T],
}
pub struct OpWithStateBuilder<'m, 'v, T: 'v, U> {
inner: fst::map::OpWithStateBuilder<'m, U>,
values: &'v Values<T>,
}
impl<'m, 'v, T: 'v, U: 'static> OpWithStateBuilder<'m, 'v, T, U> {
pub fn new(values: &'v Values<T>) -> Self {
Self {
inner: fst::map::OpWithStateBuilder::new(),
values: values,
}
}
pub fn add<I, S>(mut self, streamable: I) -> Self
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64, U)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64, U)>,
{
self.push(streamable);
self
}
pub fn push<I, S>(&mut self, streamable: I)
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64, U)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64, U)>,
{
self.inner.push(streamable);
}
pub fn union(self) -> UnionWithState<'m, 'v, T, U> {
UnionWithState {
inner: self.inner.union(),
outs: Vec::new(),
values: self.values,
}
}
}
pub struct UnionWithState<'m, 'v, T: 'v, U> {
inner: fst::map::UnionWithState<'m, U>,
outs: Vec<IndexedValuesWithState<'v, T, U>>,
values: &'v Values<T>,
}
impl<'a, 'm, 'v, T: 'v + 'a, U: 'a> fst::Streamer<'a> for UnionWithState<'m, 'v, T, U>
where
U: Clone,
{
// TODO prefer returning (&[u8], index, value T, state) one by one
type Item = (&'a [u8], &'a [IndexedValuesWithState<'a, T, U>]);
fn next(&'a mut self) -> Option<Self::Item> {
match self.inner.next() {
Some((s, ivalues)) => {
self.outs.clear();
self.outs.reserve(ivalues.len());
for ivalue in ivalues {
let index = ivalue.index;
let values = unsafe { self.values.get_unchecked(ivalue.value as usize) };
let state = ivalue.state.clone();
self.outs.push(IndexedValuesWithState { index, values, state })
}
Some((s, &self.outs))
},
None => None,
}
}
}
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct IndexedValuesWithState<'a, T: 'a, U> {
pub index: usize,
pub values: &'a [T],
pub state: U,
}
pub struct StreamBuilder<'m, 'v, T: 'v, A> {
inner: fst::map::StreamBuilder<'m, A>,
values: &'v Values<T>,
}
impl<'m, 'v, T: 'v, A> StreamBuilder<'m, 'v, T, A> {
pub fn with_state(self) -> StreamWithStateBuilder<'m, 'v, T, A> {
StreamWithStateBuilder {
inner: self.inner.with_state(),
values: self.values,
}
}
}
impl<'m, 'v, 'a, T: 'v + 'a, A: Automaton> fst::IntoStreamer<'a> for StreamBuilder<'m, 'v, T, A> {
type Item = <Self::Into as fst::Streamer<'a>>::Item;
type Into = Stream<'m, 'v, T, A>;
fn into_stream(self) -> Self::Into {
Stream {
inner: self.inner.into_stream(),
values: self.values,
}
}
}
pub struct Stream<'m, 'v, T: 'v, A: Automaton = fst::automaton::AlwaysMatch> {
inner: fst::map::Stream<'m, A>,
values: &'v Values<T>,
}
impl<'m, 'v, 'a, T: 'v + 'a, A: Automaton> fst::Streamer<'a> for Stream<'m, 'v, T, A> {
type Item = (&'a [u8], &'a [T]);
fn next(&'a mut self) -> Option<Self::Item> {
// Here we can't just `map` because of some borrow rules
match self.inner.next() {
Some((key, i)) => {
let values = unsafe { self.values.get_unchecked(i as usize) };
Some((key, values))
},
None => None,
}
}
}
pub struct StreamWithStateBuilder<'m, 'v, T: 'v, A> {
inner: fst::map::StreamWithStateBuilder<'m, A>,
values: &'v Values<T>,
}
impl<'m, 'v, 'a, T: 'v + 'a, A: 'a> fst::IntoStreamer<'a> for StreamWithStateBuilder<'m, 'v, T, A>
where
A: Automaton,
A::State: Clone,
{
type Item = <Self::Into as fst::Streamer<'a>>::Item;
type Into = StreamWithState<'m, 'v, T, A>;
fn into_stream(self) -> Self::Into {
StreamWithState {
inner: self.inner.into_stream(),
values: self.values,
}
}
}
pub struct StreamWithState<'m, 'v, T: 'v, A: Automaton = fst::automaton::AlwaysMatch> {
inner: fst::map::StreamWithState<'m, A>,
values: &'v Values<T>,
}
impl<'m, 'v, 'a, T: 'v + 'a, A: 'a> fst::Streamer<'a> for StreamWithState<'m, 'v, T, A>
where
A: Automaton,
A::State: Clone,
{
type Item = (&'a [u8], &'a [T], A::State);
fn next(&'a mut self) -> Option<Self::Item> {
match self.inner.next() {
Some((key, i, state)) => {
let values = unsafe { self.values.get_unchecked(i as usize) };
Some((key, values, state))
},
None => None,
}
}
}

427
raptor/src/metadata.rs Normal file
View File

@ -0,0 +1,427 @@
use std::sync::Arc;
use std::ops::Deref;
use std::error::Error;
use std::path::Path;
use std::collections::btree_map::{Entry, BTreeMap};
use std::slice::from_raw_parts;
use std::io::{self, Write};
use std::mem;
use byteorder::{LittleEndian, ReadBytesExt, WriteBytesExt};
use fst::{self, Map, MapBuilder, Automaton};
use fst::raw::MmapReadOnly;
use DocIndex;
#[repr(C)]
struct Range {
start: u64,
end: u64,
}
#[derive(Clone)]
enum DocIndexesData {
Shared {
vec: Arc<Vec<u8>>,
offset: usize,
len: usize,
},
Mmap(MmapReadOnly),
}
impl Deref for DocIndexesData {
type Target = [u8];
fn deref(&self) -> &Self::Target {
match self {
DocIndexesData::Shared { vec, offset, len } => {
&vec[*offset..offset + len]
},
DocIndexesData::Mmap(m) => m.as_slice(),
}
}
}
#[derive(Clone)]
pub struct DocIndexes {
ranges: DocIndexesData,
indexes: DocIndexesData,
}
impl DocIndexes {
pub unsafe fn from_path<P: AsRef<Path>>(path: P) -> io::Result<Self> {
let mmap = MmapReadOnly::open_path(path)?;
let range_len = mmap.as_slice().read_u64::<LittleEndian>()?;
let range_len = range_len as usize * mem::size_of::<Range>();
let offset = mem::size_of::<u64>() as usize;
let ranges = DocIndexesData::Mmap(mmap.range(offset, range_len));
let len = mmap.len() - range_len - offset;
let offset = offset + range_len;
let indexes = DocIndexesData::Mmap(mmap.range(offset, len));
Ok(DocIndexes { ranges, indexes })
}
pub fn from_bytes(vec: Vec<u8>) -> io::Result<Self> {
let vec = Arc::new(vec);
let range_len = vec.as_slice().read_u64::<LittleEndian>()?;
let range_len = range_len as usize * mem::size_of::<Range>();
let offset = mem::size_of::<u64>() as usize;
let ranges = DocIndexesData::Shared {
vec: vec.clone(),
offset,
len: range_len
};
let len = vec.len() - range_len - offset;
let offset = offset + range_len;
let indexes = DocIndexesData::Shared { vec, offset, len };
Ok(DocIndexes { ranges, indexes })
}
pub fn get(&self, index: u64) -> Option<&[DocIndex]> {
self.ranges().get(index as usize).map(|Range { start, end }| {
let start = *start as usize;
let end = *end as usize;
&self.indexes()[start..end]
})
}
fn ranges(&self) -> &[Range] {
let slice = &self.ranges;
let ptr = slice.as_ptr() as *const Range;
let len = slice.len() / mem::size_of::<Range>();
unsafe { from_raw_parts(ptr, len) }
}
fn indexes(&self) -> &[DocIndex] {
let slice = &self.indexes;
let ptr = slice.as_ptr() as *const DocIndex;
let len = slice.len() / mem::size_of::<DocIndex>();
unsafe { from_raw_parts(ptr, len) }
}
}
pub struct Metadata {
map: Map,
indexes: DocIndexes,
}
impl Metadata {
pub unsafe fn from_paths<P, Q>(map: P, indexes: Q) -> Result<Self, Box<Error>>
where P: AsRef<Path>,
Q: AsRef<Path>,
{
let map = Map::from_path(map)?;
let indexes = DocIndexes::from_path(indexes)?;
Ok(Metadata { map, indexes })
}
pub fn from_bytes(map: Vec<u8>, indexes: Vec<u8>) -> Result<Self, Box<Error>> {
let map = Map::from_bytes(map)?;
let indexes = DocIndexes::from_bytes(indexes)?;
Ok(Metadata { map, indexes })
}
pub fn get<K: AsRef<[u8]>>(&self, key: K) -> Option<&[DocIndex]> {
self.map.get(key).and_then(|index| self.indexes.get(index))
}
pub fn as_map(&self) -> &Map {
&self.map
}
pub fn as_indexes(&self) -> &DocIndexes {
&self.indexes
}
pub fn explode(self) -> (Map, DocIndexes) {
(self.map, self.indexes)
}
}
pub struct Inner {
keys: BTreeMap<String, u64>,
indexes: Vec<Vec<DocIndex>>,
number_docs: usize,
}
impl Inner {
pub fn new() -> Self {
Inner {
keys: BTreeMap::new(),
indexes: Vec::new(),
number_docs: 0,
}
}
pub fn number_doc_indexes(&self) -> usize {
self.number_docs
}
pub fn insert(&mut self, key: String, value: DocIndex) {
match self.keys.entry(key) {
Entry::Vacant(e) => {
let index = self.indexes.len() as u64;
self.indexes.push(vec![value]);
e.insert(index);
},
Entry::Occupied(e) => {
let index = *e.get();
let vec = &mut self.indexes[index as usize];
vec.push(value);
},
}
self.number_docs += 1;
}
}
pub struct MetadataBuilder<W, X> {
inner: Inner,
map: W,
indexes: X,
}
impl<W: Write, X: Write> MetadataBuilder<W, X> {
pub fn new(map: W, indexes: X) -> Self {
Self { inner: Inner::new(), map, indexes }
}
pub fn insert(&mut self, key: String, index: DocIndex) {
self.inner.insert(key, index)
}
pub fn finish(self) -> Result<(), Box<Error>> {
self.into_inner().map(|_| ())
}
pub fn into_inner(mut self) -> Result<(W, X), Box<Error>> {
let number_docs = self.inner.number_doc_indexes();
let mut keys_builder = MapBuilder::new(self.map)?;
keys_builder.extend_iter(self.inner.keys)?;
let map = keys_builder.into_inner()?;
// write down doc_indexes into the indexes Writer
let (ranges, values) = into_sliced_ranges(self.inner.indexes, number_docs);
let len = ranges.len() as u64;
// TODO check if this is correct
self.indexes.write_u64::<LittleEndian>(len)?;
unsafe {
// write Ranges first
let slice = into_u8_slice(ranges.as_slice());
self.indexes.write_all(slice)?;
// write Values after
let slice = into_u8_slice(values.as_slice());
self.indexes.write_all(slice)?;
}
self.indexes.flush()?;
Ok((map, self.indexes))
}
}
fn into_sliced_ranges<T>(vecs: Vec<Vec<T>>, number_docs: usize) -> (Vec<Range>, Vec<T>) {
let cap = vecs.len();
let mut ranges = Vec::with_capacity(cap);
let mut values = Vec::with_capacity(number_docs);
for mut v in &vecs {
let len = v.len() as u64;
let start = ranges.last().map(|&Range { end, .. }| end).unwrap_or(0);
let range = Range { start, end: start + len };
ranges.push(range);
}
values.extend(vecs.into_iter().flatten());
(ranges, values)
}
unsafe fn into_u8_slice<T>(slice: &[T]) -> &[u8] {
let ptr = slice.as_ptr() as *const u8;
let len = slice.len() * mem::size_of::<T>();
from_raw_parts(ptr, len)
}
pub struct OpWithStateBuilder<'m, 'v, U> {
inner: fst::map::OpWithStateBuilder<'m, U>,
indexes: &'v DocIndexes,
}
impl<'m, 'v, U: 'static> OpWithStateBuilder<'m, 'v, U> {
pub fn new(indexes: &'v DocIndexes) -> Self {
Self {
inner: fst::map::OpWithStateBuilder::new(),
indexes: indexes,
}
}
pub fn add<I, S>(mut self, streamable: I) -> Self
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64, U)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64, U)>,
{
self.push(streamable);
self
}
pub fn push<I, S>(&mut self, streamable: I)
where
I: for<'a> fst::IntoStreamer<'a, Into=S, Item=(&'a [u8], u64, U)>,
S: 'm + for<'a> fst::Streamer<'a, Item=(&'a [u8], u64, U)>,
{
self.inner.push(streamable);
}
pub fn union(self) -> UnionWithState<'m, 'v, U> {
UnionWithState {
inner: self.inner.union(),
outs: Vec::new(),
indexes: self.indexes,
}
}
}
#[derive(Copy, Clone, Debug, Eq, Hash, Ord, PartialEq, PartialOrd)]
pub struct IndexedValuesWithState<'a, U> {
pub index: usize,
pub values: &'a [DocIndex],
pub state: U,
}
pub struct UnionWithState<'m, 'v, U> {
inner: fst::map::UnionWithState<'m, U>,
outs: Vec<IndexedValuesWithState<'v, U>>,
indexes: &'v DocIndexes,
}
impl<'a, 'm, 'v, U: 'a> fst::Streamer<'a> for UnionWithState<'m, 'v, U>
where
U: Clone,
{
type Item = (&'a [u8], &'a [IndexedValuesWithState<'a, U>]);
fn next(&'a mut self) -> Option<Self::Item> {
match self.inner.next() {
Some((s, ivalues)) => {
self.outs.clear();
self.outs.reserve(ivalues.len());
for ivalue in ivalues {
if let Some(values) = self.indexes.get(ivalue.value) {
let index = ivalue.index;
let state = ivalue.state.clone();
self.outs.push(IndexedValuesWithState { index, values, state })
}
}
Some((s, &self.outs))
},
None => None,
}
}
}
pub struct StreamWithStateBuilder<'m, 'v, A> {
inner: fst::map::StreamWithStateBuilder<'m, A>,
indexes: &'v DocIndexes,
}
impl<'m, 'v, 'a, A: 'a> fst::IntoStreamer<'a> for StreamWithStateBuilder<'m, 'v, A>
where
A: Automaton,
A::State: Clone,
{
type Item = <Self::Into as fst::Streamer<'a>>::Item;
type Into = StreamWithState<'m, 'v, A>;
fn into_stream(self) -> Self::Into {
StreamWithState {
inner: self.inner.into_stream(),
indexes: self.indexes,
}
}
}
pub struct StreamWithState<'m, 'v, A: Automaton = fst::automaton::AlwaysMatch> {
inner: fst::map::StreamWithState<'m, A>,
indexes: &'v DocIndexes,
}
impl<'m, 'v, 'a, A: 'a> fst::Streamer<'a> for StreamWithState<'m, 'v, A>
where
A: Automaton,
A::State: Clone,
{
type Item = (&'a [u8], &'a [DocIndex], A::State);
fn next(&'a mut self) -> Option<Self::Item> {
match self.inner.next() {
Some((key, i, state)) => {
match self.indexes.get(i) {
Some(values) => Some((key, values, state)),
None => None,
}
},
None => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn empty_serialize_deserialize() {
let mapw = Vec::new();
let indexesw = Vec::new();
let builder = MetadataBuilder::new(mapw, indexesw);
let (map, indexes) = builder.into_inner().unwrap();
let metas = Metadata::from_bytes(map, indexes).unwrap();
assert_eq!(metas.get("chameau"), None);
}
#[test]
fn one_doc_serialize_deserialize() {
let mapw = Vec::new();
let indexesw = Vec::new();
let mut builder = MetadataBuilder::new(mapw, indexesw);
let doc = DocIndex { document: 12, attribute: 1, attribute_index: 22 };
builder.insert("chameau".into(), doc);
let (map, indexes) = builder.into_inner().unwrap();
let metas = Metadata::from_bytes(map, indexes).unwrap();
assert_eq!(metas.get("chameau"), Some(&[doc][..]));
}
#[test]
fn multiple_docs_serialize_deserialize() {
let mapw = Vec::new();
let indexesw = Vec::new();
let mut builder = MetadataBuilder::new(mapw, indexesw);
let doc1 = DocIndex { document: 12, attribute: 1, attribute_index: 22 };
let doc2 = DocIndex { document: 31, attribute: 0, attribute_index: 1 };
builder.insert("chameau".into(), doc1);
builder.insert("chameau".into(), doc2);
let (map, indexes) = builder.into_inner().unwrap();
let metas = Metadata::from_bytes(map, indexes).unwrap();
assert_eq!(metas.get("chameau"), Some(&[doc1, doc2][..]));
}
}

View File

@ -8,11 +8,10 @@ mod exact;
use std::cmp::Ordering;
use std::collections::HashMap;
use std::{mem, vec};
use DocIndexMap;
use fst;
use levenshtein::Levenshtein;
use map::{OpWithStateBuilder, UnionWithState, Values};
use {Match, DocIndex, DocumentId};
use metadata::{DocIndexes, OpWithStateBuilder, UnionWithState};
use {Match, DocumentId};
use group_by::GroupByMut;
use self::sum_of_typos::sum_of_typos;
@ -117,7 +116,7 @@ impl IntoIterator for Pool {
pub enum RankedStream<'m, 'v> {
Fed {
inner: UnionWithState<'m, 'v, DocIndex, u32>,
inner: UnionWithState<'m, 'v, u32>,
automatons: Vec<Levenshtein>,
pool: Pool,
},
@ -127,11 +126,11 @@ pub enum RankedStream<'m, 'v> {
}
impl<'m, 'v> RankedStream<'m, 'v> {
pub fn new(map: &'m DocIndexMap, values: &'v Values<DocIndex>, automatons: Vec<Levenshtein>, limit: usize) -> Self {
let mut op = OpWithStateBuilder::new(values);
pub fn new(map: &'m fst::Map, indexes: &'v DocIndexes, automatons: Vec<Levenshtein>, limit: usize) -> Self {
let mut op = OpWithStateBuilder::new(indexes);
for automaton in automatons.iter().map(|l| l.dfa.clone()) {
let stream = map.as_map().search(automaton).with_state();
let stream = map.search(automaton).with_state();
op.push(stream);
}

View File

@ -3,10 +3,10 @@ use Match;
use rank::{match_query_index, Document};
use group_by::GroupBy;
pub fn sum_of_words_position(lhs: &Document, rhs: &Document) -> Ordering {
let key = |matches: &[Match]| -> u32 {
GroupBy::new(matches, match_query_index).map(|m| m[0].attribute_index).sum()
};
fn key(matches: &[Match]) -> u32 {
GroupBy::new(matches, match_query_index).map(|m| m[0].attribute_index).sum()
}
pub fn sum_of_words_position(lhs: &Document, rhs: &Document) -> Ordering {
key(&lhs.matches).cmp(&key(&rhs.matches))
}