Fix cargo clippy errors

Dont apply clippy for tests for now

Fix clippy warnings of filter-parser package

parent 8352febd646ec4bcf56a44161e5c4dce0e55111f
author unvalley <38400669+unvalley@users.noreply.github.com> 1666325847 +0900
committer unvalley <kirohi.code@gmail.com> 1666791316 +0900

Update .github/workflows/rust.yml

Co-authored-by: Clémentine Urquizar - curqui <clementine@meilisearch.com>

Allow clippy lint too_many_argments

Allow clippy lint needless_collect

Allow clippy lint too_many_arguments and type_complexity

Fix for clippy warnings comparison_chains

Fix for clippy warnings vec_init_then_push

Allow clippy lint should_implement_trait

Allow clippy lint drop_non_drop

Fix lifetime clipy warnings in filter-paprser

Execute cargo fmt

Fix clippy remaining warnings

Fix clippy remaining warnings again and allow lint on each place
This commit is contained in:
unvalley
2022-10-14 23:44:10 +09:00
parent 811f156031
commit c7322f704c
19 changed files with 40 additions and 37 deletions

View File

@ -242,6 +242,7 @@ fn iterative_facet_number_ordered_iter<'t>(
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
// required to collect the result into an owned collection (a Vec).
// https://github.com/rust-itertools/itertools/issues/499
#[allow(clippy::needless_collect)]
let vec: Vec<_> = iter
.group_by(|(_, v)| *v)
.into_iter()
@ -284,6 +285,7 @@ fn iterative_facet_string_ordered_iter<'t>(
// The itertools GroupBy iterator doesn't provide an owned version, we are therefore
// required to collect the result into an owned collection (a Vec).
// https://github.com/rust-itertools/itertools/issues/499
#[allow(clippy::needless_collect)]
let vec: Vec<_> = iter
.group_by(|(_, v)| *v)
.into_iter()

View File

@ -179,6 +179,7 @@ impl<'t> Criterion for Attribute<'t> {
/// QueryPositionIterator is an Iterator over positions of a Query,
/// It contains iterators over words positions.
struct QueryPositionIterator<'t> {
#[allow(clippy::type_complexity)]
inner:
Vec<Peekable<Box<dyn Iterator<Item = heed::Result<((&'t str, u32), RoaringBitmap)>> + 't>>>,
}

View File

@ -96,6 +96,7 @@ pub trait Context<'c> {
&self,
docid: DocumentId,
) -> heed::Result<HashMap<String, RoaringBitmap>>;
#[allow(clippy::type_complexity)]
fn word_position_iterator(
&self,
word: &str,
@ -610,11 +611,7 @@ fn query_pair_proximity_docids(
}
(QueryKind::Exact { word: left, .. }, QueryKind::Tolerant { typo, word: right }) => {
let r_words = word_derivations(right, prefix, *typo, ctx.words_fst(), wdcache)?;
<<<<<<< HEAD
all_word_pair_overall_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
=======
all_word_pair_proximity_docids(ctx, &[(left, 0)], r_words, proximity)
>>>>>>> 08fe530b (Execute cargo clippy --fix)
}
(
QueryKind::Tolerant { typo: l_typo, word: left },

View File

@ -123,6 +123,7 @@ impl<'a> FacetDistinctIter<'a> {
}
}
#[allow(clippy::drop_non_drop)]
fn facet_values_prefix_key(distinct: FieldId, id: DocumentId) -> [u8; FID_SIZE + DOCID_SIZE] {
concat_arrays!(distinct.to_be_bytes(), id.to_be_bytes())
}

View File

@ -100,10 +100,10 @@ impl<'a> Filter<'a> {
}
}
if ors.len() > 1 {
ands.push(FilterCondition::Or(ors));
} else if ors.len() == 1 {
ands.push(ors.pop().unwrap());
match ors.len() {
1 => ands.push(ors.pop().unwrap()),
n if n > 1 => ands.push(FilterCondition::Or(ors)),
_ => (),
}
}
Either::Right(rule) => {
@ -128,6 +128,7 @@ impl<'a> Filter<'a> {
Ok(Some(Self { condition: and }))
}
#[allow(clippy::should_implement_trait)]
pub fn from_str(expression: &'a str) -> Result<Option<Self>> {
let condition = match FilterCondition::parse(expression) {
Ok(Some(fc)) => Ok(fc),

View File

@ -125,10 +125,7 @@ impl<'t, A: AsRef<[u8]>> Matcher<'t, '_, A> {
words_positions: &mut impl Iterator<Item = (usize, usize, &'a Token<'a>)>,
matches: &mut Vec<Match>,
) -> bool {
let mut potential_matches = Vec::new();
// Add first match to potential matches.
potential_matches.push((token_position, word_position, partial.char_len()));
let mut potential_matches = vec![(token_position, word_position, partial.char_len())];
for (token_position, word_position, word) in words_positions {
partial = match partial.match_token(word) {