Skip to content
This repository has been archived by the owner on Apr 4, 2023. It is now read-only.

Commit

Permalink
Merge remote-tracking branch 'origin/main' into facet-levels-refactor
Browse files Browse the repository at this point in the history
  • Loading branch information
loiclec committed Oct 26, 2022
2 parents 2fa85a2 + 365f44c commit 54c0cf9
Show file tree
Hide file tree
Showing 35 changed files with 132 additions and 149 deletions.
2 changes: 1 addition & 1 deletion benchmarks/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "benchmarks"
version = "0.34.0"
version = "0.35.0"
edition = "2018"
publish = false

Expand Down
2 changes: 1 addition & 1 deletion cli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "cli"
version = "0.34.0"
version = "0.35.0"
edition = "2018"
description = "A CLI to interact with a milli index"
publish = false
Expand Down
2 changes: 1 addition & 1 deletion filter-parser/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "filter-parser"
version = "0.34.0"
version = "0.35.0"
edition = "2021"
description = "The parser for the Meilisearch filter syntax"
publish = false
Expand Down
2 changes: 1 addition & 1 deletion flatten-serde-json/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "flatten-serde-json"
version = "0.34.0"
version = "0.35.0"
edition = "2021"
description = "Flatten serde-json objects like elastic search"
readme = "README.md"
Expand Down
2 changes: 1 addition & 1 deletion json-depth-checker/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "json-depth-checker"
version = "0.34.0"
version = "0.35.0"
edition = "2021"
description = "A library that indicates if a JSON must be flattened"
publish = false
Expand Down
2 changes: 1 addition & 1 deletion milli/Cargo.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
[package]
name = "milli"
version = "0.34.0"
version = "0.35.0"
authors = ["Kerollmops <[email protected]>"]
edition = "2018"

Expand Down
2 changes: 1 addition & 1 deletion milli/src/asc_desc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ impl FromStr for Member {
type Err = AscDescError;

fn from_str(text: &str) -> Result<Member, Self::Err> {
match text.strip_prefix("_geoPoint(").and_then(|text| text.strip_suffix(")")) {
match text.strip_prefix("_geoPoint(").and_then(|text| text.strip_suffix(')')) {
Some(point) => {
let (lat, lng) = point
.split_once(',')
Expand Down
2 changes: 1 addition & 1 deletion milli/src/documents/builder.rs
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@ impl<W: Write> DocumentsBatchBuilder<W> {
/// Appends a new JSON object into the batch and updates the `DocumentsBatchIndex` accordingly.
pub fn append_json_object(&mut self, object: &Object) -> io::Result<()> {
// Make sure that we insert the fields ids in order as the obkv writer has this requirement.
let mut fields_ids: Vec<_> = object.keys().map(|k| self.fields_index.insert(&k)).collect();
let mut fields_ids: Vec<_> = object.keys().map(|k| self.fields_index.insert(k)).collect();
fields_ids.sort_unstable();

self.obkv_buffer.clear();
Expand Down
6 changes: 3 additions & 3 deletions milli/src/documents/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -25,9 +25,9 @@ const DOCUMENTS_BATCH_INDEX_KEY: [u8; 8] = u64::MAX.to_be_bytes();
pub fn obkv_to_object(obkv: &KvReader<FieldId>, index: &DocumentsBatchIndex) -> Result<Object> {
obkv.iter()
.map(|(field_id, value)| {
let field_name = index.name(field_id).ok_or_else(|| {
FieldIdMapMissingEntry::FieldId { field_id, process: "obkv_to_object" }
})?;
let field_name = index
.name(field_id)
.ok_or(FieldIdMapMissingEntry::FieldId { field_id, process: "obkv_to_object" })?;
let value = serde_json::from_slice(value).map_err(InternalError::SerdeJson)?;
Ok((field_name.to_string(), value))
})
Expand Down
2 changes: 1 addition & 1 deletion milli/src/fields_ids_map.rs
Original file line number Diff line number Diff line change
Expand Up @@ -65,7 +65,7 @@ impl FieldsIdsMap {
}

/// Iterate over the ids in the order of the ids.
pub fn ids<'a>(&'a self) -> impl Iterator<Item = FieldId> + 'a {
pub fn ids(&'_ self) -> impl Iterator<Item = FieldId> + '_ {
self.ids_names.keys().copied()
}

Expand Down
Empty file.
Empty file.
8 changes: 4 additions & 4 deletions milli/src/index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -202,7 +202,7 @@ impl Index {

pub fn new<P: AsRef<Path>>(options: heed::EnvOpenOptions, path: P) -> Result<Index> {
let now = OffsetDateTime::now_utc();
Self::new_with_creation_dates(options, path, now.clone(), now)
Self::new_with_creation_dates(options, path, now, now)
}

fn set_creation_dates(
Expand Down Expand Up @@ -322,7 +322,7 @@ impl Index {
/// Writes the documents primary key, this is the field name that is used to store the id.
pub(crate) fn put_primary_key(&self, wtxn: &mut RwTxn, primary_key: &str) -> heed::Result<()> {
self.set_updated_at(wtxn, &OffsetDateTime::now_utc())?;
self.main.put::<_, Str, Str>(wtxn, main_key::PRIMARY_KEY_KEY, &primary_key)
self.main.put::<_, Str, Str>(wtxn, main_key::PRIMARY_KEY_KEY, primary_key)
}

/// Deletes the primary key of the documents, this can be done to reset indexes settings.
Expand Down Expand Up @@ -985,7 +985,7 @@ impl Index {
let kv = self
.documents
.get(rtxn, &BEU32::new(id))?
.ok_or_else(|| UserError::UnknownInternalDocumentId { document_id: id })?;
.ok_or(UserError::UnknownInternalDocumentId { document_id: id })?;
documents.push((id, kv));
}

Expand Down Expand Up @@ -1044,7 +1044,7 @@ impl Index {
wtxn: &mut RwTxn,
time: &OffsetDateTime,
) -> heed::Result<()> {
self.main.put::<_, Str, SerdeJson<OffsetDateTime>>(wtxn, main_key::UPDATED_AT_KEY, &time)
self.main.put::<_, Str, SerdeJson<OffsetDateTime>>(wtxn, main_key::UPDATED_AT_KEY, time)
}

pub fn authorize_typos(&self, txn: &RoTxn) -> heed::Result<bool> {
Expand Down
3 changes: 2 additions & 1 deletion milli/src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
#![cfg_attr(all(test, fuzzing), feature(no_coverage))]

#![allow(clippy::reversed_empty_ranges)]
#![allow(clippy::too_many_arguments)]
#[macro_use]
pub mod documents;

Expand Down
2 changes: 1 addition & 1 deletion milli/src/search/criteria/asc_desc.rs
Original file line number Diff line number Diff line change
Expand Up @@ -120,7 +120,7 @@ impl<'t> Criterion for AscDesc<'t> {
let mut candidates = match (&self.query_tree, candidates) {
(_, Some(candidates)) => candidates,
(Some(qt), None) => {
let context = CriteriaBuilder::new(&self.rtxn, &self.index)?;
let context = CriteriaBuilder::new(self.rtxn, self.index)?;
resolve_query_tree(&context, qt, params.wdcache)?
}
(None, None) => self.index.documents_ids(self.rtxn)?,
Expand Down
17 changes: 8 additions & 9 deletions milli/src/search/criteria/attribute.rs
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@ impl<'t> Criterion for Attribute<'t> {
}
}
} else {
let mut set_buckets = match self.set_buckets.as_mut() {
let set_buckets = match self.set_buckets.as_mut() {
Some(set_buckets) => set_buckets,
None => {
let new_buckets = initialize_set_buckets(
Expand All @@ -102,7 +102,7 @@ impl<'t> Criterion for Attribute<'t> {
}
};

match set_compute_candidates(&mut set_buckets, &allowed_candidates)? {
match set_compute_candidates(set_buckets, &allowed_candidates)? {
Some((_score, candidates)) => candidates,
None => {
return Ok(Some(CriterionResult {
Expand Down Expand Up @@ -199,18 +199,18 @@ impl<'t> QueryPositionIterator<'t> {
let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable());
} else {
for (word, _) in word_derivations(&word, true, 0, ctx.words_fst(), wdcache)?
for (word, _) in word_derivations(word, true, 0, ctx.words_fst(), wdcache)?
{
let iter = ctx.word_position_iterator(&word, in_prefix_cache)?;
let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable());
}
}
}
QueryKind::Tolerant { typo, word } => {
for (word, _) in
word_derivations(&word, query.prefix, *typo, ctx.words_fst(), wdcache)?
word_derivations(word, query.prefix, *typo, ctx.words_fst(), wdcache)?
{
let iter = ctx.word_position_iterator(&word, in_prefix_cache)?;
let iter = ctx.word_position_iterator(word, in_prefix_cache)?;
inner.push(iter.peekable());
}
}
Expand Down Expand Up @@ -476,8 +476,7 @@ fn initialize_linear_buckets(
} else {
words_positions
.get(word)
.map(|positions| positions.iter().next())
.flatten()
.and_then(|positions| positions.iter().next())
}
}
QueryKind::Tolerant { typo, word } => {
Expand Down Expand Up @@ -574,7 +573,7 @@ fn flatten_query_tree(query_tree: &Operation) -> FlattenedQueryTree {
if ops.iter().all(|op| op.query().is_some()) {
vec![vec![ops.iter().flat_map(|op| op.query()).cloned().collect()]]
} else {
ops.iter().map(recurse).flatten().collect()
ops.iter().flat_map(recurse).collect()
}
}
Phrase(words) => {
Expand Down
2 changes: 1 addition & 1 deletion milli/src/search/criteria/geo.rs
Original file line number Diff line number Diff line change
Expand Up @@ -90,7 +90,7 @@ impl Criterion for Geo<'_> {
let mut candidates = match (&query_tree, candidates) {
(_, Some(candidates)) => candidates,
(Some(qt), None) => {
let context = CriteriaBuilder::new(&self.rtxn, &self.index)?;
let context = CriteriaBuilder::new(self.rtxn, self.index)?;
resolve_query_tree(&context, qt, params.wdcache)?
}
(None, None) => self.index.documents_ids(self.rtxn)?,
Expand Down
2 changes: 1 addition & 1 deletion milli/src/search/criteria/initial.rs
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ impl<D: Distinct> Criterion for Initial<'_, D> {
let mut candidates = resolve_query_tree(
self.ctx,
answer.query_tree.as_ref().unwrap(),
&mut params.wdcache,
params.wdcache,
)?;

// Apply the filters on the documents retrieved with the query tree.
Expand Down
Loading

0 comments on commit 54c0cf9

Please sign in to comment.