mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 18:17:39 +08:00
Fix typos in comments
This commit is contained in:
parent
b0adc73ce6
commit
263e825619
@ -204,7 +204,7 @@ fn tokenizer_builder<'a>(
|
|||||||
tokenizer_builder
|
tokenizer_builder
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract words maped with their positions of a document,
|
/// Extract words mapped with their positions of a document,
|
||||||
/// ensuring no Language detection mistakes was made.
|
/// ensuring no Language detection mistakes was made.
|
||||||
#[allow(clippy::too_many_arguments)] // FIXME: consider grouping arguments in a struct
|
#[allow(clippy::too_many_arguments)] // FIXME: consider grouping arguments in a struct
|
||||||
fn lang_safe_tokens_from_document<'a>(
|
fn lang_safe_tokens_from_document<'a>(
|
||||||
@ -273,7 +273,7 @@ fn lang_safe_tokens_from_document<'a>(
|
|||||||
Ok((&buffers.obkv_buffer, script_language_word_count))
|
Ok((&buffers.obkv_buffer, script_language_word_count))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Extract words maped with their positions of a document.
|
/// Extract words mapped with their positions of a document.
|
||||||
fn tokens_from_document<'a>(
|
fn tokens_from_document<'a>(
|
||||||
obkv: &KvReader<FieldId>,
|
obkv: &KvReader<FieldId>,
|
||||||
searchable_fields: &Option<HashSet<FieldId>>,
|
searchable_fields: &Option<HashSet<FieldId>>,
|
||||||
@ -294,11 +294,11 @@ fn tokens_from_document<'a>(
|
|||||||
let value =
|
let value =
|
||||||
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?;
|
||||||
|
|
||||||
// prepare writting destination.
|
// prepare writing destination.
|
||||||
buffers.obkv_positions_buffer.clear();
|
buffers.obkv_positions_buffer.clear();
|
||||||
let mut writer = KvWriterU16::new(&mut buffers.obkv_positions_buffer);
|
let mut writer = KvWriterU16::new(&mut buffers.obkv_positions_buffer);
|
||||||
|
|
||||||
// convert json into an unique string.
|
// convert json into a unique string.
|
||||||
buffers.field_buffer.clear();
|
buffers.field_buffer.clear();
|
||||||
if let Some(field) = json_to_string(&value, &mut buffers.field_buffer) {
|
if let Some(field) = json_to_string(&value, &mut buffers.field_buffer) {
|
||||||
// create an iterator of token with their positions.
|
// create an iterator of token with their positions.
|
||||||
|
@ -75,7 +75,7 @@ pub fn extract_fid_docid_facet_values<R: io::Read + io::Seek>(
|
|||||||
let mut facet_is_null_docids = BTreeMap::<FieldId, (RoaringBitmap, RoaringBitmap)>::new();
|
let mut facet_is_null_docids = BTreeMap::<FieldId, (RoaringBitmap, RoaringBitmap)>::new();
|
||||||
let mut facet_is_empty_docids = BTreeMap::<FieldId, (RoaringBitmap, RoaringBitmap)>::new();
|
let mut facet_is_empty_docids = BTreeMap::<FieldId, (RoaringBitmap, RoaringBitmap)>::new();
|
||||||
|
|
||||||
// We create two buffer for mutable ref issues with closures.
|
// We create two buffers for mutable ref issues with closures.
|
||||||
let mut numbers_key_buffer = Vec::new();
|
let mut numbers_key_buffer = Vec::new();
|
||||||
let mut strings_key_buffer = Vec::new();
|
let mut strings_key_buffer = Vec::new();
|
||||||
|
|
||||||
@ -333,7 +333,7 @@ where
|
|||||||
key_buffer.extend_from_slice(&value_bytes);
|
key_buffer.extend_from_slice(&value_bytes);
|
||||||
key_buffer.extend_from_slice(&number.to_be_bytes());
|
key_buffer.extend_from_slice(&number.to_be_bytes());
|
||||||
|
|
||||||
// We insert only the Del part of the Obkv to inform
|
// We insert only the Add part of the Obkv to inform
|
||||||
// that we only want to remove all those numbers.
|
// that we only want to remove all those numbers.
|
||||||
let mut obkv = KvWriterDelAdd::memory();
|
let mut obkv = KvWriterDelAdd::memory();
|
||||||
obkv.insert(DelAdd::Addition, ().as_bytes())?;
|
obkv.insert(DelAdd::Addition, ().as_bytes())?;
|
||||||
|
@ -449,7 +449,6 @@ where
|
|||||||
otherwise => otherwise,
|
otherwise => otherwise,
|
||||||
};
|
};
|
||||||
|
|
||||||
// FIXME: return newly added as well as newly deleted documents
|
|
||||||
let (docids, is_merged_database) =
|
let (docids, is_merged_database) =
|
||||||
write_typed_chunk_into_index(typed_chunk, self.index, self.wtxn, index_is_empty)?;
|
write_typed_chunk_into_index(typed_chunk, self.index, self.wtxn, index_is_empty)?;
|
||||||
if !docids.is_empty() {
|
if !docids.is_empty() {
|
||||||
|
@ -409,7 +409,7 @@ pub(crate) fn write_typed_chunk_into_index(
|
|||||||
dims.into_iter().max_by_key(|(_, count)| *count).map(|(len, _)| len)
|
dims.into_iter().max_by_key(|(_, count)| *count).map(|(len, _)| len)
|
||||||
};
|
};
|
||||||
|
|
||||||
// Ensure that the vector lenghts are correct and
|
// Ensure that the vector lengths are correct and
|
||||||
// prepare the vectors before inserting them in the HNSW.
|
// prepare the vectors before inserting them in the HNSW.
|
||||||
let mut points = Vec::new();
|
let mut points = Vec::new();
|
||||||
let mut docids = Vec::new();
|
let mut docids = Vec::new();
|
||||||
|
Loading…
Reference in New Issue
Block a user