From 263e82561973020c112cd7d74a76c479d76de1c8 Mon Sep 17 00:00:00 2001 From: ManyTheFish Date: Mon, 20 Nov 2023 10:06:29 +0100 Subject: [PATCH] Fix typos in comments --- .../extract/extract_docid_word_positions.rs | 8 ++++---- .../extract/extract_fid_docid_facet_values.rs | 4 ++-- milli/src/update/index_documents/mod.rs | 1 - milli/src/update/index_documents/typed_chunk.rs | 2 +- 4 files changed, 7 insertions(+), 8 deletions(-) diff --git a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs index 0dcd6a42a..303b64271 100644 --- a/milli/src/update/index_documents/extract/extract_docid_word_positions.rs +++ b/milli/src/update/index_documents/extract/extract_docid_word_positions.rs @@ -204,7 +204,7 @@ fn tokenizer_builder<'a>( tokenizer_builder } -/// Extract words maped with their positions of a document, +/// Extract words mapped with their positions of a document, /// ensuring no Language detection mistakes was made. #[allow(clippy::too_many_arguments)] // FIXME: consider grouping arguments in a struct fn lang_safe_tokens_from_document<'a>( @@ -273,7 +273,7 @@ fn lang_safe_tokens_from_document<'a>( Ok((&buffers.obkv_buffer, script_language_word_count)) } -/// Extract words maped with their positions of a document. +/// Extract words mapped with their positions of a document. fn tokens_from_document<'a>( obkv: &KvReader, searchable_fields: &Option>, @@ -294,11 +294,11 @@ fn tokens_from_document<'a>( let value = serde_json::from_slice(field_bytes).map_err(InternalError::SerdeJson)?; - // prepare writting destination. + // prepare writing destination. buffers.obkv_positions_buffer.clear(); let mut writer = KvWriterU16::new(&mut buffers.obkv_positions_buffer); - // convert json into an unique string. + // convert json into a unique string. buffers.field_buffer.clear(); if let Some(field) = json_to_string(&value, &mut buffers.field_buffer) { // create an iterator of token with their positions. diff --git a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs index 2dce90cfc..3fcec3e79 100644 --- a/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs +++ b/milli/src/update/index_documents/extract/extract_fid_docid_facet_values.rs @@ -75,7 +75,7 @@ pub fn extract_fid_docid_facet_values( let mut facet_is_null_docids = BTreeMap::::new(); let mut facet_is_empty_docids = BTreeMap::::new(); - // We create two buffer for mutable ref issues with closures. + // We create two buffers for mutable ref issues with closures. let mut numbers_key_buffer = Vec::new(); let mut strings_key_buffer = Vec::new(); @@ -333,7 +333,7 @@ where key_buffer.extend_from_slice(&value_bytes); key_buffer.extend_from_slice(&number.to_be_bytes()); - // We insert only the Del part of the Obkv to inform + // We insert only the Add part of the Obkv to inform // that we only want to remove all those numbers. let mut obkv = KvWriterDelAdd::memory(); obkv.insert(DelAdd::Addition, ().as_bytes())?; diff --git a/milli/src/update/index_documents/mod.rs b/milli/src/update/index_documents/mod.rs index 2289666ed..113114681 100644 --- a/milli/src/update/index_documents/mod.rs +++ b/milli/src/update/index_documents/mod.rs @@ -449,7 +449,6 @@ where otherwise => otherwise, }; - // FIXME: return newly added as well as newly deleted documents let (docids, is_merged_database) = write_typed_chunk_into_index(typed_chunk, self.index, self.wtxn, index_is_empty)?; if !docids.is_empty() { diff --git a/milli/src/update/index_documents/typed_chunk.rs b/milli/src/update/index_documents/typed_chunk.rs index 90f9b7739..dda2ebc1c 100644 --- a/milli/src/update/index_documents/typed_chunk.rs +++ b/milli/src/update/index_documents/typed_chunk.rs @@ -409,7 +409,7 @@ pub(crate) fn write_typed_chunk_into_index( dims.into_iter().max_by_key(|(_, count)| *count).map(|(len, _)| len) }; - // Ensure that the vector lenghts are correct and + // Ensure that the vector lengths are correct and // prepare the vectors before inserting them in the HNSW. let mut points = Vec::new(); let mut docids = Vec::new();