From 4626c77eac1b6fa4811b7079bfb8f768d8bdb96b Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?Cl=C3=A9ment=20Renault?= Date: Fri, 7 Dec 2018 15:09:55 +0100 Subject: [PATCH] fix: Change the way the BTreeMap store entries --- src/database/document_key.rs | 2 +- src/database/update/positive/update.rs | 23 +++++++++++------------ 2 files changed, 12 insertions(+), 13 deletions(-) diff --git a/src/database/document_key.rs b/src/database/document_key.rs index 815953679..35180097e 100644 --- a/src/database/document_key.rs +++ b/src/database/document_key.rs @@ -57,7 +57,7 @@ impl fmt::Debug for DocumentKey { } } -#[derive(Copy, Clone)] +#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)] pub struct DocumentKeyAttr([u8; DOC_KEY_ATTR_LEN]); impl DocumentKeyAttr { diff --git a/src/database/update/positive/update.rs b/src/database/update/positive/update.rs index 4b57405ab..13daebe6c 100644 --- a/src/database/update/positive/update.rs +++ b/src/database/update/positive/update.rs @@ -26,7 +26,7 @@ pub struct PositiveUpdateBuilder { schema: Schema, tokenizer_builder: B, builder: UnorderedPositiveBlobBuilder, Vec>, - new_states: BTreeMap<(DocumentId, SchemaAttr), NewState>, + new_states: BTreeMap, } impl PositiveUpdateBuilder { @@ -55,13 +55,13 @@ impl PositiveUpdateBuilder { } // TODO value must be a field that can be indexed - pub fn update_field(&mut self, id: DocumentId, field: SchemaAttr, value: String) { + pub fn update_field(&mut self, id: DocumentId, attr: SchemaAttr, value: String) { let value = bincode::serialize(&value).unwrap(); - self.new_states.insert((id, field), NewState::Updated { value }); + self.new_states.insert(DocumentKeyAttr::new(id, attr), NewState::Updated { value }); } - pub fn remove_field(&mut self, id: DocumentId, field: SchemaAttr) { - self.new_states.insert((id, field), NewState::Removed); + pub fn remove_field(&mut self, id: DocumentId, attr: SchemaAttr) { + self.new_states.insert(DocumentKeyAttr::new(id, attr), NewState::Removed); } } @@ -101,7 +101,7 @@ struct Serializer<'a, B> { tokenizer_builder: &'a B, document_id: DocumentId, builder: &'a mut UnorderedPositiveBlobBuilder, Vec>, - new_states: &'a mut BTreeMap<(DocumentId, SchemaAttr), NewState>, + new_states: &'a mut BTreeMap, } macro_rules! forward_to_unserializable_type { @@ -272,7 +272,7 @@ struct StructSerializer<'a, B> { tokenizer_builder: &'a B, document_id: DocumentId, builder: &'a mut UnorderedPositiveBlobBuilder, Vec>, - new_states: &'a mut BTreeMap<(DocumentId, SchemaAttr), NewState>, + new_states: &'a mut BTreeMap, } impl<'a, B> ser::SerializeStruct for StructSerializer<'a, B> @@ -293,7 +293,8 @@ where B: TokenizerBuilder let props = self.schema.props(attr); if props.is_stored() { let value = bincode::serialize(value).unwrap(); - self.new_states.insert((self.document_id, attr), NewState::Updated { value }); + let key = DocumentKeyAttr::new(self.document_id, attr); + self.new_states.insert(key, NewState::Updated { value }); } if props.is_indexed() { let serializer = IndexerSerializer { @@ -498,11 +499,9 @@ impl PositiveUpdateBuilder { file_writer.merge(DATA_INDEX, &bytes)?; // write all the documents fields updates - for ((id, attr), state) in self.new_states { - let key = DocumentKeyAttr::new(id, attr); - let props = self.schema.props(attr); + for (key, state) in self.new_states { match state { - NewState::Updated { value } => if props.is_stored() { + NewState::Updated { value } => { file_writer.put(key.as_ref(), &value)? }, NewState::Removed => file_writer.delete(key.as_ref())?,