diff --git a/crates/index-scheduler/src/batch.rs b/crates/index-scheduler/src/batch.rs index 6255bf332..3d45ce1fe 100644 --- a/crates/index-scheduler/src/batch.rs +++ b/crates/index-scheduler/src/batch.rs @@ -29,7 +29,7 @@ use bumpalo::Bump; use dump::IndexMetadata; use meilisearch_types::batches::BatchId; use meilisearch_types::heed::{RoTxn, RwTxn}; -use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader, PrimaryKey}; +use meilisearch_types::milli::documents::PrimaryKey; use meilisearch_types::milli::heed::CompactionOption; use meilisearch_types::milli::progress::Progress; use meilisearch_types::milli::update::new::indexer::{self, UpdateByFunction}; diff --git a/crates/index-scheduler/src/lib.rs b/crates/index-scheduler/src/lib.rs index 8a789da57..ca959d74c 100644 --- a/crates/index-scheduler/src/lib.rs +++ b/crates/index-scheduler/src/lib.rs @@ -2024,12 +2024,15 @@ impl<'a> Dump<'a> { let content_uuid = match content_file { Some(content_file) if task.status == Status::Enqueued => { - let (uuid, mut file) = self.index_scheduler.create_update_file(false)?; - let mut builder = DocumentsBatchBuilder::new(&mut file); + let (uuid, file) = self.index_scheduler.create_update_file(false)?; + let mut writer = io::BufWriter::new(file); for doc in content_file { - builder.append_json_object(&doc?)?; + let doc = doc?; + serde_json::to_writer(&mut writer, &doc).map_err(|e| { + Error::from_milli(milli::InternalError::SerdeJson(e).into(), None) + })?; } - builder.into_inner()?; + let file = writer.into_inner().map_err(|e| e.into_error())?; file.persist()?; Some(uuid)