bump strois version

This commit is contained in:
Tamo 2023-10-10 19:25:12 +02:00
parent 98b67f217a
commit dfb84f80da
6 changed files with 15 additions and 27 deletions

10
Cargo.lock generated
View File

@ -3706,8 +3706,9 @@ checksum = "4f3208ce4d8448b3f3e7d168a73f5e0c43a61e32930de3bceeccedb388b6bf06"
[[package]] [[package]]
name = "rusty-s3" name = "rusty-s3"
version = "0.4.1" version = "0.5.0"
source = "git+https://github.com/paolobarbolini/rusty-s3?rev=0b60b9d#0b60b9d10f2e3db01bb772768ae0afa0d57a7239" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "31aa883f1b986a5249641e574ca0e11ac4fb9970b009c6fbb96fedaf4fa78db8"
dependencies = [ dependencies = [
"base64 0.21.2", "base64 0.21.2",
"hmac", "hmac",
@ -4008,8 +4009,9 @@ checksum = "a2eb9349b6444b326872e140eb1cf5e7c522154d69e7a0ffb0fb81c06b37543f"
[[package]] [[package]]
name = "strois" name = "strois"
version = "0.1.0" version = "0.0.4"
source = "git+http://github.com/meilisearch/strois?branch=main#f977fd50e8e082d631a1dbf98f4e75e263112981" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5dde87abcdf0638bf1e0e762e5f4466f68aa9862bc061093f3914641f9db26f6"
dependencies = [ dependencies = [
"http", "http",
"log", "log",

View File

@ -34,7 +34,7 @@ uuid = { version = "1.3.1", features = ["serde", "v4"] }
tokio = { version = "1.27.0", features = ["full"] } tokio = { version = "1.27.0", features = ["full"] }
zookeeper = "0.8.0" zookeeper = "0.8.0"
parking_lot = "0.12.1" parking_lot = "0.12.1"
strois = { git = "http://github.com/meilisearch/strois", branch = "main" } strois = "0.0.4"
[dev-dependencies] [dev-dependencies]
big_s = "1.0.2" big_s = "1.0.2"

View File

@ -46,7 +46,6 @@ use dump::{KindDump, TaskDump, UpdateFile};
pub use error::Error; pub use error::Error;
pub use features::RoFeatures; pub use features::RoFeatures;
use file_store::FileStore; use file_store::FileStore;
use strois::Bucket;
use meilisearch_types::error::ResponseError; use meilisearch_types::error::ResponseError;
use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures}; use meilisearch_types::features::{InstanceTogglableFeatures, RuntimeTogglableFeatures};
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str}; use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
@ -57,6 +56,7 @@ use meilisearch_types::milli::{self, CboRoaringBitmapCodec, Index, RoaringBitmap
use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task}; use meilisearch_types::tasks::{Kind, KindWithContent, Status, Task};
use parking_lot::{MappedRwLockReadGuard, RwLock, RwLockReadGuard}; use parking_lot::{MappedRwLockReadGuard, RwLock, RwLockReadGuard};
use roaring::RoaringBitmap; use roaring::RoaringBitmap;
use strois::Bucket;
use synchronoise::SignalEvent; use synchronoise::SignalEvent;
use tempfile::TempDir; use tempfile::TempDir;
use time::format_description::well_known::Rfc3339; use time::format_description::well_known::Rfc3339;
@ -577,12 +577,7 @@ impl IndexScheduler {
"{snapshot_dir}/{}", "{snapshot_dir}/{}",
meilisearch_types::VERSION_FILE_NAME meilisearch_types::VERSION_FILE_NAME
); );
let mut version_file_path = s3.put_object_file(dst, &inner.version_file_path).unwrap();
File::open(&inner.version_file_path).unwrap();
s3.put_object_multipart(dst, &mut version_file_path, S3_PART_SIZE)
.unwrap();
version_file_path.sync_data().unwrap();
drop(version_file_path);
// 2. Snapshot the index-scheduler LMDB env // 2. Snapshot the index-scheduler LMDB env
log::info!("Snapshotting the tasks"); log::info!("Snapshotting the tasks");
@ -599,7 +594,6 @@ impl IndexScheduler {
s3.put_object_multipart( s3.put_object_multipart(
format!("{snapshot_dir}/tasks.mdb"), format!("{snapshot_dir}/tasks.mdb"),
&mut file, &mut file,
S3_PART_SIZE,
) )
.unwrap(); .unwrap();
temp.close().unwrap(); temp.close().unwrap();
@ -620,11 +614,7 @@ impl IndexScheduler {
heed::CompactionOption::Enabled, heed::CompactionOption::Enabled,
) )
.unwrap(); .unwrap();
s3.put_object_multipart( s3.put_object_multipart(format!("{dst}/{uuid}.mdb"), &mut file)
format!("{dst}/{uuid}.mdb"),
&mut file,
S3_PART_SIZE,
)
.unwrap(); .unwrap();
temp.close().unwrap(); temp.close().unwrap();
} }

View File

@ -80,7 +80,7 @@ reqwest = { version = "0.11.16", features = [
], default-features = false } ], default-features = false }
rustls = "0.20.8" rustls = "0.20.8"
rustls-pemfile = "1.0.2" rustls-pemfile = "1.0.2"
strois = { git = "http://github.com/meilisearch/strois", branch = "main" } strois = "0.0.4"
segment = { version = "0.2.2", optional = true } segment = { version = "0.2.2", optional = true }
serde = { version = "1.0.160", features = ["derive"] } serde = { version = "1.0.160", features = ["derive"] }
serde_json = { version = "1.0.95", features = ["preserve_order"] } serde_json = { version = "1.0.95", features = ["preserve_order"] }

View File

@ -30,7 +30,6 @@ use extractors::payload::PayloadConfig;
use http::header::CONTENT_TYPE; use http::header::CONTENT_TYPE;
use index_scheduler::{IndexScheduler, IndexSchedulerOptions}; use index_scheduler::{IndexScheduler, IndexSchedulerOptions};
use log::error; use log::error;
use strois::Client;
use meilisearch_auth::AuthController; use meilisearch_auth::AuthController;
use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader}; use meilisearch_types::milli::documents::{DocumentsBatchBuilder, DocumentsBatchReader};
use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod}; use meilisearch_types::milli::update::{IndexDocumentsConfig, IndexDocumentsMethod};
@ -40,6 +39,7 @@ use meilisearch_types::versioning::{check_version_file, create_version_file};
use meilisearch_types::{compression, milli, VERSION_FILE_NAME}; use meilisearch_types::{compression, milli, VERSION_FILE_NAME};
pub use option::Opt; pub use option::Opt;
use option::ScheduleSnapshot; use option::ScheduleSnapshot;
use strois::Client;
use zookeeper::ZooKeeper; use zookeeper::ZooKeeper;
use crate::error::MeilisearchHttpError; use crate::error::MeilisearchHttpError;
@ -250,7 +250,7 @@ fn open_or_create_database_unchecked(
.key(opt.s3_access_key.as_ref().expect("Need s3 key to work").clone()) .key(opt.s3_access_key.as_ref().expect("Need s3 key to work").clone())
.secret(opt.s3_secret_key.as_ref().expect("Need s3 secret to work").clone()) .secret(opt.s3_secret_key.as_ref().expect("Need s3 secret to work").clone())
.maybe_token(opt.s3_security_token.clone()) .maybe_token(opt.s3_security_token.clone())
.build() .region(&opt.s3_region)
.bucket(opt.s3_bucket.as_ref().expect("Need an s3 bucket to work")) .bucket(opt.s3_bucket.as_ref().expect("Need an s3 bucket to work"))
.unwrap() .unwrap()
.get_or_create() .get_or_create()

View File

@ -413,11 +413,7 @@ async fn document_addition(
if let Some(s3) = s3 { if let Some(s3) = s3 {
update_file.seek(SeekFrom::Start(0)).unwrap(); update_file.seek(SeekFrom::Start(0)).unwrap();
let mut reader = BufReader::new(&*update_file); let mut reader = BufReader::new(&*update_file);
s3.put_object_multipart( s3.put_object_multipart(format!("update-files/{}", uuid), &mut reader)?;
format!("update-files/{}", uuid),
&mut reader,
50 * 1024 * 1024,
)?;
} }
// we NEED to persist the file here because we moved the `udpate_file` in another task. // we NEED to persist the file here because we moved the `udpate_file` in another task.