mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 20:15:07 +08:00
Merge #1319
1319: Stable into master r=MarinPostma a=MarinPostma Co-authored-by: mpostma <postma.marin@protonmail.com> Co-authored-by: bors[bot] <26634292+bors[bot]@users.noreply.github.com> Co-authored-by: Clémentine Urquizar <clementine@meilisearch.com> Co-authored-by: Marin Postma <postma.marin@protonmail.com>
This commit is contained in:
commit
a294462a06
2
.github/workflows/coverage.yml
vendored
2
.github/workflows/coverage.yml
vendored
@ -7,7 +7,7 @@ name: Execute code coverage
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
nightly-coverage:
|
nightly-coverage:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
|
4
.github/workflows/publish-binaries.yml
vendored
4
.github/workflows/publish-binaries.yml
vendored
@ -10,9 +10,9 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest, windows-latest]
|
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||||
include:
|
include:
|
||||||
- os: ubuntu-latest
|
- os: ubuntu-18.04
|
||||||
artifact_name: meilisearch
|
artifact_name: meilisearch
|
||||||
asset_name: meilisearch-linux-amd64
|
asset_name: meilisearch-linux-amd64
|
||||||
- os: macos-latest
|
- os: macos-latest
|
||||||
|
4
.github/workflows/publish-deb-brew-pkg.yml
vendored
4
.github/workflows/publish-deb-brew-pkg.yml
vendored
@ -7,7 +7,7 @@ on:
|
|||||||
jobs:
|
jobs:
|
||||||
debian:
|
debian:
|
||||||
name: Publish debian packagge
|
name: Publish debian packagge
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: hecrj/setup-rust-action@master
|
- uses: hecrj/setup-rust-action@master
|
||||||
with:
|
with:
|
||||||
@ -29,7 +29,7 @@ jobs:
|
|||||||
|
|
||||||
homebrew:
|
homebrew:
|
||||||
name: Bump Homebrew formula
|
name: Bump Homebrew formula
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- name: Create PR to Homebrew
|
- name: Create PR to Homebrew
|
||||||
uses: mislav/bump-homebrew-formula-action@v1
|
uses: mislav/bump-homebrew-formula-action@v1
|
||||||
|
2
.github/workflows/publish-docker-latest.yml
vendored
2
.github/workflows/publish-docker-latest.yml
vendored
@ -7,7 +7,7 @@ name: Publish latest image to Docker Hub
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v2
|
||||||
- name: Check if current release is latest
|
- name: Check if current release is latest
|
||||||
|
2
.github/workflows/publish-docker-tag.yml
vendored
2
.github/workflows/publish-docker-tag.yml
vendored
@ -8,7 +8,7 @@ name: Publish tagged image to Docker Hub
|
|||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- name: Publish to Registry
|
- name: Publish to Registry
|
||||||
|
10
.github/workflows/test.yml
vendored
10
.github/workflows/test.yml
vendored
@ -16,7 +16,7 @@ jobs:
|
|||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
strategy:
|
strategy:
|
||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-latest, macos-latest]
|
os: [ubuntu-18.04, macos-latest]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
@ -34,11 +34,11 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --all-targets -- --deny warnings
|
args: --all-targets
|
||||||
|
|
||||||
build-image:
|
build-image:
|
||||||
name: Test the build of Docker image
|
name: Test the build of Docker image
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v1
|
- uses: actions/checkout@v1
|
||||||
- run: docker build . --file Dockerfile -t meilisearch
|
- run: docker build . --file Dockerfile -t meilisearch
|
||||||
@ -49,7 +49,7 @@ jobs:
|
|||||||
name: create prerelease
|
name: create prerelease
|
||||||
needs: [check, build-image]
|
needs: [check, build-image]
|
||||||
if: ${{ contains(github.ref, 'release-') && github.event_name == 'push' }}
|
if: ${{ contains(github.ref, 'release-') && github.event_name == 'push' }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
@ -76,7 +76,7 @@ jobs:
|
|||||||
name: create release
|
name: create release
|
||||||
needs: [check, build-image]
|
needs: [check, build-image]
|
||||||
if: ${{ contains(github.ref, 'tags/v') }}
|
if: ${{ contains(github.ref, 'tags/v') }}
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout code
|
- name: Checkout code
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v2
|
||||||
|
@ -1,3 +1,3 @@
|
|||||||
status = ["Test on macos-latest", "Test on ubuntu-latest"]
|
status = ["Test on macos-latest", "Test on ubuntu-18.04"]
|
||||||
# 4 hours timeout
|
# 4 hours timeout
|
||||||
timeout-sec = 14400
|
timeout-sec = 14400
|
||||||
|
@ -48,10 +48,10 @@ impl<'a> BytesDecode<'a> for FacetData {
|
|||||||
let mut size_buf = [0; LEN];
|
let mut size_buf = [0; LEN];
|
||||||
size_buf.copy_from_slice(bytes.get(0..LEN)?);
|
size_buf.copy_from_slice(bytes.get(0..LEN)?);
|
||||||
// decode size of the first item from the bytes
|
// decode size of the first item from the bytes
|
||||||
let first_size = usize::from_be_bytes(size_buf);
|
let first_size = u64::from_be_bytes(size_buf);
|
||||||
// decode first and second items
|
// decode first and second items
|
||||||
let first_item = Str::bytes_decode(bytes.get(LEN..(LEN + first_size))?)?;
|
let first_item = Str::bytes_decode(bytes.get(LEN..(LEN + first_size as usize))?)?;
|
||||||
let second_item = CowSet::bytes_decode(bytes.get((LEN + first_size)..)?)?;
|
let second_item = CowSet::bytes_decode(bytes.get((LEN + first_size as usize)..)?)?;
|
||||||
Some((first_item, second_item))
|
Some((first_item, second_item))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -123,7 +123,7 @@ pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
|||||||
eprintln!("{}", ascii_name);
|
eprintln!("{}", ascii_name);
|
||||||
|
|
||||||
eprintln!("Database path:\t\t{:?}", opt.db_path);
|
eprintln!("Database path:\t\t{:?}", opt.db_path);
|
||||||
eprintln!("Server listening on: http://\t{:?}", opt.http_addr);
|
eprintln!("Server listening on:\t\"http://{}\"", opt.http_addr);
|
||||||
eprintln!("Environment:\t\t{:?}", opt.env);
|
eprintln!("Environment:\t\t{:?}", opt.env);
|
||||||
eprintln!("Commit SHA:\t\t{:?}", env!("VERGEN_SHA").to_string());
|
eprintln!("Commit SHA:\t\t{:?}", env!("VERGEN_SHA").to_string());
|
||||||
eprintln!(
|
eprintln!(
|
||||||
|
@ -7,7 +7,6 @@ use std::fs::create_dir_all;
|
|||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
use std::thread;
|
use std::thread;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
use tempfile::TempDir;
|
|
||||||
|
|
||||||
pub fn load_snapshot(
|
pub fn load_snapshot(
|
||||||
db_path: &str,
|
db_path: &str,
|
||||||
@ -28,12 +27,22 @@ pub fn load_snapshot(
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn create_snapshot(data: &Data, snapshot_path: &Path) -> Result<(), Error> {
|
pub fn create_snapshot(data: &Data, snapshot_dir: impl AsRef<Path>, snapshot_name: impl AsRef<str>) -> Result<(), Error> {
|
||||||
let tmp_dir = TempDir::new_in(snapshot_path)?;
|
create_dir_all(&snapshot_dir)?;
|
||||||
|
let tmp_dir = tempfile::tempdir_in(&snapshot_dir)?;
|
||||||
|
|
||||||
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
data.db.copy_and_compact_to_path(tmp_dir.path())?;
|
||||||
|
|
||||||
compression::to_tar_gz(tmp_dir.path(), snapshot_path).map_err(|e| Error::Internal(format!("something went wrong during snapshot compression: {}", e)))
|
let temp_snapshot_file = tempfile::NamedTempFile::new_in(&snapshot_dir)?;
|
||||||
|
|
||||||
|
compression::to_tar_gz(tmp_dir.path(), temp_snapshot_file.path())
|
||||||
|
.map_err(|e| Error::Internal(format!("something went wrong during snapshot compression: {}", e)))?;
|
||||||
|
|
||||||
|
let snapshot_path = snapshot_dir.as_ref().join(snapshot_name.as_ref());
|
||||||
|
|
||||||
|
temp_snapshot_file.persist(snapshot_path).map_err(|e| Error::Internal(e.to_string()))?;
|
||||||
|
|
||||||
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Result<(), Error> {
|
||||||
@ -42,10 +51,11 @@ pub fn schedule_snapshot(data: Data, snapshot_dir: &Path, time_gap_s: u64) -> Re
|
|||||||
}
|
}
|
||||||
let db_name = Path::new(&data.db_path).file_name().ok_or_else(|| Error::Internal("invalid database name".to_string()))?;
|
let db_name = Path::new(&data.db_path).file_name().ok_or_else(|| Error::Internal("invalid database name".to_string()))?;
|
||||||
create_dir_all(snapshot_dir)?;
|
create_dir_all(snapshot_dir)?;
|
||||||
let snapshot_path = snapshot_dir.join(format!("{}.snapshot", db_name.to_str().unwrap_or("data.ms")));
|
let snapshot_name = format!("{}.snapshot", db_name.to_str().unwrap_or("data.ms"));
|
||||||
|
let snapshot_dir = snapshot_dir.to_owned();
|
||||||
|
|
||||||
thread::spawn(move || loop {
|
thread::spawn(move || loop {
|
||||||
if let Err(e) = create_snapshot(&data, &snapshot_path) {
|
if let Err(e) = create_snapshot(&data, &snapshot_dir, &snapshot_name) {
|
||||||
error!("Unsuccessful snapshot creation: {}", e);
|
error!("Unsuccessful snapshot creation: {}", e);
|
||||||
}
|
}
|
||||||
thread::sleep(Duration::from_secs(time_gap_s));
|
thread::sleep(Duration::from_secs(time_gap_s));
|
||||||
@ -62,7 +72,7 @@ mod tests {
|
|||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn test_pack_unpack() {
|
fn test_pack_unpack() {
|
||||||
let tempdir = TempDir::new().unwrap();
|
let tempdir = tempfile::tempdir().unwrap();
|
||||||
|
|
||||||
let test_dir = tempdir.path();
|
let test_dir = tempdir.path();
|
||||||
let src_dir = test_dir.join("src");
|
let src_dir = test_dir.join("src");
|
||||||
|
Loading…
Reference in New Issue
Block a user