diff --git a/.dockerignore b/.dockerignore
new file mode 100644
index 000000000..364510117
--- /dev/null
+++ b/.dockerignore
@@ -0,0 +1,5 @@
+target
+Dockerfile
+.dockerignore
+.git
+.gitignore
diff --git a/.github/workflows/README.md b/.github/workflows/README.md
new file mode 100644
index 000000000..71d046348
--- /dev/null
+++ b/.github/workflows/README.md
@@ -0,0 +1,13 @@
+# GitHub actions workflow for MeiliDB
+
+> **Note:**
+
+> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
+
+## Workflow
+
+- On each pull request, we are triggering `cargo test`.
+- On each commit on master, we are building the latest docker image.
+
+## TODO
+- On each tag, we should build the tagged docker image and the binaries for Debian & CentOS
diff --git a/.github/workflows/publish-docker-latest.yml b/.github/workflows/publish-docker-latest.yml
new file mode 100644
index 000000000..a206b22df
--- /dev/null
+++ b/.github/workflows/publish-docker-latest.yml
@@ -0,0 +1,19 @@
+---
+on:
+ push:
+ branches:
+ - master
+
+name: Publish latest image to Docker Hub
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v1
+ - name: Publish to Registry
+ uses: elgohr/Publish-Docker-Github-Action@master
+ with:
+ name: getmeili/meilisearch
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
diff --git a/.github/workflows/publish-docker-tag.yml b/.github/workflows/publish-docker-tag.yml
new file mode 100644
index 000000000..02ca3e64e
--- /dev/null
+++ b/.github/workflows/publish-docker-tag.yml
@@ -0,0 +1,20 @@
+---
+on:
+ push:
+ tags:
+ - '*'
+
+name: Publish tagged image to Docker Hub
+
+jobs:
+ build:
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v1
+ - name: Publish to Registry
+ uses: elgohr/Publish-Docker-Github-Action@master
+ with:
+ name: getmeili/meilisearch
+ username: ${{ secrets.DOCKER_USERNAME }}
+ password: ${{ secrets.DOCKER_PASSWORD }}
+ tags: true
diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml
new file mode 100644
index 000000000..ae34f4a0c
--- /dev/null
+++ b/.github/workflows/test.yml
@@ -0,0 +1,20 @@
+---
+on: [pull_request]
+
+name: Cargo test
+
+jobs:
+ check:
+ name: MeiliDB
+ runs-on: ubuntu-latest
+ steps:
+ - uses: actions/checkout@v1
+ - uses: actions-rs/toolchain@v1
+ with:
+ profile: minimal
+ toolchain: stable
+ override: true
+ - name: Run cargo test
+ uses: actions-rs/cargo@v1
+ with:
+ command: test
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 000000000..24c8effd4
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,28 @@
+# Compile
+FROM alpine:3.10 AS compiler
+
+RUN apk update --quiet
+RUN apk add curl
+RUN apk add build-base
+RUN apk add libressl-dev
+
+RUN curl --proto '=https' --tlsv1.2 -sSf https://sh.rustup.rs | sh -s -- -y
+
+WORKDIR /meilisearch
+
+COPY . .
+
+ENV RUSTFLAGS="-C target-feature=-crt-static"
+
+RUN $HOME/.cargo/bin/cargo build --release
+
+# Run
+FROM alpine:3.10
+
+RUN apk update --quiet
+RUN apk add libressl
+RUN apk add build-base
+
+COPY --from=compiler /meilisearch/target/release/meilidb-http .
+
+CMD ./meilidb-http
diff --git a/README.md b/README.md
index 81cf68216..0a88e701c 100644
--- a/README.md
+++ b/README.md
@@ -1,6 +1,6 @@
# MeiliDB
-[![Build Status](https://dev.azure.com/thomas0884/thomas/_apis/build/status/meilisearch.MeiliDB?branchName=master)](https://dev.azure.com/thomas0884/thomas/_build/latest?definitionId=1&branchName=master)
+[![Build Status](https://github.com/meilisearch/MeiliDB/workflows/Cargo%20test/badge.svg)](https://dev.azure.com/thomas0884/thomas/_build/latest?definitionId=1&branchName=master)
[![dependency status](https://deps.rs/repo/github/meilisearch/MeiliDB/status.svg)](https://deps.rs/repo/github/meilisearch/MeiliDB)
[![License](https://img.shields.io/badge/license-commons%20clause-lightgrey)](https://commonsclause.com/)
@@ -27,7 +27,7 @@ It uses [LMDB](https://en.wikipedia.org/wiki/Lightning_Memory-Mapped_Database) a
You can [read the deep dive](deep-dive.md) if you want more information on the engine, it describes the whole process of generating updates and handling queries or you can take a look at the [typos and ranking rules](typos-ranking-rules.md) if you want to know the default rules used to sort the documents.
-We will be proud if you submit issues and pull requests. You can help to grow this project and start contributing by checking [issues tagged "good-first-issue"](https://github.com/meilisearch/MeiliDB/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22). It is a good start!
+We will be glad if you submit issues and pull requests. You can help to grow this project and start contributing by checking [issues tagged "good-first-issue"](https://github.com/meilisearch/MeiliDB/issues?q=is%3Aissue+is%3Aopen+label%3A%22good+first+issue%22). It is a good start!
[![crates.io demo gif](misc/crates-io-demo.gif)](https://crates.meilisearch.com)
@@ -47,7 +47,6 @@ If you have not installed Rust and its package manager `cargo` yet, go to [the i
You can deploy the server on your own machine, it will listen to HTTP requests on the 8080 port by default.
```bash
-rustup override set nightly
cargo run --release
```
@@ -112,8 +111,8 @@ curl 'http://127.0.0.1:8080/indexes/movies/search?q=botman'
## Performances
-With a dataset composed of _100 353_ documents with _352_ attributes each and _3_ of them indexed.
-So more than _300 000_ fields indexed for _35 million_ stored we can handle more than _2.8k req/sec_ with an average response time of _9 ms_ on an Intel i7-7700 (8) @ 4.2GHz.
+With a dataset composed of _100 353_ documents with _352_ attributes each and _3_ of them indexed.
+So more than _300 000_ fields indexed for _35 million_ stored we can handle more than _2.8k req/sec_ with an average response time of _9 ms_ on an Intel i7-7700 (8) @ 4.2GHz.
Requests are made using [wrk](https://github.com/wg/wrk) and scripted to simulate real users queries.
@@ -128,7 +127,7 @@ Requests/sec: 2806.46
Transfer/sec: 759.17KB
```
-We also indexed a dataset containing something like _12 millions_ cities names in _24 minutes_ on a machine with _8 cores_, _64 GB of RAM_ and a _300 GB NMVe_ SSD.
+We also indexed a dataset containing something like _12 millions_ cities names in _24 minutes_ on a machine with _8 cores_, _64 GB of RAM_ and a _300 GB NMVe_ SSD.
The resulting database was _16 GB_ and search results were between _30 ms_ and _4 seconds_ for short prefix queries.
### Notes
diff --git a/azure-pipelines.yml b/azure-pipelines.yml
deleted file mode 100644
index e1dc6fd9f..000000000
--- a/azure-pipelines.yml
+++ /dev/null
@@ -1,52 +0,0 @@
----
-trigger:
- branches:
- include: [ master ]
-
-pr: [ master ]
-
-jobs:
- - job: test
- pool:
- vmImage: 'Ubuntu 16.04'
- container: tpayet/chiquitita:latest
- steps:
- - script: |
- curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
- $HOME/.cargo/bin/rustup component add rustfmt
- displayName: 'Install rustc and components'
- - script: |
- $HOME/.cargo/bin/cargo check
- displayName: 'Check MeiliDB'
- - script: |
- $HOME/.cargo/bin/cargo test
- displayName: 'Test MeiliDB'
- - script: |
- $HOME/.cargo/bin/cargo fmt --all -- --check
- displayName: 'Fmt MeiliDB'
-
- - job: build
- dependsOn:
- - test
- condition: succeeded()
- pool:
- vmImage: 'Ubuntu 16.04'
- container: tpayet/chiquitita:latest
- steps:
- - script: |
- curl https://sh.rustup.rs -sSf | sh -s -- -y --default-toolchain stable
- $HOME/.cargo/bin/rustup component add rustfmt
- displayName: 'Install rustc and components'
- - script: |
- $HOME/.cargo/bin/cargo build --release
- displayName: 'Build MeiliDB'
- - task: CopyFiles@2
- inputs:
- contents: '$(System.DefaultWorkingDirectory)/target/release/meilidb-http'
- targetFolder: $(Build.ArtifactStagingDirectory)
- displayName: 'Copy build'
- - task: PublishBuildArtifacts@1
- inputs:
- artifactName: meilidb
- displayName: 'Upload artifacts'
-