mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 12:05:05 +08:00
Merge branch 'main' into stable
This commit is contained in:
commit
8e370ed9ab
3
.github/ISSUE_TEMPLATE/config.yml
vendored
3
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -1,4 +1,7 @@
|
|||||||
contact_links:
|
contact_links:
|
||||||
|
- name: Language support request & feedback
|
||||||
|
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal?discussions_q=label%3Aproduct%3Acore%3Atokenizer+category%3A%22Feedback+%26+Feature+Proposal%22
|
||||||
|
about: The requests and feedback regarding Language support are not managed in this repository. Please upvote the related discussion in our dedicated product repository or open a new one if it doesn't exist.
|
||||||
- name: Feature request & feedback
|
- name: Feature request & feedback
|
||||||
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal
|
url: https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal
|
||||||
about: The feature requests and feedback regarding the already existing features are not managed in this repository. Please open a discussion in our dedicated product repository
|
about: The feature requests and feedback regarding the already existing features are not managed in this repository. Please open a discussion in our dedicated product repository
|
||||||
|
13
.github/dependabot.yml
vendored
Normal file
13
.github/dependabot.yml
vendored
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
# Set update schedule for GitHub Actions only
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "monthly"
|
||||||
|
labels:
|
||||||
|
- 'skip changelog'
|
||||||
|
- 'dependencies'
|
||||||
|
rebase-strategy: disabled
|
20
.github/workflows/README.md
vendored
20
.github/workflows/README.md
vendored
@ -1,20 +0,0 @@
|
|||||||
# GitHub Actions Workflow for Meilisearch
|
|
||||||
|
|
||||||
> **Note:**
|
|
||||||
|
|
||||||
> - We do not use [cache](https://github.com/actions/cache) yet but we could use it to speed up CI
|
|
||||||
|
|
||||||
## Workflow
|
|
||||||
|
|
||||||
- On each pull request, we trigger `cargo test`.
|
|
||||||
- On each tag, we build:
|
|
||||||
- the tagged Docker image and publish it to Docker Hub
|
|
||||||
- the binaries for MacOS, Ubuntu, and Windows
|
|
||||||
- the Debian package
|
|
||||||
- On each stable release (`v*.*.*` tag):
|
|
||||||
- we build the `latest` Docker image and publish it to Docker Hub
|
|
||||||
- we publish the binary to Hombrew and Gemfury
|
|
||||||
|
|
||||||
## Problems
|
|
||||||
|
|
||||||
- We do not test on Windows because we are unable to make it work, there is a disk space problem.
|
|
4
.github/workflows/coverage.yml
vendored
4
.github/workflows/coverage.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
|||||||
nightly-coverage:
|
nightly-coverage:
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: nightly
|
toolchain: nightly
|
||||||
@ -25,7 +25,7 @@ jobs:
|
|||||||
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
RUSTFLAGS: "-Zprofile -Ccodegen-units=1 -Cinline-threshold=0 -Clink-dead-code -Coverflow-checks=off -Cpanic=unwind -Zpanic_abort_tests"
|
||||||
- uses: actions-rs/grcov@v0.1
|
- uses: actions-rs/grcov@v0.1
|
||||||
- name: Upload coverage to Codecov
|
- name: Upload coverage to Codecov
|
||||||
uses: codecov/codecov-action@v1
|
uses: codecov/codecov-action@v3
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.CODECOV_TOKEN }}
|
token: ${{ secrets.CODECOV_TOKEN }}
|
||||||
file: ${{ steps.coverage.outputs.report }}
|
file: ${{ steps.coverage.outputs.report }}
|
||||||
|
23
.github/workflows/create-issue-dependencies.yml
vendored
Normal file
23
.github/workflows/create-issue-dependencies.yml
vendored
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
name: Create issue to upgrade dependencies
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: '0 0 1 */3 *'
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
create-issue:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- name: Create an issue
|
||||||
|
uses: actions-ecosystem/action-create-issue@v1
|
||||||
|
with:
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
title: Upgrade dependencies
|
||||||
|
body: |
|
||||||
|
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the core-team repositories that Meilisearch depends on (milli, charabia, heed...).
|
||||||
|
|
||||||
|
⚠️ This issue should only be done at the beginning of the sprint!
|
||||||
|
labels: |
|
||||||
|
dependencies
|
||||||
|
maintenance
|
2
.github/workflows/flaky.yml
vendored
2
.github/workflows/flaky.yml
vendored
@ -8,7 +8,7 @@ jobs:
|
|||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Install cargo-flaky
|
- name: Install cargo-flaky
|
||||||
run: cargo install cargo-flaky
|
run: cargo install cargo-flaky
|
||||||
- name: Run cargo flaky 100 times
|
- name: Run cargo flaky 100 times
|
||||||
|
4
.github/workflows/publish-binaries.yml
vendored
4
.github/workflows/publish-binaries.yml
vendored
@ -51,7 +51,7 @@ jobs:
|
|||||||
- uses: hecrj/setup-rust-action@master
|
- uses: hecrj/setup-rust-action@master
|
||||||
with:
|
with:
|
||||||
rust-version: stable
|
rust-version: stable
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Build
|
- name: Build
|
||||||
run: cargo build --release --locked
|
run: cargo build --release --locked
|
||||||
- name: Upload binaries to release
|
- name: Upload binaries to release
|
||||||
@ -80,7 +80,7 @@ jobs:
|
|||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Checkout repository
|
- name: Checkout repository
|
||||||
uses: actions/checkout@v2
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Installing Rust toolchain
|
- name: Installing Rust toolchain
|
||||||
uses: actions-rs/toolchain@v1
|
uses: actions-rs/toolchain@v1
|
||||||
|
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
2
.github/workflows/publish-deb-brew-pkg.yml
vendored
@ -23,7 +23,7 @@ jobs:
|
|||||||
rust-version: stable
|
rust-version: stable
|
||||||
- name: Install cargo-deb
|
- name: Install cargo-deb
|
||||||
run: cargo install cargo-deb
|
run: cargo install cargo-deb
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Build deb package
|
- name: Build deb package
|
||||||
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
run: cargo deb -p meilisearch-http -o target/debian/meilisearch.deb
|
||||||
- name: Upload debian pkg to release
|
- name: Upload debian pkg to release
|
||||||
|
28
.github/workflows/publish-docker-images.yml
vendored
28
.github/workflows/publish-docker-images.yml
vendored
@ -36,21 +36,35 @@ jobs:
|
|||||||
run: bash .github/scripts/check-release.sh
|
run: bash .github/scripts/check-release.sh
|
||||||
|
|
||||||
- name: Set up QEMU
|
- name: Set up QEMU
|
||||||
uses: docker/setup-qemu-action@v1
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Set up Docker Buildx
|
||||||
uses: docker/setup-buildx-action@v1
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
- name: Login to Docker Hub
|
- name: Login to Docker Hub
|
||||||
if: github.event_name != 'schedule'
|
if: github.event_name != 'schedule'
|
||||||
uses: docker/login-action@v1
|
uses: docker/login-action@v2
|
||||||
with:
|
with:
|
||||||
username: ${{ secrets.DOCKER_USERNAME }}
|
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||||
password: ${{ secrets.DOCKER_PASSWORD }}
|
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Check tag format
|
||||||
|
id: check-tag-format
|
||||||
|
run: |
|
||||||
|
# Escape submitted tag name
|
||||||
|
escaped_tag=$(printf "%q" ${{ github.ref_name }})
|
||||||
|
|
||||||
|
# Check if tag has format v<nmumber>.<number>.<number> and set output.match
|
||||||
|
# to create a vX.Y (without patch version) Docker tag
|
||||||
|
if [[ $escaped_tag =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||||
|
echo ::set-output name=match::true
|
||||||
|
else
|
||||||
|
echo ::set-output name=match::false
|
||||||
|
fi
|
||||||
|
|
||||||
- name: Docker meta
|
- name: Docker meta
|
||||||
id: meta
|
id: meta
|
||||||
uses: docker/metadata-action@v3
|
uses: docker/metadata-action@v4
|
||||||
with:
|
with:
|
||||||
images: getmeili/meilisearch
|
images: getmeili/meilisearch
|
||||||
# The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases
|
# The lastest and `vX.Y` tags are only pushed for the official Meilisearch releases
|
||||||
@ -63,7 +77,7 @@ jobs:
|
|||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
id: docker_build
|
id: docker_build
|
||||||
uses: docker/build-push-action@v2
|
uses: docker/build-push-action@v3
|
||||||
with:
|
with:
|
||||||
# We do not push tags for the cron jobs, this is only for test purposes
|
# We do not push tags for the cron jobs, this is only for test purposes
|
||||||
push: ${{ github.event_name != 'schedule' }}
|
push: ${{ github.event_name != 'schedule' }}
|
||||||
|
16
.github/workflows/rust.yml
vendored
16
.github/workflows/rust.yml
vendored
@ -23,9 +23,9 @@ jobs:
|
|||||||
matrix:
|
matrix:
|
||||||
os: [ubuntu-18.04, macos-latest, windows-latest]
|
os: [ubuntu-18.04, macos-latest, windows-latest]
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
uses: Swatinem/rust-cache@v1.4.0
|
||||||
- name: Run cargo check without any default features
|
- name: Run cargo check without any default features
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -42,14 +42,14 @@ jobs:
|
|||||||
name: Run tests in debug
|
name: Run tests in debug
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
uses: Swatinem/rust-cache@v1.4.0
|
||||||
- name: Run tests in debug
|
- name: Run tests in debug
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -60,7 +60,7 @@ jobs:
|
|||||||
name: Run Clippy
|
name: Run Clippy
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
@ -68,7 +68,7 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
uses: Swatinem/rust-cache@v1.4.0
|
||||||
- name: Run cargo clippy
|
- name: Run cargo clippy
|
||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
@ -79,7 +79,7 @@ jobs:
|
|||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
runs-on: ubuntu-18.04
|
runs-on: ubuntu-18.04
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v2
|
- uses: actions/checkout@v3
|
||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
@ -87,6 +87,6 @@ jobs:
|
|||||||
override: true
|
override: true
|
||||||
components: rustfmt
|
components: rustfmt
|
||||||
- name: Cache dependencies
|
- name: Cache dependencies
|
||||||
uses: Swatinem/rust-cache@v1.3.0
|
uses: Swatinem/rust-cache@v1.4.0
|
||||||
- name: Run cargo fmt
|
- name: Run cargo fmt
|
||||||
run: cargo fmt --all -- --check
|
run: cargo fmt --all -- --check
|
||||||
|
@ -5,6 +5,7 @@ First, thank you for contributing to Meilisearch! The goal of this document is t
|
|||||||
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
Remember that there are many ways to contribute other than writing code: writing [tutorials or blog posts](https://github.com/meilisearch/awesome-meilisearch), improving [the documentation](https://github.com/meilisearch/documentation), submitting [bug reports](https://github.com/meilisearch/meilisearch/issues/new?assignees=&labels=&template=bug_report.md&title=) and [feature requests](https://github.com/meilisearch/product/discussions/categories/feedback-feature-proposal)...
|
||||||
|
|
||||||
## Table of Contents
|
## Table of Contents
|
||||||
|
|
||||||
- [Assumptions](#assumptions)
|
- [Assumptions](#assumptions)
|
||||||
- [How to Contribute](#how-to-contribute)
|
- [How to Contribute](#how-to-contribute)
|
||||||
- [Development Workflow](#development-workflow)
|
- [Development Workflow](#development-workflow)
|
||||||
@ -13,7 +14,7 @@ Remember that there are many ways to contribute other than writing code: writing
|
|||||||
|
|
||||||
## Assumptions
|
## Assumptions
|
||||||
|
|
||||||
1. **You're familiar with [Github](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
1. **You're familiar with [GitHub](https://github.com) and the [Pull Requests](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/about-pull-requests)(PR) workflow.**
|
||||||
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
2. **You've read the Meilisearch [documentation](https://docs.meilisearch.com).**
|
||||||
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
3. **You know about the [Meilisearch community](https://docs.meilisearch.com/learn/what_is_meilisearch/contact.html).
|
||||||
Please use this for help.**
|
Please use this for help.**
|
||||||
@ -23,7 +24,7 @@ Remember that there are many ways to contribute other than writing code: writing
|
|||||||
1. Ensure your change has an issue! Find an
|
1. Ensure your change has an issue! Find an
|
||||||
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
[existing issue](https://github.com/meilisearch/meilisearch/issues/) or [open a new issue](https://github.com/meilisearch/meilisearch/issues/new).
|
||||||
* This is where you can get a feel if the change will be accepted or not.
|
* This is where you can get a feel if the change will be accepted or not.
|
||||||
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own Github account.
|
2. Once approved, [fork the Meilisearch repository](https://help.github.com/en/github/getting-started-with-github/fork-a-repo) in your own GitHub account.
|
||||||
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
3. [Create a new Git branch](https://help.github.com/en/github/collaborating-with-issues-and-pull-requests/creating-and-deleting-branches-within-your-repository)
|
||||||
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
4. Review the [Development Workflow](#development-workflow) section that describes the steps to maintain the repository.
|
||||||
5. Make your changes on your branch.
|
5. Make your changes on your branch.
|
||||||
@ -45,6 +46,8 @@ We recommend using the `--release` flag to test the full performance of Meilisea
|
|||||||
cargo test
|
cargo test
|
||||||
```
|
```
|
||||||
|
|
||||||
|
This command will be triggered to each PR as a requirement for merging it.
|
||||||
|
|
||||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -69,7 +72,7 @@ As minimal requirements, your commit message should:
|
|||||||
|
|
||||||
We don't follow any other convention, but if you want to use one, we recommend [the Chris Beams one](https://chris.beams.io/posts/git-commit/).
|
We don't follow any other convention, but if you want to use one, we recommend [the Chris Beams one](https://chris.beams.io/posts/git-commit/).
|
||||||
|
|
||||||
### Github Pull Requests
|
### GitHub Pull Requests
|
||||||
|
|
||||||
Some notes on GitHub PRs:
|
Some notes on GitHub PRs:
|
||||||
|
|
||||||
@ -92,6 +95,16 @@ _[Read more about this](https://github.com/meilisearch/integration-guides/blob/m
|
|||||||
|
|
||||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/core-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||||
|
|
||||||
|
### Release assets
|
||||||
|
|
||||||
|
For each release, the following assets are created:
|
||||||
|
- Binaries for differents platforms (Linux, MacOS, Windows and ARM architectures) are attached to the GitHub release
|
||||||
|
- Binaries are pushed to HomeBrew and APT (not published for RC)
|
||||||
|
- Docker tags are created/updated:
|
||||||
|
- `vX.Y.Z`
|
||||||
|
- `vX.Y` (not published for RC)
|
||||||
|
- `latest` (not published for RC)
|
||||||
|
|
||||||
<hr>
|
<hr>
|
||||||
|
|
||||||
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️
|
Thank you again for reading this through, we can not wait to begin to work with you if you made your way through this contributing guide ❤️
|
||||||
|
@ -1,19 +1,24 @@
|
|||||||
use enum_iterator::IntoEnumIterator;
|
use enum_iterator::IntoEnumIterator;
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
|
use std::hash::Hash;
|
||||||
|
|
||||||
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq)]
|
#[derive(IntoEnumIterator, Copy, Clone, Serialize, Deserialize, Debug, Eq, PartialEq, Hash)]
|
||||||
#[repr(u8)]
|
#[repr(u8)]
|
||||||
pub enum Action {
|
pub enum Action {
|
||||||
#[serde(rename = "*")]
|
#[serde(rename = "*")]
|
||||||
All = actions::ALL,
|
All = actions::ALL,
|
||||||
#[serde(rename = "search")]
|
#[serde(rename = "search")]
|
||||||
Search = actions::SEARCH,
|
Search = actions::SEARCH,
|
||||||
|
#[serde(rename = "documents.*")]
|
||||||
|
DocumentsAll = actions::DOCUMENTS_ALL,
|
||||||
#[serde(rename = "documents.add")]
|
#[serde(rename = "documents.add")]
|
||||||
DocumentsAdd = actions::DOCUMENTS_ADD,
|
DocumentsAdd = actions::DOCUMENTS_ADD,
|
||||||
#[serde(rename = "documents.get")]
|
#[serde(rename = "documents.get")]
|
||||||
DocumentsGet = actions::DOCUMENTS_GET,
|
DocumentsGet = actions::DOCUMENTS_GET,
|
||||||
#[serde(rename = "documents.delete")]
|
#[serde(rename = "documents.delete")]
|
||||||
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
DocumentsDelete = actions::DOCUMENTS_DELETE,
|
||||||
|
#[serde(rename = "indexes.*")]
|
||||||
|
IndexesAll = actions::INDEXES_ALL,
|
||||||
#[serde(rename = "indexes.create")]
|
#[serde(rename = "indexes.create")]
|
||||||
IndexesAdd = actions::INDEXES_CREATE,
|
IndexesAdd = actions::INDEXES_CREATE,
|
||||||
#[serde(rename = "indexes.get")]
|
#[serde(rename = "indexes.get")]
|
||||||
@ -22,14 +27,22 @@ pub enum Action {
|
|||||||
IndexesUpdate = actions::INDEXES_UPDATE,
|
IndexesUpdate = actions::INDEXES_UPDATE,
|
||||||
#[serde(rename = "indexes.delete")]
|
#[serde(rename = "indexes.delete")]
|
||||||
IndexesDelete = actions::INDEXES_DELETE,
|
IndexesDelete = actions::INDEXES_DELETE,
|
||||||
|
#[serde(rename = "tasks.*")]
|
||||||
|
TasksAll = actions::TASKS_ALL,
|
||||||
#[serde(rename = "tasks.get")]
|
#[serde(rename = "tasks.get")]
|
||||||
TasksGet = actions::TASKS_GET,
|
TasksGet = actions::TASKS_GET,
|
||||||
|
#[serde(rename = "settings.*")]
|
||||||
|
SettingsAll = actions::SETTINGS_ALL,
|
||||||
#[serde(rename = "settings.get")]
|
#[serde(rename = "settings.get")]
|
||||||
SettingsGet = actions::SETTINGS_GET,
|
SettingsGet = actions::SETTINGS_GET,
|
||||||
#[serde(rename = "settings.update")]
|
#[serde(rename = "settings.update")]
|
||||||
SettingsUpdate = actions::SETTINGS_UPDATE,
|
SettingsUpdate = actions::SETTINGS_UPDATE,
|
||||||
|
#[serde(rename = "stats.*")]
|
||||||
|
StatsAll = actions::STATS_ALL,
|
||||||
#[serde(rename = "stats.get")]
|
#[serde(rename = "stats.get")]
|
||||||
StatsGet = actions::STATS_GET,
|
StatsGet = actions::STATS_GET,
|
||||||
|
#[serde(rename = "dumps.*")]
|
||||||
|
DumpsAll = actions::DUMPS_ALL,
|
||||||
#[serde(rename = "dumps.create")]
|
#[serde(rename = "dumps.create")]
|
||||||
DumpsCreate = actions::DUMPS_CREATE,
|
DumpsCreate = actions::DUMPS_CREATE,
|
||||||
#[serde(rename = "version")]
|
#[serde(rename = "version")]
|
||||||
@ -50,17 +63,23 @@ impl Action {
|
|||||||
match repr {
|
match repr {
|
||||||
ALL => Some(Self::All),
|
ALL => Some(Self::All),
|
||||||
SEARCH => Some(Self::Search),
|
SEARCH => Some(Self::Search),
|
||||||
|
DOCUMENTS_ALL => Some(Self::DocumentsAll),
|
||||||
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
DOCUMENTS_ADD => Some(Self::DocumentsAdd),
|
||||||
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
DOCUMENTS_GET => Some(Self::DocumentsGet),
|
||||||
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
DOCUMENTS_DELETE => Some(Self::DocumentsDelete),
|
||||||
|
INDEXES_ALL => Some(Self::IndexesAll),
|
||||||
INDEXES_CREATE => Some(Self::IndexesAdd),
|
INDEXES_CREATE => Some(Self::IndexesAdd),
|
||||||
INDEXES_GET => Some(Self::IndexesGet),
|
INDEXES_GET => Some(Self::IndexesGet),
|
||||||
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
INDEXES_UPDATE => Some(Self::IndexesUpdate),
|
||||||
INDEXES_DELETE => Some(Self::IndexesDelete),
|
INDEXES_DELETE => Some(Self::IndexesDelete),
|
||||||
|
TASKS_ALL => Some(Self::TasksAll),
|
||||||
TASKS_GET => Some(Self::TasksGet),
|
TASKS_GET => Some(Self::TasksGet),
|
||||||
|
SETTINGS_ALL => Some(Self::SettingsAll),
|
||||||
SETTINGS_GET => Some(Self::SettingsGet),
|
SETTINGS_GET => Some(Self::SettingsGet),
|
||||||
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
SETTINGS_UPDATE => Some(Self::SettingsUpdate),
|
||||||
|
STATS_ALL => Some(Self::StatsAll),
|
||||||
STATS_GET => Some(Self::StatsGet),
|
STATS_GET => Some(Self::StatsGet),
|
||||||
|
DUMPS_ALL => Some(Self::DumpsAll),
|
||||||
DUMPS_CREATE => Some(Self::DumpsCreate),
|
DUMPS_CREATE => Some(Self::DumpsCreate),
|
||||||
VERSION => Some(Self::Version),
|
VERSION => Some(Self::Version),
|
||||||
KEYS_CREATE => Some(Self::KeysAdd),
|
KEYS_CREATE => Some(Self::KeysAdd),
|
||||||
@ -76,17 +95,23 @@ impl Action {
|
|||||||
match self {
|
match self {
|
||||||
Self::All => ALL,
|
Self::All => ALL,
|
||||||
Self::Search => SEARCH,
|
Self::Search => SEARCH,
|
||||||
|
Self::DocumentsAll => DOCUMENTS_ALL,
|
||||||
Self::DocumentsAdd => DOCUMENTS_ADD,
|
Self::DocumentsAdd => DOCUMENTS_ADD,
|
||||||
Self::DocumentsGet => DOCUMENTS_GET,
|
Self::DocumentsGet => DOCUMENTS_GET,
|
||||||
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
Self::DocumentsDelete => DOCUMENTS_DELETE,
|
||||||
|
Self::IndexesAll => INDEXES_ALL,
|
||||||
Self::IndexesAdd => INDEXES_CREATE,
|
Self::IndexesAdd => INDEXES_CREATE,
|
||||||
Self::IndexesGet => INDEXES_GET,
|
Self::IndexesGet => INDEXES_GET,
|
||||||
Self::IndexesUpdate => INDEXES_UPDATE,
|
Self::IndexesUpdate => INDEXES_UPDATE,
|
||||||
Self::IndexesDelete => INDEXES_DELETE,
|
Self::IndexesDelete => INDEXES_DELETE,
|
||||||
|
Self::TasksAll => TASKS_ALL,
|
||||||
Self::TasksGet => TASKS_GET,
|
Self::TasksGet => TASKS_GET,
|
||||||
|
Self::SettingsAll => SETTINGS_ALL,
|
||||||
Self::SettingsGet => SETTINGS_GET,
|
Self::SettingsGet => SETTINGS_GET,
|
||||||
Self::SettingsUpdate => SETTINGS_UPDATE,
|
Self::SettingsUpdate => SETTINGS_UPDATE,
|
||||||
|
Self::StatsAll => STATS_ALL,
|
||||||
Self::StatsGet => STATS_GET,
|
Self::StatsGet => STATS_GET,
|
||||||
|
Self::DumpsAll => DUMPS_ALL,
|
||||||
Self::DumpsCreate => DUMPS_CREATE,
|
Self::DumpsCreate => DUMPS_CREATE,
|
||||||
Self::Version => VERSION,
|
Self::Version => VERSION,
|
||||||
Self::KeysAdd => KEYS_CREATE,
|
Self::KeysAdd => KEYS_CREATE,
|
||||||
@ -100,21 +125,27 @@ impl Action {
|
|||||||
pub mod actions {
|
pub mod actions {
|
||||||
pub(crate) const ALL: u8 = 0;
|
pub(crate) const ALL: u8 = 0;
|
||||||
pub const SEARCH: u8 = 1;
|
pub const SEARCH: u8 = 1;
|
||||||
pub const DOCUMENTS_ADD: u8 = 2;
|
pub const DOCUMENTS_ALL: u8 = 2;
|
||||||
pub const DOCUMENTS_GET: u8 = 3;
|
pub const DOCUMENTS_ADD: u8 = 3;
|
||||||
pub const DOCUMENTS_DELETE: u8 = 4;
|
pub const DOCUMENTS_GET: u8 = 4;
|
||||||
pub const INDEXES_CREATE: u8 = 5;
|
pub const DOCUMENTS_DELETE: u8 = 5;
|
||||||
pub const INDEXES_GET: u8 = 6;
|
pub const INDEXES_ALL: u8 = 6;
|
||||||
pub const INDEXES_UPDATE: u8 = 7;
|
pub const INDEXES_CREATE: u8 = 7;
|
||||||
pub const INDEXES_DELETE: u8 = 8;
|
pub const INDEXES_GET: u8 = 8;
|
||||||
pub const TASKS_GET: u8 = 9;
|
pub const INDEXES_UPDATE: u8 = 9;
|
||||||
pub const SETTINGS_GET: u8 = 10;
|
pub const INDEXES_DELETE: u8 = 10;
|
||||||
pub const SETTINGS_UPDATE: u8 = 11;
|
pub const TASKS_ALL: u8 = 11;
|
||||||
pub const STATS_GET: u8 = 12;
|
pub const TASKS_GET: u8 = 12;
|
||||||
pub const DUMPS_CREATE: u8 = 13;
|
pub const SETTINGS_ALL: u8 = 13;
|
||||||
pub const VERSION: u8 = 15;
|
pub const SETTINGS_GET: u8 = 14;
|
||||||
pub const KEYS_CREATE: u8 = 16;
|
pub const SETTINGS_UPDATE: u8 = 15;
|
||||||
pub const KEYS_GET: u8 = 17;
|
pub const STATS_ALL: u8 = 16;
|
||||||
pub const KEYS_UPDATE: u8 = 18;
|
pub const STATS_GET: u8 = 17;
|
||||||
pub const KEYS_DELETE: u8 = 19;
|
pub const DUMPS_ALL: u8 = 18;
|
||||||
|
pub const DUMPS_CREATE: u8 = 19;
|
||||||
|
pub const VERSION: u8 = 20;
|
||||||
|
pub const KEYS_CREATE: u8 = 21;
|
||||||
|
pub const KEYS_GET: u8 = 22;
|
||||||
|
pub const KEYS_UPDATE: u8 = 23;
|
||||||
|
pub const KEYS_DELETE: u8 = 24;
|
||||||
}
|
}
|
||||||
|
@ -1,5 +1,6 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
use std::cmp::Reverse;
|
use std::cmp::Reverse;
|
||||||
|
use std::collections::HashSet;
|
||||||
use std::convert::TryFrom;
|
use std::convert::TryFrom;
|
||||||
use std::convert::TryInto;
|
use std::convert::TryInto;
|
||||||
use std::fs::create_dir_all;
|
use std::fs::create_dir_all;
|
||||||
@ -88,12 +89,48 @@ impl HeedAuthStore {
|
|||||||
// create inverted database.
|
// create inverted database.
|
||||||
let db = self.action_keyid_index_expiration;
|
let db = self.action_keyid_index_expiration;
|
||||||
|
|
||||||
let actions = if key.actions.contains(&Action::All) {
|
let mut actions = HashSet::new();
|
||||||
// if key.actions contains All, we iterate over all actions.
|
for action in &key.actions {
|
||||||
Action::into_enum_iter().collect()
|
match action {
|
||||||
} else {
|
Action::All => actions.extend(Action::into_enum_iter()),
|
||||||
key.actions.clone()
|
Action::DocumentsAll => {
|
||||||
};
|
actions.extend(
|
||||||
|
[
|
||||||
|
Action::DocumentsGet,
|
||||||
|
Action::DocumentsDelete,
|
||||||
|
Action::DocumentsAdd,
|
||||||
|
]
|
||||||
|
.iter(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Action::IndexesAll => {
|
||||||
|
actions.extend(
|
||||||
|
[
|
||||||
|
Action::IndexesAdd,
|
||||||
|
Action::IndexesDelete,
|
||||||
|
Action::IndexesGet,
|
||||||
|
Action::IndexesUpdate,
|
||||||
|
]
|
||||||
|
.iter(),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
Action::SettingsAll => {
|
||||||
|
actions.extend([Action::SettingsGet, Action::SettingsUpdate].iter());
|
||||||
|
}
|
||||||
|
Action::DumpsAll => {
|
||||||
|
actions.insert(Action::DumpsCreate);
|
||||||
|
}
|
||||||
|
Action::TasksAll => {
|
||||||
|
actions.insert(Action::TasksGet);
|
||||||
|
}
|
||||||
|
Action::StatsAll => {
|
||||||
|
actions.insert(Action::StatsGet);
|
||||||
|
}
|
||||||
|
other => {
|
||||||
|
actions.insert(*other);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
let no_index_restriction = key.indexes.contains(&StarOr::Star);
|
||||||
for action in actions {
|
for action in actions {
|
||||||
|
@ -46,7 +46,7 @@ fn payload_to_stream(mut payload: Payload) -> impl Stream<Item = Result<Bytes, P
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// Extracts the mime type from the content type and return
|
/// Extracts the mime type from the content type and return
|
||||||
/// a meilisearch error if anyhthing bad happen.
|
/// a meilisearch error if anything bad happen.
|
||||||
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
fn extract_mime_type(req: &HttpRequest) -> Result<Option<Mime>, MeilisearchHttpError> {
|
||||||
match req.mime_type() {
|
match req.mime_type() {
|
||||||
Ok(Some(mime)) => Ok(Some(mime)),
|
Ok(Some(mime)) => Ok(Some(mime)),
|
||||||
|
Binary file not shown.
Binary file not shown.
@ -11,41 +11,41 @@ pub static AUTHORIZATIONS: Lazy<HashMap<(&'static str, &'static str), HashSet<&'
|
|||||||
hashmap! {
|
hashmap! {
|
||||||
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
("POST", "/indexes/products/search") => hashset!{"search", "*"},
|
||||||
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
("GET", "/indexes/products/search") => hashset!{"search", "*"},
|
||||||
("POST", "/indexes/products/documents") => hashset!{"documents.add", "*"},
|
("POST", "/indexes/products/documents") => hashset!{"documents.add", "documents.*", "*"},
|
||||||
("GET", "/indexes/products/documents") => hashset!{"documents.get", "*"},
|
("GET", "/indexes/products/documents") => hashset!{"documents.get", "documents.*", "*"},
|
||||||
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "*"},
|
("GET", "/indexes/products/documents/0") => hashset!{"documents.get", "documents.*", "*"},
|
||||||
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "*"},
|
("DELETE", "/indexes/products/documents/0") => hashset!{"documents.delete", "documents.*", "*"},
|
||||||
("GET", "/tasks") => hashset!{"tasks.get", "*"},
|
("GET", "/tasks") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||||
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "*"},
|
("GET", "/tasks?indexUid=products") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||||
("GET", "/tasks/0") => hashset!{"tasks.get", "*"},
|
("GET", "/tasks/0") => hashset!{"tasks.get", "tasks.*", "*"},
|
||||||
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "*"},
|
("PATCH", "/indexes/products/") => hashset!{"indexes.update", "indexes.*", "*"},
|
||||||
("GET", "/indexes/products/") => hashset!{"indexes.get", "*"},
|
("GET", "/indexes/products/") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||||
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "*"},
|
("DELETE", "/indexes/products/") => hashset!{"indexes.delete", "indexes.*", "*"},
|
||||||
("POST", "/indexes") => hashset!{"indexes.create", "*"},
|
("POST", "/indexes") => hashset!{"indexes.create", "indexes.*", "*"},
|
||||||
("GET", "/indexes") => hashset!{"indexes.get", "*"},
|
("GET", "/indexes") => hashset!{"indexes.get", "indexes.*", "*"},
|
||||||
("GET", "/indexes/products/settings") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/ranking-rules") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/stop-words") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "*"},
|
("GET", "/indexes/products/settings/synonyms") => hashset!{"settings.get", "settings.*", "*"},
|
||||||
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
("DELETE", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "*"},
|
("PATCH", "/indexes/products/settings") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "*"},
|
("PATCH", "/indexes/products/settings/typo-tolerance") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/displayed-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/distinct-attribute") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/filterable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/ranking-rules") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/searchable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/sortable-attributes") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/stop-words") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "*"},
|
("PUT", "/indexes/products/settings/synonyms") => hashset!{"settings.update", "settings.*", "*"},
|
||||||
("GET", "/indexes/products/stats") => hashset!{"stats.get", "*"},
|
("GET", "/indexes/products/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("GET", "/stats") => hashset!{"stats.get", "*"},
|
("GET", "/stats") => hashset!{"stats.get", "stats.*", "*"},
|
||||||
("POST", "/dumps") => hashset!{"dumps.create", "*"},
|
("POST", "/dumps") => hashset!{"dumps.create", "dumps.*", "*"},
|
||||||
("GET", "/version") => hashset!{"version", "*"},
|
("GET", "/version") => hashset!{"version", "*"},
|
||||||
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
("PATCH", "/keys/mykey/") => hashset!{"keys.update", "*"},
|
||||||
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
("GET", "/keys/mykey/") => hashset!{"keys.get", "*"},
|
||||||
|
@ -115,7 +115,7 @@ static REFUSED_KEYS: Lazy<Vec<Value>> = Lazy::new(|| {
|
|||||||
]
|
]
|
||||||
});
|
});
|
||||||
|
|
||||||
macro_rules! compute_autorized_search {
|
macro_rules! compute_authorized_search {
|
||||||
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
($tenant_tokens:expr, $filter:expr, $expected_count:expr) => {
|
||||||
let mut server = Server::new_auth().await;
|
let mut server = Server::new_auth().await;
|
||||||
server.use_admin_key("MASTER_KEY").await;
|
server.use_admin_key("MASTER_KEY").await;
|
||||||
@ -251,7 +251,7 @@ async fn search_authorized_simple_token() {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
compute_autorized_search!(tenant_tokens, {}, 5);
|
compute_authorized_search!(tenant_tokens, {}, 5);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -305,7 +305,7 @@ async fn search_authorized_filter_token() {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
compute_autorized_search!(tenant_tokens, {}, 3);
|
compute_authorized_search!(tenant_tokens, {}, 3);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
@ -359,7 +359,7 @@ async fn filter_search_authorized_filter_token() {
|
|||||||
},
|
},
|
||||||
];
|
];
|
||||||
|
|
||||||
compute_autorized_search!(tenant_tokens, "color = yellow", 1);
|
compute_authorized_search!(tenant_tokens, "color = yellow", 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -625,7 +625,7 @@ async fn add_documents_no_index_creation() {
|
|||||||
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
OffsetDateTime::parse(response["enqueuedAt"].as_str().unwrap(), &Rfc3339).unwrap();
|
||||||
assert!(processed_at > enqueued_at);
|
assert!(processed_at > enqueued_at);
|
||||||
|
|
||||||
// index was created, and primary key was infered.
|
// index was created, and primary key was inferred.
|
||||||
let (response, code) = index.get().await;
|
let (response, code) = index.get().await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
assert_eq!(response["primaryKey"], "id");
|
assert_eq!(response["primaryKey"], "id");
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
// This modules contains all the test concerning search. Each particular feture of the search
|
// This modules contains all the test concerning search. Each particular feature of the search
|
||||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||||
|
|
||||||
mod errors;
|
mod errors;
|
||||||
|
@ -39,7 +39,7 @@ async fn get_task_status() {
|
|||||||
index.wait_task(0).await;
|
index.wait_task(0).await;
|
||||||
let (_response, code) = index.get_task(1).await;
|
let (_response, code) = index.get_task(1).await;
|
||||||
assert_eq!(code, 200);
|
assert_eq!(code, 200);
|
||||||
// TODO check resonse format, as per #48
|
// TODO check response format, as per #48
|
||||||
}
|
}
|
||||||
|
|
||||||
#[actix_rt::test]
|
#[actix_rt::test]
|
||||||
|
@ -9,7 +9,7 @@ anyhow = { version = "1.0.56", features = ["backtrace"] }
|
|||||||
async-stream = "0.3.3"
|
async-stream = "0.3.3"
|
||||||
async-trait = "0.1.52"
|
async-trait = "0.1.52"
|
||||||
atomic_refcell = "0.1.8"
|
atomic_refcell = "0.1.8"
|
||||||
byte-unit = { version = "4.0.14", default-features = false, features = ["std"] }
|
byte-unit = { version = "4.0.14", default-features = false, features = ["std", "serde"] }
|
||||||
bytes = "1.1.0"
|
bytes = "1.1.0"
|
||||||
clap = { version = "3.1.6", features = ["derive", "env"] }
|
clap = { version = "3.1.6", features = ["derive", "env"] }
|
||||||
crossbeam-channel = "0.5.2"
|
crossbeam-channel = "0.5.2"
|
||||||
|
@ -38,7 +38,7 @@ impl BatchContent {
|
|||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub struct Batch {
|
pub struct Batch {
|
||||||
// Only batches that contains a persistant tasks are given an id. Snapshot batches don't have
|
// Only batches that contains a persistent tasks are given an id. Snapshot batches don't have
|
||||||
// an id.
|
// an id.
|
||||||
pub id: Option<BatchId>,
|
pub id: Option<BatchId>,
|
||||||
pub created_at: OffsetDateTime,
|
pub created_at: OffsetDateTime,
|
||||||
|
@ -33,7 +33,7 @@ pub trait BatchHandler: Sync + Send + 'static {
|
|||||||
/// `accept` beforehand.
|
/// `accept` beforehand.
|
||||||
async fn process_batch(&self, batch: Batch) -> Batch;
|
async fn process_batch(&self, batch: Batch) -> Batch;
|
||||||
|
|
||||||
/// `finish` is called when the result of `process` has been commited to the task store. This
|
/// `finish` is called when the result of `process` has been committed to the task store. This
|
||||||
/// method can be used to perform cleanup after the update has been completed for example.
|
/// method can be used to perform cleanup after the update has been completed for example.
|
||||||
async fn finish(&self, batch: &Batch);
|
async fn finish(&self, batch: &Batch);
|
||||||
}
|
}
|
||||||
|
@ -189,7 +189,7 @@ impl TaskQueue {
|
|||||||
Entry::Occupied(entry) => {
|
Entry::Occupied(entry) => {
|
||||||
// A task list already exists for this index, all we have to to is to push the new
|
// A task list already exists for this index, all we have to to is to push the new
|
||||||
// update to the end of the list. This won't change the order since ids are
|
// update to the end of the list. This won't change the order since ids are
|
||||||
// monotically increasing.
|
// monotonically increasing.
|
||||||
let mut list = entry.get().borrow_mut();
|
let mut list = entry.get().borrow_mut();
|
||||||
|
|
||||||
// We only need the first element to be lower than the one we want to
|
// We only need the first element to be lower than the one we want to
|
||||||
|
@ -78,7 +78,7 @@ impl TaskEvent {
|
|||||||
|
|
||||||
/// A task represents an operation that Meilisearch must do.
|
/// A task represents an operation that Meilisearch must do.
|
||||||
/// It's stored on disk and executed from the lowest to highest Task id.
|
/// It's stored on disk and executed from the lowest to highest Task id.
|
||||||
/// Everytime a new task is created it has a higher Task id than the previous one.
|
/// Every time a new task is created it has a higher Task id than the previous one.
|
||||||
/// See also `Job`.
|
/// See also `Job`.
|
||||||
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
|
#[derive(Clone, Debug, Serialize, Deserialize, PartialEq)]
|
||||||
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
#[cfg_attr(test, derive(proptest_derive::Arbitrary))]
|
||||||
|
@ -122,7 +122,7 @@ impl TaskStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// This methods takes a `Processing` which contains the next task ids to process, and returns
|
/// This methods takes a `Processing` which contains the next task ids to process, and returns
|
||||||
/// the coresponding tasks along with the ownership to the passed processing.
|
/// the corresponding tasks along with the ownership to the passed processing.
|
||||||
///
|
///
|
||||||
/// We need get_processing_tasks to take ownership over `Processing` because we need it to be
|
/// We need get_processing_tasks to take ownership over `Processing` because we need it to be
|
||||||
/// valid for 'static.
|
/// valid for 'static.
|
||||||
|
@ -49,7 +49,7 @@ impl UpdateLoop {
|
|||||||
};
|
};
|
||||||
|
|
||||||
if let Err(e) = self.process_next_batch().await {
|
if let Err(e) = self.process_next_batch().await {
|
||||||
log::error!("an error occured while processing an update batch: {}", e);
|
log::error!("an error occurred while processing an update batch: {}", e);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user