mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-26 12:05:05 +08:00
Merge branch 'main' into enhance-language-detection
This commit is contained in:
commit
8aa808d51b
10
.github/scripts/check-release.sh
vendored
10
.github/scripts/check-release.sh
vendored
@ -3,7 +3,7 @@
|
|||||||
# check_tag $current_tag $file_tag $file_name
|
# check_tag $current_tag $file_tag $file_name
|
||||||
function check_tag {
|
function check_tag {
|
||||||
if [[ "$1" != "$2" ]]; then
|
if [[ "$1" != "$2" ]]; then
|
||||||
echo "Error: the current tag does not match the version in $3: found $2 - expected $1"
|
echo "Error: the current tag does not match the version in Cargo.toml: found $2 - expected $1"
|
||||||
ret=1
|
ret=1
|
||||||
fi
|
fi
|
||||||
}
|
}
|
||||||
@ -11,12 +11,8 @@ function check_tag {
|
|||||||
ret=0
|
ret=0
|
||||||
current_tag=${GITHUB_REF#'refs/tags/v'}
|
current_tag=${GITHUB_REF#'refs/tags/v'}
|
||||||
|
|
||||||
toml_files='*/Cargo.toml'
|
file_tag="$(grep '^version = ' Cargo.toml | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
||||||
for toml_file in $toml_files;
|
check_tag $current_tag $file_tag
|
||||||
do
|
|
||||||
file_tag="$(grep '^version = ' $toml_file | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')"
|
|
||||||
check_tag $current_tag $file_tag $toml_file
|
|
||||||
done
|
|
||||||
|
|
||||||
lock_file='Cargo.lock'
|
lock_file='Cargo.lock'
|
||||||
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
lock_tag=$(grep -A 1 'name = "meilisearch-auth"' $lock_file | grep version | cut -d '=' -f 2 | tr -d '"' | tr -d ' ')
|
||||||
|
48
.github/uffizzi/Dockerfile
vendored
Normal file
48
.github/uffizzi/Dockerfile
vendored
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
# Compile
|
||||||
|
FROM rust:alpine3.16 AS compiler
|
||||||
|
|
||||||
|
RUN apk add -q --update-cache --no-cache build-base openssl-dev
|
||||||
|
|
||||||
|
WORKDIR /meilisearch
|
||||||
|
|
||||||
|
ARG COMMIT_SHA
|
||||||
|
ARG COMMIT_DATE
|
||||||
|
ARG GIT_TAG
|
||||||
|
ENV COMMIT_SHA=${COMMIT_SHA} COMMIT_DATE=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||||
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
|
COPY . .
|
||||||
|
RUN set -eux; \
|
||||||
|
apkArch="$(apk --print-arch)"; \
|
||||||
|
if [ "$apkArch" = "aarch64" ]; then \
|
||||||
|
export JEMALLOC_SYS_WITH_LG_PAGE=16; \
|
||||||
|
fi && \
|
||||||
|
cargo build --release
|
||||||
|
|
||||||
|
# Run
|
||||||
|
FROM uffizzi/ttyd:alpine
|
||||||
|
|
||||||
|
ENV MEILI_HTTP_ADDR 0.0.0.0:7700
|
||||||
|
ENV MEILI_SERVER_PROVIDER docker
|
||||||
|
ENV MEILI_NO_ANALYTICS true
|
||||||
|
|
||||||
|
RUN apk update --quiet \
|
||||||
|
&& apk add -q --no-cache libgcc tini curl
|
||||||
|
|
||||||
|
# add meilisearch to the `/bin` so you can run it from anywhere and it's easy
|
||||||
|
# to find.
|
||||||
|
COPY --from=compiler /meilisearch/target/release/meilisearch /bin/meilisearch
|
||||||
|
# To stay compatible with the older version of the container (pre v0.27.0) we're
|
||||||
|
# going to symlink the meilisearch binary in the path to `/meilisearch`
|
||||||
|
RUN ln -s /bin/meilisearch /meilisearch
|
||||||
|
|
||||||
|
# This directory should hold all the data related to meilisearch so we're going
|
||||||
|
# to move our PWD in there.
|
||||||
|
# We don't want to put the meilisearch binary
|
||||||
|
WORKDIR /meili_data
|
||||||
|
|
||||||
|
|
||||||
|
EXPOSE 7700/tcp
|
||||||
|
|
||||||
|
ENTRYPOINT ["tini", "--"]
|
||||||
|
CMD ["ttyd", "/bin/zsh"]
|
26
.github/uffizzi/docker-compose.uffizzi.yml
vendored
Normal file
26
.github/uffizzi/docker-compose.uffizzi.yml
vendored
Normal file
@ -0,0 +1,26 @@
|
|||||||
|
version: "3"
|
||||||
|
|
||||||
|
x-uffizzi:
|
||||||
|
ingress:
|
||||||
|
service: nginx
|
||||||
|
port: 8081
|
||||||
|
|
||||||
|
services:
|
||||||
|
meilisearch:
|
||||||
|
image: "${MEILISEARCH_IMAGE}"
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "7681:7681"
|
||||||
|
- "7700:7700"
|
||||||
|
deploy:
|
||||||
|
resources:
|
||||||
|
limits:
|
||||||
|
memory: 500M
|
||||||
|
|
||||||
|
nginx:
|
||||||
|
image: nginx:alpine
|
||||||
|
restart: unless-stopped
|
||||||
|
ports:
|
||||||
|
- "8081:8081"
|
||||||
|
volumes:
|
||||||
|
- ./.github/uffizzi/nginx:/etc/nginx
|
28
.github/uffizzi/nginx/nginx.conf
vendored
Normal file
28
.github/uffizzi/nginx/nginx.conf
vendored
Normal file
@ -0,0 +1,28 @@
|
|||||||
|
|
||||||
|
events {
|
||||||
|
worker_connections 4096; ## Default: 1024
|
||||||
|
}
|
||||||
|
|
||||||
|
http {
|
||||||
|
map $http_upgrade $connection_upgrade {
|
||||||
|
default upgrade;
|
||||||
|
'' close;
|
||||||
|
}
|
||||||
|
|
||||||
|
server {
|
||||||
|
listen 8081;
|
||||||
|
|
||||||
|
location / {
|
||||||
|
proxy_pass http://localhost:7681;
|
||||||
|
proxy_http_version 1.1;
|
||||||
|
proxy_set_header Upgrade $http_upgrade;
|
||||||
|
proxy_set_header Connection $connection_upgrade;
|
||||||
|
}
|
||||||
|
|
||||||
|
location /meilisearch/ {
|
||||||
|
# rewrite /meilisearch/(.*) /$1 break;
|
||||||
|
proxy_pass http://localhost:7700/;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -1,6 +1,7 @@
|
|||||||
name: Create issue to upgrade dependencies
|
name: Create issue to upgrade dependencies
|
||||||
on:
|
on:
|
||||||
schedule:
|
schedule:
|
||||||
|
# Run the first of the month, every 3 month
|
||||||
- cron: '0 0 1 */3 *'
|
- cron: '0 0 1 */3 *'
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
@ -15,9 +16,13 @@ jobs:
|
|||||||
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
github_token: ${{ secrets.MEILI_BOT_GH_PAT }}
|
||||||
title: Upgrade dependencies
|
title: Upgrade dependencies
|
||||||
body: |
|
body: |
|
||||||
We need to update the dependencies of the Meilisearch repository, and, if possible, the dependencies of all the engine-team repositories that Meilisearch depends on (charabia, heed...).
|
This issue is about updating Meilisearch dependencies:
|
||||||
|
- [ ] Cargo toml dependencies of Meilisearch; but also the main engine-team repositories that Meilisearch depends on (charabia, heed...)
|
||||||
|
- [ ] If new Rust versions have been released, update the Rust version in the Clippy job of this [GitHub Action file](./.github/workflows/rust.yml)
|
||||||
|
|
||||||
⚠️ This issue should only be done at the beginning of the sprint!
|
⚠️ To avoid last minute bugs, this issue should only be done at the beginning of the sprint!
|
||||||
|
|
||||||
|
The GitHub action dependencies are managed by [Dependabot](./.github/dependabot.yml)
|
||||||
labels: |
|
labels: |
|
||||||
dependencies
|
dependencies
|
||||||
maintenance
|
maintenance
|
||||||
|
3
.github/workflows/publish-docker-images.yml
vendored
3
.github/workflows/publish-docker-images.yml
vendored
@ -84,7 +84,7 @@ jobs:
|
|||||||
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
type=raw,value=latest,enable=${{ steps.check-tag-format.outputs.stable == 'true' && steps.check-tag-format.outputs.latest == 'true' }}
|
||||||
|
|
||||||
- name: Build and push
|
- name: Build and push
|
||||||
uses: docker/build-push-action@v3
|
uses: docker/build-push-action@v4
|
||||||
with:
|
with:
|
||||||
push: true
|
push: true
|
||||||
platforms: linux/amd64,linux/arm64
|
platforms: linux/amd64,linux/arm64
|
||||||
@ -92,6 +92,7 @@ jobs:
|
|||||||
build-args: |
|
build-args: |
|
||||||
COMMIT_SHA=${{ github.sha }}
|
COMMIT_SHA=${{ github.sha }}
|
||||||
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
COMMIT_DATE=${{ steps.build-metadata.outputs.date }}
|
||||||
|
GIT_TAG=${{ github.ref_name }}
|
||||||
|
|
||||||
# /!\ Don't touch this without checking with Cloud team
|
# /!\ Don't touch this without checking with Cloud team
|
||||||
- name: Send CI information to Cloud team
|
- name: Send CI information to Cloud team
|
||||||
|
18
.github/workflows/rust.yml
vendored
18
.github/workflows/rust.yml
vendored
@ -2,6 +2,9 @@ name: Rust
|
|||||||
|
|
||||||
on:
|
on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
schedule:
|
||||||
|
# Everyday at 5:00am
|
||||||
|
- cron: '0 5 * * *'
|
||||||
pull_request:
|
pull_request:
|
||||||
push:
|
push:
|
||||||
# trying and staging branches are for Bors config
|
# trying and staging branches are for Bors config
|
||||||
@ -27,10 +30,18 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
apt-get update && apt-get install -y curl
|
apt-get update && apt-get install -y curl
|
||||||
apt-get install build-essential -y
|
apt-get install build-essential -y
|
||||||
- uses: actions-rs/toolchain@v1
|
- name: Run test with Rust stable
|
||||||
|
if: github.event_name != 'schedule'
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
toolchain: stable
|
toolchain: stable
|
||||||
override: true
|
override: true
|
||||||
|
- name: Run test with Rust nightly
|
||||||
|
if: github.event_name == 'schedule'
|
||||||
|
uses: actions-rs/toolchain@v1
|
||||||
|
with:
|
||||||
|
toolchain: nightly
|
||||||
|
override: true
|
||||||
# Disable cache due to disk space issues with Windows workers in CI
|
# Disable cache due to disk space issues with Windows workers in CI
|
||||||
# - name: Cache dependencies
|
# - name: Cache dependencies
|
||||||
# uses: Swatinem/rust-cache@v2.2.0
|
# uses: Swatinem/rust-cache@v2.2.0
|
||||||
@ -100,7 +111,7 @@ jobs:
|
|||||||
- uses: actions-rs/toolchain@v1
|
- uses: actions-rs/toolchain@v1
|
||||||
with:
|
with:
|
||||||
profile: minimal
|
profile: minimal
|
||||||
toolchain: stable
|
toolchain: 1.67.0
|
||||||
override: true
|
override: true
|
||||||
components: clippy
|
components: clippy
|
||||||
# - name: Cache dependencies
|
# - name: Cache dependencies
|
||||||
@ -109,7 +120,8 @@ jobs:
|
|||||||
uses: actions-rs/cargo@v1
|
uses: actions-rs/cargo@v1
|
||||||
with:
|
with:
|
||||||
command: clippy
|
command: clippy
|
||||||
args: --all-targets -- --deny warnings
|
# allow unlined_format_args https://github.com/rust-lang/rust-clippy/issues/10087
|
||||||
|
args: --all-targets -- --deny warnings --allow clippy::uninlined_format_args
|
||||||
|
|
||||||
fmt:
|
fmt:
|
||||||
name: Run Rustfmt
|
name: Run Rustfmt
|
||||||
|
100
.github/workflows/uffizzi-build.yml
vendored
Normal file
100
.github/workflows/uffizzi-build.yml
vendored
Normal file
@ -0,0 +1,100 @@
|
|||||||
|
name: Uffizzi - Build PR Image
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened,synchronize,reopened,closed]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build-meilisearch:
|
||||||
|
name: Build and push `meilisearch`
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
outputs:
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
if: ${{ github.event.action != 'closed' }}
|
||||||
|
steps:
|
||||||
|
- name: checkout
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up QEMU
|
||||||
|
uses: docker/setup-qemu-action@v2
|
||||||
|
|
||||||
|
- name: Set up Docker Buildx
|
||||||
|
uses: docker/setup-buildx-action@v2
|
||||||
|
|
||||||
|
- name: Generate UUID image name
|
||||||
|
id: uuid
|
||||||
|
run: echo "UUID_TAG=$(uuidgen)" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Docker metadata
|
||||||
|
id: meta
|
||||||
|
uses: docker/metadata-action@v3
|
||||||
|
with:
|
||||||
|
images: registry.uffizzi.com/${{ env.UUID_TAG }}
|
||||||
|
tags: |
|
||||||
|
type=raw,value=60d
|
||||||
|
|
||||||
|
- name: Build Image
|
||||||
|
uses: docker/build-push-action@v3
|
||||||
|
with:
|
||||||
|
context: ./
|
||||||
|
file: .github/uffizzi/Dockerfile
|
||||||
|
tags: ${{ steps.meta.outputs.tags }}
|
||||||
|
labels: ${{ steps.meta.outputs.labels }}
|
||||||
|
push: true
|
||||||
|
cache-from: type=gha
|
||||||
|
cache-to: type=gha,mode=max
|
||||||
|
|
||||||
|
render-compose-file:
|
||||||
|
name: Render Docker Compose File
|
||||||
|
# Pass output of this workflow to another triggered by `workflow_run` event.
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
needs:
|
||||||
|
- build-meilisearch
|
||||||
|
outputs:
|
||||||
|
compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }}
|
||||||
|
steps:
|
||||||
|
- name: Checkout git repo
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
- name: Render Compose File
|
||||||
|
run: |
|
||||||
|
MEILISEARCH_IMAGE=$(echo ${{ needs.build-meilisearch.outputs.tags }})
|
||||||
|
export MEILISEARCH_IMAGE
|
||||||
|
# Render simple template from environment variables.
|
||||||
|
envsubst < .github/uffizzi/docker-compose.uffizzi.yml > docker-compose.rendered.yml
|
||||||
|
cat docker-compose.rendered.yml
|
||||||
|
- name: Upload Rendered Compose File as Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: preview-spec
|
||||||
|
path: docker-compose.rendered.yml
|
||||||
|
retention-days: 2
|
||||||
|
- name: Serialize PR Event to File
|
||||||
|
run: |
|
||||||
|
cat << EOF > event.json
|
||||||
|
${{ toJSON(github.event) }}
|
||||||
|
|
||||||
|
EOF
|
||||||
|
- name: Upload PR Event as Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: preview-spec
|
||||||
|
path: event.json
|
||||||
|
retention-days: 2
|
||||||
|
|
||||||
|
delete-preview:
|
||||||
|
name: Call for Preview Deletion
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.event.action == 'closed' }}
|
||||||
|
steps:
|
||||||
|
# If this PR is closing, we will not render a compose file nor pass it to the next workflow.
|
||||||
|
- name: Serialize PR Event to File
|
||||||
|
run: |
|
||||||
|
cat << EOF > event.json
|
||||||
|
${{ toJSON(github.event) }}
|
||||||
|
|
||||||
|
EOF
|
||||||
|
- name: Upload PR Event as Artifact
|
||||||
|
uses: actions/upload-artifact@v3
|
||||||
|
with:
|
||||||
|
name: preview-spec
|
||||||
|
path: event.json
|
||||||
|
retention-days: 2
|
103
.github/workflows/uffizzi-preview-deploy.yml
vendored
Normal file
103
.github/workflows/uffizzi-preview-deploy.yml
vendored
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
name: Uffizzi - Deploy Preview
|
||||||
|
|
||||||
|
on:
|
||||||
|
workflow_run:
|
||||||
|
workflows:
|
||||||
|
- "Uffizzi - Build PR Image"
|
||||||
|
types:
|
||||||
|
- completed
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
cache-compose-file:
|
||||||
|
name: Cache Compose File
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
if: ${{ github.event.workflow_run.conclusion == 'success' }}
|
||||||
|
outputs:
|
||||||
|
compose-file-cache-key: ${{ env.COMPOSE_FILE_HASH }}
|
||||||
|
pr-number: ${{ env.PR_NUMBER }}
|
||||||
|
expected-url: ${{ env.EXPECTED_URL }}
|
||||||
|
steps:
|
||||||
|
- name: 'Download artifacts'
|
||||||
|
# Fetch output (zip archive) from the workflow run that triggered this workflow.
|
||||||
|
uses: actions/github-script@v6
|
||||||
|
with:
|
||||||
|
script: |
|
||||||
|
let allArtifacts = await github.rest.actions.listWorkflowRunArtifacts({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
run_id: context.payload.workflow_run.id,
|
||||||
|
});
|
||||||
|
let matchArtifact = allArtifacts.data.artifacts.filter((artifact) => {
|
||||||
|
return artifact.name == "preview-spec"
|
||||||
|
})[0];
|
||||||
|
let download = await github.rest.actions.downloadArtifact({
|
||||||
|
owner: context.repo.owner,
|
||||||
|
repo: context.repo.repo,
|
||||||
|
artifact_id: matchArtifact.id,
|
||||||
|
archive_format: 'zip',
|
||||||
|
});
|
||||||
|
let fs = require('fs');
|
||||||
|
fs.writeFileSync(`${process.env.GITHUB_WORKSPACE}/preview-spec.zip`, Buffer.from(download.data));
|
||||||
|
|
||||||
|
- name: 'Unzip artifact'
|
||||||
|
run: unzip preview-spec.zip
|
||||||
|
|
||||||
|
- name: Read Event into ENV
|
||||||
|
run: |
|
||||||
|
echo 'EVENT_JSON<<EOF' >> $GITHUB_ENV
|
||||||
|
cat event.json >> $GITHUB_ENV
|
||||||
|
echo 'EOF' >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Hash Rendered Compose File
|
||||||
|
id: hash
|
||||||
|
# If the previous workflow was triggered by a PR close event, we will not have a compose file artifact.
|
||||||
|
if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }}
|
||||||
|
run: echo "COMPOSE_FILE_HASH=$(md5sum docker-compose.rendered.yml | awk '{ print $1 }')" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: Cache Rendered Compose File
|
||||||
|
if: ${{ fromJSON(env.EVENT_JSON).action != 'closed' }}
|
||||||
|
uses: actions/cache@v3
|
||||||
|
with:
|
||||||
|
path: docker-compose.rendered.yml
|
||||||
|
key: ${{ env.COMPOSE_FILE_HASH }}
|
||||||
|
|
||||||
|
- name: Read PR Number From Event Object
|
||||||
|
id: pr
|
||||||
|
run: echo "PR_NUMBER=${{ fromJSON(env.EVENT_JSON).number }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
- name: DEBUG - Print Job Outputs
|
||||||
|
if: ${{ runner.debug }}
|
||||||
|
run: |
|
||||||
|
echo "PR number: ${{ env.PR_NUMBER }}"
|
||||||
|
echo "Compose file hash: ${{ env.COMPOSE_FILE_HASH }}"
|
||||||
|
cat event.json
|
||||||
|
|
||||||
|
- name: Add expected URL env var
|
||||||
|
if: ${{ runner.debug }}
|
||||||
|
run: |
|
||||||
|
REPO=$(echo ${{ github.repository }} | sed 's/\./+/g')
|
||||||
|
echo "EXPECTED_URL=${{ inputs.server }}/github.com/$REPO/pull/${{ env.PR_NUMBER }}" >> $GITHUB_ENV
|
||||||
|
|
||||||
|
deploy-uffizzi-preview:
|
||||||
|
name: Use Remote Workflow to Preview on Uffizzi
|
||||||
|
needs:
|
||||||
|
- cache-compose-file
|
||||||
|
uses: UffizziCloud/preview-action/.github/workflows/reusable.yaml@desc
|
||||||
|
with:
|
||||||
|
# If this workflow was triggered by a PR close event, cache-key will be an empty string
|
||||||
|
# and this reusable workflow will delete the preview deployment.
|
||||||
|
compose-file-cache-key: ${{ needs.cache-compose-file.outputs.compose-file-cache-key }}
|
||||||
|
compose-file-cache-path: docker-compose.rendered.yml
|
||||||
|
server: https://app.uffizzi.com
|
||||||
|
pr-number: ${{ needs.cache-compose-file.outputs.pr-number }}
|
||||||
|
description: |
|
||||||
|
The meilisearch preview environment contains a web terminal from where you can run the
|
||||||
|
`meilisearch` command. You should be able to access this instance of meilisearch running in
|
||||||
|
the preview from the link Meilisearch Endpoint link given below.
|
||||||
|
|
||||||
|
Web Terminal Endpoint : ${{ needs.cache-compose-file.outputs.expected-url }}
|
||||||
|
Meilisearch Endpoint : ${{ needs.cache-compose-file.outputs.expected-url }}/meilisearch
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
pull-requests: write
|
||||||
|
id-token: write
|
@ -29,7 +29,7 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2)
|
raw_new_version=$(echo $NEW_VERSION | cut -d 'v' -f 2)
|
||||||
new_string="version = \"$raw_new_version\""
|
new_string="version = \"$raw_new_version\""
|
||||||
sd '^version = "\d+.\d+.\w+"$' "$new_string" */Cargo.toml
|
sd '^version = "\d+.\d+.\w+"$' "$new_string" Cargo.toml
|
||||||
- name: Build Meilisearch to update Cargo.lock
|
- name: Build Meilisearch to update Cargo.lock
|
||||||
run: cargo build
|
run: cargo build
|
||||||
- name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch
|
- name: Commit and push the changes to the ${{ env.NEW_BRANCH }} branch
|
||||||
|
2
.gitignore
vendored
2
.gitignore
vendored
@ -1,3 +1,5 @@
|
|||||||
|
.idea/
|
||||||
|
.vscode/
|
||||||
/target
|
/target
|
||||||
**/*.csv
|
**/*.csv
|
||||||
**/*.json_lines
|
**/*.json_lines
|
||||||
|
@ -52,6 +52,23 @@ cargo test
|
|||||||
|
|
||||||
This command will be triggered to each PR as a requirement for merging it.
|
This command will be triggered to each PR as a requirement for merging it.
|
||||||
|
|
||||||
|
#### Snapshot-based tests
|
||||||
|
|
||||||
|
We are using [insta](https://insta.rs) to perform snapshot-based testing.
|
||||||
|
We recommend using the insta tooling (such as `cargo-insta`) to update the snapshots if they change following a PR.
|
||||||
|
|
||||||
|
New tests should use insta where possible rather than manual `assert` statements.
|
||||||
|
|
||||||
|
Furthermore, we provide some macros on top of insta, notably a way to use snapshot hashes instead of inline snapshots, saving a lot of space in the repository.
|
||||||
|
|
||||||
|
To effectively debug snapshot-based hashes, we recommend you export the `MEILI_TEST_FULL_SNAPS` environment variable so that snapshot are fully created locally:
|
||||||
|
|
||||||
|
```
|
||||||
|
export MEILI_TEST_FULL_SNAPS=true # add this to your .bashrc, .zshrc, ...
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Test troubleshooting
|
||||||
|
|
||||||
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
If you get a "Too many open files" error you might want to increase the open file limit using this command:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
@ -99,6 +116,34 @@ _[Read more about this](https://github.com/meilisearch/integration-guides/blob/m
|
|||||||
|
|
||||||
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
The full Meilisearch release process is described in [this guide](https://github.com/meilisearch/engine-team/blob/main/resources/meilisearch-release.md). Please follow it carefully before doing any release.
|
||||||
|
|
||||||
|
### How to publish a prototype
|
||||||
|
|
||||||
|
Depending on the developed feature, you might need to provide a prototyped version of Meilisearch to make it easier to test by the users.
|
||||||
|
|
||||||
|
The prototype name must follow this convention: `prototype-X-Y` where
|
||||||
|
- `X` is the feature name formatted in `kebab-case`. It should not end with a single number.
|
||||||
|
- `Y` is the version of the prototype, starting from `0`.
|
||||||
|
|
||||||
|
✅ Example: `prototype-auto-resize-0`. </br>
|
||||||
|
❌ Bad example: `auto-resize-0`: lacks the `prototype` prefix. </br>
|
||||||
|
❌ Bad example: `prototype-auto-resize`: lacks the version suffix. </br>
|
||||||
|
❌ Bad example: `prototype-auto-resize-0-0`: feature name ends with a single number.
|
||||||
|
|
||||||
|
Steps to create a prototype:
|
||||||
|
|
||||||
|
1. In your terminal, go to the last commit of your branch (the one you want to provide as a prototype).
|
||||||
|
2. Create a tag following the convention: `git tag prototype-X-Y`
|
||||||
|
3. Run Meilisearch and check that its launch summary features a line: `Prototype: prototype-X-Y` (you may need to switch branches and back after tagging for this to work).
|
||||||
|
3. Push the tag: `git push origin prototype-X-Y`
|
||||||
|
4. Check the [Docker CI](https://github.com/meilisearch/meilisearch/actions/workflows/publish-docker-images.yml) is now running.
|
||||||
|
|
||||||
|
🐳 Once the CI has finished to run (~1h30), a Docker image named `prototype-X-Y` will be available on [DockerHub](https://hub.docker.com/repository/docker/getmeili/meilisearch/general). People can use it with the following command: `docker run -p 7700:7700 -v $(pwd)/meili_data:/meili_data getmeili/meilisearch:prototype-X-Y`. <br>
|
||||||
|
More information about [how to run Meilisearch with Docker](https://docs.meilisearch.com/learn/cookbooks/docker.html#download-meilisearch-with-docker).
|
||||||
|
|
||||||
|
⚙️ However, no binaries will be created. If the users do not use Docker, they can go to the `prototype-X-Y` tag in the Meilisearch repository and compile from the source code.
|
||||||
|
|
||||||
|
⚠️ When sharing a prototype with users, remind them to not use it in production. Prototypes are solely for test purposes.
|
||||||
|
|
||||||
### Release assets
|
### Release assets
|
||||||
|
|
||||||
For each release, the following assets are created:
|
For each release, the following assets are created:
|
||||||
|
374
Cargo.lock
generated
374
Cargo.lock
generated
@ -36,9 +36,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "actix-http"
|
name = "actix-http"
|
||||||
version = "3.2.2"
|
version = "3.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "0c83abf9903e1f0ad9973cc4f7b9767fd5a03a583f51a5b7a339e07987cd2724"
|
checksum = "0070905b2c4a98d184c4e81025253cb192aa8a73827553f38e9410801ceb35bb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-codec",
|
"actix-codec",
|
||||||
"actix-rt",
|
"actix-rt",
|
||||||
@ -46,7 +46,7 @@ dependencies = [
|
|||||||
"actix-tls",
|
"actix-tls",
|
||||||
"actix-utils",
|
"actix-utils",
|
||||||
"ahash",
|
"ahash",
|
||||||
"base64 0.13.1",
|
"base64 0.21.0",
|
||||||
"bitflags",
|
"bitflags",
|
||||||
"brotli",
|
"brotli",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -68,7 +68,10 @@ dependencies = [
|
|||||||
"rand",
|
"rand",
|
||||||
"sha1",
|
"sha1",
|
||||||
"smallvec",
|
"smallvec",
|
||||||
|
"tokio",
|
||||||
|
"tokio-util",
|
||||||
"tracing",
|
"tracing",
|
||||||
|
"zstd 0.12.3+zstd.1.5.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -77,8 +80,8 @@ version = "0.2.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6"
|
checksum = "465a6172cf69b960917811022d8f29bc0b7fa1398bc4f78b3c466673db1213b6"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -164,9 +167,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "actix-web"
|
name = "actix-web"
|
||||||
version = "4.2.1"
|
version = "4.3.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d48f7b6534e06c7bfc72ee91db7917d4af6afe23e7d223b51e68fffbb21e96b9"
|
checksum = "464e0fddc668ede5f26ec1f9557a8d44eda948732f40c6b0ad79126930eb775f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-codec",
|
"actix-codec",
|
||||||
"actix-http",
|
"actix-http",
|
||||||
@ -211,9 +214,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13"
|
checksum = "1fa9362663c8643d67b2d5eafba49e4cb2c8a053a29ed00a0bea121f17c76b13"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"actix-router",
|
"actix-router",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -330,9 +333,9 @@ version = "0.3.3"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27"
|
checksum = "10f203db73a71dfa2fb6dd22763990fa26f3d2625a6da2da900d23b87d26be27"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -341,9 +344,9 @@ version = "0.1.61"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
|
checksum = "705339e0e4a9690e2908d2b3d049d85682cf19fbd5782494498fbf7003a6a282"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -448,15 +451,6 @@ dependencies = [
|
|||||||
"serde",
|
"serde",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "bit-set"
|
|
||||||
version = "0.5.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "0700ddab506f33b20a03b13996eccd309a48e5ff77d0d95926aa0210fb4e95f1"
|
|
||||||
dependencies = [
|
|
||||||
"bit-vec",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "bit-vec"
|
name = "bit-vec"
|
||||||
version = "0.6.3"
|
version = "0.6.3"
|
||||||
@ -560,9 +554,9 @@ version = "1.3.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4"
|
checksum = "5fe233b960f12f8007e3db2d136e3cb1c291bfd7396e384ee76025fc1a3932b4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -609,9 +603,9 @@ dependencies = [
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "cargo_toml"
|
name = "cargo_toml"
|
||||||
version = "0.13.3"
|
version = "0.14.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "497049e9477329f8f6a559972ee42e117487d01d1e8c2cc9f836ea6fa23a9e1a"
|
checksum = "2bfbc36312494041e2cdd5f06697b7e89d4b76f42773a0b5556ac290ff22acc2"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"serde",
|
"serde",
|
||||||
"toml",
|
"toml",
|
||||||
@ -753,9 +747,9 @@ checksum = "0177313f9f02afc995627906bbd8967e2be069f5261954222dac78290c2b9014"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"heck",
|
"heck",
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -782,9 +776,9 @@ version = "0.1.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747"
|
checksum = "1df715824eb382e34b7afb7463b0247bf41538aeba731fba05241ecdb5dc3747"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1026,10 +1020,10 @@ checksum = "a784d2ccaf7c98501746bf0be29b2022ba41fd62a2e622af997a03e9f972859f"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"fnv",
|
"fnv",
|
||||||
"ident_case",
|
"ident_case",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"strsim",
|
"strsim",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1039,8 +1033,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e"
|
checksum = "7618812407e9402654622dd402b0a89dff9ba93badd6540781526117b92aab7e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling_core",
|
"darling_core",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1059,9 +1053,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4"
|
checksum = "1f91d4cfa921f1c05904dc3c57b4a32c38aed3340cce209f3a6fd1478babafc4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1071,7 +1065,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
checksum = "8f0314b72bed045f3a68671b3c86328386762c93f82d98c65c3cb5e5f573dd68"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"derive_builder_core",
|
"derive_builder_core",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1081,33 +1075,39 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
checksum = "4fb810d30a7c1953f91334de7244731fc3f3c10d7fe163338a35b9f640960321"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case 0.4.0",
|
"convert_case 0.4.0",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"rustc_version",
|
"rustc_version",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deserr"
|
name = "deserr"
|
||||||
version = "0.1.4"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "86290491a2b5c21a1a5083da8dae831006761258fabd5617309c3eebc5f89468"
|
checksum = "6eee2844f21cf7fb5693aae1fb8f1658127acfdb2fc072167d68a9152584ae64"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
|
"actix-http",
|
||||||
|
"actix-utils",
|
||||||
|
"actix-web",
|
||||||
"deserr-internal",
|
"deserr-internal",
|
||||||
|
"futures",
|
||||||
"serde-cs",
|
"serde-cs",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
|
"serde_urlencoded",
|
||||||
|
"strsim",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "deserr-internal"
|
name = "deserr-internal"
|
||||||
version = "0.1.4"
|
version = "0.4.1"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "7131de1c27581bc376a22166c9f570be91b76cb096be2f6aecf224c27bf7c49a"
|
checksum = "c27246f8ca9eeba9dd70d614b664dc43b529251ed7bd9e633131010d340da4b9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"convert_case 0.5.0",
|
"convert_case 0.5.0",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1285,9 +1285,9 @@ version = "1.1.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce"
|
checksum = "828de45d0ca18782232dfb8f3ea9cc428e8ced380eb26a520baaacfc70de39ce"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1363,9 +1363,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "35c9bb4a2c13ffb3a93a39902aaf4e7190a1706a4779b6db0449aee433d26c4a"
|
checksum = "35c9bb4a2c13ffb3a93a39902aaf4e7190a1706a4779b6db0449aee433d26c4a"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"darling",
|
"darling",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"uuid 0.8.2",
|
"uuid 0.8.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -1494,9 +1494,9 @@ version = "0.3.25"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
|
checksum = "bdfb8ce053d86b91919aad980c220b1fb8401a9394410e1c289ed7e66b61835d"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1568,9 +1568,9 @@ version = "0.12.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8"
|
checksum = "30ce01e8bbb3e7e0758dcf907fe799f5998a54368963f766ae94b84624ba60c8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1625,9 +1625,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9"
|
checksum = "e45727250e75cc04ff2846a66397da8ef2b3db8e40e0cef4df67950a07621eb9"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-error",
|
"proc-macro-error",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -1730,8 +1730,8 @@ checksum = "2540771e65fc8cb83cd6e8a237f70c319bd5c29f78ed1084ba5d50eeac86f7f9"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heed"
|
name = "heed"
|
||||||
version = "0.12.4"
|
version = "0.12.5"
|
||||||
source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb"
|
source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"byteorder",
|
"byteorder",
|
||||||
"heed-traits",
|
"heed-traits",
|
||||||
@ -1748,12 +1748,12 @@ dependencies = [
|
|||||||
[[package]]
|
[[package]]
|
||||||
name = "heed-traits"
|
name = "heed-traits"
|
||||||
version = "0.7.0"
|
version = "0.7.0"
|
||||||
source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb"
|
source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8"
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "heed-types"
|
name = "heed-types"
|
||||||
version = "0.7.2"
|
version = "0.7.2"
|
||||||
source = "git+https://github.com/meilisearch/heed?tag=v0.12.4#7a4542bc72dd60ef0f508c89900ea292218223fb"
|
source = "git+https://github.com/meilisearch/heed?tag=v0.12.5#4158a6c484752afaaf9e2530a6ee0e7ab0f24ee8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"bincode",
|
"bincode",
|
||||||
"heed-traits",
|
"heed-traits",
|
||||||
@ -2413,9 +2413,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
|
checksum = "10a9062912d7952c5588cc474795e0b9ee008e7e6781127945b85413d4b99d81"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"log",
|
"log",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2425,9 +2425,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f"
|
checksum = "f08150cf2bab1fc47c2196f4f41173a27fcd0f684165e5458c0046b53a472e2f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -2465,6 +2465,7 @@ dependencies = [
|
|||||||
"assert-json-diff",
|
"assert-json-diff",
|
||||||
"async-stream",
|
"async-stream",
|
||||||
"async-trait",
|
"async-trait",
|
||||||
|
"atty",
|
||||||
"brotli",
|
"brotli",
|
||||||
"bstr 1.1.0",
|
"bstr 1.1.0",
|
||||||
"byte-unit",
|
"byte-unit",
|
||||||
@ -2513,7 +2514,6 @@ dependencies = [
|
|||||||
"rustls-pemfile",
|
"rustls-pemfile",
|
||||||
"segment",
|
"segment",
|
||||||
"serde",
|
"serde",
|
||||||
"serde-cs",
|
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"serde_urlencoded",
|
"serde_urlencoded",
|
||||||
"sha-1",
|
"sha-1",
|
||||||
@ -2525,6 +2525,7 @@ dependencies = [
|
|||||||
"tar",
|
"tar",
|
||||||
"temp-env",
|
"temp-env",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
|
"termcolor",
|
||||||
"thiserror",
|
"thiserror",
|
||||||
"time",
|
"time",
|
||||||
"tokio",
|
"tokio",
|
||||||
@ -2545,6 +2546,7 @@ dependencies = [
|
|||||||
"base64 0.13.1",
|
"base64 0.13.1",
|
||||||
"enum-iterator",
|
"enum-iterator",
|
||||||
"hmac",
|
"hmac",
|
||||||
|
"maplit",
|
||||||
"meilisearch-types",
|
"meilisearch-types",
|
||||||
"rand",
|
"rand",
|
||||||
"roaring",
|
"roaring",
|
||||||
@ -2574,10 +2576,9 @@ dependencies = [
|
|||||||
"meili-snap",
|
"meili-snap",
|
||||||
"memmap2",
|
"memmap2",
|
||||||
"milli",
|
"milli",
|
||||||
"proptest",
|
|
||||||
"proptest-derive",
|
|
||||||
"roaring",
|
"roaring",
|
||||||
"serde",
|
"serde",
|
||||||
|
"serde-cs",
|
||||||
"serde_json",
|
"serde_json",
|
||||||
"tar",
|
"tar",
|
||||||
"tempfile",
|
"tempfile",
|
||||||
@ -3001,9 +3002,9 @@ checksum = "46b53634d8c8196302953c74d5352f33d0c512a9499bd2ce468fc9f4128fa27c"
|
|||||||
dependencies = [
|
dependencies = [
|
||||||
"pest",
|
"pest",
|
||||||
"pest_meta",
|
"pest_meta",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3129,9 +3130,9 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
|||||||
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
checksum = "da25490ff9892aab3fcf7c36f08cfb902dd3e71ca0f9f9517bea02a73a5ce38c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro-error-attr",
|
"proc-macro-error-attr",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3141,20 +3142,11 @@ version = "1.0.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
checksum = "a1be40180e52ecc98ad80b184934baf3d0d29f979574e439af5a55274b35f869"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"version_check",
|
"version_check",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proc-macro2"
|
|
||||||
version = "0.4.30"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cf3d2011ab5c909338f7887f4fc896d35932e29146c12c8d01da6b22a80ba759"
|
|
||||||
dependencies = [
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "proc-macro2"
|
name = "proc-macro2"
|
||||||
version = "1.0.49"
|
version = "1.0.49"
|
||||||
@ -3194,71 +3186,19 @@ dependencies = [
|
|||||||
"thiserror",
|
"thiserror",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest"
|
|
||||||
version = "1.0.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "1e0d9cc07f18492d879586c92b485def06bc850da3118075cd45d50e9c95b0e5"
|
|
||||||
dependencies = [
|
|
||||||
"bit-set",
|
|
||||||
"bitflags",
|
|
||||||
"byteorder",
|
|
||||||
"lazy_static",
|
|
||||||
"num-traits",
|
|
||||||
"quick-error 2.0.1",
|
|
||||||
"rand",
|
|
||||||
"rand_chacha",
|
|
||||||
"rand_xorshift",
|
|
||||||
"regex-syntax",
|
|
||||||
"rusty-fork",
|
|
||||||
"tempfile",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "proptest-derive"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "90b46295382dc76166cb7cf2bb4a97952464e4b7ed5a43e6cd34e1fec3349ddc"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
"quote 0.6.13",
|
|
||||||
"syn 0.15.44",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "protobuf"
|
name = "protobuf"
|
||||||
version = "2.28.0"
|
version = "2.28.0"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
checksum = "106dd99e98437432fed6519dedecfade6a06a73bb7b2a1e019fdd2bee5778d94"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "1.2.3"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a1d01941d82fa2ab50be1e79e6714289dd7cde78eba4c074bc5a4374f650dfe0"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quick-error"
|
|
||||||
version = "2.0.1"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "a993555f31e5a609f617c12db6250dedcac1b0a85076912c436e6fc9b2c8e6a3"
|
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "quote"
|
|
||||||
version = "0.6.13"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "6ce23b6b870e8f94f81fb0a363d65d86675884b34a09043c81e5562f11c1f8e1"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "quote"
|
name = "quote"
|
||||||
version = "1.0.23"
|
version = "1.0.23"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
checksum = "8856d8364d252a14d474036ea1358d63c9e6965c8e5c1885c18f73d70bff9c7b"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3291,15 +3231,6 @@ dependencies = [
|
|||||||
"getrandom",
|
"getrandom",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rand_xorshift"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "d25bf25ec5ae4a3f1b92f929810509a2f53d7dca2f50b794ff57e3face536c8f"
|
|
||||||
dependencies = [
|
|
||||||
"rand_core",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "rayon"
|
name = "rayon"
|
||||||
version = "1.6.1"
|
version = "1.6.1"
|
||||||
@ -3514,18 +3445,6 @@ version = "1.0.11"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
|
checksum = "5583e89e108996506031660fe09baa5011b9dd0341b89029313006d1fb508d70"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "rusty-fork"
|
|
||||||
version = "0.3.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "cb3dcc6e454c328bb824492db107ab7c0ae8fcffe4ad210136ef014458c1bc4f"
|
|
||||||
dependencies = [
|
|
||||||
"fnv",
|
|
||||||
"quick-error 1.2.3",
|
|
||||||
"tempfile",
|
|
||||||
"wait-timeout",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "ryu"
|
name = "ryu"
|
||||||
version = "1.0.12"
|
version = "1.0.12"
|
||||||
@ -3601,9 +3520,9 @@ version = "1.0.152"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
checksum = "af487d118eecd09402d70a5d72551860e788df87b464af30e5ea6a38c75c541e"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3798,25 +3717,14 @@ version = "2.4.1"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
checksum = "6bdef32e8150c2a081110b42772ffe7d7c9032b606bc226c8260fd97e0976601"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "syn"
|
|
||||||
version = "0.15.44"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9ca4b3b69a77cbe1ffc9e198781b7acb0c7365a883670e8f1c1bc66fba79a5c5"
|
|
||||||
dependencies = [
|
|
||||||
"proc-macro2 0.4.30",
|
|
||||||
"quote 0.6.13",
|
|
||||||
"unicode-xid 0.1.0",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "syn"
|
name = "syn"
|
||||||
version = "1.0.107"
|
version = "1.0.107"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
checksum = "1f4064b5b16e03ae50984a5a8ed5d4f8803e6bc1fd170a3cda91a1be4b18e3f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"unicode-ident",
|
"unicode-ident",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -3835,10 +3743,10 @@ version = "0.12.6"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"unicode-xid 0.2.4",
|
"unicode-xid",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3920,9 +3828,9 @@ version = "1.0.38"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
checksum = "1fb327af4685e4d03fa8cbcf1716380da910eeb2bb8be417e7f9fd3fb164f36f"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -3979,9 +3887,9 @@ checksum = "cda74da7e1a664f795bb1f8a87ec406fb89a02522cf6e50620d016add6dbbf5c"
|
|||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "tokio"
|
name = "tokio"
|
||||||
version = "1.24.1"
|
version = "1.24.2"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "1d9f76183f91ecfb55e1d7d5602bd1d979e38a3a522fe900241cf195624d67ae"
|
checksum = "597a12a59981d9e3c38d216785b0c37399f6e415e8d0712047620f189371b0bb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"autocfg",
|
"autocfg",
|
||||||
"bytes",
|
"bytes",
|
||||||
@ -4003,9 +3911,9 @@ version = "1.8.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
|
checksum = "d266c00fde287f55d3f1c3e96c500c362a2b8c695076ec180f27918820bc6df8"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4146,12 +4054,6 @@ version = "0.1.10"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "unicode-xid"
|
|
||||||
version = "0.1.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "fc72304796d0818e357ead4e000d19c9c174ab23dc11093ac919054d20a6a7fc"
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "unicode-xid"
|
name = "unicode-xid"
|
||||||
version = "0.2.4"
|
version = "0.2.4"
|
||||||
@ -4234,15 +4136,6 @@ version = "0.9.4"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
checksum = "49874b5167b65d7193b8aba1567f5c7d93d001cafc34600cee003eda787e483f"
|
||||||
|
|
||||||
[[package]]
|
|
||||||
name = "wait-timeout"
|
|
||||||
version = "0.2.0"
|
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
|
||||||
checksum = "9f200f5b12eb75f8c1ed65abd4b2db8a6e1b138a20de009dacee265a2498f3f6"
|
|
||||||
dependencies = [
|
|
||||||
"libc",
|
|
||||||
]
|
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "walkdir"
|
name = "walkdir"
|
||||||
version = "2.3.2"
|
version = "2.3.2"
|
||||||
@ -4300,9 +4193,9 @@ dependencies = [
|
|||||||
"bumpalo",
|
"bumpalo",
|
||||||
"log",
|
"log",
|
||||||
"once_cell",
|
"once_cell",
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4324,7 +4217,7 @@ version = "0.2.83"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
|
checksum = "052be0f94026e6cbc75cdefc9bae13fd6052cdcaf532fa6c45e7ae33a1e6c810"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"wasm-bindgen-macro-support",
|
"wasm-bindgen-macro-support",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4334,9 +4227,9 @@ version = "0.2.83"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
|
checksum = "07bc0c051dc5f23e307b13285f9d75df86bfdf816c5721e573dec1f9b8aa193c"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"quote 1.0.23",
|
"quote",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"wasm-bindgen-backend",
|
"wasm-bindgen-backend",
|
||||||
"wasm-bindgen-shared",
|
"wasm-bindgen-shared",
|
||||||
]
|
]
|
||||||
@ -4533,8 +4426,8 @@ version = "0.2.0"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
|
checksum = "d498dbd1fd7beb83c86709ae1c33ca50942889473473d287d56ce4770a18edfb"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"proc-macro2 1.0.49",
|
"proc-macro2",
|
||||||
"syn 1.0.107",
|
"syn",
|
||||||
"synstructure",
|
"synstructure",
|
||||||
]
|
]
|
||||||
|
|
||||||
@ -4555,7 +4448,7 @@ dependencies = [
|
|||||||
"pbkdf2",
|
"pbkdf2",
|
||||||
"sha1",
|
"sha1",
|
||||||
"time",
|
"time",
|
||||||
"zstd",
|
"zstd 0.11.2+zstd.1.5.2",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4564,7 +4457,16 @@ version = "0.11.2+zstd.1.5.2"
|
|||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
|
checksum = "20cc960326ece64f010d2d2107537f26dc589a6573a316bd5b1dba685fa5fde4"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"zstd-safe",
|
"zstd-safe 5.0.2+zstd.1.5.2",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd"
|
||||||
|
version = "0.12.3+zstd.1.5.2"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "76eea132fb024e0e13fd9c2f5d5d595d8a967aa72382ac2f9d39fcc95afd0806"
|
||||||
|
dependencies = [
|
||||||
|
"zstd-safe 6.0.4+zstd.1.5.4",
|
||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
@ -4578,10 +4480,20 @@ dependencies = [
|
|||||||
]
|
]
|
||||||
|
|
||||||
[[package]]
|
[[package]]
|
||||||
name = "zstd-sys"
|
name = "zstd-safe"
|
||||||
version = "2.0.5+zstd.1.5.2"
|
version = "6.0.4+zstd.1.5.4"
|
||||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
checksum = "edc50ffce891ad571e9f9afe5039c4837bede781ac4bb13052ed7ae695518596"
|
checksum = "7afb4b54b8910cf5447638cb54bf4e8a65cbedd783af98b98c62ffe91f185543"
|
||||||
|
dependencies = [
|
||||||
|
"libc",
|
||||||
|
"zstd-sys",
|
||||||
|
]
|
||||||
|
|
||||||
|
[[package]]
|
||||||
|
name = "zstd-sys"
|
||||||
|
version = "2.0.7+zstd.1.5.4"
|
||||||
|
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||||
|
checksum = "94509c3ba2fe55294d752b79842c530ccfab760192521df74a081a78d2b3c7f5"
|
||||||
dependencies = [
|
dependencies = [
|
||||||
"cc",
|
"cc",
|
||||||
"libc",
|
"libc",
|
||||||
|
@ -16,6 +16,15 @@ members = [
|
|||||||
"benchmarks"
|
"benchmarks"
|
||||||
]
|
]
|
||||||
|
|
||||||
|
[workspace.package]
|
||||||
|
version = "1.0.0"
|
||||||
|
authors = ["Quentin de Quelen <quentin@dequelen.me>", "Clément Renault <clement@meilisearch.com>"]
|
||||||
|
description = "Meilisearch HTTP server"
|
||||||
|
homepage = "https://meilisearch.com"
|
||||||
|
readme = "README.md"
|
||||||
|
edition = "2021"
|
||||||
|
license = "MIT"
|
||||||
|
|
||||||
[profile.release]
|
[profile.release]
|
||||||
codegen-units = 1
|
codegen-units = 1
|
||||||
|
|
||||||
|
@ -7,7 +7,8 @@ WORKDIR /meilisearch
|
|||||||
|
|
||||||
ARG COMMIT_SHA
|
ARG COMMIT_SHA
|
||||||
ARG COMMIT_DATE
|
ARG COMMIT_DATE
|
||||||
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE}
|
ARG GIT_TAG
|
||||||
|
ENV VERGEN_GIT_SHA=${COMMIT_SHA} VERGEN_GIT_COMMIT_TIMESTAMP=${COMMIT_DATE} VERGEN_GIT_SEMVER_LIGHTWEIGHT=${GIT_TAG}
|
||||||
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
ENV RUSTFLAGS="-C target-feature=-crt-static"
|
||||||
|
|
||||||
COPY . .
|
COPY . .
|
||||||
|
12
README.md
12
README.md
@ -101,6 +101,14 @@ Meilisearch is a search engine created by [Meili](https://www.welcometothejungle
|
|||||||
|
|
||||||
Thank you for your support!
|
Thank you for your support!
|
||||||
|
|
||||||
## 📦 Internal crates and their versioning
|
## 👩💻 Contributing
|
||||||
|
|
||||||
The crates in this repository are not currently available on crates.io and do not follow [semver conventions](https://semver.org). However, the Meilisearch search engine is well versioned, and releases follow the semver conventions.
|
Meilisearch is, and will always be, open-source! If you want to contribute to the project, please take a look at [our contribution guidelines](CONTRIBUTING.md).
|
||||||
|
|
||||||
|
## 📦 Versioning
|
||||||
|
|
||||||
|
Meilisearch releases and their associated binaries are available [in this GitHub page](https://github.com/meilisearch/meilisearch/releases).
|
||||||
|
|
||||||
|
The binaries are versioned following [SemVer conventions](https://semver.org/). To know more, read our [versioning policy](https://github.com/meilisearch/engine-team/blob/main/resources/versioning-policy.md).
|
||||||
|
|
||||||
|
Differently from the binaries, crates in this repository are not currently available on [crates.io](https://crates.io/) and do not follow [SemVer conventions](https://semver.org).
|
||||||
|
@ -1,9 +1,15 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "benchmarks"
|
name = "benchmarks"
|
||||||
version = "1.0.0"
|
|
||||||
edition = "2018"
|
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
description.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
readme.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.65"
|
||||||
csv = "1.1.6"
|
csv = "1.1.6"
|
||||||
|
@ -45,7 +45,7 @@ log_level = "INFO"
|
|||||||
|
|
||||||
dump_dir = "dumps/"
|
dump_dir = "dumps/"
|
||||||
# Sets the directory where Meilisearch will create dump files.
|
# Sets the directory where Meilisearch will create dump files.
|
||||||
# https://docs.meilisearch.com/learn/configuration/instance_options.html#dumps-destination
|
# https://docs.meilisearch.com/learn/configuration/instance_options.html#dump-directory
|
||||||
|
|
||||||
# import_dump = "./path/to/my/file.dump"
|
# import_dump = "./path/to/my/file.dump"
|
||||||
# Imports the dump file located at the specified path. Path must point to a .dump file.
|
# Imports the dump file located at the specified path. Path must point to a .dump file.
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "dump"
|
name = "dump"
|
||||||
version = "1.0.0"
|
publish = false
|
||||||
edition = "2021"
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
description.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
readme.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.65"
|
anyhow = "1.0.65"
|
||||||
|
@ -198,17 +198,16 @@ impl From<KindWithContent> for KindDump {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
pub(crate) mod test {
|
pub(crate) mod test {
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
use std::io::{Seek, SeekFrom};
|
use std::io::Seek;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use big_s::S;
|
use big_s::S;
|
||||||
use maplit::btreeset;
|
use maplit::btreeset;
|
||||||
use meilisearch_types::index_uid::IndexUid;
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::keys::{Action, Key};
|
use meilisearch_types::keys::{Action, Key};
|
||||||
use meilisearch_types::milli::update::Setting;
|
use meilisearch_types::milli::update::Setting;
|
||||||
use meilisearch_types::milli::{self};
|
use meilisearch_types::milli::{self};
|
||||||
use meilisearch_types::settings::{Checked, Settings};
|
use meilisearch_types::settings::{Checked, Settings};
|
||||||
use meilisearch_types::star_or::StarOr;
|
|
||||||
use meilisearch_types::tasks::{Details, Status};
|
use meilisearch_types::tasks::{Details, Status};
|
||||||
use serde_json::{json, Map, Value};
|
use serde_json::{json, Map, Value};
|
||||||
use time::macros::datetime;
|
use time::macros::datetime;
|
||||||
@ -341,7 +340,7 @@ pub(crate) mod test {
|
|||||||
name: Some(S("doggos_key")),
|
name: Some(S("doggos_key")),
|
||||||
uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(),
|
uid: Uuid::from_str("9f8a34da-b6b2-42f0-939b-dbd4c3448655").unwrap(),
|
||||||
actions: vec![Action::DocumentsAll],
|
actions: vec![Action::DocumentsAll],
|
||||||
indexes: vec![StarOr::Other(IndexUid::from_str("doggos").unwrap())],
|
indexes: vec![IndexUidPattern::from_str("doggos").unwrap()],
|
||||||
expires_at: Some(datetime!(4130-03-14 12:21 UTC)),
|
expires_at: Some(datetime!(4130-03-14 12:21 UTC)),
|
||||||
created_at: datetime!(1960-11-15 0:00 UTC),
|
created_at: datetime!(1960-11-15 0:00 UTC),
|
||||||
updated_at: datetime!(2022-11-10 0:00 UTC),
|
updated_at: datetime!(2022-11-10 0:00 UTC),
|
||||||
@ -351,7 +350,7 @@ pub(crate) mod test {
|
|||||||
name: Some(S("master_key")),
|
name: Some(S("master_key")),
|
||||||
uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(),
|
uid: Uuid::from_str("4622f717-1c00-47bb-a494-39d76a49b591").unwrap(),
|
||||||
actions: vec![Action::All],
|
actions: vec![Action::All],
|
||||||
indexes: vec![StarOr::Star],
|
indexes: vec![IndexUidPattern::all()],
|
||||||
expires_at: None,
|
expires_at: None,
|
||||||
created_at: datetime!(0000-01-01 00:01 UTC),
|
created_at: datetime!(0000-01-01 00:01 UTC),
|
||||||
updated_at: datetime!(1964-05-04 17:25 UTC),
|
updated_at: datetime!(1964-05-04 17:25 UTC),
|
||||||
@ -410,7 +409,7 @@ pub(crate) mod test {
|
|||||||
// create the dump
|
// create the dump
|
||||||
let mut file = tempfile::tempfile().unwrap();
|
let mut file = tempfile::tempfile().unwrap();
|
||||||
dump.persist_to(&mut file).unwrap();
|
dump.persist_to(&mut file).unwrap();
|
||||||
file.seek(SeekFrom::Start(0)).unwrap();
|
file.rewind().unwrap();
|
||||||
|
|
||||||
file
|
file
|
||||||
}
|
}
|
||||||
|
@ -10,6 +10,7 @@ expression: products.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -13,13 +13,17 @@ expression: movies.settings().unwrap()
|
|||||||
"genres",
|
"genres",
|
||||||
"id"
|
"id"
|
||||||
],
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
"proximity",
|
"proximity",
|
||||||
"attribute",
|
"attribute",
|
||||||
"exactness",
|
"exactness",
|
||||||
"asc(release_date)"
|
"release_date:asc"
|
||||||
],
|
],
|
||||||
"stopWords": [],
|
"stopWords": [],
|
||||||
"synonyms": {},
|
"synonyms": {},
|
||||||
|
@ -10,6 +10,7 @@ expression: spells.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::collections::BTreeSet;
|
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use super::v2_to_v3::CompatV2ToV3;
|
use super::v2_to_v3::CompatV2ToV3;
|
||||||
@ -102,14 +101,15 @@ impl CompatIndexV1ToV2 {
|
|||||||
|
|
||||||
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
||||||
fn from(source: v1::settings::Settings) -> Self {
|
fn from(source: v1::settings::Settings) -> Self {
|
||||||
let displayed_attributes = source
|
Self {
|
||||||
.displayed_attributes
|
displayed_attributes: option_to_setting(source.displayed_attributes)
|
||||||
.map(|opt| opt.map(|displayed_attributes| displayed_attributes.into_iter().collect()));
|
.map(|displayed| displayed.into_iter().collect()),
|
||||||
let attributes_for_faceting = source.attributes_for_faceting.map(|opt| {
|
searchable_attributes: option_to_setting(source.searchable_attributes),
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
filterable_attributes: option_to_setting(source.attributes_for_faceting.clone())
|
||||||
});
|
.map(|filterable| filterable.into_iter().collect()),
|
||||||
let ranking_rules = source.ranking_rules.map(|opt| {
|
sortable_attributes: option_to_setting(source.attributes_for_faceting)
|
||||||
opt.map(|ranking_rules| {
|
.map(|sortable| sortable.into_iter().collect()),
|
||||||
|
ranking_rules: option_to_setting(source.ranking_rules).map(|ranking_rules| {
|
||||||
ranking_rules
|
ranking_rules
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.filter_map(|ranking_rule| {
|
.filter_map(|ranking_rule| {
|
||||||
@ -119,26 +119,33 @@ impl From<v1::settings::Settings> for v2::Settings<v2::Unchecked> {
|
|||||||
ranking_rule.into();
|
ranking_rule.into();
|
||||||
criterion.as_ref().map(ToString::to_string)
|
criterion.as_ref().map(ToString::to_string)
|
||||||
}
|
}
|
||||||
Err(()) => Some(ranking_rule),
|
Err(()) => {
|
||||||
|
log::warn!(
|
||||||
|
"Could not import the following ranking rule: `{}`.",
|
||||||
|
ranking_rule
|
||||||
|
);
|
||||||
|
None
|
||||||
|
}
|
||||||
}
|
}
|
||||||
})
|
})
|
||||||
.collect()
|
.collect()
|
||||||
})
|
}),
|
||||||
});
|
stop_words: option_to_setting(source.stop_words),
|
||||||
|
synonyms: option_to_setting(source.synonyms),
|
||||||
Self {
|
distinct_attribute: option_to_setting(source.distinct_attribute),
|
||||||
displayed_attributes,
|
|
||||||
searchable_attributes: source.searchable_attributes,
|
|
||||||
filterable_attributes: attributes_for_faceting,
|
|
||||||
ranking_rules,
|
|
||||||
stop_words: source.stop_words,
|
|
||||||
synonyms: source.synonyms,
|
|
||||||
distinct_attribute: source.distinct_attribute,
|
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn option_to_setting<T>(opt: Option<Option<T>>) -> v2::Setting<T> {
|
||||||
|
match opt {
|
||||||
|
Some(Some(t)) => v2::Setting::Set(t),
|
||||||
|
None => v2::Setting::NotSet,
|
||||||
|
Some(None) => v2::Setting::Reset,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
impl From<v1::update::UpdateStatus> for Option<v2::updates::UpdateStatus> {
|
||||||
fn from(source: v1::update::UpdateStatus) -> Self {
|
fn from(source: v1::update::UpdateStatus) -> Self {
|
||||||
use v1::update::UpdateStatus as UpdateStatusV1;
|
use v1::update::UpdateStatus as UpdateStatusV1;
|
||||||
@ -251,38 +258,27 @@ impl From<v1::update::UpdateType> for Option<v2::updates::UpdateMeta> {
|
|||||||
|
|
||||||
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
impl From<v1::settings::SettingsUpdate> for v2::Settings<v2::Unchecked> {
|
||||||
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
fn from(source: v1::settings::SettingsUpdate) -> Self {
|
||||||
let displayed_attributes: Option<Option<BTreeSet<String>>> =
|
let ranking_rules = v2::Setting::from(source.ranking_rules);
|
||||||
source.displayed_attributes.into();
|
|
||||||
|
|
||||||
let attributes_for_faceting: Option<Option<Vec<String>>> =
|
|
||||||
source.attributes_for_faceting.into();
|
|
||||||
|
|
||||||
let ranking_rules: Option<Option<Vec<v1::settings::RankingRule>>> =
|
|
||||||
source.ranking_rules.into();
|
|
||||||
|
|
||||||
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
// go from the concrete types of v1 (RankingRule) to the concrete type of v2 (Criterion),
|
||||||
// and then back to string as this is what the settings manipulate
|
// and then back to string as this is what the settings manipulate
|
||||||
let ranking_rules = ranking_rules.map(|opt| {
|
let ranking_rules = ranking_rules.map(|ranking_rules| {
|
||||||
opt.map(|ranking_rules| {
|
ranking_rules
|
||||||
ranking_rules
|
.into_iter()
|
||||||
.into_iter()
|
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
||||||
// filter out the WordsPosition ranking rule that exists in v1 but not v2
|
.filter_map(Option::<v2::settings::Criterion>::from)
|
||||||
.filter_map(|ranking_rule| {
|
.map(|criterion| criterion.to_string())
|
||||||
Option::<v2::settings::Criterion>::from(ranking_rule)
|
.collect()
|
||||||
})
|
|
||||||
.map(|criterion| criterion.to_string())
|
|
||||||
.collect()
|
|
||||||
})
|
|
||||||
});
|
});
|
||||||
|
|
||||||
Self {
|
Self {
|
||||||
displayed_attributes: displayed_attributes.map(|opt| {
|
displayed_attributes: v2::Setting::from(source.displayed_attributes)
|
||||||
opt.map(|displayed_attributes| displayed_attributes.into_iter().collect())
|
.map(|displayed_attributes| displayed_attributes.into_iter().collect()),
|
||||||
}),
|
|
||||||
searchable_attributes: source.searchable_attributes.into(),
|
searchable_attributes: source.searchable_attributes.into(),
|
||||||
filterable_attributes: attributes_for_faceting.map(|opt| {
|
filterable_attributes: v2::Setting::from(source.attributes_for_faceting.clone())
|
||||||
opt.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect())
|
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||||
}),
|
sortable_attributes: v2::Setting::from(source.attributes_for_faceting)
|
||||||
|
.map(|attributes_for_faceting| attributes_for_faceting.into_iter().collect()),
|
||||||
ranking_rules,
|
ranking_rules,
|
||||||
stop_words: source.stop_words.into(),
|
stop_words: source.stop_words.into(),
|
||||||
synonyms: source.synonyms.into(),
|
synonyms: source.synonyms.into(),
|
||||||
@ -314,12 +310,12 @@ impl From<v1::settings::RankingRule> for Option<v2::settings::Criterion> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<v1::settings::UpdateState<T>> for Option<Option<T>> {
|
impl<T> From<v1::settings::UpdateState<T>> for v2::Setting<T> {
|
||||||
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
fn from(source: v1::settings::UpdateState<T>) -> Self {
|
||||||
match source {
|
match source {
|
||||||
v1::settings::UpdateState::Update(new_value) => Some(Some(new_value)),
|
v1::settings::UpdateState::Update(new_value) => v2::Setting::Set(new_value),
|
||||||
v1::settings::UpdateState::Clear => Some(None),
|
v1::settings::UpdateState::Clear => v2::Setting::Reset,
|
||||||
v1::settings::UpdateState::Nothing => None,
|
v1::settings::UpdateState::Nothing => v2::Setting::NotSet,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -352,7 +348,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"ad6245d98d1a8e30535f3339a9a8d223");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2298010973ee98cf4670787314176a3a");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dumps v1
|
||||||
|
|
||||||
|
@ -361,28 +361,29 @@ impl From<String> for v3::Code {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fn option_to_setting<T>(opt: Option<Option<T>>) -> v3::Setting<T> {
|
impl<A> From<v2::Setting<A>> for v3::Setting<A> {
|
||||||
match opt {
|
fn from(setting: v2::Setting<A>) -> Self {
|
||||||
Some(Some(t)) => v3::Setting::Set(t),
|
match setting {
|
||||||
None => v3::Setting::NotSet,
|
v2::settings::Setting::Set(a) => v3::settings::Setting::Set(a),
|
||||||
Some(None) => v3::Setting::Reset,
|
v2::settings::Setting::Reset => v3::settings::Setting::Reset,
|
||||||
|
v2::settings::Setting::NotSet => v3::settings::Setting::NotSet,
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
impl<T> From<v2::Settings<T>> for v3::Settings<v3::Unchecked> {
|
||||||
fn from(settings: v2::Settings<T>) -> Self {
|
fn from(settings: v2::Settings<T>) -> Self {
|
||||||
v3::Settings {
|
v3::Settings {
|
||||||
displayed_attributes: option_to_setting(settings.displayed_attributes),
|
displayed_attributes: settings.displayed_attributes.into(),
|
||||||
searchable_attributes: option_to_setting(settings.searchable_attributes),
|
searchable_attributes: settings.searchable_attributes.into(),
|
||||||
filterable_attributes: option_to_setting(settings.filterable_attributes)
|
filterable_attributes: settings.filterable_attributes.into(),
|
||||||
.map(|f| f.into_iter().collect()),
|
sortable_attributes: settings.sortable_attributes.into(),
|
||||||
sortable_attributes: v3::Setting::NotSet,
|
ranking_rules: v3::Setting::from(settings.ranking_rules).map(|criteria| {
|
||||||
ranking_rules: option_to_setting(settings.ranking_rules).map(|criteria| {
|
|
||||||
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
criteria.into_iter().map(|criterion| patch_ranking_rules(&criterion)).collect()
|
||||||
}),
|
}),
|
||||||
stop_words: option_to_setting(settings.stop_words),
|
stop_words: settings.stop_words.into(),
|
||||||
synonyms: option_to_setting(settings.synonyms),
|
synonyms: settings.synonyms.into(),
|
||||||
distinct_attribute: option_to_setting(settings.distinct_attribute),
|
distinct_attribute: settings.distinct_attribute.into(),
|
||||||
_kind: std::marker::PhantomData,
|
_kind: std::marker::PhantomData,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -394,6 +395,7 @@ fn patch_ranking_rules(ranking_rule: &str) -> String {
|
|||||||
Ok(v2::settings::Criterion::Typo) => String::from("typo"),
|
Ok(v2::settings::Criterion::Typo) => String::from("typo"),
|
||||||
Ok(v2::settings::Criterion::Proximity) => String::from("proximity"),
|
Ok(v2::settings::Criterion::Proximity) => String::from("proximity"),
|
||||||
Ok(v2::settings::Criterion::Attribute) => String::from("attribute"),
|
Ok(v2::settings::Criterion::Attribute) => String::from("attribute"),
|
||||||
|
Ok(v2::settings::Criterion::Sort) => String::from("sort"),
|
||||||
Ok(v2::settings::Criterion::Exactness) => String::from("exactness"),
|
Ok(v2::settings::Criterion::Exactness) => String::from("exactness"),
|
||||||
Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"),
|
Ok(v2::settings::Criterion::Asc(name)) => format!("{name}:asc"),
|
||||||
Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"),
|
Ok(v2::settings::Criterion::Desc(name)) => format!("{name}:desc"),
|
||||||
|
@ -181,10 +181,8 @@ impl CompatV5ToV6 {
|
|||||||
.indexes
|
.indexes
|
||||||
.into_iter()
|
.into_iter()
|
||||||
.map(|index| match index {
|
.map(|index| match index {
|
||||||
v5::StarOr::Star => v6::StarOr::Star,
|
v5::StarOr::Star => v6::IndexUidPattern::all(),
|
||||||
v5::StarOr::Other(uid) => {
|
v5::StarOr::Other(uid) => v6::IndexUidPattern::new_unchecked(uid.as_str()),
|
||||||
v6::StarOr::Other(v6::IndexUid::new_unchecked(uid.as_str()))
|
|
||||||
}
|
|
||||||
})
|
})
|
||||||
.collect(),
|
.collect(),
|
||||||
expires_at: key.expires_at,
|
expires_at: key.expires_at,
|
||||||
@ -260,7 +258,7 @@ impl From<v5::ResponseError> for v6::ResponseError {
|
|||||||
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
"index_already_exists" => v6::Code::IndexAlreadyExists,
|
||||||
"index_not_found" => v6::Code::IndexNotFound,
|
"index_not_found" => v6::Code::IndexNotFound,
|
||||||
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
"invalid_index_uid" => v6::Code::InvalidIndexUid,
|
||||||
"invalid_min_word_length_for_typo" => v6::Code::InvalidMinWordLengthForTypo,
|
"invalid_min_word_length_for_typo" => v6::Code::InvalidSettingsTypoTolerance,
|
||||||
"invalid_state" => v6::Code::InvalidState,
|
"invalid_state" => v6::Code::InvalidState,
|
||||||
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
"primary_key_inference_failed" => v6::Code::IndexPrimaryKeyNoCandidateFound,
|
||||||
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
"index_primary_key_already_exists" => v6::Code::IndexPrimaryKeyAlreadyExists,
|
||||||
@ -439,7 +437,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
|
@ -201,7 +201,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"10c673c97f053830aa659876d7aa0b53");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"41f91d3a94911b2735ec41b07540df5c");
|
||||||
assert_eq!(update_files.len(), 22);
|
assert_eq!(update_files.len(), 22);
|
||||||
assert!(update_files[0].is_none()); // the dump creation
|
assert!(update_files[0].is_none()); // the dump creation
|
||||||
assert!(update_files[1].is_some()); // the enqueued document addition
|
assert!(update_files[1].is_some()); // the enqueued document addition
|
||||||
@ -279,7 +279,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"12eca43d5d1e1f334200eb4df653b0c9");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"c2445ddd1785528b80f2ba534d3bd00c");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -356,7 +356,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2f51c6345fabccf47b18c82bad618ffe");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"cd12efd308fe3ed226356a727ab42ed3");
|
||||||
assert_eq!(update_files.len(), 10);
|
assert_eq!(update_files.len(), 10);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -449,7 +449,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"b27292d0bb86d4b4dd1b375a46b33890");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"bc616290adfe7d09a624cf6065ca9069");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[0].is_some()); // the enqueued document addition
|
assert!(update_files[0].is_some()); // the enqueued document addition
|
||||||
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
assert!(update_files[1..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
@ -530,6 +530,82 @@ pub(crate) mod test {
|
|||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn import_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||||
|
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||||
|
let mut dump = DumpReader::open(dump).unwrap();
|
||||||
|
|
||||||
|
// top level infos
|
||||||
|
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
|
assert_eq!(dump.instance_uid().unwrap(), None);
|
||||||
|
|
||||||
|
// tasks
|
||||||
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"2db37756d8af1fb7623436b76e8956a6");
|
||||||
|
assert_eq!(update_files.len(), 8);
|
||||||
|
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything already processed
|
||||||
|
|
||||||
|
// keys
|
||||||
|
let keys = dump.keys().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(keys), @"d751713988987e9331980363e24189ce");
|
||||||
|
|
||||||
|
// indexes
|
||||||
|
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
// the index are not ordered in any way by default
|
||||||
|
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||||
|
|
||||||
|
let mut products = indexes.pop().unwrap();
|
||||||
|
let mut movies = indexes.pop().unwrap();
|
||||||
|
let mut spells = indexes.pop().unwrap();
|
||||||
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
|
// products
|
||||||
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "products",
|
||||||
|
"primaryKey": "sku",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||||
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
|
// movies
|
||||||
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "movies",
|
||||||
|
"primaryKey": "id",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||||
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
|
// spells
|
||||||
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "dnd_spells",
|
||||||
|
"primaryKey": "index",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||||
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn import_dump_v1() {
|
fn import_dump_v1() {
|
||||||
let dump = File::open("tests/assets/v1.dump").unwrap();
|
let dump = File::open("tests/assets/v1.dump").unwrap();
|
||||||
@ -542,7 +618,7 @@ pub(crate) mod test {
|
|||||||
// tasks
|
// tasks
|
||||||
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
let tasks = dump.tasks().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"9725ccfceea3f8d5846c44006c9e1e7b");
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"8df6eab075a44b3c1af6b726f9fd9a43");
|
||||||
assert_eq!(update_files.len(), 9);
|
assert_eq!(update_files.len(), 9);
|
||||||
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
assert!(update_files[..].iter().all(|u| u.is_none())); // no update file in dump v1
|
||||||
|
|
||||||
|
@ -10,6 +10,7 @@ expression: spells.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -10,6 +10,7 @@ expression: products.settings().unwrap()
|
|||||||
"*"
|
"*"
|
||||||
],
|
],
|
||||||
"filterableAttributes": [],
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -13,6 +13,10 @@ expression: movies.settings().unwrap()
|
|||||||
"genres",
|
"genres",
|
||||||
"id"
|
"id"
|
||||||
],
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
"rankingRules": [
|
"rankingRules": [
|
||||||
"typo",
|
"typo",
|
||||||
"words",
|
"words",
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: spells.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: products.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {
|
||||||
|
"android": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"iphone": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"phone": [
|
||||||
|
"android",
|
||||||
|
"iphone",
|
||||||
|
"smartphone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/mod.rs
|
||||||
|
expression: movies.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"release_date"
|
||||||
|
],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness",
|
||||||
|
"release_date:asc"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -41,6 +41,7 @@ use super::Document;
|
|||||||
use crate::{IndexMetadata, Result, Version};
|
use crate::{IndexMetadata, Result, Version};
|
||||||
|
|
||||||
pub type Settings<T> = settings::Settings<T>;
|
pub type Settings<T> = settings::Settings<T>;
|
||||||
|
pub type Setting<T> = settings::Setting<T>;
|
||||||
pub type Checked = settings::Checked;
|
pub type Checked = settings::Checked;
|
||||||
pub type Unchecked = settings::Unchecked;
|
pub type Unchecked = settings::Unchecked;
|
||||||
|
|
||||||
@ -306,4 +307,81 @@ pub(crate) mod test {
|
|||||||
assert_eq!(documents.len(), 10);
|
assert_eq!(documents.len(), 10);
|
||||||
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn read_dump_v2_from_meilisearch_v0_22_0_issue_3435() {
|
||||||
|
let dump = File::open("tests/assets/v2-v0.22.0.dump").unwrap();
|
||||||
|
let dir = TempDir::new().unwrap();
|
||||||
|
let mut dump = BufReader::new(dump);
|
||||||
|
let gz = GzDecoder::new(&mut dump);
|
||||||
|
let mut archive = tar::Archive::new(gz);
|
||||||
|
archive.unpack(dir.path()).unwrap();
|
||||||
|
|
||||||
|
let mut dump = V2Reader::open(dir).unwrap();
|
||||||
|
|
||||||
|
// top level infos
|
||||||
|
insta::assert_display_snapshot!(dump.date().unwrap(), @"2023-01-30 16:26:09.247261 +00:00:00");
|
||||||
|
|
||||||
|
// tasks
|
||||||
|
let tasks = dump.tasks().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
let (tasks, update_files): (Vec<_>, Vec<_>) = tasks.into_iter().unzip();
|
||||||
|
meili_snap::snapshot_hash!(meili_snap::json_string!(tasks), @"aca8ba13046272664eb3ea2da3031633");
|
||||||
|
assert_eq!(update_files.len(), 8);
|
||||||
|
assert!(update_files[0..].iter().all(|u| u.is_none())); // everything has already been processed
|
||||||
|
|
||||||
|
// indexes
|
||||||
|
let mut indexes = dump.indexes().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
// the index are not ordered in any way by default
|
||||||
|
indexes.sort_by_key(|index| index.metadata().uid.to_string());
|
||||||
|
|
||||||
|
let mut products = indexes.pop().unwrap();
|
||||||
|
let mut movies = indexes.pop().unwrap();
|
||||||
|
let mut spells = indexes.pop().unwrap();
|
||||||
|
assert!(indexes.is_empty());
|
||||||
|
|
||||||
|
// products
|
||||||
|
insta::assert_json_snapshot!(products.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "products",
|
||||||
|
"primaryKey": "sku",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(products.settings().unwrap());
|
||||||
|
let documents = products.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"548284a84de510f71e88e6cdea495cf5");
|
||||||
|
|
||||||
|
// movies
|
||||||
|
insta::assert_json_snapshot!(movies.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "movies",
|
||||||
|
"primaryKey": "id",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(movies.settings().unwrap());
|
||||||
|
let documents = movies.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"0227598af846e574139ee0b80e03a720");
|
||||||
|
|
||||||
|
// spells
|
||||||
|
insta::assert_json_snapshot!(spells.metadata(), { ".createdAt" => "[now]", ".updatedAt" => "[now]" }, @r###"
|
||||||
|
{
|
||||||
|
"uid": "dnd_spells",
|
||||||
|
"primaryKey": "index",
|
||||||
|
"createdAt": "[now]",
|
||||||
|
"updatedAt": "[now]"
|
||||||
|
}
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_json_snapshot!(spells.settings().unwrap());
|
||||||
|
let documents = spells.documents().unwrap().collect::<Result<Vec<_>>>().unwrap();
|
||||||
|
assert_eq!(documents.len(), 10);
|
||||||
|
meili_snap::snapshot_hash!(format!("{:#?}", documents), @"235016433dd04262c7f2da01d1e808ce");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,35 +1,33 @@
|
|||||||
use std::collections::{BTreeMap, BTreeSet};
|
use std::collections::{BTreeMap, BTreeSet};
|
||||||
use std::fmt::Display;
|
use std::fmt;
|
||||||
use std::marker::PhantomData;
|
use std::marker::PhantomData;
|
||||||
use std::str::FromStr;
|
use std::str::FromStr;
|
||||||
|
|
||||||
use once_cell::sync::Lazy;
|
|
||||||
use regex::Regex;
|
|
||||||
use serde::{Deserialize, Deserializer};
|
use serde::{Deserialize, Deserializer};
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
fn serialize_with_wildcard<S>(
|
fn serialize_with_wildcard<S>(
|
||||||
field: &Option<Option<Vec<String>>>,
|
field: &Setting<Vec<String>>,
|
||||||
s: S,
|
s: S,
|
||||||
) -> std::result::Result<S::Ok, S::Error>
|
) -> std::result::Result<S::Ok, S::Error>
|
||||||
where
|
where
|
||||||
S: serde::Serializer,
|
S: serde::Serializer,
|
||||||
{
|
{
|
||||||
let wildcard = vec!["*".to_string()];
|
use serde::Serialize;
|
||||||
s.serialize_some(&field.as_ref().map(|o| o.as_ref().unwrap_or(&wildcard)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn deserialize_some<'de, T, D>(deserializer: D) -> std::result::Result<Option<T>, D::Error>
|
let wildcard = vec!["*".to_string()];
|
||||||
where
|
match field {
|
||||||
T: Deserialize<'de>,
|
Setting::Set(value) => Some(value),
|
||||||
D: Deserializer<'de>,
|
Setting::Reset => Some(&wildcard),
|
||||||
{
|
Setting::NotSet => None,
|
||||||
Deserialize::deserialize(deserializer).map(Some)
|
}
|
||||||
|
.serialize(s)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug)]
|
#[derive(Clone, Default, Debug)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct Checked;
|
pub struct Checked;
|
||||||
|
|
||||||
#[derive(Clone, Default, Debug, Deserialize)]
|
#[derive(Clone, Default, Debug, Deserialize)]
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct Unchecked;
|
pub struct Unchecked;
|
||||||
@ -42,75 +40,54 @@ pub struct Unchecked;
|
|||||||
pub struct Settings<T> {
|
pub struct Settings<T> {
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Option::is_none"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
pub displayed_attributes: Option<Option<Vec<String>>>,
|
pub displayed_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(
|
||||||
default,
|
default,
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
serialize_with = "serialize_with_wildcard",
|
serialize_with = "serialize_with_wildcard",
|
||||||
skip_serializing_if = "Option::is_none"
|
skip_serializing_if = "Setting::is_not_set"
|
||||||
)]
|
)]
|
||||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
pub searchable_attributes: Setting<Vec<String>>,
|
||||||
|
|
||||||
#[serde(
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
default,
|
pub filterable_attributes: Setting<BTreeSet<String>>,
|
||||||
deserialize_with = "deserialize_some",
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
skip_serializing_if = "Option::is_none"
|
pub sortable_attributes: Setting<BTreeSet<String>>,
|
||||||
)]
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
pub filterable_attributes: Option<Option<BTreeSet<String>>>,
|
pub ranking_rules: Setting<Vec<String>>,
|
||||||
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
#[serde(
|
pub stop_words: Setting<BTreeSet<String>>,
|
||||||
default,
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
deserialize_with = "deserialize_some",
|
pub synonyms: Setting<BTreeMap<String, Vec<String>>>,
|
||||||
skip_serializing_if = "Option::is_none"
|
#[serde(default, skip_serializing_if = "Setting::is_not_set")]
|
||||||
)]
|
pub distinct_attribute: Setting<String>,
|
||||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub stop_words: Option<Option<BTreeSet<String>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub synonyms: Option<Option<BTreeMap<String, Vec<String>>>>,
|
|
||||||
#[serde(
|
|
||||||
default,
|
|
||||||
deserialize_with = "deserialize_some",
|
|
||||||
skip_serializing_if = "Option::is_none"
|
|
||||||
)]
|
|
||||||
pub distinct_attribute: Option<Option<String>>,
|
|
||||||
|
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
pub _kind: PhantomData<T>,
|
pub _kind: PhantomData<T>,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Settings<Unchecked> {
|
impl Settings<Unchecked> {
|
||||||
pub fn check(mut self) -> Settings<Checked> {
|
pub fn check(self) -> Settings<Checked> {
|
||||||
let displayed_attributes = match self.displayed_attributes.take() {
|
let displayed_attributes = match self.displayed_attributes {
|
||||||
Some(Some(fields)) => {
|
Setting::Set(fields) => {
|
||||||
if fields.iter().any(|f| f == "*") {
|
if fields.iter().any(|f| f == "*") {
|
||||||
Some(None)
|
Setting::Reset
|
||||||
} else {
|
} else {
|
||||||
Some(Some(fields))
|
Setting::Set(fields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
otherwise => otherwise,
|
otherwise => otherwise,
|
||||||
};
|
};
|
||||||
|
|
||||||
let searchable_attributes = match self.searchable_attributes.take() {
|
let searchable_attributes = match self.searchable_attributes {
|
||||||
Some(Some(fields)) => {
|
Setting::Set(fields) => {
|
||||||
if fields.iter().any(|f| f == "*") {
|
if fields.iter().any(|f| f == "*") {
|
||||||
Some(None)
|
Setting::Reset
|
||||||
} else {
|
} else {
|
||||||
Some(Some(fields))
|
Setting::Set(fields)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
otherwise => otherwise,
|
otherwise => otherwise,
|
||||||
@ -120,6 +97,7 @@ impl Settings<Unchecked> {
|
|||||||
displayed_attributes,
|
displayed_attributes,
|
||||||
searchable_attributes,
|
searchable_attributes,
|
||||||
filterable_attributes: self.filterable_attributes,
|
filterable_attributes: self.filterable_attributes,
|
||||||
|
sortable_attributes: self.sortable_attributes,
|
||||||
ranking_rules: self.ranking_rules,
|
ranking_rules: self.ranking_rules,
|
||||||
stop_words: self.stop_words,
|
stop_words: self.stop_words,
|
||||||
synonyms: self.synonyms,
|
synonyms: self.synonyms,
|
||||||
@ -129,10 +107,61 @@ impl Settings<Unchecked> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
static ASC_DESC_REGEX: Lazy<Regex> =
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
Lazy::new(|| Regex::new(r#"(asc|desc)\(([\w_-]+)\)"#).unwrap());
|
pub enum Setting<T> {
|
||||||
|
Set(T),
|
||||||
|
Reset,
|
||||||
|
NotSet,
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
impl<T> Default for Setting<T> {
|
||||||
|
fn default() -> Self {
|
||||||
|
Self::NotSet
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<T> Setting<T> {
|
||||||
|
pub const fn is_not_set(&self) -> bool {
|
||||||
|
matches!(self, Self::NotSet)
|
||||||
|
}
|
||||||
|
|
||||||
|
pub fn map<A>(self, f: fn(T) -> A) -> Setting<A> {
|
||||||
|
match self {
|
||||||
|
Setting::Set(a) => Setting::Set(f(a)),
|
||||||
|
Setting::Reset => Setting::Reset,
|
||||||
|
Setting::NotSet => Setting::NotSet,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[cfg(test)]
|
||||||
|
impl<T: serde::Serialize> serde::Serialize for Setting<T> {
|
||||||
|
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
|
||||||
|
where
|
||||||
|
S: serde::Serializer,
|
||||||
|
{
|
||||||
|
match self {
|
||||||
|
Self::Set(value) => Some(value),
|
||||||
|
// Usually not_set isn't serialized by setting skip_serializing_if field attribute
|
||||||
|
Self::NotSet | Self::Reset => None,
|
||||||
|
}
|
||||||
|
.serialize(serializer)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl<'de, T: Deserialize<'de>> Deserialize<'de> for Setting<T> {
|
||||||
|
fn deserialize<D>(deserializer: D) -> std::result::Result<Self, D::Error>
|
||||||
|
where
|
||||||
|
D: Deserializer<'de>,
|
||||||
|
{
|
||||||
|
Deserialize::deserialize(deserializer).map(|x| match x {
|
||||||
|
Some(x) => Self::Set(x),
|
||||||
|
None => Self::Reset, // Reset is forced by sending null value
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
#[derive(Debug, Clone, PartialEq, Eq)]
|
||||||
pub enum Criterion {
|
pub enum Criterion {
|
||||||
/// Sorted by decreasing number of matched query terms.
|
/// Sorted by decreasing number of matched query terms.
|
||||||
/// Query words at the front of an attribute is considered better than if it was at the back.
|
/// Query words at the front of an attribute is considered better than if it was at the back.
|
||||||
@ -142,8 +171,11 @@ pub enum Criterion {
|
|||||||
/// Sorted by increasing distance between matched query terms.
|
/// Sorted by increasing distance between matched query terms.
|
||||||
Proximity,
|
Proximity,
|
||||||
/// Documents with quey words contained in more important
|
/// Documents with quey words contained in more important
|
||||||
/// attributes are considred better.
|
/// attributes are considered better.
|
||||||
Attribute,
|
Attribute,
|
||||||
|
/// Dynamically sort at query time the documents. None, one or multiple Asc/Desc sortable
|
||||||
|
/// attributes can be used in place of this criterion at query time.
|
||||||
|
Sort,
|
||||||
/// Sorted by the similarity of the matched words with the query words.
|
/// Sorted by the similarity of the matched words with the query words.
|
||||||
Exactness,
|
Exactness,
|
||||||
/// Sorted by the increasing value of the field specified.
|
/// Sorted by the increasing value of the field specified.
|
||||||
@ -152,40 +184,86 @@ pub enum Criterion {
|
|||||||
Desc(String),
|
Desc(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl Criterion {
|
||||||
|
/// Returns the field name parameter of this criterion.
|
||||||
|
pub fn field_name(&self) -> Option<&str> {
|
||||||
|
match self {
|
||||||
|
Criterion::Asc(name) | Criterion::Desc(name) => Some(name),
|
||||||
|
_otherwise => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
impl FromStr for Criterion {
|
impl FromStr for Criterion {
|
||||||
|
// since we're not going to show the custom error message we can override the
|
||||||
|
// error type.
|
||||||
type Err = ();
|
type Err = ();
|
||||||
|
|
||||||
fn from_str(txt: &str) -> Result<Criterion, Self::Err> {
|
fn from_str(text: &str) -> Result<Criterion, Self::Err> {
|
||||||
match txt {
|
match text {
|
||||||
"words" => Ok(Criterion::Words),
|
"words" => Ok(Criterion::Words),
|
||||||
"typo" => Ok(Criterion::Typo),
|
"typo" => Ok(Criterion::Typo),
|
||||||
"proximity" => Ok(Criterion::Proximity),
|
"proximity" => Ok(Criterion::Proximity),
|
||||||
"attribute" => Ok(Criterion::Attribute),
|
"attribute" => Ok(Criterion::Attribute),
|
||||||
|
"sort" => Ok(Criterion::Sort),
|
||||||
"exactness" => Ok(Criterion::Exactness),
|
"exactness" => Ok(Criterion::Exactness),
|
||||||
text => {
|
text => match AscDesc::from_str(text) {
|
||||||
let caps = ASC_DESC_REGEX.captures(text).ok_or(())?;
|
Ok(AscDesc::Asc(field)) => Ok(Criterion::Asc(field)),
|
||||||
let order = caps.get(1).unwrap().as_str();
|
Ok(AscDesc::Desc(field)) => Ok(Criterion::Desc(field)),
|
||||||
let field_name = caps.get(2).unwrap().as_str();
|
Err(_) => Err(()),
|
||||||
match order {
|
},
|
||||||
"asc" => Ok(Criterion::Asc(field_name.to_string())),
|
|
||||||
"desc" => Ok(Criterion::Desc(field_name.to_string())),
|
|
||||||
_text => Err(()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Display for Criterion {
|
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
pub enum AscDesc {
|
||||||
match self {
|
Asc(String),
|
||||||
Criterion::Words => write!(f, "words"),
|
Desc(String),
|
||||||
Criterion::Typo => write!(f, "typo"),
|
}
|
||||||
Criterion::Proximity => write!(f, "proximity"),
|
|
||||||
Criterion::Attribute => write!(f, "attribute"),
|
impl FromStr for AscDesc {
|
||||||
Criterion::Exactness => write!(f, "exactness"),
|
type Err = ();
|
||||||
Criterion::Asc(field_name) => write!(f, "asc({})", field_name),
|
|
||||||
Criterion::Desc(field_name) => write!(f, "desc({})", field_name),
|
// since we don't know if this comes from the old or new syntax we need to check
|
||||||
|
// for both syntax.
|
||||||
|
// WARN: this code doesn't come from the original meilisearch v0.22.0 but was
|
||||||
|
// written specifically to be able to import the dump of meilisearch v0.21.0 AND
|
||||||
|
// meilisearch v0.22.0.
|
||||||
|
fn from_str(text: &str) -> Result<AscDesc, Self::Err> {
|
||||||
|
if let Some((field_name, asc_desc)) = text.rsplit_once(':') {
|
||||||
|
match asc_desc {
|
||||||
|
"asc" => Ok(AscDesc::Asc(field_name.to_string())),
|
||||||
|
"desc" => Ok(AscDesc::Desc(field_name.to_string())),
|
||||||
|
_ => Err(()),
|
||||||
|
}
|
||||||
|
} else if text.starts_with("asc(") && text.ends_with(')') {
|
||||||
|
Ok(AscDesc::Asc(
|
||||||
|
text.strip_prefix("asc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||||
|
))
|
||||||
|
} else if text.starts_with("desc(") && text.ends_with(')') {
|
||||||
|
Ok(AscDesc::Desc(
|
||||||
|
text.strip_prefix("desc(").unwrap().strip_suffix(')').unwrap().to_string(),
|
||||||
|
))
|
||||||
|
} else {
|
||||||
|
Err(())
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for Criterion {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
use Criterion::*;
|
||||||
|
|
||||||
|
match self {
|
||||||
|
Words => f.write_str("words"),
|
||||||
|
Typo => f.write_str("typo"),
|
||||||
|
Proximity => f.write_str("proximity"),
|
||||||
|
Attribute => f.write_str("attribute"),
|
||||||
|
Sort => f.write_str("sort"),
|
||||||
|
Exactness => f.write_str("exactness"),
|
||||||
|
Asc(attr) => write!(f, "{}:asc", attr),
|
||||||
|
Desc(attr) => write!(f, "{}:desc", attr),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: spells.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,39 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: products.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [],
|
||||||
|
"sortableAttributes": [],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"sort",
|
||||||
|
"exactness"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {
|
||||||
|
"android": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"iphone": [
|
||||||
|
"phone",
|
||||||
|
"smartphone"
|
||||||
|
],
|
||||||
|
"phone": [
|
||||||
|
"android",
|
||||||
|
"iphone",
|
||||||
|
"smartphone"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -0,0 +1,30 @@
|
|||||||
|
---
|
||||||
|
source: dump/src/reader/v2/mod.rs
|
||||||
|
expression: movies.settings().unwrap()
|
||||||
|
---
|
||||||
|
{
|
||||||
|
"displayedAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"searchableAttributes": [
|
||||||
|
"*"
|
||||||
|
],
|
||||||
|
"filterableAttributes": [
|
||||||
|
"genres",
|
||||||
|
"id"
|
||||||
|
],
|
||||||
|
"sortableAttributes": [
|
||||||
|
"release_date"
|
||||||
|
],
|
||||||
|
"rankingRules": [
|
||||||
|
"words",
|
||||||
|
"typo",
|
||||||
|
"proximity",
|
||||||
|
"attribute",
|
||||||
|
"exactness",
|
||||||
|
"release_date:asc"
|
||||||
|
],
|
||||||
|
"stopWords": [],
|
||||||
|
"synonyms": {},
|
||||||
|
"distinctAttribute": null
|
||||||
|
}
|
@ -5,10 +5,8 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
#[cfg_attr(feature = "test-traits", proptest(strategy = "strategy::status_code_strategy()"))]
|
|
||||||
pub code: StatusCode,
|
pub code: StatusCode,
|
||||||
pub message: String,
|
pub message: String,
|
||||||
#[serde(rename = "code")]
|
#[serde(rename = "code")]
|
||||||
|
@ -5,7 +5,6 @@ use serde::Deserialize;
|
|||||||
|
|
||||||
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
#[derive(Debug, Deserialize, Clone, PartialEq, Eq)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
#[cfg_attr(feature = "test-traits", derive(proptest_derive::Arbitrary))]
|
|
||||||
#[cfg_attr(test, derive(serde::Serialize))]
|
#[cfg_attr(test, derive(serde::Serialize))]
|
||||||
pub struct ResponseError {
|
pub struct ResponseError {
|
||||||
#[serde(skip)]
|
#[serde(skip)]
|
||||||
|
@ -33,7 +33,7 @@
|
|||||||
//!
|
//!
|
||||||
|
|
||||||
use std::fs::{self, File};
|
use std::fs::{self, File};
|
||||||
use std::io::{BufRead, BufReader, ErrorKind, Seek, SeekFrom};
|
use std::io::{BufRead, BufReader, ErrorKind, Seek};
|
||||||
use std::path::Path;
|
use std::path::Path;
|
||||||
|
|
||||||
use serde::{Deserialize, Serialize};
|
use serde::{Deserialize, Serialize};
|
||||||
@ -178,7 +178,7 @@ impl V5Reader {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
|
pub fn keys(&mut self) -> Result<Box<dyn Iterator<Item = Result<Key>> + '_>> {
|
||||||
self.keys.seek(SeekFrom::Start(0))?;
|
self.keys.rewind()?;
|
||||||
Ok(Box::new(
|
Ok(Box::new(
|
||||||
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
(&mut self.keys).lines().map(|line| -> Result<_> { Ok(serde_json::from_str(&line?)?) }),
|
||||||
))
|
))
|
||||||
|
@ -34,8 +34,7 @@ pub type PaginationSettings = meilisearch_types::settings::PaginationSettings;
|
|||||||
|
|
||||||
// everything related to the api keys
|
// everything related to the api keys
|
||||||
pub type Action = meilisearch_types::keys::Action;
|
pub type Action = meilisearch_types::keys::Action;
|
||||||
pub type StarOr<T> = meilisearch_types::star_or::StarOr<T>;
|
pub type IndexUidPattern = meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
pub type IndexUid = meilisearch_types::index_uid::IndexUid;
|
|
||||||
|
|
||||||
// everything related to the errors
|
// everything related to the errors
|
||||||
pub type ResponseError = meilisearch_types::error::ResponseError;
|
pub type ResponseError = meilisearch_types::error::ResponseError;
|
||||||
|
BIN
dump/tests/assets/v2-v0.22.0.dump
Normal file
BIN
dump/tests/assets/v2-v0.22.0.dump
Normal file
Binary file not shown.
@ -1,7 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "file-store"
|
name = "file-store"
|
||||||
version = "1.0.0"
|
publish = false
|
||||||
edition = "2021"
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
description.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
readme.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
tempfile = "3.3.0"
|
tempfile = "3.3.0"
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
use std::collections::BTreeSet;
|
|
||||||
use std::fs::File as StdFile;
|
use std::fs::File as StdFile;
|
||||||
use std::ops::{Deref, DerefMut};
|
use std::ops::{Deref, DerefMut};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
@ -11,10 +10,14 @@ const UPDATE_FILES_PATH: &str = "updates/updates_files";
|
|||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum Error {
|
pub enum Error {
|
||||||
|
#[error("Could not parse file name as utf-8")]
|
||||||
|
CouldNotParseFileNameAsUtf8,
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
IoError(#[from] std::io::Error),
|
IoError(#[from] std::io::Error),
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
PersistError(#[from] tempfile::PersistError),
|
PersistError(#[from] tempfile::PersistError),
|
||||||
|
#[error(transparent)]
|
||||||
|
UuidError(#[from] uuid::Error),
|
||||||
}
|
}
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, Error>;
|
pub type Result<T> = std::result::Result<T, Error>;
|
||||||
@ -33,13 +36,11 @@ impl DerefMut for File {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, faux::create)]
|
|
||||||
#[derive(Clone, Debug)]
|
#[derive(Clone, Debug)]
|
||||||
pub struct FileStore {
|
pub struct FileStore {
|
||||||
path: PathBuf,
|
path: PathBuf,
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(not(test))]
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
pub fn new(path: impl AsRef<Path>) -> Result<FileStore> {
|
pub fn new(path: impl AsRef<Path>) -> Result<FileStore> {
|
||||||
let path = path.as_ref().to_path_buf();
|
let path = path.as_ref().to_path_buf();
|
||||||
@ -48,7 +49,6 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg_attr(test, faux::methods)]
|
|
||||||
impl FileStore {
|
impl FileStore {
|
||||||
/// Creates a new temporary update file.
|
/// Creates a new temporary update file.
|
||||||
/// A call to `persist` is needed to persist the file in the database.
|
/// A call to `persist` is needed to persist the file in the database.
|
||||||
@ -94,7 +94,17 @@ impl FileStore {
|
|||||||
Ok(())
|
Ok(())
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn get_size(&self, uuid: Uuid) -> Result<u64> {
|
/// Compute the size of all the updates contained in the file store.
|
||||||
|
pub fn compute_total_size(&self) -> Result<u64> {
|
||||||
|
let mut total = 0;
|
||||||
|
for uuid in self.all_uuids()? {
|
||||||
|
total += self.compute_size(uuid?).unwrap_or_default();
|
||||||
|
}
|
||||||
|
Ok(total)
|
||||||
|
}
|
||||||
|
|
||||||
|
/// Compute the size of one update
|
||||||
|
pub fn compute_size(&self, uuid: Uuid) -> Result<u64> {
|
||||||
Ok(self.get_update(uuid)?.metadata()?.len())
|
Ok(self.get_update(uuid)?.metadata()?.len())
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -105,17 +115,12 @@ impl FileStore {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/// List the Uuids of the files in the FileStore
|
/// List the Uuids of the files in the FileStore
|
||||||
///
|
pub fn all_uuids(&self) -> Result<impl Iterator<Item = Result<Uuid>>> {
|
||||||
/// This function is meant to be used by tests only.
|
Ok(self.path.read_dir()?.map(|entry| {
|
||||||
#[doc(hidden)]
|
Ok(Uuid::from_str(
|
||||||
pub fn __all_uuids(&self) -> BTreeSet<Uuid> {
|
entry?.file_name().to_str().ok_or(Error::CouldNotParseFileNameAsUtf8)?,
|
||||||
let mut uuids = BTreeSet::new();
|
)?)
|
||||||
for entry in self.path.read_dir().unwrap() {
|
}))
|
||||||
let entry = entry.unwrap();
|
|
||||||
let uuid = Uuid::from_str(entry.file_name().to_str().unwrap()).unwrap();
|
|
||||||
uuids.insert(uuid);
|
|
||||||
}
|
|
||||||
uuids
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,10 +1,16 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "filter-parser"
|
name = "filter-parser"
|
||||||
version = "1.0.0"
|
|
||||||
edition = "2021"
|
|
||||||
description = "The parser for the Meilisearch filter syntax"
|
description = "The parser for the Meilisearch filter syntax"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
# description.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
readme.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
nom = "7.1.1"
|
nom = "7.1.1"
|
||||||
nom_locate = "4.0.0"
|
nom_locate = "4.0.0"
|
||||||
|
@ -57,8 +57,10 @@ pub enum ExpectedValueKind {
|
|||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub enum ErrorKind<'a> {
|
pub enum ErrorKind<'a> {
|
||||||
ReservedGeo(&'a str),
|
ReservedGeo(&'a str),
|
||||||
Geo,
|
GeoRadius,
|
||||||
MisusedGeo,
|
GeoBoundingBox,
|
||||||
|
MisusedGeoRadius,
|
||||||
|
MisusedGeoBoundingBox,
|
||||||
InvalidPrimary,
|
InvalidPrimary,
|
||||||
ExpectedEof,
|
ExpectedEof,
|
||||||
ExpectedValue(ExpectedValueKind),
|
ExpectedValue(ExpectedValueKind),
|
||||||
@ -142,23 +144,29 @@ impl<'a> Display for Error<'a> {
|
|||||||
writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)?
|
writeln!(f, "Expression `{}` is missing the following closing delimiter: `{}`.", escaped_input, c)?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidPrimary if input.trim().is_empty() => {
|
ErrorKind::InvalidPrimary if input.trim().is_empty() => {
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` but instead got nothing.")?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.")?
|
||||||
}
|
}
|
||||||
ErrorKind::InvalidPrimary => {
|
ErrorKind::InvalidPrimary => {
|
||||||
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `{}`.", escaped_input)?
|
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `{}`.", escaped_input)?
|
||||||
}
|
}
|
||||||
ErrorKind::ExpectedEof => {
|
ErrorKind::ExpectedEof => {
|
||||||
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
|
||||||
}
|
}
|
||||||
ErrorKind::Geo => {
|
ErrorKind::GeoRadius => {
|
||||||
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
writeln!(f, "The `_geoRadius` filter expects three arguments: `_geoRadius(latitude, longitude, radius)`.")?
|
||||||
}
|
}
|
||||||
ErrorKind::ReservedGeo(name) => {
|
ErrorKind::GeoBoundingBox => {
|
||||||
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates.", name.escape_debug())?
|
writeln!(f, "The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.")?
|
||||||
}
|
}
|
||||||
ErrorKind::MisusedGeo => {
|
ErrorKind::ReservedGeo(name) => {
|
||||||
|
writeln!(f, "`{}` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates.", name.escape_debug())?
|
||||||
|
}
|
||||||
|
ErrorKind::MisusedGeoRadius => {
|
||||||
writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")?
|
writeln!(f, "The `_geoRadius` filter is an operation and can't be used as a value.")?
|
||||||
}
|
}
|
||||||
|
ErrorKind::MisusedGeoBoundingBox => {
|
||||||
|
writeln!(f, "The `_geoBoundingBox` filter is an operation and can't be used as a value.")?
|
||||||
|
}
|
||||||
ErrorKind::ReservedKeyword(word) => {
|
ErrorKind::ReservedKeyword(word) => {
|
||||||
writeln!(f, "`{word}` is a reserved keyword and thus cannot be used as a field name unless it is put inside quotes. Use \"{word}\" or \'{word}\' instead.")?
|
writeln!(f, "`{word}` is a reserved keyword and thus cannot be used as a field name unless it is put inside quotes. Use \"{word}\" or \'{word}\' instead.")?
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@
|
|||||||
//! doubleQuoted = "\"" .* all but double quotes "\""
|
//! doubleQuoted = "\"" .* all but double quotes "\""
|
||||||
//! word = (alphanumeric | _ | - | .)+
|
//! word = (alphanumeric | _ | - | .)+
|
||||||
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
//! geoRadius = "_geoRadius(" WS* float WS* "," WS* float WS* "," float WS* ")"
|
||||||
|
//! geoBoundingBox = "_geoBoundingBox([" WS * float WS* "," WS* float WS* "], [" WS* float WS* "," WS* float WS* "]")
|
||||||
//! ```
|
//! ```
|
||||||
//!
|
//!
|
||||||
//! Other BNF grammar used to handle some specific errors:
|
//! Other BNF grammar used to handle some specific errors:
|
||||||
@ -87,10 +88,15 @@ impl<'a> Token<'a> {
|
|||||||
Self { span, value }
|
Self { span, value }
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the string contained in the span of the `Token`.
|
||||||
|
/// This is only useful in the tests. You should always use
|
||||||
|
/// the value.
|
||||||
|
#[cfg(test)]
|
||||||
pub fn lexeme(&self) -> &str {
|
pub fn lexeme(&self) -> &str {
|
||||||
&self.span
|
&self.span
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the string contained in the token.
|
||||||
pub fn value(&self) -> &str {
|
pub fn value(&self) -> &str {
|
||||||
self.value.as_ref().map_or(&self.span, |value| value)
|
self.value.as_ref().map_or(&self.span, |value| value)
|
||||||
}
|
}
|
||||||
@ -99,8 +105,13 @@ impl<'a> Token<'a> {
|
|||||||
Error::new_from_external(self.span, error)
|
Error::new_from_external(self.span, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns a copy of the span this token was created with.
|
||||||
|
pub fn original_span(&self) -> Span<'a> {
|
||||||
|
self.span
|
||||||
|
}
|
||||||
|
|
||||||
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
pub fn parse_finite_float(&self) -> Result<f64, Error> {
|
||||||
let value: f64 = self.span.parse().map_err(|e| self.as_external_error(e))?;
|
let value: f64 = self.value().parse().map_err(|e| self.as_external_error(e))?;
|
||||||
if value.is_finite() {
|
if value.is_finite() {
|
||||||
Ok(value)
|
Ok(value)
|
||||||
} else {
|
} else {
|
||||||
@ -130,6 +141,7 @@ pub enum FilterCondition<'a> {
|
|||||||
Or(Vec<Self>),
|
Or(Vec<Self>),
|
||||||
And(Vec<Self>),
|
And(Vec<Self>),
|
||||||
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
GeoLowerThan { point: [Token<'a>; 2], radius: Token<'a> },
|
||||||
|
GeoBoundingBox { top_left_point: [Token<'a>; 2], bottom_right_point: [Token<'a>; 2] },
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'a> FilterCondition<'a> {
|
impl<'a> FilterCondition<'a> {
|
||||||
@ -310,12 +322,12 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
|||||||
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
// if we were able to parse `_geoRadius` and can't parse the rest of the input we return a failure
|
||||||
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
|
cut(delimited(char('('), separated_list1(tag(","), ws(recognize_float)), char(')'))),
|
||||||
)(input)
|
)(input)
|
||||||
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::Geo)));
|
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
||||||
|
|
||||||
let (input, args) = parsed?;
|
let (input, args) = parsed?;
|
||||||
|
|
||||||
if args.len() != 3 {
|
if args.len() != 3 {
|
||||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::Geo)));
|
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoRadius)));
|
||||||
}
|
}
|
||||||
|
|
||||||
let res = FilterCondition::GeoLowerThan {
|
let res = FilterCondition::GeoLowerThan {
|
||||||
@ -325,6 +337,37 @@ fn parse_geo_radius(input: Span) -> IResult<FilterCondition> {
|
|||||||
Ok((input, res))
|
Ok((input, res))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// geoBoundingBox = WS* "_geoBoundingBox([float WS* "," WS* float WS* "], [float WS* "," WS* float WS* "]")
|
||||||
|
/// If we parse `_geoBoundingBox` we MUST parse the rest of the expression.
|
||||||
|
fn parse_geo_bounding_box(input: Span) -> IResult<FilterCondition> {
|
||||||
|
// we want to allow space BEFORE the _geoBoundingBox but not after
|
||||||
|
let parsed = preceded(
|
||||||
|
tuple((multispace0, word_exact("_geoBoundingBox"))),
|
||||||
|
// if we were able to parse `_geoBoundingBox` and can't parse the rest of the input we return a failure
|
||||||
|
cut(delimited(
|
||||||
|
char('('),
|
||||||
|
separated_list1(
|
||||||
|
tag(","),
|
||||||
|
ws(delimited(char('['), separated_list1(tag(","), ws(recognize_float)), char(']'))),
|
||||||
|
),
|
||||||
|
char(')'),
|
||||||
|
)),
|
||||||
|
)(input)
|
||||||
|
.map_err(|e| e.map(|_| Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
||||||
|
|
||||||
|
let (input, args) = parsed?;
|
||||||
|
|
||||||
|
if args.len() != 2 || args[0].len() != 2 || args[1].len() != 2 {
|
||||||
|
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::GeoBoundingBox)));
|
||||||
|
}
|
||||||
|
|
||||||
|
let res = FilterCondition::GeoBoundingBox {
|
||||||
|
top_left_point: [args[0][0].into(), args[0][1].into()],
|
||||||
|
bottom_right_point: [args[1][0].into(), args[1][1].into()],
|
||||||
|
};
|
||||||
|
Ok((input, res))
|
||||||
|
}
|
||||||
|
|
||||||
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
/// geoPoint = WS* "_geoPoint(float WS* "," WS* float WS* "," WS* float)
|
||||||
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
fn parse_geo_point(input: Span) -> IResult<FilterCondition> {
|
||||||
// we want to forbid space BEFORE the _geoPoint but not after
|
// we want to forbid space BEFORE the _geoPoint but not after
|
||||||
@ -367,6 +410,7 @@ fn parse_primary(input: Span, depth: usize) -> IResult<FilterCondition> {
|
|||||||
}),
|
}),
|
||||||
),
|
),
|
||||||
parse_geo_radius,
|
parse_geo_radius,
|
||||||
|
parse_geo_bounding_box,
|
||||||
parse_in,
|
parse_in,
|
||||||
parse_not_in,
|
parse_not_in,
|
||||||
parse_condition,
|
parse_condition,
|
||||||
@ -468,6 +512,12 @@ pub mod tests {
|
|||||||
// Test geo radius
|
// Test geo radius
|
||||||
insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
insta::assert_display_snapshot!(p("_geoRadius(12, 13, 14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
insta::assert_display_snapshot!(p("NOT _geoRadius(12, 13, 14)"), @"NOT (_geoRadius({12}, {13}, {14}))");
|
||||||
|
insta::assert_display_snapshot!(p("_geoRadius(12,13,14)"), @"_geoRadius({12}, {13}, {14})");
|
||||||
|
|
||||||
|
// Test geo bounding box
|
||||||
|
insta::assert_display_snapshot!(p("_geoBoundingBox([12, 13], [14, 15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
|
insta::assert_display_snapshot!(p("NOT _geoBoundingBox([12, 13], [14, 15])"), @"NOT (_geoBoundingBox([{12}, {13}], [{14}, {15}]))");
|
||||||
|
insta::assert_display_snapshot!(p("_geoBoundingBox([12,13],[14,15])"), @"_geoBoundingBox([{12}, {13}], [{14}, {15}])");
|
||||||
|
|
||||||
// Test OR + AND
|
// Test OR + AND
|
||||||
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
insta::assert_display_snapshot!(p("channel = ponce AND 'dog race' != 'bernese mountain'"), @"AND[{channel} = {ponce}, {dog race} != {bernese mountain}, ]");
|
||||||
@ -526,7 +576,7 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("'OR'"), @r###"
|
insta::assert_display_snapshot!(p("'OR'"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `\'OR\'`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `\'OR\'`.
|
||||||
1:5 'OR'
|
1:5 'OR'
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -536,12 +586,12 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel Ponce"), @r###"
|
insta::assert_display_snapshot!(p("channel Ponce"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `channel Ponce`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `channel Ponce`.
|
||||||
1:14 channel Ponce
|
1:14 channel Ponce
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###"
|
insta::assert_display_snapshot!(p("channel = Ponce OR"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` but instead got nothing.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` but instead got nothing.
|
||||||
19:19 channel = Ponce OR
|
19:19 channel = Ponce OR
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -555,13 +605,28 @@ pub mod tests {
|
|||||||
1:16 _geoRadius = 12
|
1:16 _geoRadius = 12
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(p("_geoBoundingBox"), @r###"
|
||||||
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
|
1:16 _geoBoundingBox
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(p("_geoBoundingBox = 12"), @r###"
|
||||||
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
|
1:21 _geoBoundingBox = 12
|
||||||
|
"###);
|
||||||
|
|
||||||
|
insta::assert_display_snapshot!(p("_geoBoundingBox(1.0, 1.0)"), @r###"
|
||||||
|
The `_geoBoundingBox` filter expects two pairs of arguments: `_geoBoundingBox([latitude, longitude], [latitude, longitude])`.
|
||||||
|
1:26 _geoBoundingBox(1.0, 1.0)
|
||||||
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
insta::assert_display_snapshot!(p("_geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates.
|
||||||
1:22 _geoPoint(12, 13, 14)
|
1:22 _geoPoint(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###"
|
insta::assert_display_snapshot!(p("position <= _geoPoint(12, 13, 14)"), @r###"
|
||||||
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance) built-in rule to filter on `_geo` coordinates.
|
`_geoPoint` is a reserved keyword and thus can't be used as a filter expression. Use the `_geoRadius(latitude, longitude, distance), or _geoBoundingBox([latitude, longitude], [latitude, longitude]) built-in rules to filter on `_geo` coordinates.
|
||||||
13:34 position <= _geoPoint(12, 13, 14)
|
13:34 position <= _geoPoint(12, 13, 14)
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -591,12 +656,12 @@ pub mod tests {
|
|||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###"
|
insta::assert_display_snapshot!(p("colour NOT EXIST"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `colour NOT EXIST`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `colour NOT EXIST`.
|
||||||
1:17 colour NOT EXIST
|
1:17 colour NOT EXIST
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###"
|
insta::assert_display_snapshot!(p("subscribers 100 TO1000"), @r###"
|
||||||
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, or `_geoRadius` at `subscribers 100 TO1000`.
|
Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `_geoRadius`, or `_geoBoundingBox` at `subscribers 100 TO1000`.
|
||||||
1:23 subscribers 100 TO1000
|
1:23 subscribers 100 TO1000
|
||||||
"###);
|
"###);
|
||||||
|
|
||||||
@ -715,6 +780,16 @@ impl<'a> std::fmt::Display for FilterCondition<'a> {
|
|||||||
FilterCondition::GeoLowerThan { point, radius } => {
|
FilterCondition::GeoLowerThan { point, radius } => {
|
||||||
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
write!(f, "_geoRadius({}, {}, {})", point[0], point[1], radius)
|
||||||
}
|
}
|
||||||
|
FilterCondition::GeoBoundingBox { top_left_point, bottom_right_point } => {
|
||||||
|
write!(
|
||||||
|
f,
|
||||||
|
"_geoBoundingBox([{}, {}], [{}, {}])",
|
||||||
|
top_left_point[0],
|
||||||
|
top_left_point[1],
|
||||||
|
bottom_right_point[0],
|
||||||
|
bottom_right_point[1]
|
||||||
|
)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -6,7 +6,10 @@ use nom::sequence::{delimited, terminated};
|
|||||||
use nom::{InputIter, InputLength, InputTake, Slice};
|
use nom::{InputIter, InputLength, InputTake, Slice};
|
||||||
|
|
||||||
use crate::error::{ExpectedValueKind, NomErrorExt};
|
use crate::error::{ExpectedValueKind, NomErrorExt};
|
||||||
use crate::{parse_geo_point, parse_geo_radius, Error, ErrorKind, IResult, Span, Token};
|
use crate::{
|
||||||
|
parse_geo_bounding_box, parse_geo_point, parse_geo_radius, Error, ErrorKind, IResult, Span,
|
||||||
|
Token,
|
||||||
|
};
|
||||||
|
|
||||||
/// This function goes through all characters in the [Span] if it finds any escaped character (`\`).
|
/// This function goes through all characters in the [Span] if it finds any escaped character (`\`).
|
||||||
/// It generates a new string with all `\` removed from the [Span].
|
/// It generates a new string with all `\` removed from the [Span].
|
||||||
@ -91,11 +94,31 @@ pub fn parse_value(input: Span) -> IResult<Token> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
match parse_geo_radius(input) {
|
match parse_geo_radius(input) {
|
||||||
Ok(_) => return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeo))),
|
Ok(_) => {
|
||||||
|
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius)))
|
||||||
|
}
|
||||||
// if we encountered a failure it means the user badly wrote a _geoRadius filter.
|
// if we encountered a failure it means the user badly wrote a _geoRadius filter.
|
||||||
// But instead of showing him how to fix his syntax we are going to tell him he should not use this filter as a value.
|
// But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value.
|
||||||
Err(e) if e.is_failure() => {
|
Err(e) if e.is_failure() => {
|
||||||
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeo)))
|
return Err(nom::Err::Failure(Error::new_from_kind(input, ErrorKind::MisusedGeoRadius)))
|
||||||
|
}
|
||||||
|
_ => (),
|
||||||
|
}
|
||||||
|
|
||||||
|
match parse_geo_bounding_box(input) {
|
||||||
|
Ok(_) => {
|
||||||
|
return Err(nom::Err::Failure(Error::new_from_kind(
|
||||||
|
input,
|
||||||
|
ErrorKind::MisusedGeoBoundingBox,
|
||||||
|
)))
|
||||||
|
}
|
||||||
|
// if we encountered a failure it means the user badly wrote a _geoBoundingBox filter.
|
||||||
|
// But instead of showing them how to fix his syntax we are going to tell them they should not use this filter as a value.
|
||||||
|
Err(e) if e.is_failure() => {
|
||||||
|
return Err(nom::Err::Failure(Error::new_from_kind(
|
||||||
|
input,
|
||||||
|
ErrorKind::MisusedGeoBoundingBox,
|
||||||
|
)))
|
||||||
}
|
}
|
||||||
_ => (),
|
_ => (),
|
||||||
}
|
}
|
||||||
@ -155,7 +178,7 @@ fn is_syntax_component(c: char) -> bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn is_keyword(s: &str) -> bool {
|
fn is_keyword(s: &str) -> bool {
|
||||||
matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius")
|
matches!(s, "AND" | "OR" | "IN" | "NOT" | "TO" | "EXISTS" | "_geoRadius" | "_geoBoundingBox")
|
||||||
}
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
|
@ -1,11 +1,17 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "flatten-serde-json"
|
name = "flatten-serde-json"
|
||||||
version = "1.0.0"
|
|
||||||
edition = "2021"
|
|
||||||
description = "Flatten serde-json objects like elastic search"
|
description = "Flatten serde-json objects like elastic search"
|
||||||
readme = "README.md"
|
readme = "README.md"
|
||||||
publish = false
|
publish = false
|
||||||
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
# description.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
# readme.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
serde_json = "1.0"
|
serde_json = "1.0"
|
||||||
|
|
||||||
|
@ -1,7 +1,14 @@
|
|||||||
[package]
|
[package]
|
||||||
name = "index-scheduler"
|
name = "index-scheduler"
|
||||||
version = "1.0.0"
|
publish = false
|
||||||
edition = "2021"
|
|
||||||
|
version.workspace = true
|
||||||
|
authors.workspace = true
|
||||||
|
description.workspace = true
|
||||||
|
homepage.workspace = true
|
||||||
|
readme.workspace = true
|
||||||
|
edition.workspace = true
|
||||||
|
license.workspace = true
|
||||||
|
|
||||||
[dependencies]
|
[dependencies]
|
||||||
anyhow = "1.0.64"
|
anyhow = "1.0.64"
|
||||||
|
@ -19,10 +19,16 @@ use crate::KindWithContent;
|
|||||||
///
|
///
|
||||||
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
/// Only the non-prioritised tasks that can be grouped in a batch have a corresponding [`AutobatchKind`]
|
||||||
enum AutobatchKind {
|
enum AutobatchKind {
|
||||||
DocumentImport { method: IndexDocumentsMethod, allow_index_creation: bool },
|
DocumentImport {
|
||||||
|
method: IndexDocumentsMethod,
|
||||||
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<String>,
|
||||||
|
},
|
||||||
DocumentDeletion,
|
DocumentDeletion,
|
||||||
DocumentClear,
|
DocumentClear,
|
||||||
Settings { allow_index_creation: bool },
|
Settings {
|
||||||
|
allow_index_creation: bool,
|
||||||
|
},
|
||||||
IndexCreation,
|
IndexCreation,
|
||||||
IndexDeletion,
|
IndexDeletion,
|
||||||
IndexUpdate,
|
IndexUpdate,
|
||||||
@ -38,14 +44,24 @@ impl AutobatchKind {
|
|||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn primary_key(&self) -> Option<Option<&str>> {
|
||||||
|
match self {
|
||||||
|
AutobatchKind::DocumentImport { primary_key, .. } => Some(primary_key.as_deref()),
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl From<KindWithContent> for AutobatchKind {
|
impl From<KindWithContent> for AutobatchKind {
|
||||||
fn from(kind: KindWithContent) -> Self {
|
fn from(kind: KindWithContent) -> Self {
|
||||||
match kind {
|
match kind {
|
||||||
KindWithContent::DocumentAdditionOrUpdate { method, allow_index_creation, .. } => {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
AutobatchKind::DocumentImport { method, allow_index_creation }
|
method,
|
||||||
}
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
..
|
||||||
|
} => AutobatchKind::DocumentImport { method, allow_index_creation, primary_key },
|
||||||
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
KindWithContent::DocumentDeletion { .. } => AutobatchKind::DocumentDeletion,
|
||||||
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
KindWithContent::DocumentClear { .. } => AutobatchKind::DocumentClear,
|
||||||
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
KindWithContent::SettingsUpdate { allow_index_creation, is_deletion, .. } => {
|
||||||
@ -72,10 +88,11 @@ pub enum BatchKind {
|
|||||||
DocumentClear {
|
DocumentClear {
|
||||||
ids: Vec<TaskId>,
|
ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
DocumentImport {
|
DocumentOperation {
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
import_ids: Vec<TaskId>,
|
primary_key: Option<String>,
|
||||||
|
operation_ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
DocumentDeletion {
|
DocumentDeletion {
|
||||||
deletion_ids: Vec<TaskId>,
|
deletion_ids: Vec<TaskId>,
|
||||||
@ -85,11 +102,12 @@ pub enum BatchKind {
|
|||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
settings_ids: Vec<TaskId>,
|
settings_ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
SettingsAndDocumentImport {
|
SettingsAndDocumentOperation {
|
||||||
settings_ids: Vec<TaskId>,
|
settings_ids: Vec<TaskId>,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
import_ids: Vec<TaskId>,
|
primary_key: Option<String>,
|
||||||
|
operation_ids: Vec<TaskId>,
|
||||||
},
|
},
|
||||||
Settings {
|
Settings {
|
||||||
allow_index_creation: bool,
|
allow_index_creation: bool,
|
||||||
@ -113,13 +131,23 @@ impl BatchKind {
|
|||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
fn allow_index_creation(&self) -> Option<bool> {
|
fn allow_index_creation(&self) -> Option<bool> {
|
||||||
match self {
|
match self {
|
||||||
BatchKind::DocumentImport { allow_index_creation, .. }
|
BatchKind::DocumentOperation { allow_index_creation, .. }
|
||||||
| BatchKind::ClearAndSettings { allow_index_creation, .. }
|
| BatchKind::ClearAndSettings { allow_index_creation, .. }
|
||||||
| BatchKind::SettingsAndDocumentImport { allow_index_creation, .. }
|
| BatchKind::SettingsAndDocumentOperation { allow_index_creation, .. }
|
||||||
| BatchKind::Settings { allow_index_creation, .. } => Some(*allow_index_creation),
|
| BatchKind::Settings { allow_index_creation, .. } => Some(*allow_index_creation),
|
||||||
_ => None,
|
_ => None,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
fn primary_key(&self) -> Option<Option<&str>> {
|
||||||
|
match self {
|
||||||
|
BatchKind::DocumentOperation { primary_key, .. }
|
||||||
|
| BatchKind::SettingsAndDocumentOperation { primary_key, .. } => {
|
||||||
|
Some(primary_key.as_deref())
|
||||||
|
}
|
||||||
|
_ => None,
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl BatchKind {
|
impl BatchKind {
|
||||||
@ -131,6 +159,7 @@ impl BatchKind {
|
|||||||
pub fn new(
|
pub fn new(
|
||||||
task_id: TaskId,
|
task_id: TaskId,
|
||||||
kind: KindWithContent,
|
kind: KindWithContent,
|
||||||
|
primary_key: Option<&str>,
|
||||||
) -> (ControlFlow<BatchKind, BatchKind>, bool) {
|
) -> (ControlFlow<BatchKind, BatchKind>, bool) {
|
||||||
use AutobatchKind as K;
|
use AutobatchKind as K;
|
||||||
|
|
||||||
@ -140,11 +169,26 @@ impl BatchKind {
|
|||||||
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
K::IndexUpdate => (Break(BatchKind::IndexUpdate { id: task_id }), false),
|
||||||
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
K::IndexSwap => (Break(BatchKind::IndexSwap { id: task_id }), false),
|
||||||
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
K::DocumentClear => (Continue(BatchKind::DocumentClear { ids: vec![task_id] }), false),
|
||||||
K::DocumentImport { method, allow_index_creation } => (
|
K::DocumentImport { method, allow_index_creation, primary_key: pk }
|
||||||
Continue(BatchKind::DocumentImport {
|
if primary_key.is_none() || pk.is_none() || primary_key == pk.as_deref() =>
|
||||||
|
{
|
||||||
|
(
|
||||||
|
Continue(BatchKind::DocumentOperation {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key: pk,
|
||||||
|
operation_ids: vec![task_id],
|
||||||
|
}),
|
||||||
|
allow_index_creation,
|
||||||
|
)
|
||||||
|
}
|
||||||
|
// if the primary key set in the task was different than ours we should stop and make this batch fail asap.
|
||||||
|
K::DocumentImport { method, allow_index_creation, primary_key } => (
|
||||||
|
Break(BatchKind::DocumentOperation {
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids: vec![task_id],
|
primary_key,
|
||||||
|
operation_ids: vec![task_id],
|
||||||
}),
|
}),
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
),
|
),
|
||||||
@ -163,7 +207,7 @@ impl BatchKind {
|
|||||||
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
/// To ease the writting of the code. `true` can be returned when you don't need to create an index
|
||||||
/// but false can't be returned if you needs to create an index.
|
/// but false can't be returned if you needs to create an index.
|
||||||
#[rustfmt::skip]
|
#[rustfmt::skip]
|
||||||
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool) -> ControlFlow<BatchKind, BatchKind> {
|
fn accumulate(self, id: TaskId, kind: AutobatchKind, index_already_exists: bool, primary_key: Option<&str>) -> ControlFlow<BatchKind, BatchKind> {
|
||||||
use AutobatchKind as K;
|
use AutobatchKind as K;
|
||||||
|
|
||||||
match (self, kind) {
|
match (self, kind) {
|
||||||
@ -173,11 +217,39 @@ impl BatchKind {
|
|||||||
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
(this, kind) if !index_already_exists && this.allow_index_creation() == Some(false) && kind.allow_index_creation() == Some(true) => {
|
||||||
Break(this)
|
Break(this)
|
||||||
},
|
},
|
||||||
|
// NOTE: We need to negate the whole condition since we're checking if we need to break instead of continue.
|
||||||
|
// I wrote it this way because it's easier to understand than the other way around.
|
||||||
|
(this, kind) if !(
|
||||||
|
// 1. If both task don't interact with primary key -> we can continue
|
||||||
|
(this.primary_key().is_none() && kind.primary_key().is_none()) ||
|
||||||
|
// 2. Else ->
|
||||||
|
(
|
||||||
|
// 2.1 If we already have a primary-key ->
|
||||||
|
(
|
||||||
|
primary_key.is_some() &&
|
||||||
|
// 2.1.1 If the task we're trying to accumulate have a pk it must be equal to our primary key
|
||||||
|
// 2.1.2 If the task don't have a primary-key -> we can continue
|
||||||
|
kind.primary_key().map_or(true, |pk| pk == primary_key)
|
||||||
|
) ||
|
||||||
|
// 2.2 If we don't have a primary-key ->
|
||||||
|
(
|
||||||
|
// 2.2.1 If both the batch and the task have a primary key they should be equal
|
||||||
|
// 2.2.2 If the batch is set to Some(None), the task should be too
|
||||||
|
// 2.2.3 If the batch is set to None -> we can continue
|
||||||
|
this.primary_key().zip(kind.primary_key()).map_or(true, |(this, kind)| this == kind)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
) // closing the negation
|
||||||
|
|
||||||
|
=> {
|
||||||
|
Break(this)
|
||||||
|
},
|
||||||
// The index deletion can batch with everything but must stop after
|
// The index deletion can batch with everything but must stop after
|
||||||
(
|
(
|
||||||
BatchKind::DocumentClear { mut ids }
|
BatchKind::DocumentClear { mut ids }
|
||||||
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
| BatchKind::DocumentDeletion { deletion_ids: mut ids }
|
||||||
| BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids }
|
| BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, operation_ids: mut ids }
|
||||||
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
| BatchKind::Settings { allow_index_creation: _, settings_ids: mut ids },
|
||||||
K::IndexDeletion,
|
K::IndexDeletion,
|
||||||
) => {
|
) => {
|
||||||
@ -186,7 +258,7 @@ impl BatchKind {
|
|||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other }
|
BatchKind::ClearAndSettings { settings_ids: mut ids, allow_index_creation: _, mut other }
|
||||||
| BatchKind::SettingsAndDocumentImport { import_ids: mut ids, method: _, allow_index_creation: _, settings_ids: mut other },
|
| BatchKind::SettingsAndDocumentOperation { operation_ids: mut ids, method: _, allow_index_creation: _, primary_key: _, settings_ids: mut other },
|
||||||
K::IndexDeletion,
|
K::IndexDeletion,
|
||||||
) => {
|
) => {
|
||||||
ids.push(id);
|
ids.push(id);
|
||||||
@ -206,59 +278,108 @@ impl BatchKind {
|
|||||||
K::DocumentImport { .. } | K::Settings { .. },
|
K::DocumentImport { .. } | K::Settings { .. },
|
||||||
) => Break(this),
|
) => Break(this),
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: _, allow_index_creation: _, import_ids: mut ids },
|
BatchKind::DocumentOperation { method: _, allow_index_creation: _, primary_key: _, mut operation_ids },
|
||||||
K::DocumentClear,
|
K::DocumentClear,
|
||||||
) => {
|
) => {
|
||||||
ids.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::DocumentClear { ids })
|
Continue(BatchKind::DocumentClear { ids: operation_ids })
|
||||||
}
|
}
|
||||||
|
|
||||||
// we can autobatch the same kind of document additions / updates
|
// we can autobatch the same kind of document additions / updates
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: ReplaceDocuments, allow_index_creation, mut import_ids },
|
BatchKind::DocumentOperation { method: ReplaceDocuments, allow_index_creation, primary_key: _, mut operation_ids },
|
||||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
K::DocumentImport { method: ReplaceDocuments, primary_key: pk, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentOperation {
|
||||||
method: ReplaceDocuments,
|
method: ReplaceDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
operation_ids,
|
||||||
|
primary_key: pk,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method: UpdateDocuments, allow_index_creation, mut import_ids },
|
BatchKind::DocumentOperation { method: UpdateDocuments, allow_index_creation, primary_key: _, mut operation_ids },
|
||||||
K::DocumentImport { method: UpdateDocuments, .. },
|
K::DocumentImport { method: UpdateDocuments, primary_key: pk, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::DocumentImport {
|
Continue(BatchKind::DocumentOperation {
|
||||||
method: UpdateDocuments,
|
method: UpdateDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key: pk,
|
||||||
|
operation_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
(
|
||||||
|
BatchKind::DocumentOperation { method, allow_index_creation, primary_key, mut operation_ids },
|
||||||
|
K::DocumentDeletion,
|
||||||
|
) => {
|
||||||
|
operation_ids.push(id);
|
||||||
|
|
||||||
|
Continue(BatchKind::DocumentOperation {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
operation_ids,
|
||||||
|
})
|
||||||
|
}
|
||||||
// but we can't autobatch documents if it's not the same kind
|
// but we can't autobatch documents if it's not the same kind
|
||||||
// this match branch MUST be AFTER the previous one
|
// this match branch MUST be AFTER the previous one
|
||||||
(
|
(
|
||||||
this @ BatchKind::DocumentImport { .. },
|
this @ BatchKind::DocumentOperation { .. },
|
||||||
K::DocumentDeletion | K::DocumentImport { .. },
|
K::DocumentImport { .. },
|
||||||
) => Break(this),
|
) => Break(this),
|
||||||
|
|
||||||
(
|
(
|
||||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
BatchKind::DocumentOperation { method, allow_index_creation, primary_key, operation_ids },
|
||||||
K::Settings { .. },
|
K::Settings { .. },
|
||||||
) => Continue(BatchKind::SettingsAndDocumentImport {
|
) => Continue(BatchKind::SettingsAndDocumentOperation {
|
||||||
settings_ids: vec![id],
|
settings_ids: vec![id],
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key,
|
||||||
|
operation_ids,
|
||||||
}),
|
}),
|
||||||
|
|
||||||
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentClear) => {
|
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentClear) => {
|
||||||
deletion_ids.push(id);
|
deletion_ids.push(id);
|
||||||
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
Continue(BatchKind::DocumentClear { ids: deletion_ids })
|
||||||
}
|
}
|
||||||
(this @ BatchKind::DocumentDeletion { .. }, K::DocumentImport { .. }) => Break(this),
|
// we can autobatch the deletion and import if the index already exists
|
||||||
|
(
|
||||||
|
BatchKind::DocumentDeletion { mut deletion_ids },
|
||||||
|
K::DocumentImport { method, allow_index_creation, primary_key }
|
||||||
|
) if index_already_exists => {
|
||||||
|
deletion_ids.push(id);
|
||||||
|
|
||||||
|
Continue(BatchKind::DocumentOperation {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
operation_ids: deletion_ids,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// we can autobatch the deletion and import if both can't create an index
|
||||||
|
(
|
||||||
|
BatchKind::DocumentDeletion { mut deletion_ids },
|
||||||
|
K::DocumentImport { method, allow_index_creation, primary_key }
|
||||||
|
) if !allow_index_creation => {
|
||||||
|
deletion_ids.push(id);
|
||||||
|
|
||||||
|
Continue(BatchKind::DocumentOperation {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
operation_ids: deletion_ids,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
// we can't autobatch a deletion and an import if the index does not exists but would be created by an addition
|
||||||
|
(
|
||||||
|
this @ BatchKind::DocumentDeletion { .. },
|
||||||
|
K::DocumentImport { .. }
|
||||||
|
) => {
|
||||||
|
Break(this)
|
||||||
|
}
|
||||||
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentDeletion) => {
|
(BatchKind::DocumentDeletion { mut deletion_ids }, K::DocumentDeletion) => {
|
||||||
deletion_ids.push(id);
|
deletion_ids.push(id);
|
||||||
Continue(BatchKind::DocumentDeletion { deletion_ids })
|
Continue(BatchKind::DocumentDeletion { deletion_ids })
|
||||||
@ -327,57 +448,60 @@ impl BatchKind {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: _, import_ids: mut other, allow_index_creation },
|
BatchKind::SettingsAndDocumentOperation { settings_ids, method: _, mut operation_ids, allow_index_creation, primary_key: _ },
|
||||||
K::DocumentClear,
|
K::DocumentClear,
|
||||||
) => {
|
) => {
|
||||||
other.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::ClearAndSettings {
|
Continue(BatchKind::ClearAndSettings {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
other,
|
other: operation_ids,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: ReplaceDocuments, mut import_ids, allow_index_creation },
|
BatchKind::SettingsAndDocumentOperation { settings_ids, method: ReplaceDocuments, mut operation_ids, allow_index_creation, primary_key: _},
|
||||||
K::DocumentImport { method: ReplaceDocuments, .. },
|
K::DocumentImport { method: ReplaceDocuments, primary_key: pk2, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::SettingsAndDocumentImport {
|
Continue(BatchKind::SettingsAndDocumentOperation {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method: ReplaceDocuments,
|
method: ReplaceDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key: pk2,
|
||||||
|
operation_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { settings_ids, method: UpdateDocuments, allow_index_creation, mut import_ids },
|
BatchKind::SettingsAndDocumentOperation { settings_ids, method: UpdateDocuments, allow_index_creation, primary_key: _, mut operation_ids },
|
||||||
K::DocumentImport { method: UpdateDocuments, .. },
|
K::DocumentImport { method: UpdateDocuments, primary_key: pk2, .. },
|
||||||
) => {
|
) => {
|
||||||
import_ids.push(id);
|
operation_ids.push(id);
|
||||||
Continue(BatchKind::SettingsAndDocumentImport {
|
Continue(BatchKind::SettingsAndDocumentOperation {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method: UpdateDocuments,
|
method: UpdateDocuments,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key: pk2,
|
||||||
|
operation_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
// But we can't batch a settings and a doc op with another doc op
|
// But we can't batch a settings and a doc op with another doc op
|
||||||
// this MUST be AFTER the two previous branch
|
// this MUST be AFTER the two previous branch
|
||||||
(
|
(
|
||||||
this @ BatchKind::SettingsAndDocumentImport { .. },
|
this @ BatchKind::SettingsAndDocumentOperation { .. },
|
||||||
K::DocumentDeletion | K::DocumentImport { .. },
|
K::DocumentDeletion | K::DocumentImport { .. },
|
||||||
) => Break(this),
|
) => Break(this),
|
||||||
(
|
(
|
||||||
BatchKind::SettingsAndDocumentImport { mut settings_ids, method, allow_index_creation, import_ids },
|
BatchKind::SettingsAndDocumentOperation { mut settings_ids, method, allow_index_creation,primary_key, operation_ids },
|
||||||
K::Settings { .. },
|
K::Settings { .. },
|
||||||
) => {
|
) => {
|
||||||
settings_ids.push(id);
|
settings_ids.push(id);
|
||||||
Continue(BatchKind::SettingsAndDocumentImport {
|
Continue(BatchKind::SettingsAndDocumentOperation {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key,
|
||||||
|
operation_ids,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
(
|
(
|
||||||
@ -406,6 +530,7 @@ impl BatchKind {
|
|||||||
pub fn autobatch(
|
pub fn autobatch(
|
||||||
enqueued: Vec<(TaskId, KindWithContent)>,
|
enqueued: Vec<(TaskId, KindWithContent)>,
|
||||||
index_already_exists: bool,
|
index_already_exists: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
) -> Option<(BatchKind, bool)> {
|
) -> Option<(BatchKind, bool)> {
|
||||||
let mut enqueued = enqueued.into_iter();
|
let mut enqueued = enqueued.into_iter();
|
||||||
let (id, kind) = enqueued.next()?;
|
let (id, kind) = enqueued.next()?;
|
||||||
@ -413,7 +538,7 @@ pub fn autobatch(
|
|||||||
// index_exist will keep track of if the index should exist at this point after the tasks we batched.
|
// index_exist will keep track of if the index should exist at this point after the tasks we batched.
|
||||||
let mut index_exist = index_already_exists;
|
let mut index_exist = index_already_exists;
|
||||||
|
|
||||||
let (mut acc, must_create_index) = match BatchKind::new(id, kind) {
|
let (mut acc, must_create_index) = match BatchKind::new(id, kind, primary_key) {
|
||||||
(Continue(acc), create) => (acc, create),
|
(Continue(acc), create) => (acc, create),
|
||||||
(Break(acc), create) => return Some((acc, create)),
|
(Break(acc), create) => return Some((acc, create)),
|
||||||
};
|
};
|
||||||
@ -422,7 +547,7 @@ pub fn autobatch(
|
|||||||
index_exist |= must_create_index;
|
index_exist |= must_create_index;
|
||||||
|
|
||||||
for (id, kind) in enqueued {
|
for (id, kind) in enqueued {
|
||||||
acc = match acc.accumulate(id, kind.into(), index_exist) {
|
acc = match acc.accumulate(id, kind.into(), index_exist, primary_key) {
|
||||||
Continue(acc) => acc,
|
Continue(acc) => acc,
|
||||||
Break(acc) => return Some((acc, must_create_index)),
|
Break(acc) => return Some((acc, must_create_index)),
|
||||||
};
|
};
|
||||||
@ -441,18 +566,24 @@ mod tests {
|
|||||||
|
|
||||||
fn autobatch_from(
|
fn autobatch_from(
|
||||||
index_already_exists: bool,
|
index_already_exists: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
input: impl IntoIterator<Item = KindWithContent>,
|
input: impl IntoIterator<Item = KindWithContent>,
|
||||||
) -> Option<(BatchKind, bool)> {
|
) -> Option<(BatchKind, bool)> {
|
||||||
autobatch(
|
autobatch(
|
||||||
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(),
|
input.into_iter().enumerate().map(|(id, kind)| (id as TaskId, kind)).collect(),
|
||||||
index_already_exists,
|
index_already_exists,
|
||||||
|
primary_key,
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
fn doc_imp(method: IndexDocumentsMethod, allow_index_creation: bool) -> KindWithContent {
|
fn doc_imp(
|
||||||
|
method: IndexDocumentsMethod,
|
||||||
|
allow_index_creation: bool,
|
||||||
|
primary_key: Option<&str>,
|
||||||
|
) -> KindWithContent {
|
||||||
KindWithContent::DocumentAdditionOrUpdate {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
index_uid: String::from("doggo"),
|
index_uid: String::from("doggo"),
|
||||||
primary_key: None,
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
method,
|
method,
|
||||||
content_file: Uuid::new_v4(),
|
content_file: Uuid::new_v4(),
|
||||||
documents_count: 0,
|
documents_count: 0,
|
||||||
@ -502,226 +633,301 @@ mod tests {
|
|||||||
fn autobatch_simple_operation_together() {
|
fn autobatch_simple_operation_together() {
|
||||||
// we can autobatch one or multiple `ReplaceDocuments` together.
|
// we can autobatch one or multiple `ReplaceDocuments` together.
|
||||||
// if the index exists.
|
// if the index exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, false ), doc_imp(ReplaceDocuments, false )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, false , None), doc_imp(ReplaceDocuments, false , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// if it doesn't exists.
|
// if it doesn't exists.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp( ReplaceDocuments, true ), doc_imp(ReplaceDocuments, true )]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp( ReplaceDocuments, true , None), doc_imp(ReplaceDocuments, true , None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple `UpdateDocuments` together.
|
// we can autobatch one or multiple `UpdateDocuments` together.
|
||||||
// if the index exists.
|
// if the index exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// if it doesn't exists.
|
// if it doesn't exists.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false), doc_imp(UpdateDocuments, false)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple DocumentDeletion together
|
// we can autobatch one or multiple DocumentDeletion together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del(), doc_del(), doc_del()]), @"Some((DocumentDeletion { deletion_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
// we can autobatch one or multiple Settings together
|
// we can autobatch one or multiple Settings together
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true), settings(true), settings(true)]), @"Some((Settings { allow_index_creation: true, settings_ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), settings(false), settings(false)]), @"Some((Settings { allow_index_creation: false, settings_ids: [0, 1, 2] }, false))");
|
||||||
|
|
||||||
|
// We can autobatch document addition with document deletion
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, None), doc_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, true, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(ReplaceDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_imp(UpdateDocuments, false, Some("catto")), doc_del()]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
// And the other way around
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, false, Some("catto"))]), @r###"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: Some("catto"), operation_ids: [0, 1] }, false))"###);
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn simple_document_operation_dont_autobatch_with_other() {
|
fn simple_document_operation_dont_autobatch_with_other() {
|
||||||
// addition, updates and deletion can't batch together
|
// addition, updates and deletion can't batch together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(UpdateDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_create()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_create()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_create()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_create()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_update()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_update()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_update()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_update()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_swap()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_swap()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_swap()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_swap()]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn document_addition_batch_with_settings() {
|
fn document_addition_batch_with_settings() {
|
||||||
// simple case
|
// simple case
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
|
||||||
// multiple settings and doc addition
|
// multiple settings and doc addition
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), settings(true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), settings(true), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [2, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
|
||||||
// addition and setting unordered
|
// addition and setting unordered
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1, 3], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(UpdateDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, import_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1, 3], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 2] }, true))");
|
||||||
|
|
||||||
// We ensure this kind of batch doesn't batch with forbidden operations
|
// We ensure this kind of batch doesn't batch with forbidden operations
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_imp(UpdateDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_imp(UpdateDocuments, true, None)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_imp(ReplaceDocuments, true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_imp(ReplaceDocuments, true, None)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_del()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_del()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_create()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_create()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_update()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_update()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_swap()]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_swap()]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: UpdateDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn clear_and_additions() {
|
fn clear_and_additions() {
|
||||||
// these two doesn't need to batch
|
// these two doesn't need to batch
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
|
|
||||||
// Basic use case
|
// Basic use case
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
|
|
||||||
// This batch kind doesn't mix with other document addition
|
// This batch kind doesn't mix with other document addition
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_imp(UpdateDocuments, true)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentClear { ids: [0, 1, 2] }, true))");
|
||||||
|
|
||||||
// But you can batch multiple clear together
|
// But you can batch multiple clear together
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), doc_imp(UpdateDocuments, true), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), doc_imp(UpdateDocuments, true, None), doc_clr(), doc_clr(), doc_clr()]), @"Some((DocumentClear { ids: [0, 1, 2, 3, 4] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn clear_and_additions_and_settings() {
|
fn clear_and_additions_and_settings() {
|
||||||
// A clear don't need to autobatch the settings that happens AFTER there is no documents
|
// A clear don't need to autobatch the settings that happens AFTER there is no documents
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), settings(true)]), @"Some((DocumentClear { ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), doc_clr(), settings(true)]), @"Some((ClearAndSettings { other: [1], allow_index_creation: true, settings_ids: [0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr()]), @"Some((ClearAndSettings { other: [0, 2], allow_index_creation: true, settings_ids: [1] }, true))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn anything_and_index_deletion() {
|
fn anything_and_index_deletion() {
|
||||||
// The `IndexDeletion` doesn't batch with anything that happens AFTER.
|
// The `IndexDeletion` doesn't batch with anything that happens AFTER.
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(ReplaceDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(ReplaceDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_imp(UpdateDocuments, false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_imp(UpdateDocuments, false, None)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_del()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), doc_clr()]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(true)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [idx_del(), settings(false)]), @"Some((IndexDeletion { ids: [0] }, false))");
|
||||||
|
|
||||||
// The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`.
|
// The index deletion can accept almost any type of `BatchKind` and transform it to an `IndexDeletion`.
|
||||||
// First, the basic cases
|
// First, the basic cases
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_del(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 1] }, false))");
|
||||||
|
|
||||||
// Then the mixed cases.
|
// Then the mixed cases.
|
||||||
// The index already exists, whatever is the right of the tasks it shouldn't change the result.
|
// The index already exists, whatever is the right of the tasks it shouldn't change the result.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
|
|
||||||
// When the index doesn't exists yet it's more complicated.
|
// When the index doesn't exists yet it's more complicated.
|
||||||
// Either the first task we encounter create it, in which case we can create a big batch with everything.
|
// Either the first task we encounter create it, in which case we can create a big batch with everything.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(true), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
// The right of the tasks following isn't really important.
|
// The right of the tasks following isn't really important.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, true), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, true, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, true))");
|
||||||
// Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index.
|
// Or, the second case; the first task doesn't create the index and thus we wants to batch it with only tasks that can't create an index.
|
||||||
// that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc.
|
// that can be a second task that don't have the right to create an index. Or anything that can't create an index like an index deletion, document deletion, document clear, etc.
|
||||||
// All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed.
|
// All theses tasks are going to throw an error `Index doesn't exist` once the batch is processed.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), idx_del()]), @"Some((IndexDeletion { ids: [0, 2, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(false), doc_clr(), idx_del()]), @"Some((IndexDeletion { ids: [1, 3, 0, 2] }, false))");
|
||||||
// The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what
|
// The third and final case is when the first task doesn't create an index but is directly followed by a task creating an index. In this case we can't batch whith what
|
||||||
// follows because we first need to process the erronous batch.
|
// follows because we first need to process the erronous batch.
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), idx_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), idx_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments,false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments,false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(UpdateDocuments, false), settings(true), doc_clr(), idx_del()]), @"Some((DocumentImport { method: UpdateDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(UpdateDocuments, false, None), settings(true), doc_clr(), idx_del()]), @"Some((DocumentOperation { method: UpdateDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn allowed_and_disallowed_index_creation() {
|
fn allowed_and_disallowed_index_creation() {
|
||||||
// `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists.
|
// `DocumentImport` can't be mixed with those disallowed to do so except if the index already exists.
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(true, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
|
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), doc_imp(ReplaceDocuments, true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: true, import_ids: [0, 1] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), doc_imp(ReplaceDocuments, false)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0, 1] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), doc_imp(ReplaceDocuments, false, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0, 1] }, false))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, true), settings(true)]), @"Some((SettingsAndDocumentImport { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, import_ids: [0] }, true))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, true, None), settings(true)]), @"Some((SettingsAndDocumentOperation { settings_ids: [1], method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
debug_snapshot!(autobatch_from(false, [doc_imp(ReplaceDocuments, false), settings(true)]), @"Some((DocumentImport { method: ReplaceDocuments, allow_index_creation: false, import_ids: [0] }, false))");
|
debug_snapshot!(autobatch_from(false,None, [doc_imp(ReplaceDocuments, false, None), settings(true)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: false, primary_key: None, operation_ids: [0] }, false))");
|
||||||
|
|
||||||
|
// batch deletion and addition
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, Some("catto"))]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
debug_snapshot!(autobatch_from(false, None, [doc_del(), doc_imp(UpdateDocuments, true, None)]), @"Some((DocumentDeletion { deletion_ids: [0] }, false))");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn autobatch_primary_key() {
|
||||||
|
// ==> If I have a pk
|
||||||
|
// With a single update
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// With a multiple updates
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0, 1] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, Some("id"), [doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("other")), doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// ==> If I don't have a pk
|
||||||
|
// With a single update
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("other"))]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("other"), operation_ids: [0] }, true))"###);
|
||||||
|
|
||||||
|
// With a multiple updates
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, None)]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0, 1] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, None), doc_imp(ReplaceDocuments, true, Some("id"))]), @"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: None, operation_ids: [0] }, true))");
|
||||||
|
debug_snapshot!(autobatch_from(true, None, [doc_imp(ReplaceDocuments, true, Some("id")), doc_imp(ReplaceDocuments, true, None)]), @r###"Some((DocumentOperation { method: ReplaceDocuments, allow_index_creation: true, primary_key: Some("id"), operation_ids: [0] }, true))"###);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -28,8 +28,7 @@ use meilisearch_types::heed::{RoTxn, RwTxn};
|
|||||||
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
use meilisearch_types::milli::documents::{obkv_to_object, DocumentsBatchReader};
|
||||||
use meilisearch_types::milli::heed::CompactionOption;
|
use meilisearch_types::milli::heed::CompactionOption;
|
||||||
use meilisearch_types::milli::update::{
|
use meilisearch_types::milli::update::{
|
||||||
DocumentAdditionResult, DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod,
|
DocumentDeletionResult, IndexDocumentsConfig, IndexDocumentsMethod, Settings as MilliSettings,
|
||||||
Settings as MilliSettings,
|
|
||||||
};
|
};
|
||||||
use meilisearch_types::milli::{self, BEU32};
|
use meilisearch_types::milli::{self, BEU32};
|
||||||
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
use meilisearch_types::settings::{apply_settings_to_builder, Settings, Unchecked};
|
||||||
@ -86,15 +85,21 @@ pub(crate) enum Batch {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[derive(Debug)]
|
||||||
|
pub(crate) enum DocumentOperation {
|
||||||
|
Add(Uuid),
|
||||||
|
Delete(Vec<String>),
|
||||||
|
}
|
||||||
|
|
||||||
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
/// A [batch](Batch) that combines multiple tasks operating on an index.
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
pub(crate) enum IndexOperation {
|
pub(crate) enum IndexOperation {
|
||||||
DocumentImport {
|
DocumentOperation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
documents_counts: Vec<u64>,
|
documents_counts: Vec<u64>,
|
||||||
content_files: Vec<Uuid>,
|
operations: Vec<DocumentOperation>,
|
||||||
tasks: Vec<Task>,
|
tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
DocumentDeletion {
|
DocumentDeletion {
|
||||||
@ -121,13 +126,13 @@ pub(crate) enum IndexOperation {
|
|||||||
settings: Vec<(bool, Settings<Unchecked>)>,
|
settings: Vec<(bool, Settings<Unchecked>)>,
|
||||||
settings_tasks: Vec<Task>,
|
settings_tasks: Vec<Task>,
|
||||||
},
|
},
|
||||||
SettingsAndDocumentImport {
|
SettingsAndDocumentOperation {
|
||||||
index_uid: String,
|
index_uid: String,
|
||||||
|
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
method: IndexDocumentsMethod,
|
method: IndexDocumentsMethod,
|
||||||
documents_counts: Vec<u64>,
|
documents_counts: Vec<u64>,
|
||||||
content_files: Vec<Uuid>,
|
operations: Vec<DocumentOperation>,
|
||||||
document_import_tasks: Vec<Task>,
|
document_import_tasks: Vec<Task>,
|
||||||
|
|
||||||
// The boolean indicates if it's a settings deletion or creation.
|
// The boolean indicates if it's a settings deletion or creation.
|
||||||
@ -149,13 +154,13 @@ impl Batch {
|
|||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
Batch::IndexOperation { op, .. } => match op {
|
Batch::IndexOperation { op, .. } => match op {
|
||||||
IndexOperation::DocumentImport { tasks, .. }
|
IndexOperation::DocumentOperation { tasks, .. }
|
||||||
| IndexOperation::DocumentDeletion { tasks, .. }
|
| IndexOperation::DocumentDeletion { tasks, .. }
|
||||||
| IndexOperation::Settings { tasks, .. }
|
| IndexOperation::Settings { tasks, .. }
|
||||||
| IndexOperation::DocumentClear { tasks, .. } => {
|
| IndexOperation::DocumentClear { tasks, .. } => {
|
||||||
tasks.iter().map(|task| task.uid).collect()
|
tasks.iter().map(|task| task.uid).collect()
|
||||||
}
|
}
|
||||||
IndexOperation::SettingsAndDocumentImport {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
document_import_tasks: tasks,
|
document_import_tasks: tasks,
|
||||||
settings_tasks: other,
|
settings_tasks: other,
|
||||||
..
|
..
|
||||||
@ -169,17 +174,33 @@ impl Batch {
|
|||||||
Batch::IndexSwap { task } => vec![task.uid],
|
Batch::IndexSwap { task } => vec![task.uid],
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Return the index UID associated with this batch
|
||||||
|
pub fn index_uid(&self) -> Option<&str> {
|
||||||
|
use Batch::*;
|
||||||
|
match self {
|
||||||
|
TaskCancelation { .. }
|
||||||
|
| TaskDeletion(_)
|
||||||
|
| SnapshotCreation(_)
|
||||||
|
| Dump(_)
|
||||||
|
| IndexSwap { .. } => None,
|
||||||
|
IndexOperation { op, .. } => Some(op.index_uid()),
|
||||||
|
IndexCreation { index_uid, .. }
|
||||||
|
| IndexUpdate { index_uid, .. }
|
||||||
|
| IndexDeletion { index_uid, .. } => Some(index_uid),
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl IndexOperation {
|
impl IndexOperation {
|
||||||
pub fn index_uid(&self) -> &str {
|
pub fn index_uid(&self) -> &str {
|
||||||
match self {
|
match self {
|
||||||
IndexOperation::DocumentImport { index_uid, .. }
|
IndexOperation::DocumentOperation { index_uid, .. }
|
||||||
| IndexOperation::DocumentDeletion { index_uid, .. }
|
| IndexOperation::DocumentDeletion { index_uid, .. }
|
||||||
| IndexOperation::DocumentClear { index_uid, .. }
|
| IndexOperation::DocumentClear { index_uid, .. }
|
||||||
| IndexOperation::Settings { index_uid, .. }
|
| IndexOperation::Settings { index_uid, .. }
|
||||||
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
| IndexOperation::DocumentClearAndSetting { index_uid, .. }
|
||||||
| IndexOperation::SettingsAndDocumentImport { index_uid, .. } => index_uid,
|
| IndexOperation::SettingsAndDocumentOperation { index_uid, .. } => index_uid,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -206,18 +227,24 @@ impl IndexScheduler {
|
|||||||
},
|
},
|
||||||
must_create_index,
|
must_create_index,
|
||||||
})),
|
})),
|
||||||
BatchKind::DocumentImport { method, import_ids, .. } => {
|
BatchKind::DocumentOperation { method, operation_ids, .. } => {
|
||||||
let tasks = self.get_existing_tasks(rtxn, import_ids)?;
|
let tasks = self.get_existing_tasks(rtxn, operation_ids)?;
|
||||||
let primary_key = match &tasks[0].kind {
|
let primary_key = tasks
|
||||||
KindWithContent::DocumentAdditionOrUpdate { primary_key, .. } => {
|
.iter()
|
||||||
primary_key.clone()
|
.find_map(|task| match task.kind {
|
||||||
}
|
KindWithContent::DocumentAdditionOrUpdate { ref primary_key, .. } => {
|
||||||
_ => unreachable!(),
|
// we want to stop on the first document addition
|
||||||
};
|
Some(primary_key.clone())
|
||||||
|
}
|
||||||
|
KindWithContent::DocumentDeletion { .. } => None,
|
||||||
|
_ => unreachable!(),
|
||||||
|
})
|
||||||
|
.flatten();
|
||||||
|
|
||||||
let mut documents_counts = Vec::new();
|
let mut documents_counts = Vec::new();
|
||||||
let mut content_files = Vec::new();
|
let mut operations = Vec::new();
|
||||||
for task in &tasks {
|
|
||||||
|
for task in tasks.iter() {
|
||||||
match task.kind {
|
match task.kind {
|
||||||
KindWithContent::DocumentAdditionOrUpdate {
|
KindWithContent::DocumentAdditionOrUpdate {
|
||||||
content_file,
|
content_file,
|
||||||
@ -225,19 +252,23 @@ impl IndexScheduler {
|
|||||||
..
|
..
|
||||||
} => {
|
} => {
|
||||||
documents_counts.push(documents_count);
|
documents_counts.push(documents_count);
|
||||||
content_files.push(content_file);
|
operations.push(DocumentOperation::Add(content_file));
|
||||||
|
}
|
||||||
|
KindWithContent::DocumentDeletion { ref documents_ids, .. } => {
|
||||||
|
documents_counts.push(documents_ids.len() as u64);
|
||||||
|
operations.push(DocumentOperation::Delete(documents_ids.clone()));
|
||||||
}
|
}
|
||||||
_ => unreachable!(),
|
_ => unreachable!(),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(Some(Batch::IndexOperation {
|
Ok(Some(Batch::IndexOperation {
|
||||||
op: IndexOperation::DocumentImport {
|
op: IndexOperation::DocumentOperation {
|
||||||
index_uid,
|
index_uid,
|
||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
documents_counts,
|
documents_counts,
|
||||||
content_files,
|
operations,
|
||||||
tasks,
|
tasks,
|
||||||
},
|
},
|
||||||
must_create_index,
|
must_create_index,
|
||||||
@ -321,11 +352,12 @@ impl IndexScheduler {
|
|||||||
must_create_index,
|
must_create_index,
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
BatchKind::SettingsAndDocumentImport {
|
BatchKind::SettingsAndDocumentOperation {
|
||||||
settings_ids,
|
settings_ids,
|
||||||
method,
|
method,
|
||||||
allow_index_creation,
|
allow_index_creation,
|
||||||
import_ids,
|
primary_key,
|
||||||
|
operation_ids,
|
||||||
} => {
|
} => {
|
||||||
let settings = self.create_next_batch_index(
|
let settings = self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
@ -337,7 +369,12 @@ impl IndexScheduler {
|
|||||||
let document_import = self.create_next_batch_index(
|
let document_import = self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
index_uid.clone(),
|
index_uid.clone(),
|
||||||
BatchKind::DocumentImport { method, allow_index_creation, import_ids },
|
BatchKind::DocumentOperation {
|
||||||
|
method,
|
||||||
|
allow_index_creation,
|
||||||
|
primary_key,
|
||||||
|
operation_ids,
|
||||||
|
},
|
||||||
must_create_index,
|
must_create_index,
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
@ -345,10 +382,10 @@ impl IndexScheduler {
|
|||||||
(
|
(
|
||||||
Some(Batch::IndexOperation {
|
Some(Batch::IndexOperation {
|
||||||
op:
|
op:
|
||||||
IndexOperation::DocumentImport {
|
IndexOperation::DocumentOperation {
|
||||||
primary_key,
|
primary_key,
|
||||||
documents_counts,
|
documents_counts,
|
||||||
content_files,
|
operations,
|
||||||
tasks: document_import_tasks,
|
tasks: document_import_tasks,
|
||||||
..
|
..
|
||||||
},
|
},
|
||||||
@ -359,12 +396,12 @@ impl IndexScheduler {
|
|||||||
..
|
..
|
||||||
}),
|
}),
|
||||||
) => Ok(Some(Batch::IndexOperation {
|
) => Ok(Some(Batch::IndexOperation {
|
||||||
op: IndexOperation::SettingsAndDocumentImport {
|
op: IndexOperation::SettingsAndDocumentOperation {
|
||||||
index_uid,
|
index_uid,
|
||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
documents_counts,
|
documents_counts,
|
||||||
content_files,
|
operations,
|
||||||
document_import_tasks,
|
document_import_tasks,
|
||||||
settings,
|
settings,
|
||||||
settings_tasks,
|
settings_tasks,
|
||||||
@ -467,6 +504,12 @@ impl IndexScheduler {
|
|||||||
};
|
};
|
||||||
|
|
||||||
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
let index_already_exists = self.index_mapper.exists(rtxn, index_name)?;
|
||||||
|
let mut primary_key = None;
|
||||||
|
if index_already_exists {
|
||||||
|
let index = self.index_mapper.index(rtxn, index_name)?;
|
||||||
|
let rtxn = index.read_txn()?;
|
||||||
|
primary_key = index.primary_key(&rtxn)?.map(|pk| pk.to_string());
|
||||||
|
}
|
||||||
|
|
||||||
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
let index_tasks = self.index_tasks(rtxn, index_name)? & enqueued;
|
||||||
|
|
||||||
@ -484,7 +527,7 @@ impl IndexScheduler {
|
|||||||
.collect::<Result<Vec<_>>>()?;
|
.collect::<Result<Vec<_>>>()?;
|
||||||
|
|
||||||
if let Some((batchkind, create_index)) =
|
if let Some((batchkind, create_index)) =
|
||||||
autobatcher::autobatch(enqueued, index_already_exists)
|
autobatcher::autobatch(enqueued, index_already_exists, primary_key.as_deref())
|
||||||
{
|
{
|
||||||
return self.create_next_batch_index(
|
return self.create_next_batch_index(
|
||||||
rtxn,
|
rtxn,
|
||||||
@ -947,9 +990,9 @@ impl IndexScheduler {
|
|||||||
///
|
///
|
||||||
/// ## Return
|
/// ## Return
|
||||||
/// The list of processed tasks.
|
/// The list of processed tasks.
|
||||||
fn apply_index_operation<'txn, 'i>(
|
fn apply_index_operation<'i>(
|
||||||
&self,
|
&self,
|
||||||
index_wtxn: &'txn mut RwTxn<'i, '_>,
|
index_wtxn: &mut RwTxn<'i, '_>,
|
||||||
index: &'i Index,
|
index: &'i Index,
|
||||||
operation: IndexOperation,
|
operation: IndexOperation,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
@ -974,28 +1017,42 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::DocumentImport {
|
IndexOperation::DocumentOperation {
|
||||||
index_uid: _,
|
index_uid: _,
|
||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
documents_counts,
|
documents_counts: _,
|
||||||
content_files,
|
operations,
|
||||||
mut tasks,
|
mut tasks,
|
||||||
} => {
|
} => {
|
||||||
let mut primary_key_has_been_set = false;
|
let mut primary_key_has_been_set = false;
|
||||||
let must_stop_processing = self.must_stop_processing.clone();
|
let must_stop_processing = self.must_stop_processing.clone();
|
||||||
let indexer_config = self.index_mapper.indexer_config();
|
let indexer_config = self.index_mapper.indexer_config();
|
||||||
// TODO use the code from the IndexCreate operation
|
|
||||||
if let Some(primary_key) = primary_key {
|
if let Some(primary_key) = primary_key {
|
||||||
if index.primary_key(index_wtxn)?.is_none() {
|
match index.primary_key(index_wtxn)? {
|
||||||
let mut builder =
|
// if a primary key was set AND had already been defined in the index
|
||||||
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
// but to a different value, we can make the whole batch fail.
|
||||||
builder.set_primary_key(primary_key);
|
Some(pk) => {
|
||||||
builder.execute(
|
if primary_key != pk {
|
||||||
|indexing_step| debug!("update: {:?}", indexing_step),
|
return Err(milli::Error::from(
|
||||||
|| must_stop_processing.clone().get(),
|
milli::UserError::PrimaryKeyCannotBeChanged(pk.to_string()),
|
||||||
)?;
|
)
|
||||||
primary_key_has_been_set = true;
|
.into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
// if the primary key was set and there was no primary key set for this index
|
||||||
|
// we set it to the received value before starting the indexing process.
|
||||||
|
None => {
|
||||||
|
let mut builder =
|
||||||
|
milli::update::Settings::new(index_wtxn, index, indexer_config);
|
||||||
|
builder.set_primary_key(primary_key);
|
||||||
|
builder.execute(
|
||||||
|
|indexing_step| debug!("update: {:?}", indexing_step),
|
||||||
|
|| must_stop_processing.clone().get(),
|
||||||
|
)?;
|
||||||
|
primary_key_has_been_set = true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1010,26 +1067,82 @@ impl IndexScheduler {
|
|||||||
|| must_stop_processing.get(),
|
|| must_stop_processing.get(),
|
||||||
)?;
|
)?;
|
||||||
|
|
||||||
let mut results = Vec::new();
|
for (operation, task) in operations.into_iter().zip(tasks.iter_mut()) {
|
||||||
for content_uuid in content_files.into_iter() {
|
match operation {
|
||||||
let content_file = self.file_store.get_update(content_uuid)?;
|
DocumentOperation::Add(content_uuid) => {
|
||||||
let reader = DocumentsBatchReader::from_reader(content_file)
|
let content_file = self.file_store.get_update(content_uuid)?;
|
||||||
.map_err(milli::Error::from)?;
|
let reader = DocumentsBatchReader::from_reader(content_file)
|
||||||
let (new_builder, user_result) = builder.add_documents(reader)?;
|
.map_err(milli::Error::from)?;
|
||||||
builder = new_builder;
|
let (new_builder, user_result) = builder.add_documents(reader)?;
|
||||||
|
builder = new_builder;
|
||||||
|
|
||||||
let user_result = match user_result {
|
let received_documents =
|
||||||
Ok(count) => Ok(DocumentAdditionResult {
|
if let Some(Details::DocumentAdditionOrUpdate {
|
||||||
indexed_documents: count,
|
received_documents,
|
||||||
number_of_documents: count, // TODO: this is wrong, we should use the value stored in the Details.
|
..
|
||||||
}),
|
}) = task.details
|
||||||
Err(e) => Err(milli::Error::from(e)),
|
{
|
||||||
};
|
received_documents
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentAdditionOrUpdate` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
results.push(user_result);
|
match user_result {
|
||||||
|
Ok(count) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentAdditionOrUpdate {
|
||||||
|
received_documents,
|
||||||
|
indexed_documents: Some(count),
|
||||||
|
})
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentAdditionOrUpdate {
|
||||||
|
received_documents,
|
||||||
|
indexed_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(milli::Error::from(e).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
DocumentOperation::Delete(document_ids) => {
|
||||||
|
let (new_builder, user_result) =
|
||||||
|
builder.remove_documents(document_ids)?;
|
||||||
|
builder = new_builder;
|
||||||
|
|
||||||
|
let provided_ids =
|
||||||
|
if let Some(Details::DocumentDeletion { provided_ids, .. }) =
|
||||||
|
task.details
|
||||||
|
{
|
||||||
|
provided_ids
|
||||||
|
} else {
|
||||||
|
// In the case of a `documentAdditionOrUpdate` the details MUST be set
|
||||||
|
unreachable!();
|
||||||
|
};
|
||||||
|
|
||||||
|
match user_result {
|
||||||
|
Ok(count) => {
|
||||||
|
task.status = Status::Succeeded;
|
||||||
|
task.details = Some(Details::DocumentDeletion {
|
||||||
|
provided_ids,
|
||||||
|
deleted_documents: Some(count),
|
||||||
|
});
|
||||||
|
}
|
||||||
|
Err(e) => {
|
||||||
|
task.status = Status::Failed;
|
||||||
|
task.details = Some(Details::DocumentDeletion {
|
||||||
|
provided_ids,
|
||||||
|
deleted_documents: Some(0),
|
||||||
|
});
|
||||||
|
task.error = Some(milli::Error::from(e).into());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if results.iter().any(|res| res.is_ok()) {
|
if !tasks.iter().all(|res| res.error.is_some()) {
|
||||||
let addition = builder.execute()?;
|
let addition = builder.execute()?;
|
||||||
info!("document addition done: {:?}", addition);
|
info!("document addition done: {:?}", addition);
|
||||||
} else if primary_key_has_been_set {
|
} else if primary_key_has_been_set {
|
||||||
@ -1044,28 +1157,6 @@ impl IndexScheduler {
|
|||||||
)?;
|
)?;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (task, (ret, count)) in
|
|
||||||
tasks.iter_mut().zip(results.into_iter().zip(documents_counts))
|
|
||||||
{
|
|
||||||
match ret {
|
|
||||||
Ok(DocumentAdditionResult { indexed_documents, number_of_documents }) => {
|
|
||||||
task.status = Status::Succeeded;
|
|
||||||
task.details = Some(Details::DocumentAdditionOrUpdate {
|
|
||||||
received_documents: number_of_documents,
|
|
||||||
indexed_documents: Some(indexed_documents),
|
|
||||||
});
|
|
||||||
}
|
|
||||||
Err(error) => {
|
|
||||||
task.status = Status::Failed;
|
|
||||||
task.details = Some(Details::DocumentAdditionOrUpdate {
|
|
||||||
received_documents: count,
|
|
||||||
indexed_documents: Some(count),
|
|
||||||
});
|
|
||||||
task.error = Some(error.into())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => {
|
IndexOperation::DocumentDeletion { index_uid: _, documents, mut tasks } => {
|
||||||
@ -1108,12 +1199,12 @@ impl IndexScheduler {
|
|||||||
|
|
||||||
Ok(tasks)
|
Ok(tasks)
|
||||||
}
|
}
|
||||||
IndexOperation::SettingsAndDocumentImport {
|
IndexOperation::SettingsAndDocumentOperation {
|
||||||
index_uid,
|
index_uid,
|
||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
documents_counts,
|
documents_counts,
|
||||||
content_files,
|
operations,
|
||||||
document_import_tasks,
|
document_import_tasks,
|
||||||
settings,
|
settings,
|
||||||
settings_tasks,
|
settings_tasks,
|
||||||
@ -1131,12 +1222,12 @@ impl IndexScheduler {
|
|||||||
let mut import_tasks = self.apply_index_operation(
|
let mut import_tasks = self.apply_index_operation(
|
||||||
index_wtxn,
|
index_wtxn,
|
||||||
index,
|
index,
|
||||||
IndexOperation::DocumentImport {
|
IndexOperation::DocumentOperation {
|
||||||
index_uid,
|
index_uid,
|
||||||
primary_key,
|
primary_key,
|
||||||
method,
|
method,
|
||||||
documents_counts,
|
documents_counts,
|
||||||
content_files,
|
operations,
|
||||||
tasks: document_import_tasks,
|
tasks: document_import_tasks,
|
||||||
},
|
},
|
||||||
)?;
|
)?;
|
||||||
|
@ -100,9 +100,9 @@ pub enum Error {
|
|||||||
InvalidIndexUid { index_uid: String },
|
InvalidIndexUid { index_uid: String },
|
||||||
#[error("Task `{0}` not found.")]
|
#[error("Task `{0}` not found.")]
|
||||||
TaskNotFound(TaskId),
|
TaskNotFound(TaskId),
|
||||||
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to delete are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskDeletionWithEmptyQuery,
|
TaskDeletionWithEmptyQuery,
|
||||||
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
#[error("Query parameters to filter the tasks to cancel are missing. Available query parameters are: `uids`, `indexUids`, `statuses`, `types`, `canceledBy`, `beforeEnqueuedAt`, `afterEnqueuedAt`, `beforeStartedAt`, `afterStartedAt`, `beforeFinishedAt`, `afterFinishedAt`.")]
|
||||||
TaskCancelationWithEmptyQuery,
|
TaskCancelationWithEmptyQuery,
|
||||||
|
|
||||||
#[error(transparent)]
|
#[error(transparent)]
|
||||||
@ -141,8 +141,8 @@ impl ErrorCode for Error {
|
|||||||
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
Error::IndexAlreadyExists(_) => Code::IndexAlreadyExists,
|
||||||
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexesFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
Error::SwapDuplicateIndexFound(_) => Code::InvalidSwapDuplicateIndexFound,
|
||||||
Error::SwapIndexNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexNotFound(_) => Code::IndexNotFound,
|
||||||
Error::SwapIndexesNotFound(_) => Code::InvalidSwapIndexes,
|
Error::SwapIndexesNotFound(_) => Code::IndexNotFound,
|
||||||
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
Error::InvalidTaskDate { field, .. } => (*field).into(),
|
||||||
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
|
Error::InvalidTaskUids { .. } => Code::InvalidTaskUids,
|
||||||
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
|
Error::InvalidTaskStatuses { .. } => Code::InvalidTaskStatuses,
|
||||||
|
@ -9,10 +9,11 @@ use meilisearch_types::heed::types::Str;
|
|||||||
use meilisearch_types::heed::{Database, Env, EnvOpenOptions, RoTxn, RwTxn};
|
use meilisearch_types::heed::{Database, Env, EnvOpenOptions, RoTxn, RwTxn};
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
use meilisearch_types::milli::Index;
|
use meilisearch_types::milli::Index;
|
||||||
|
use synchronoise::SignalEvent;
|
||||||
use time::OffsetDateTime;
|
use time::OffsetDateTime;
|
||||||
use uuid::Uuid;
|
use uuid::Uuid;
|
||||||
|
|
||||||
use self::IndexStatus::{Available, BeingDeleted};
|
use self::IndexStatus::{Available, BeingDeleted, BeingResized};
|
||||||
use crate::uuid_codec::UuidCodec;
|
use crate::uuid_codec::UuidCodec;
|
||||||
use crate::{clamp_to_page_size, Error, Result};
|
use crate::{clamp_to_page_size, Error, Result};
|
||||||
|
|
||||||
@ -45,6 +46,8 @@ pub struct IndexMapper {
|
|||||||
pub enum IndexStatus {
|
pub enum IndexStatus {
|
||||||
/// Do not insert it back in the index map as it is currently being deleted.
|
/// Do not insert it back in the index map as it is currently being deleted.
|
||||||
BeingDeleted,
|
BeingDeleted,
|
||||||
|
/// Temporarily do not insert the index in the index map as it is currently being resized.
|
||||||
|
BeingResized(Arc<SignalEvent>),
|
||||||
/// You can use the index without worrying about anything.
|
/// You can use the index without worrying about anything.
|
||||||
Available(Index),
|
Available(Index),
|
||||||
}
|
}
|
||||||
@ -71,9 +74,10 @@ impl IndexMapper {
|
|||||||
&self,
|
&self,
|
||||||
path: &Path,
|
path: &Path,
|
||||||
date: Option<(OffsetDateTime, OffsetDateTime)>,
|
date: Option<(OffsetDateTime, OffsetDateTime)>,
|
||||||
|
map_size: usize,
|
||||||
) -> Result<Index> {
|
) -> Result<Index> {
|
||||||
let mut options = EnvOpenOptions::new();
|
let mut options = EnvOpenOptions::new();
|
||||||
options.map_size(clamp_to_page_size(self.index_size));
|
options.map_size(clamp_to_page_size(map_size));
|
||||||
options.max_readers(1024);
|
options.max_readers(1024);
|
||||||
|
|
||||||
if let Some((created, updated)) = date {
|
if let Some((created, updated)) = date {
|
||||||
@ -102,14 +106,15 @@ impl IndexMapper {
|
|||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.base_path.join(uuid.to_string());
|
||||||
fs::create_dir_all(&index_path)?;
|
fs::create_dir_all(&index_path)?;
|
||||||
|
|
||||||
let index = self.create_or_open_index(&index_path, date)?;
|
let index = self.create_or_open_index(&index_path, date, self.index_size)?;
|
||||||
|
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
|
// Error if the UUIDv4 somehow already exists in the map, since it should be fresh.
|
||||||
|
// This is very unlikely to happen in practice.
|
||||||
// TODO: it would be better to lazily create the index. But we need an Index::open function for milli.
|
// TODO: it would be better to lazily create the index. But we need an Index::open function for milli.
|
||||||
if let Some(BeingDeleted) =
|
if self.index_map.write().unwrap().insert(uuid, Available(index.clone())).is_some()
|
||||||
self.index_map.write().unwrap().insert(uuid, Available(index.clone()))
|
|
||||||
{
|
{
|
||||||
panic!("Uuid v4 conflict.");
|
panic!("Uuid v4 conflict: index with UUID {uuid} already exists.");
|
||||||
}
|
}
|
||||||
|
|
||||||
Ok(index)
|
Ok(index)
|
||||||
@ -131,13 +136,23 @@ impl IndexMapper {
|
|||||||
|
|
||||||
wtxn.commit()?;
|
wtxn.commit()?;
|
||||||
// We remove the index from the in-memory index map.
|
// We remove the index from the in-memory index map.
|
||||||
let mut lock = self.index_map.write().unwrap();
|
let closing_event = loop {
|
||||||
let closing_event = match lock.insert(uuid, BeingDeleted) {
|
let mut lock = self.index_map.write().unwrap();
|
||||||
Some(Available(index)) => Some(index.prepare_for_closing()),
|
let resize_operation = match lock.insert(uuid, BeingDeleted) {
|
||||||
_ => None,
|
Some(Available(index)) => break Some(index.prepare_for_closing()),
|
||||||
};
|
// The target index is in the middle of a resize operation.
|
||||||
|
// Wait for this operation to complete, then try again.
|
||||||
|
Some(BeingResized(resize_operation)) => resize_operation.clone(),
|
||||||
|
// The index is already being deleted or doesn't exist.
|
||||||
|
// It's OK to remove it from the map again.
|
||||||
|
_ => break None,
|
||||||
|
};
|
||||||
|
|
||||||
drop(lock);
|
// Avoiding deadlocks: we need to drop the lock before waiting for the end of the resize, which
|
||||||
|
// will involve operations on the very map we're locking.
|
||||||
|
drop(lock);
|
||||||
|
resize_operation.wait();
|
||||||
|
};
|
||||||
|
|
||||||
let index_map = self.index_map.clone();
|
let index_map = self.index_map.clone();
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
let index_path = self.base_path.join(uuid.to_string());
|
||||||
@ -171,6 +186,87 @@ impl IndexMapper {
|
|||||||
Ok(self.index_mapping.get(rtxn, name)?.is_some())
|
Ok(self.index_mapping.get(rtxn, name)?.is_some())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Resizes the maximum size of the specified index to the double of its current maximum size.
|
||||||
|
///
|
||||||
|
/// This operation involves closing the underlying environment and so can take a long time to complete.
|
||||||
|
///
|
||||||
|
/// # Panics
|
||||||
|
///
|
||||||
|
/// - If the Index corresponding to the passed name is concurrently being deleted/resized or cannot be found in the
|
||||||
|
/// in memory hash map.
|
||||||
|
pub fn resize_index(&self, rtxn: &RoTxn, name: &str) -> Result<()> {
|
||||||
|
// fixme: factor to a function?
|
||||||
|
let uuid = self
|
||||||
|
.index_mapping
|
||||||
|
.get(rtxn, name)?
|
||||||
|
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||||
|
|
||||||
|
// We remove the index from the in-memory index map.
|
||||||
|
let mut lock = self.index_map.write().unwrap();
|
||||||
|
// signal that will be sent when the resize operation completes
|
||||||
|
let resize_operation = Arc::new(SignalEvent::manual(false));
|
||||||
|
let index = match lock.insert(uuid, BeingResized(resize_operation)) {
|
||||||
|
Some(Available(index)) => index,
|
||||||
|
Some(previous_status) => {
|
||||||
|
lock.insert(uuid, previous_status);
|
||||||
|
panic!(
|
||||||
|
"Attempting to resize index {name} that is already being resized or deleted."
|
||||||
|
)
|
||||||
|
}
|
||||||
|
None => {
|
||||||
|
panic!("Could not find the status of index {name} in the in-memory index mapper.")
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
drop(lock);
|
||||||
|
|
||||||
|
let resize_succeeded = (move || {
|
||||||
|
let current_size = index.map_size()?;
|
||||||
|
let new_size = current_size * 2;
|
||||||
|
let closing_event = index.prepare_for_closing();
|
||||||
|
|
||||||
|
log::debug!("Waiting for index {name} to close");
|
||||||
|
|
||||||
|
if !closing_event.wait_timeout(std::time::Duration::from_secs(600)) {
|
||||||
|
// fail after 10 minutes waiting
|
||||||
|
panic!("Could not resize index {name} (unable to close it)");
|
||||||
|
}
|
||||||
|
|
||||||
|
log::info!("Resized index {name} from {current_size} to {new_size} bytes");
|
||||||
|
let index_path = self.base_path.join(uuid.to_string());
|
||||||
|
let index = self.create_or_open_index(&index_path, None, new_size)?;
|
||||||
|
Ok(index)
|
||||||
|
})();
|
||||||
|
|
||||||
|
// Put the map back to a consistent state.
|
||||||
|
// Even if there was an error we don't want to leave the map in an inconsistent state as it would cause
|
||||||
|
// deadlocks.
|
||||||
|
let mut lock = self.index_map.write().unwrap();
|
||||||
|
let (resize_operation, resize_succeeded) = match resize_succeeded {
|
||||||
|
Ok(index) => {
|
||||||
|
// insert the resized index
|
||||||
|
let Some(BeingResized(resize_operation)) = lock.insert(uuid, Available(index)) else {
|
||||||
|
panic!("Index state for index {name} was modified while it was being resized")
|
||||||
|
};
|
||||||
|
|
||||||
|
(resize_operation, Ok(()))
|
||||||
|
}
|
||||||
|
Err(error) => {
|
||||||
|
// there was an error, not much we can do... delete the index from the in-memory map to prevent future errors
|
||||||
|
let Some(BeingResized(resize_operation)) = lock.remove(&uuid) else {
|
||||||
|
panic!("Index state for index {name} was modified while it was being resized")
|
||||||
|
};
|
||||||
|
(resize_operation, Err(error))
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
// drop the lock before signaling completion so that other threads don't immediately await on the lock after waking up.
|
||||||
|
drop(lock);
|
||||||
|
resize_operation.signal();
|
||||||
|
|
||||||
|
resize_succeeded
|
||||||
|
}
|
||||||
|
|
||||||
/// Return an index, may open it if it wasn't already opened.
|
/// Return an index, may open it if it wasn't already opened.
|
||||||
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
pub fn index(&self, rtxn: &RoTxn, name: &str) -> Result<Index> {
|
||||||
let uuid = self
|
let uuid = self
|
||||||
@ -179,31 +275,47 @@ impl IndexMapper {
|
|||||||
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
.ok_or_else(|| Error::IndexNotFound(name.to_string()))?;
|
||||||
|
|
||||||
// we clone here to drop the lock before entering the match
|
// we clone here to drop the lock before entering the match
|
||||||
let index = self.index_map.read().unwrap().get(&uuid).cloned();
|
let index = loop {
|
||||||
let index = match index {
|
let index = self.index_map.read().unwrap().get(&uuid).cloned();
|
||||||
Some(Available(index)) => index,
|
|
||||||
Some(BeingDeleted) => return Err(Error::IndexNotFound(name.to_string())),
|
|
||||||
// since we're lazy, it's possible that the index has not been opened yet.
|
|
||||||
None => {
|
|
||||||
let mut index_map = self.index_map.write().unwrap();
|
|
||||||
// between the read lock and the write lock it's not impossible
|
|
||||||
// that someone already opened the index (eg if two search happens
|
|
||||||
// at the same time), thus before opening it we check a second time
|
|
||||||
// if it's not already there.
|
|
||||||
// Since there is a good chance it's not already there we can use
|
|
||||||
// the entry method.
|
|
||||||
match index_map.entry(uuid) {
|
|
||||||
Entry::Vacant(entry) => {
|
|
||||||
let index_path = self.base_path.join(uuid.to_string());
|
|
||||||
|
|
||||||
let index = self.create_or_open_index(&index_path, None)?;
|
match index {
|
||||||
entry.insert(Available(index.clone()));
|
Some(Available(index)) => break index,
|
||||||
index
|
Some(BeingResized(ref resize_operation)) => {
|
||||||
|
// Avoiding deadlocks: no lock taken while doing this operation.
|
||||||
|
resize_operation.wait();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
Some(BeingDeleted) => return Err(Error::IndexNotFound(name.to_string())),
|
||||||
|
// since we're lazy, it's possible that the index has not been opened yet.
|
||||||
|
None => {
|
||||||
|
let mut index_map = self.index_map.write().unwrap();
|
||||||
|
// between the read lock and the write lock it's not impossible
|
||||||
|
// that someone already opened the index (eg if two search happens
|
||||||
|
// at the same time), thus before opening it we check a second time
|
||||||
|
// if it's not already there.
|
||||||
|
// Since there is a good chance it's not already there we can use
|
||||||
|
// the entry method.
|
||||||
|
match index_map.entry(uuid) {
|
||||||
|
Entry::Vacant(entry) => {
|
||||||
|
let index_path = self.base_path.join(uuid.to_string());
|
||||||
|
|
||||||
|
let index =
|
||||||
|
self.create_or_open_index(&index_path, None, self.index_size)?;
|
||||||
|
entry.insert(Available(index.clone()));
|
||||||
|
break index;
|
||||||
|
}
|
||||||
|
Entry::Occupied(entry) => match entry.get() {
|
||||||
|
Available(index) => break index.clone(),
|
||||||
|
BeingResized(resize_operation) => {
|
||||||
|
// Avoiding the deadlock: we drop the lock before waiting
|
||||||
|
let resize_operation = resize_operation.clone();
|
||||||
|
drop(index_map);
|
||||||
|
resize_operation.wait();
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
BeingDeleted => return Err(Error::IndexNotFound(name.to_string())),
|
||||||
|
},
|
||||||
}
|
}
|
||||||
Entry::Occupied(entry) => match entry.get() {
|
|
||||||
Available(index) => index.clone(),
|
|
||||||
BeingDeleted => return Err(Error::IndexNotFound(name.to_string())),
|
|
||||||
},
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
use std::collections::BTreeSet;
|
||||||
use std::fmt::Write;
|
use std::fmt::Write;
|
||||||
|
|
||||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
@ -92,7 +93,9 @@ pub fn snapshot_index_scheduler(scheduler: &IndexScheduler) -> String {
|
|||||||
|
|
||||||
pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
pub fn snapshot_file_store(file_store: &file_store::FileStore) -> String {
|
||||||
let mut snap = String::new();
|
let mut snap = String::new();
|
||||||
for uuid in file_store.__all_uuids() {
|
// we store the uuid in a `BTreeSet` to keep them ordered.
|
||||||
|
let all_uuids = file_store.all_uuids().unwrap().collect::<Result<BTreeSet<_>, _>>().unwrap();
|
||||||
|
for uuid in all_uuids {
|
||||||
snap.push_str(&format!("{uuid}\n"));
|
snap.push_str(&format!("{uuid}\n"));
|
||||||
}
|
}
|
||||||
snap
|
snap
|
||||||
|
@ -43,6 +43,7 @@ use file_store::FileStore;
|
|||||||
use meilisearch_types::error::ResponseError;
|
use meilisearch_types::error::ResponseError;
|
||||||
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
use meilisearch_types::heed::types::{OwnedType, SerdeBincode, SerdeJson, Str};
|
||||||
use meilisearch_types::heed::{self, Database, Env, RoTxn};
|
use meilisearch_types::heed::{self, Database, Env, RoTxn};
|
||||||
|
use meilisearch_types::index_uid_pattern::IndexUidPattern;
|
||||||
use meilisearch_types::milli;
|
use meilisearch_types::milli;
|
||||||
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
|
use meilisearch_types::milli::documents::DocumentsBatchBuilder;
|
||||||
use meilisearch_types::milli::update::IndexerConfig;
|
use meilisearch_types::milli::update::IndexerConfig;
|
||||||
@ -422,12 +423,12 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
run.breakpoint(Breakpoint::Init);
|
run.breakpoint(Breakpoint::Init);
|
||||||
|
|
||||||
loop {
|
run.wake_up.wait();
|
||||||
run.wake_up.wait();
|
|
||||||
|
|
||||||
|
loop {
|
||||||
match run.tick() {
|
match run.tick() {
|
||||||
Ok(0) => (),
|
Ok(TickOutcome::TickAgain(_)) => (),
|
||||||
Ok(_) => run.wake_up.signal(),
|
Ok(TickOutcome::WaitForSignal) => run.wake_up.wait(),
|
||||||
Err(e) => {
|
Err(e) => {
|
||||||
log::error!("{}", e);
|
log::error!("{}", e);
|
||||||
// Wait one second when an irrecoverable error occurs.
|
// Wait one second when an irrecoverable error occurs.
|
||||||
@ -440,7 +441,6 @@ impl IndexScheduler {
|
|||||||
) {
|
) {
|
||||||
std::thread::sleep(Duration::from_secs(1));
|
std::thread::sleep(Duration::from_secs(1));
|
||||||
}
|
}
|
||||||
run.wake_up.signal();
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -452,6 +452,10 @@ impl IndexScheduler {
|
|||||||
&self.index_mapper.indexer_config
|
&self.index_mapper.indexer_config
|
||||||
}
|
}
|
||||||
|
|
||||||
|
pub fn size(&self) -> Result<u64> {
|
||||||
|
Ok(self.env.real_disk_size()?)
|
||||||
|
}
|
||||||
|
|
||||||
/// Return the index corresponding to the name.
|
/// Return the index corresponding to the name.
|
||||||
///
|
///
|
||||||
/// * If the index wasn't opened before, the index will be opened.
|
/// * If the index wasn't opened before, the index will be opened.
|
||||||
@ -502,13 +506,22 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if let Some(canceled_by) = &query.canceled_by {
|
if let Some(canceled_by) = &query.canceled_by {
|
||||||
|
let mut all_canceled_tasks = RoaringBitmap::new();
|
||||||
for cancel_task_uid in canceled_by {
|
for cancel_task_uid in canceled_by {
|
||||||
if let Some(canceled_by_uid) =
|
if let Some(canceled_by_uid) =
|
||||||
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
|
self.canceled_by.get(rtxn, &BEU32::new(*cancel_task_uid))?
|
||||||
{
|
{
|
||||||
tasks &= canceled_by_uid;
|
all_canceled_tasks |= canceled_by_uid;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// if the canceled_by has been specified but no task
|
||||||
|
// matches then we prefer matching zero than all tasks.
|
||||||
|
if all_canceled_tasks.is_empty() {
|
||||||
|
return Ok(RoaringBitmap::new());
|
||||||
|
} else {
|
||||||
|
tasks &= all_canceled_tasks;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(kind) = &query.types {
|
if let Some(kind) = &query.types {
|
||||||
@ -617,7 +630,7 @@ impl IndexScheduler {
|
|||||||
&self,
|
&self,
|
||||||
rtxn: &RoTxn,
|
rtxn: &RoTxn,
|
||||||
query: &Query,
|
query: &Query,
|
||||||
authorized_indexes: &Option<Vec<String>>,
|
authorized_indexes: &Option<Vec<IndexUidPattern>>,
|
||||||
) -> Result<RoaringBitmap> {
|
) -> Result<RoaringBitmap> {
|
||||||
let mut tasks = self.get_task_ids(rtxn, query)?;
|
let mut tasks = self.get_task_ids(rtxn, query)?;
|
||||||
|
|
||||||
@ -635,7 +648,7 @@ impl IndexScheduler {
|
|||||||
let all_indexes_iter = self.index_tasks.iter(rtxn)?;
|
let all_indexes_iter = self.index_tasks.iter(rtxn)?;
|
||||||
for result in all_indexes_iter {
|
for result in all_indexes_iter {
|
||||||
let (index, index_tasks) = result?;
|
let (index, index_tasks) = result?;
|
||||||
if !authorized_indexes.contains(&index.to_owned()) {
|
if !authorized_indexes.iter().any(|p| p.matches_str(index)) {
|
||||||
tasks -= index_tasks;
|
tasks -= index_tasks;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -655,7 +668,7 @@ impl IndexScheduler {
|
|||||||
pub fn get_tasks_from_authorized_indexes(
|
pub fn get_tasks_from_authorized_indexes(
|
||||||
&self,
|
&self,
|
||||||
query: Query,
|
query: Query,
|
||||||
authorized_indexes: Option<Vec<String>>,
|
authorized_indexes: Option<Vec<IndexUidPattern>>,
|
||||||
) -> Result<Vec<Task>> {
|
) -> Result<Vec<Task>> {
|
||||||
let rtxn = self.env.read_txn()?;
|
let rtxn = self.env.read_txn()?;
|
||||||
|
|
||||||
@ -751,8 +764,8 @@ impl IndexScheduler {
|
|||||||
Ok(task)
|
Ok(task)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Register a new task comming from a dump in the scheduler.
|
/// Register a new task coming from a dump in the scheduler.
|
||||||
/// By takinig a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
/// By taking a mutable ref we're pretty sure no one will ever import a dump while actix is running.
|
||||||
pub fn register_dumped_task(
|
pub fn register_dumped_task(
|
||||||
&mut self,
|
&mut self,
|
||||||
task: TaskDump,
|
task: TaskDump,
|
||||||
@ -889,6 +902,11 @@ impl IndexScheduler {
|
|||||||
Ok(self.file_store.new_update_with_uuid(uuid)?)
|
Ok(self.file_store.new_update_with_uuid(uuid)?)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The size on disk taken by all the updates files contained in the `IndexScheduler`, in bytes.
|
||||||
|
pub fn compute_update_file_size(&self) -> Result<u64> {
|
||||||
|
Ok(self.file_store.compute_total_size()?)
|
||||||
|
}
|
||||||
|
|
||||||
/// Delete a file from the index scheduler.
|
/// Delete a file from the index scheduler.
|
||||||
///
|
///
|
||||||
/// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method.
|
/// Counterpart to the [`create_update_file`](IndexScheduler::create_update_file) method.
|
||||||
@ -908,7 +926,7 @@ impl IndexScheduler {
|
|||||||
/// 5. Reset the in-memory list of processed tasks.
|
/// 5. Reset the in-memory list of processed tasks.
|
||||||
///
|
///
|
||||||
/// Returns the number of processed tasks.
|
/// Returns the number of processed tasks.
|
||||||
fn tick(&self) -> Result<usize> {
|
fn tick(&self) -> Result<TickOutcome> {
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
{
|
{
|
||||||
*self.run_loop_iteration.write().unwrap() += 1;
|
*self.run_loop_iteration.write().unwrap() += 1;
|
||||||
@ -919,8 +937,9 @@ impl IndexScheduler {
|
|||||||
let batch =
|
let batch =
|
||||||
match self.create_next_batch(&rtxn).map_err(|e| Error::CreateBatch(Box::new(e)))? {
|
match self.create_next_batch(&rtxn).map_err(|e| Error::CreateBatch(Box::new(e)))? {
|
||||||
Some(batch) => batch,
|
Some(batch) => batch,
|
||||||
None => return Ok(0),
|
None => return Ok(TickOutcome::WaitForSignal),
|
||||||
};
|
};
|
||||||
|
let index_uid = batch.index_uid().map(ToOwned::to_owned);
|
||||||
drop(rtxn);
|
drop(rtxn);
|
||||||
|
|
||||||
// 1. store the starting date with the bitmap of processing tasks.
|
// 1. store the starting date with the bitmap of processing tasks.
|
||||||
@ -991,7 +1010,23 @@ impl IndexScheduler {
|
|||||||
// the `started_at` date times and `processings` of the current processing tasks.
|
// the `started_at` date times and `processings` of the current processing tasks.
|
||||||
// This date time is used by the task cancelation to store the right `started_at`
|
// This date time is used by the task cancelation to store the right `started_at`
|
||||||
// date in the task on disk.
|
// date in the task on disk.
|
||||||
return Ok(0);
|
return Ok(TickOutcome::TickAgain(0));
|
||||||
|
}
|
||||||
|
// If an index said it was full, we need to:
|
||||||
|
// 1. identify which index is full
|
||||||
|
// 2. close the associated environment
|
||||||
|
// 3. resize it
|
||||||
|
// 4. re-schedule tasks
|
||||||
|
Err(Error::Milli(milli::Error::UserError(
|
||||||
|
milli::UserError::MaxDatabaseSizeReached,
|
||||||
|
))) if index_uid.is_some() => {
|
||||||
|
// fixme: add index_uid to match to avoid the unwrap
|
||||||
|
let index_uid = index_uid.unwrap();
|
||||||
|
// fixme: handle error more gracefully? not sure when this could happen
|
||||||
|
self.index_mapper.resize_index(&wtxn, &index_uid)?;
|
||||||
|
wtxn.abort().map_err(Error::HeedTransaction)?;
|
||||||
|
|
||||||
|
return Ok(TickOutcome::TickAgain(0));
|
||||||
}
|
}
|
||||||
// In case of a failure we must get back and patch all the tasks with the error.
|
// In case of a failure we must get back and patch all the tasks with the error.
|
||||||
Err(err) => {
|
Err(err) => {
|
||||||
@ -1031,7 +1066,7 @@ impl IndexScheduler {
|
|||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
self.breakpoint(Breakpoint::AfterProcessing);
|
self.breakpoint(Breakpoint::AfterProcessing);
|
||||||
|
|
||||||
Ok(processed_tasks)
|
Ok(TickOutcome::TickAgain(processed_tasks))
|
||||||
}
|
}
|
||||||
|
|
||||||
pub(crate) fn delete_persisted_task_data(&self, task: &Task) -> Result<()> {
|
pub(crate) fn delete_persisted_task_data(&self, task: &Task) -> Result<()> {
|
||||||
@ -1066,6 +1101,16 @@ impl IndexScheduler {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// The outcome of calling the [`IndexScheduler::tick`] function.
|
||||||
|
pub enum TickOutcome {
|
||||||
|
/// The scheduler should immediately attempt another `tick`.
|
||||||
|
///
|
||||||
|
/// The `usize` field contains the number of processed tasks.
|
||||||
|
TickAgain(usize),
|
||||||
|
/// The scheduler should wait for an external signal before attempting another `tick`.
|
||||||
|
WaitForSignal,
|
||||||
|
}
|
||||||
|
|
||||||
#[cfg(test)]
|
#[cfg(test)]
|
||||||
mod tests {
|
mod tests {
|
||||||
use std::io::{BufWriter, Seek, Write};
|
use std::io::{BufWriter, Seek, Write};
|
||||||
@ -1661,6 +1706,105 @@ mod tests {
|
|||||||
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded");
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "both_task_succeeded");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn document_addition_and_document_deletion() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
let content = r#"[
|
||||||
|
{ "id": 1, "doggo": "jean bob" },
|
||||||
|
{ "id": 2, "catto": "jorts" },
|
||||||
|
{ "id": 3, "doggo": "bork" }
|
||||||
|
]"#;
|
||||||
|
|
||||||
|
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
file.persist().unwrap();
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S("id")),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task");
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentDeletion {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
documents_ids: vec![S("1"), S("2")],
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task");
|
||||||
|
|
||||||
|
handle.advance_one_successful_batch(); // The addition AND deletion should've been batched together
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_processing_the_batch");
|
||||||
|
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn document_deletion_and_document_addition() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentDeletion {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
documents_ids: vec![S("1"), S("2")],
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_first_task");
|
||||||
|
|
||||||
|
let content = r#"[
|
||||||
|
{ "id": 1, "doggo": "jean bob" },
|
||||||
|
{ "id": 2, "catto": "jorts" },
|
||||||
|
{ "id": 3, "doggo": "bork" }
|
||||||
|
]"#;
|
||||||
|
|
||||||
|
let (uuid, mut file) = index_scheduler.create_update_file_with_uuid(0).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
file.persist().unwrap();
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S("id")),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "registered_the_second_task");
|
||||||
|
|
||||||
|
// The deletion should have failed because it can't create an index
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_failing_the_deletion");
|
||||||
|
|
||||||
|
// The addition should works
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_last_successful_addition");
|
||||||
|
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
fn do_not_batch_task_of_different_indexes() {
|
fn do_not_batch_task_of_different_indexes() {
|
||||||
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
@ -1991,7 +2135,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2038,7 +2182,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2090,7 +2234,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2141,7 +2285,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2192,7 +2336,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[macro_export]
|
#[macro_export]
|
||||||
@ -2503,7 +2647,11 @@ mod tests {
|
|||||||
|
|
||||||
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
|
let query = Query { index_uids: Some(vec!["catto".to_owned()]), ..Default::default() };
|
||||||
let tasks = index_scheduler
|
let tasks = index_scheduler
|
||||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
|
.get_task_ids_from_authorized_indexes(
|
||||||
|
&rtxn,
|
||||||
|
&query,
|
||||||
|
&Some(vec![IndexUidPattern::new_unchecked("doggo")]),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks
|
// we have asked for only the tasks associated with catto, but are only authorized to retrieve the tasks
|
||||||
// associated with doggo -> empty result
|
// associated with doggo -> empty result
|
||||||
@ -2511,7 +2659,11 @@ mod tests {
|
|||||||
|
|
||||||
let query = Query::default();
|
let query = Query::default();
|
||||||
let tasks = index_scheduler
|
let tasks = index_scheduler
|
||||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_owned()]))
|
.get_task_ids_from_authorized_indexes(
|
||||||
|
&rtxn,
|
||||||
|
&query,
|
||||||
|
&Some(vec![IndexUidPattern::new_unchecked("doggo")]),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// we asked for all the tasks, but we are only authorized to retrieve the doggo tasks
|
// we asked for all the tasks, but we are only authorized to retrieve the doggo tasks
|
||||||
// -> only the index creation of doggo should be returned
|
// -> only the index creation of doggo should be returned
|
||||||
@ -2522,7 +2674,10 @@ mod tests {
|
|||||||
.get_task_ids_from_authorized_indexes(
|
.get_task_ids_from_authorized_indexes(
|
||||||
&rtxn,
|
&rtxn,
|
||||||
&query,
|
&query,
|
||||||
&Some(vec!["catto".to_owned(), "doggo".to_owned()]),
|
&Some(vec![
|
||||||
|
IndexUidPattern::new_unchecked("catto"),
|
||||||
|
IndexUidPattern::new_unchecked("doggo"),
|
||||||
|
]),
|
||||||
)
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks
|
// we asked for all the tasks, but we are only authorized to retrieve the doggo and catto tasks
|
||||||
@ -2570,7 +2725,11 @@ mod tests {
|
|||||||
|
|
||||||
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
|
let query = Query { canceled_by: Some(vec![task_cancelation.uid]), ..Query::default() };
|
||||||
let tasks = index_scheduler
|
let tasks = index_scheduler
|
||||||
.get_task_ids_from_authorized_indexes(&rtxn, &query, &Some(vec!["doggo".to_string()]))
|
.get_task_ids_from_authorized_indexes(
|
||||||
|
&rtxn,
|
||||||
|
&query,
|
||||||
|
&Some(vec![IndexUidPattern::new_unchecked("doggo")]),
|
||||||
|
)
|
||||||
.unwrap();
|
.unwrap();
|
||||||
// Return only 1 because the user is not authorized to see task 2
|
// Return only 1 because the user is not authorized to see task 2
|
||||||
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
|
snapshot!(snapshot_bitmap(&tasks), @"[1,]");
|
||||||
@ -2831,7 +2990,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2894,7 +3053,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -2954,7 +3113,7 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
@ -3011,7 +3170,361 @@ mod tests {
|
|||||||
.unwrap()
|
.unwrap()
|
||||||
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
snapshot!(serde_json::to_string_pretty(&documents).unwrap());
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_multiple_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["id", "bork", "bloup"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first documentAddition.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||||
|
|
||||||
|
// The second batch should fail.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// The second batch should fail.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_fails");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_multiple_primary_key_batch_wrong_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["id", "bork", "bork"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_3_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first documentAddition.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "only_first_task_succeed");
|
||||||
|
|
||||||
|
// The second batch should fail and contains two tasks.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_and_third_tasks_fails");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_bad_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in ["bork", "bork", "id", "bork", "id"].iter().enumerate() {
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"id": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: Some(S(primary_key)),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_5_tasks");
|
||||||
|
|
||||||
|
// A first batch should be processed with only the first two documentAddition.
|
||||||
|
// it should fails because the documents don't contains any `bork` field.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_and_second_task_fails");
|
||||||
|
|
||||||
|
// The primary key should be set to none since we failed the batch.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"true");
|
||||||
|
|
||||||
|
// The second batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// The primary key should be set to `id` since this batch succeeded.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// We're trying to `bork` again, but now there is already a primary key set for this index.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fourth_task_fails");
|
||||||
|
|
||||||
|
// Finally the last task should succeed since its primary key is the same as the valid one.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "fifth_task_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"id");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_set_and_null_primary_key() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in
|
||||||
|
[None, Some("bork"), Some("paw"), None, None, Some("paw")].into_iter().enumerate()
|
||||||
|
{
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"paw": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||||
|
|
||||||
|
// A first batch should contains only one task that fails because we can't infer the primary key.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_fails");
|
||||||
|
|
||||||
|
// The second batch should contains only one task that fails because we bork is not a valid primary key.
|
||||||
|
// NOTE: it's marked as successful because the batch didn't fails, it's the individual tasks that failed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// No primary key should be set at this point.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"true");
|
||||||
|
|
||||||
|
// The third batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// The primary key should be set to `id` since this batch succeeded.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"paw");
|
||||||
|
|
||||||
|
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||||
|
// + the last task that matches the current primary-key. Everything should succeed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"paw");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
|
}
|
||||||
|
|
||||||
|
#[test]
|
||||||
|
fn test_document_addition_with_set_and_null_primary_key_inference_works() {
|
||||||
|
let (index_scheduler, mut handle) = IndexScheduler::test(true, vec![]);
|
||||||
|
|
||||||
|
for (id, primary_key) in [None, Some("bork"), Some("doggoid"), None, None, Some("doggoid")]
|
||||||
|
.into_iter()
|
||||||
|
.enumerate()
|
||||||
|
{
|
||||||
|
let content = format!(
|
||||||
|
r#"{{
|
||||||
|
"doggoid": {id},
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}}"#,
|
||||||
|
);
|
||||||
|
let (uuid, mut file) =
|
||||||
|
index_scheduler.create_update_file_with_uuid(id as u128).unwrap();
|
||||||
|
let documents_count = read_json(content.as_bytes(), file.as_file_mut()).unwrap();
|
||||||
|
assert_eq!(documents_count, 1);
|
||||||
|
file.persist().unwrap();
|
||||||
|
|
||||||
|
index_scheduler
|
||||||
|
.register(KindWithContent::DocumentAdditionOrUpdate {
|
||||||
|
index_uid: S("doggos"),
|
||||||
|
primary_key: primary_key.map(|pk| pk.to_string()),
|
||||||
|
method: ReplaceDocuments,
|
||||||
|
content_file: uuid,
|
||||||
|
documents_count,
|
||||||
|
allow_index_creation: true,
|
||||||
|
})
|
||||||
|
.unwrap();
|
||||||
|
index_scheduler.assert_internally_consistent();
|
||||||
|
}
|
||||||
|
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "after_registering_the_6_tasks");
|
||||||
|
|
||||||
|
// A first batch should contains only one task that succeed and sets the primary key to `doggoid`.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "first_task_succeed");
|
||||||
|
|
||||||
|
// Checking the primary key.
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap();
|
||||||
|
snapshot!(primary_key.is_none(), @"false");
|
||||||
|
|
||||||
|
// The second batch should contains only one task that fails because it tries to update the primary key to `bork`.
|
||||||
|
handle.advance_one_failed_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "second_task_fails");
|
||||||
|
|
||||||
|
// The third batch should succeed and only contains one task.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "third_task_succeeds");
|
||||||
|
|
||||||
|
// We should be able to batch together the next two tasks that don't specify any primary key
|
||||||
|
// + the last task that matches the current primary-key. Everything should succeed.
|
||||||
|
handle.advance_one_successful_batch();
|
||||||
|
snapshot!(snapshot_index_scheduler(&index_scheduler), name: "all_other_tasks_succeeds");
|
||||||
|
|
||||||
|
// Is the primary key still what we expect?
|
||||||
|
let index = index_scheduler.index("doggos").unwrap();
|
||||||
|
let rtxn = index.read_txn().unwrap();
|
||||||
|
let primary_key = index.primary_key(&rtxn).unwrap().unwrap();
|
||||||
|
snapshot!(primary_key, @"doggoid");
|
||||||
|
|
||||||
|
// Is the document still the one we expect?.
|
||||||
|
let field_ids_map = index.fields_ids_map(&rtxn).unwrap();
|
||||||
|
let field_ids = field_ids_map.ids().collect::<Vec<_>>();
|
||||||
|
let documents = index
|
||||||
|
.all_documents(&rtxn)
|
||||||
|
.unwrap()
|
||||||
|
.map(|ret| obkv_to_json(&field_ids, &field_ids_map, ret.unwrap().1).unwrap())
|
||||||
|
.collect::<Vec<_>>();
|
||||||
|
snapshot!(serde_json::to_string_pretty(&documents).unwrap(), name: "documents");
|
||||||
}
|
}
|
||||||
|
|
||||||
#[test]
|
#[test]
|
||||||
|
@ -0,0 +1,42 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: succeeded, details: { received_document_ids: 2, deleted_documents: Some(2) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,]
|
||||||
|
"documentDeletion" [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"doggo": "bork"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,37 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,]
|
||||||
|
"documentDeletion" [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,]
|
||||||
|
failed [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [1,]
|
||||||
|
"documentDeletion" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_document_ids: 2, deleted_documents: Some(0) }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
1 {uid: 1, status: succeeded, details: { received_documents: 3, indexed_documents: Some(3) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [1,]
|
||||||
|
failed [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [1,]
|
||||||
|
"documentDeletion" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,17 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 1,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"catto": "jorts"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 3,
|
||||||
|
"doggo": "bork"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,36 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentDeletion" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,40 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_document_ids: 2, deleted_documents: None }, kind: DocumentDeletion { index_uid: "doggos", documents_ids: ["1", "2"] }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 3, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 3, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [1,]
|
||||||
|
"documentDeletion" [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -10,7 +10,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
|
1 {uid: 1, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "b", primary_key: Some("id") }}
|
||||||
2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
|
2 {uid: 2, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "c", primary_key: Some("id") }}
|
||||||
3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
|
3 {uid: 3, status: succeeded, details: { primary_key: Some("id") }, kind: IndexCreation { index_uid: "d", primary_key: Some("id") }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "invalid_swap_indexes", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#invalid-swap-indexes" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Indexes `e`, `f` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }, kind: IndexSwap { swaps: [IndexSwap { indexes: ("a", "b") }, IndexSwap { indexes: ("c", "e") }, IndexSwap { indexes: ("d", "f") }] }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
||||||
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,16 +6,16 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
8 {uid: 8, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000008, documents_count: 1, allow_index_creation: false }}
|
||||||
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
9 {uid: 9, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000009, documents_count: 1, allow_index_creation: false }}
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### Status:
|
### Status:
|
||||||
enqueued []
|
enqueued []
|
||||||
|
@ -6,11 +6,11 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: false }}
|
||||||
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: false }}
|
||||||
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
4 {uid: 4, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: false }}
|
||||||
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: false }}
|
||||||
6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
6 {uid: 6, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000006, documents_count: 1, allow_index_creation: false }}
|
||||||
7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
7 {uid: 7, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000007, documents_count: 1, allow_index_creation: false }}
|
||||||
|
@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
@ -6,7 +6,7 @@ source: index-scheduler/src/lib.rs
|
|||||||
[]
|
[]
|
||||||
----------------------------------------------------------------------
|
----------------------------------------------------------------------
|
||||||
### All Tasks:
|
### All Tasks:
|
||||||
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index-not-found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Index `doggos` not found.", error_code: "index_not_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_not_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: false }}
|
||||||
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: false }}
|
||||||
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
@ -0,0 +1,49 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"id": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [2,4,]
|
||||||
|
failed [0,1,3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,50 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,3,4,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,53 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [4,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":0,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"id\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [3,4,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,]
|
||||||
|
succeeded [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,]
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,48 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bloup"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,43 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,9 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"id": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,45 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,]
|
||||||
|
succeeded [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,47 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("id"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `id`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,]
|
||||||
|
succeeded [0,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [2,3,4,5,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,21 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"paw": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 3,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"paw": 5,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
@ -0,0 +1,54 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [1,2,3,4,5,]
|
||||||
|
failed [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,55 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [2,3,4,5,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,57 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: failed, error: ResponseError { code: 200, message: "The primary key inference failed as the engine did not find any field ending with `id` in its name. Please specify the primary key manually using the `primaryKey` query parameter.", error_code: "index_primary_key_no_candidate_found", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_no_candidate_found" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Document doesn't have a `bork` attribute: `{\"paw\":1,\"doggo\":\"jean bob\"}`.", error_code: "missing_document_id", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#missing_document_id" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("paw"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [3,4,5,]
|
||||||
|
succeeded [2,]
|
||||||
|
failed [0,1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,52 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: enqueued, details: { received_documents: 1, indexed_documents: None }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
00000000-0000-0000-0000-000000000000
|
||||||
|
00000000-0000-0000-0000-000000000001
|
||||||
|
00000000-0000-0000-0000-000000000002
|
||||||
|
00000000-0000-0000-0000-000000000003
|
||||||
|
00000000-0000-0000-0000-000000000004
|
||||||
|
00000000-0000-0000-0000-000000000005
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,56 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
### Autobatching Enabled = true
|
||||||
|
### Processing Tasks:
|
||||||
|
[]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### All Tasks:
|
||||||
|
0 {uid: 0, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000000, documents_count: 1, allow_index_creation: true }}
|
||||||
|
1 {uid: 1, status: failed, error: ResponseError { code: 200, message: "Index already has a primary key: `doggoid`.", error_code: "index_primary_key_already_exists", error_type: "invalid_request", error_link: "https://docs.meilisearch.com/errors#index_primary_key_already_exists" }, details: { received_documents: 1, indexed_documents: Some(0) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("bork"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000001, documents_count: 1, allow_index_creation: true }}
|
||||||
|
2 {uid: 2, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000002, documents_count: 1, allow_index_creation: true }}
|
||||||
|
3 {uid: 3, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000003, documents_count: 1, allow_index_creation: true }}
|
||||||
|
4 {uid: 4, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: None, method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000004, documents_count: 1, allow_index_creation: true }}
|
||||||
|
5 {uid: 5, status: succeeded, details: { received_documents: 1, indexed_documents: Some(1) }, kind: DocumentAdditionOrUpdate { index_uid: "doggos", primary_key: Some("doggoid"), method: ReplaceDocuments, content_file: 00000000-0000-0000-0000-000000000005, documents_count: 1, allow_index_creation: true }}
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Status:
|
||||||
|
enqueued []
|
||||||
|
succeeded [0,2,3,4,5,]
|
||||||
|
failed [1,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Kind:
|
||||||
|
"documentAdditionOrUpdate" [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Tasks:
|
||||||
|
doggos [0,1,2,3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Index Mapper:
|
||||||
|
["doggos"]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Canceled By:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Enqueued At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,]
|
||||||
|
[timestamp] [4,]
|
||||||
|
[timestamp] [5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Started At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### Finished At:
|
||||||
|
[timestamp] [0,]
|
||||||
|
[timestamp] [1,]
|
||||||
|
[timestamp] [2,]
|
||||||
|
[timestamp] [3,4,5,]
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
### File Store:
|
||||||
|
|
||||||
|
----------------------------------------------------------------------
|
||||||
|
|
@ -0,0 +1,25 @@
|
|||||||
|
---
|
||||||
|
source: index-scheduler/src/lib.rs
|
||||||
|
---
|
||||||
|
[
|
||||||
|
{
|
||||||
|
"doggoid": 0,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 2,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 3,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 4,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"doggoid": 5,
|
||||||
|
"doggo": "jean bob"
|
||||||
|
}
|
||||||
|
]
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user