mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-22 18:17:39 +08:00
last review edits + fmt
This commit is contained in:
parent
c29b86849b
commit
dd324807f9
@ -81,7 +81,6 @@ pub enum Code {
|
||||
}
|
||||
|
||||
impl Code {
|
||||
|
||||
/// ascociate a `Code` variant to the actual ErrCode
|
||||
fn err_code(&self) -> ErrCode {
|
||||
use Code::*;
|
||||
@ -94,17 +93,23 @@ impl Code {
|
||||
// thrown when requesting an unexisting index
|
||||
IndexNotFound => ErrCode::invalid("index_not_found", StatusCode::NOT_FOUND),
|
||||
InvalidIndexUid => ErrCode::invalid("invalid_index_uid", StatusCode::BAD_REQUEST),
|
||||
OpenIndex => ErrCode::internal("index_not_accessible", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
OpenIndex => {
|
||||
ErrCode::internal("index_not_accessible", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
|
||||
// invalid state error
|
||||
InvalidState => ErrCode::internal("invalid_state", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
// thrown when no primary key has been set
|
||||
MissingPrimaryKey => ErrCode::invalid("missing_primary_key", StatusCode::BAD_REQUEST),
|
||||
// error thrown when trying to set an already existing primary key
|
||||
PrimaryKeyAlreadyPresent => ErrCode::invalid("primary_key_already_present", StatusCode::BAD_REQUEST),
|
||||
PrimaryKeyAlreadyPresent => {
|
||||
ErrCode::invalid("primary_key_already_present", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
|
||||
// invalid document
|
||||
MaxFieldsLimitExceeded => ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST),
|
||||
MaxFieldsLimitExceeded => {
|
||||
ErrCode::invalid("max_fields_limit_exceeded", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
MissingDocumentId => ErrCode::invalid("missing_document_id", StatusCode::BAD_REQUEST),
|
||||
|
||||
// error related to facets
|
||||
@ -117,16 +122,26 @@ impl Code {
|
||||
DocumentNotFound => ErrCode::invalid("document_not_found", StatusCode::NOT_FOUND),
|
||||
Internal => ErrCode::internal("internal", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
InvalidToken => ErrCode::authentication("invalid_token", StatusCode::FORBIDDEN),
|
||||
MissingAuthorizationHeader => ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED),
|
||||
MissingAuthorizationHeader => {
|
||||
ErrCode::authentication("missing_authorization_header", StatusCode::UNAUTHORIZED)
|
||||
}
|
||||
NotFound => ErrCode::invalid("not_found", StatusCode::NOT_FOUND),
|
||||
PayloadTooLarge => ErrCode::invalid("payload_too_large", StatusCode::PAYLOAD_TOO_LARGE),
|
||||
RetrieveDocument => ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST),
|
||||
RetrieveDocument => {
|
||||
ErrCode::internal("unretrievable_document", StatusCode::BAD_REQUEST)
|
||||
}
|
||||
SearchDocuments => ErrCode::internal("search_error", StatusCode::BAD_REQUEST),
|
||||
UnsupportedMediaType => ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE),
|
||||
UnsupportedMediaType => {
|
||||
ErrCode::invalid("unsupported_media_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||
}
|
||||
|
||||
// error related to dump
|
||||
DumpAlreadyInProgress => ErrCode::invalid("dump_already_in_progress", StatusCode::CONFLICT),
|
||||
DumpProcessFailed => ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR),
|
||||
DumpAlreadyInProgress => {
|
||||
ErrCode::invalid("dump_already_in_progress", StatusCode::CONFLICT)
|
||||
}
|
||||
DumpProcessFailed => {
|
||||
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -7,9 +7,9 @@ use std::sync::Arc;
|
||||
|
||||
use sha2::Digest;
|
||||
|
||||
use crate::index_controller::{IndexMetadata, IndexSettings};
|
||||
use crate::index_controller::IndexController;
|
||||
use crate::index::Settings;
|
||||
use crate::index_controller::IndexController;
|
||||
use crate::index_controller::{IndexMetadata, IndexSettings};
|
||||
use crate::option::Opt;
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -72,7 +72,11 @@ impl Data {
|
||||
|
||||
api_keys.generate_missing_api_keys();
|
||||
|
||||
let inner = DataInner { index_controller, options, api_keys };
|
||||
let inner = DataInner {
|
||||
index_controller,
|
||||
options,
|
||||
api_keys,
|
||||
};
|
||||
let inner = Arc::new(inner);
|
||||
|
||||
Ok(Data { inner })
|
||||
@ -90,7 +94,11 @@ impl Data {
|
||||
self.index_controller.get_index(uid).await
|
||||
}
|
||||
|
||||
pub async fn create_index(&self, uid: String, primary_key: Option<String>) -> anyhow::Result<IndexMetadata> {
|
||||
pub async fn create_index(
|
||||
&self,
|
||||
uid: String,
|
||||
primary_key: Option<String>,
|
||||
) -> anyhow::Result<IndexMetadata> {
|
||||
let settings = IndexSettings {
|
||||
uid: Some(uid),
|
||||
primary_key,
|
||||
|
@ -1,7 +1,7 @@
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
use crate::index::{SearchQuery, SearchResult};
|
||||
use super::Data;
|
||||
use crate::index::{SearchQuery, SearchResult};
|
||||
|
||||
impl Data {
|
||||
pub async fn search(
|
||||
@ -19,7 +19,9 @@ impl Data {
|
||||
limit: usize,
|
||||
attributes_to_retrieve: Option<Vec<String>>,
|
||||
) -> anyhow::Result<Vec<Map<String, Value>>> {
|
||||
self.index_controller.documents(index, offset, limit, attributes_to_retrieve).await
|
||||
self.index_controller
|
||||
.documents(index, offset, limit, attributes_to_retrieve)
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn retrieve_document(
|
||||
@ -27,8 +29,9 @@ impl Data {
|
||||
index: String,
|
||||
document_id: String,
|
||||
attributes_to_retrieve: Option<Vec<String>>,
|
||||
) -> anyhow::Result<Map<String, Value>>
|
||||
{
|
||||
self.index_controller.document(index, document_id, attributes_to_retrieve).await
|
||||
) -> anyhow::Result<Map<String, Value>> {
|
||||
self.index_controller
|
||||
.document(index, document_id, attributes_to_retrieve)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
@ -1,10 +1,9 @@
|
||||
use milli::update::{IndexDocumentsMethod, UpdateFormat};
|
||||
use actix_web::web::Payload;
|
||||
use milli::update::{IndexDocumentsMethod, UpdateFormat};
|
||||
|
||||
use crate::index_controller::{IndexMetadata, IndexSettings, UpdateStatus};
|
||||
use crate::index::Settings;
|
||||
use super::Data;
|
||||
|
||||
use crate::index::Settings;
|
||||
use crate::index_controller::{IndexMetadata, IndexSettings, UpdateStatus};
|
||||
|
||||
impl Data {
|
||||
pub async fn add_documents(
|
||||
@ -14,9 +13,11 @@ impl Data {
|
||||
format: UpdateFormat,
|
||||
stream: Payload,
|
||||
primary_key: Option<String>,
|
||||
) -> anyhow::Result<UpdateStatus>
|
||||
{
|
||||
let update_status = self.index_controller.add_documents(index, method, format, stream, primary_key).await?;
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let update_status = self
|
||||
.index_controller
|
||||
.add_documents(index, method, format, stream, primary_key)
|
||||
.await?;
|
||||
Ok(update_status)
|
||||
}
|
||||
|
||||
@ -26,14 +27,14 @@ impl Data {
|
||||
settings: Settings,
|
||||
create: bool,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let update = self.index_controller.update_settings(index, settings, create).await?;
|
||||
let update = self
|
||||
.index_controller
|
||||
.update_settings(index, settings, create)
|
||||
.await?;
|
||||
Ok(update)
|
||||
}
|
||||
|
||||
pub async fn clear_documents(
|
||||
&self,
|
||||
index: String,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
pub async fn clear_documents(&self, index: String) -> anyhow::Result<UpdateStatus> {
|
||||
let update = self.index_controller.clear_documents(index).await?;
|
||||
Ok(update)
|
||||
}
|
||||
@ -43,14 +44,14 @@ impl Data {
|
||||
index: String,
|
||||
document_ids: Vec<String>,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let update = self.index_controller.delete_documents(index, document_ids).await?;
|
||||
let update = self
|
||||
.index_controller
|
||||
.delete_documents(index, document_ids)
|
||||
.await?;
|
||||
Ok(update)
|
||||
}
|
||||
|
||||
pub async fn delete_index(
|
||||
&self,
|
||||
index: String,
|
||||
) -> anyhow::Result<()> {
|
||||
pub async fn delete_index(&self, index: String) -> anyhow::Result<()> {
|
||||
self.index_controller.delete_index(index).await?;
|
||||
Ok(())
|
||||
}
|
||||
@ -67,7 +68,7 @@ impl Data {
|
||||
&self,
|
||||
uid: String,
|
||||
primary_key: Option<String>,
|
||||
new_uid: Option<String>
|
||||
new_uid: Option<String>,
|
||||
) -> anyhow::Result<IndexMetadata> {
|
||||
let settings = IndexSettings {
|
||||
uid: new_uid,
|
||||
|
@ -1,14 +1,13 @@
|
||||
use std::error;
|
||||
use std::fmt;
|
||||
|
||||
use actix_web::dev::HttpResponseBuilder;
|
||||
use actix_web::http::Error as HttpError;
|
||||
use actix_web as aweb;
|
||||
use actix_web::dev::HttpResponseBuilder;
|
||||
use actix_web::error::{JsonPayloadError, QueryPayloadError};
|
||||
use actix_web::http::Error as HttpError;
|
||||
use actix_web::http::StatusCode;
|
||||
use serde::ser::{Serialize, Serializer, SerializeStruct};
|
||||
use meilisearch_error::{ErrorCode, Code};
|
||||
|
||||
use meilisearch_error::{Code, ErrorCode};
|
||||
use serde::ser::{Serialize, SerializeStruct, Serializer};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ResponseError {
|
||||
@ -32,19 +31,25 @@ impl fmt::Display for ResponseError {
|
||||
// TODO: remove this when implementing actual error handling
|
||||
impl From<anyhow::Error> for ResponseError {
|
||||
fn from(other: anyhow::Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(Error::NotFound(other.to_string())) }
|
||||
ResponseError {
|
||||
inner: Box::new(Error::NotFound(other.to_string())),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Error> for ResponseError {
|
||||
fn from(error: Error) -> ResponseError {
|
||||
ResponseError { inner: Box::new(error) }
|
||||
ResponseError {
|
||||
inner: Box::new(error),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl From<FacetCountError> for ResponseError {
|
||||
fn from(err: FacetCountError) -> ResponseError {
|
||||
ResponseError { inner: Box::new(err) }
|
||||
ResponseError {
|
||||
inner: Box::new(err),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -130,7 +135,10 @@ impl ErrorCode for Error {
|
||||
pub enum FacetCountError {
|
||||
AttributeNotSet(String),
|
||||
SyntaxError(String),
|
||||
UnexpectedToken { found: String, expected: &'static [&'static str] },
|
||||
UnexpectedToken {
|
||||
found: String,
|
||||
expected: &'static [&'static str],
|
||||
},
|
||||
NoFacetSet,
|
||||
}
|
||||
|
||||
@ -143,7 +151,10 @@ impl ErrorCode for FacetCountError {
|
||||
}
|
||||
|
||||
impl FacetCountError {
|
||||
pub fn unexpected_token(found: impl ToString, expected: &'static [&'static str]) -> FacetCountError {
|
||||
pub fn unexpected_token(
|
||||
found: impl ToString,
|
||||
expected: &'static [&'static str],
|
||||
) -> FacetCountError {
|
||||
let found = found.to_string();
|
||||
FacetCountError::UnexpectedToken { expected, found }
|
||||
}
|
||||
@ -162,7 +173,9 @@ impl fmt::Display for FacetCountError {
|
||||
match self {
|
||||
AttributeNotSet(attr) => write!(f, "Attribute {} is not set as facet", attr),
|
||||
SyntaxError(msg) => write!(f, "Syntax error: {}", msg),
|
||||
UnexpectedToken { expected, found } => write!(f, "Unexpected {} found, expected {:?}", found, expected),
|
||||
UnexpectedToken { expected, found } => {
|
||||
write!(f, "Unexpected {} found, expected {:?}", found, expected)
|
||||
}
|
||||
NoFacetSet => write!(f, "Can't perform facet count, as no facet is set"),
|
||||
}
|
||||
}
|
||||
@ -276,10 +289,14 @@ impl From<serde_json::error::Error> for Error {
|
||||
impl From<JsonPayloadError> for Error {
|
||||
fn from(err: JsonPayloadError) -> Error {
|
||||
match err {
|
||||
JsonPayloadError::Deserialize(err) => Error::BadRequest(format!("Invalid JSON: {}", err)),
|
||||
JsonPayloadError::Deserialize(err) => {
|
||||
Error::BadRequest(format!("Invalid JSON: {}", err))
|
||||
}
|
||||
JsonPayloadError::Overflow => Error::PayloadTooLarge,
|
||||
JsonPayloadError::ContentType => Error::UnsupportedMediaType,
|
||||
JsonPayloadError::Payload(err) => Error::BadRequest(format!("Problem while decoding the request: {}", err)),
|
||||
JsonPayloadError::Payload(err) => {
|
||||
Error::BadRequest(format!("Problem while decoding the request: {}", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -287,7 +304,9 @@ impl From<JsonPayloadError> for Error {
|
||||
impl From<QueryPayloadError> for Error {
|
||||
fn from(err: QueryPayloadError) -> Error {
|
||||
match err {
|
||||
QueryPayloadError::Deserialize(err) => Error::BadRequest(format!("Invalid query parameters: {}", err)),
|
||||
QueryPayloadError::Deserialize(err) => {
|
||||
Error::BadRequest(format!("Invalid query parameters: {}", err))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -3,7 +3,7 @@ use std::pin::Pin;
|
||||
use std::rc::Rc;
|
||||
use std::task::{Context, Poll};
|
||||
|
||||
use actix_web::dev::{Transform, Service, ServiceResponse, ServiceRequest};
|
||||
use actix_web::dev::{Service, ServiceRequest, ServiceResponse, Transform};
|
||||
use actix_web::web;
|
||||
use futures::future::{err, ok, Future, Ready};
|
||||
|
||||
@ -70,10 +70,16 @@ where
|
||||
let auth_header = match req.headers().get("X-Meili-API-Key") {
|
||||
Some(auth) => match auth.to_str() {
|
||||
Ok(auth) => auth,
|
||||
Err(_) => return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into())),
|
||||
Err(_) => {
|
||||
return Box::pin(err(
|
||||
ResponseError::from(Error::MissingAuthorizationHeader).into()
|
||||
))
|
||||
}
|
||||
},
|
||||
None => {
|
||||
return Box::pin(err(ResponseError::from(Error::MissingAuthorizationHeader).into()));
|
||||
return Box::pin(err(
|
||||
ResponseError::from(Error::MissingAuthorizationHeader).into()
|
||||
));
|
||||
}
|
||||
};
|
||||
|
||||
@ -93,9 +99,10 @@ where
|
||||
if authenticated {
|
||||
Box::pin(svc.call(req))
|
||||
} else {
|
||||
Box::pin(err(
|
||||
ResponseError::from(Error::InvalidToken(auth_header.to_string())).into()
|
||||
Box::pin(err(ResponseError::from(Error::InvalidToken(
|
||||
auth_header.to_string(),
|
||||
))
|
||||
.into()))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,9 +1,9 @@
|
||||
use flate2::Compression;
|
||||
use flate2::read::GzDecoder;
|
||||
use flate2::write::GzEncoder;
|
||||
use flate2::Compression;
|
||||
use std::fs::{create_dir_all, File};
|
||||
use std::path::Path;
|
||||
use tar::{Builder, Archive};
|
||||
use tar::{Archive, Builder};
|
||||
|
||||
use crate::error::Error;
|
||||
|
||||
|
@ -1,6 +1,4 @@
|
||||
pub mod authentication;
|
||||
//pub mod normalize_path;
|
||||
pub mod compression;
|
||||
|
||||
pub use authentication::Authentication;
|
||||
//pub use normalize_path::NormalizePath;
|
||||
|
@ -1,86 +0,0 @@
|
||||
/// From https://docs.rs/actix-web/3.0.0-alpha.2/src/actix_web/middleware/normalize.rs.html#34
|
||||
use actix_web::http::Error;
|
||||
use actix_service::{Service, Transform};
|
||||
use actix_web::{
|
||||
dev::ServiceRequest,
|
||||
dev::ServiceResponse,
|
||||
http::uri::{PathAndQuery, Uri},
|
||||
};
|
||||
use futures::future::{ok, Ready};
|
||||
use regex::Regex;
|
||||
use std::task::{Context, Poll};
|
||||
pub struct NormalizePath;
|
||||
|
||||
impl<S, B> Transform<S> for NormalizePath
|
||||
where
|
||||
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
{
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse<B>;
|
||||
type Error = Error;
|
||||
type InitError = ();
|
||||
type Transform = NormalizePathNormalization<S>;
|
||||
type Future = Ready<Result<Self::Transform, Self::InitError>>;
|
||||
|
||||
fn new_transform(&self, service: S) -> Self::Future {
|
||||
ok(NormalizePathNormalization {
|
||||
service,
|
||||
merge_slash: Regex::new("//+").unwrap(),
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub struct NormalizePathNormalization<S> {
|
||||
service: S,
|
||||
merge_slash: Regex,
|
||||
}
|
||||
|
||||
impl<S, B> Service for NormalizePathNormalization<S>
|
||||
where
|
||||
S: Service<Request = ServiceRequest, Response = ServiceResponse<B>, Error = Error>,
|
||||
S::Future: 'static,
|
||||
{
|
||||
type Request = ServiceRequest;
|
||||
type Response = ServiceResponse<B>;
|
||||
type Error = Error;
|
||||
type Future = S::Future;
|
||||
|
||||
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
|
||||
self.service.poll_ready(cx)
|
||||
}
|
||||
|
||||
fn call(&mut self, mut req: ServiceRequest) -> Self::Future {
|
||||
let head = req.head_mut();
|
||||
|
||||
// always add trailing slash, might be an extra one
|
||||
let path = head.uri.path().to_string() + "/";
|
||||
|
||||
if self.merge_slash.find(&path).is_some() {
|
||||
// normalize multiple /'s to one /
|
||||
let path = self.merge_slash.replace_all(&path, "/");
|
||||
|
||||
let path = if path.len() > 1 {
|
||||
path.trim_end_matches('/')
|
||||
} else {
|
||||
&path
|
||||
};
|
||||
|
||||
let mut parts = head.uri.clone().into_parts();
|
||||
let pq = parts.path_and_query.as_ref().unwrap();
|
||||
|
||||
let path = if let Some(q) = pq.query() {
|
||||
bytes::Bytes::from(format!("{}?{}", path, q))
|
||||
} else {
|
||||
bytes::Bytes::copy_from_slice(path.as_bytes())
|
||||
};
|
||||
parts.path_and_query = Some(PathAndQuery::from_maybe_shared(path).unwrap());
|
||||
|
||||
let uri = Uri::from_parts(parts).unwrap();
|
||||
req.match_info_mut().get_mut().update(&uri);
|
||||
req.head_mut().uri = uri;
|
||||
}
|
||||
|
||||
self.service.call(req)
|
||||
}
|
||||
}
|
@ -59,19 +59,17 @@ impl Index {
|
||||
})
|
||||
}
|
||||
|
||||
pub fn retrieve_documents<S>(
|
||||
pub fn retrieve_documents<S: AsRef<str>>(
|
||||
&self,
|
||||
offset: usize,
|
||||
limit: usize,
|
||||
attributes_to_retrieve: Option<Vec<S>>,
|
||||
) -> anyhow::Result<Vec<Map<String, Value>>>
|
||||
where
|
||||
S: AsRef<str> + Send + Sync + 'static,
|
||||
{
|
||||
) -> anyhow::Result<Vec<Map<String, Value>>> {
|
||||
let txn = self.read_txn()?;
|
||||
|
||||
let fields_ids_map = self.fields_ids_map(&txn)?;
|
||||
let fields_to_display = self.fields_to_display(&txn, attributes_to_retrieve, &fields_ids_map)?;
|
||||
let fields_to_display =
|
||||
self.fields_to_display(&txn, attributes_to_retrieve, &fields_ids_map)?;
|
||||
|
||||
let iter = self.documents.range(&txn, &(..))?.skip(offset).take(limit);
|
||||
|
||||
@ -95,7 +93,8 @@ impl Index {
|
||||
|
||||
let fields_ids_map = self.fields_ids_map(&txn)?;
|
||||
|
||||
let fields_to_display = self.fields_to_display(&txn, attributes_to_retrieve, &fields_ids_map)?;
|
||||
let fields_to_display =
|
||||
self.fields_to_display(&txn, attributes_to_retrieve, &fields_ids_map)?;
|
||||
|
||||
let internal_id = self
|
||||
.external_documents_ids(&txn)?
|
||||
@ -109,11 +108,7 @@ impl Index {
|
||||
.map(|(_, d)| d);
|
||||
|
||||
match document {
|
||||
Some(document) => Ok(obkv_to_json(
|
||||
&fields_to_display,
|
||||
&fields_ids_map,
|
||||
document,
|
||||
)?),
|
||||
Some(document) => Ok(obkv_to_json(&fields_to_display, &fields_ids_map, document)?),
|
||||
None => bail!("Document with id {} not found", doc_id),
|
||||
}
|
||||
}
|
||||
|
@ -1,14 +1,14 @@
|
||||
use std::time::Instant;
|
||||
use std::collections::{HashSet, BTreeMap};
|
||||
use std::collections::{BTreeMap, HashSet};
|
||||
use std::mem;
|
||||
use std::time::Instant;
|
||||
|
||||
use either::Either;
|
||||
use anyhow::bail;
|
||||
use either::Either;
|
||||
use heed::RoTxn;
|
||||
use meilisearch_tokenizer::{Analyzer, AnalyzerConfig};
|
||||
use milli::{FacetCondition, MatchingWords, facet::FacetValue};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde_json::{Value, Map};
|
||||
use milli::{facet::FacetValue, FacetCondition, MatchingWords};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::{Map, Value};
|
||||
|
||||
use super::Index;
|
||||
|
||||
@ -78,13 +78,15 @@ impl Index {
|
||||
let mut documents = Vec::new();
|
||||
let fields_ids_map = self.fields_ids_map(&rtxn).unwrap();
|
||||
|
||||
let fields_to_display = self.fields_to_display(&rtxn, query.attributes_to_retrieve, &fields_ids_map)?;
|
||||
let fields_to_display =
|
||||
self.fields_to_display(&rtxn, query.attributes_to_retrieve, &fields_ids_map)?;
|
||||
|
||||
let stop_words = fst::Set::default();
|
||||
let highlighter = Highlighter::new(&stop_words);
|
||||
|
||||
for (_id, obkv) in self.documents(&rtxn, documents_ids)? {
|
||||
let mut object = milli::obkv_to_json(&fields_to_display, &fields_ids_map, obkv).unwrap();
|
||||
let mut object =
|
||||
milli::obkv_to_json(&fields_to_display, &fields_ids_map, obkv).unwrap();
|
||||
if let Some(ref attributes_to_highlight) = query.attributes_to_highlight {
|
||||
highlighter.highlight_record(&mut object, &matching_words, attributes_to_highlight);
|
||||
}
|
||||
@ -183,15 +185,15 @@ impl<'a, A: AsRef<[u8]>> Highlighter<'a, A> {
|
||||
}
|
||||
Value::Array(values) => Value::Array(
|
||||
values
|
||||
.into_iter()
|
||||
.map(|v| self.highlight_value(v, words_to_highlight))
|
||||
.collect(),
|
||||
.into_iter()
|
||||
.map(|v| self.highlight_value(v, words_to_highlight))
|
||||
.collect(),
|
||||
),
|
||||
Value::Object(object) => Value::Object(
|
||||
object
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, self.highlight_value(v, words_to_highlight)))
|
||||
.collect(),
|
||||
.into_iter()
|
||||
.map(|(k, v)| (k, self.highlight_value(v, words_to_highlight)))
|
||||
.collect(),
|
||||
),
|
||||
}
|
||||
}
|
||||
@ -221,9 +223,6 @@ fn parse_facets(
|
||||
// Disabled for now
|
||||
//Value::String(expr) => Ok(Some(FacetCondition::from_str(txn, index, expr)?)),
|
||||
Value::Array(arr) => parse_facets_array(txn, index, arr),
|
||||
v => bail!(
|
||||
"Invalid facet expression, expected Array, found: {:?}",
|
||||
v
|
||||
),
|
||||
v => bail!("Invalid facet expression, expected Array, found: {:?}", v),
|
||||
}
|
||||
}
|
||||
|
@ -4,8 +4,8 @@ use std::num::NonZeroUsize;
|
||||
|
||||
use flate2::read::GzDecoder;
|
||||
use log::info;
|
||||
use milli::update::{UpdateFormat, IndexDocumentsMethod, UpdateBuilder, DocumentAdditionResult};
|
||||
use serde::{Serialize, Deserialize, de::Deserializer};
|
||||
use milli::update::{DocumentAdditionResult, IndexDocumentsMethod, UpdateBuilder, UpdateFormat};
|
||||
use serde::{de::Deserializer, Deserialize, Serialize};
|
||||
|
||||
use super::Index;
|
||||
|
||||
@ -23,14 +23,14 @@ pub struct Settings {
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub displayed_attributes: Option<Option<Vec<String>>>,
|
||||
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub searchable_attributes: Option<Option<Vec<String>>>,
|
||||
|
||||
@ -40,7 +40,7 @@ pub struct Settings {
|
||||
#[serde(
|
||||
default,
|
||||
deserialize_with = "deserialize_some",
|
||||
skip_serializing_if = "Option::is_none",
|
||||
skip_serializing_if = "Option::is_none"
|
||||
)]
|
||||
pub ranking_rules: Option<Option<Vec<String>>>,
|
||||
}
|
||||
@ -65,8 +65,9 @@ pub struct Facets {
|
||||
}
|
||||
|
||||
fn deserialize_some<'de, T, D>(deserializer: D) -> Result<Option<T>, D::Error>
|
||||
where T: Deserialize<'de>,
|
||||
D: Deserializer<'de>
|
||||
where
|
||||
T: Deserialize<'de>,
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
Deserialize::deserialize(deserializer).map(Some)
|
||||
}
|
||||
@ -85,7 +86,7 @@ impl Index {
|
||||
let mut wtxn = self.write_txn()?;
|
||||
|
||||
// Set the primary key if not set already, ignore if already set.
|
||||
if let (None, Some(ref primary_key)) = (self.primary_key(&wtxn)?, primary_key) {
|
||||
if let (None, Some(ref primary_key)) = (self.primary_key(&wtxn)?, primary_key) {
|
||||
self.put_primary_key(&mut wtxn, primary_key)?;
|
||||
}
|
||||
|
||||
@ -106,10 +107,11 @@ impl Index {
|
||||
|
||||
info!("document addition done: {:?}", result);
|
||||
|
||||
result.and_then(|addition_result| wtxn
|
||||
.commit()
|
||||
.and(Ok(UpdateResult::DocumentsAddition(addition_result)))
|
||||
.map_err(Into::into))
|
||||
result.and_then(|addition_result| {
|
||||
wtxn.commit()
|
||||
.and(Ok(UpdateResult::DocumentsAddition(addition_result)))
|
||||
.map_err(Into::into)
|
||||
})
|
||||
}
|
||||
|
||||
pub fn clear_documents(&self, update_builder: UpdateBuilder) -> anyhow::Result<UpdateResult> {
|
||||
@ -210,14 +212,16 @@ impl Index {
|
||||
let mut builder = update_builder.delete_documents(&mut txn, self)?;
|
||||
|
||||
// We ignore unexisting document ids
|
||||
ids.iter().for_each(|id| { builder.delete_external_id(id); });
|
||||
ids.iter().for_each(|id| {
|
||||
builder.delete_external_id(id);
|
||||
});
|
||||
|
||||
match builder.execute() {
|
||||
Ok(deleted) => txn
|
||||
.commit()
|
||||
.and(Ok(UpdateResult::DocumentDeletion { deleted }))
|
||||
.map_err(Into::into),
|
||||
Err(e) => Err(e)
|
||||
Err(e) => Err(e),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -12,13 +12,13 @@ use heed::EnvOpenOptions;
|
||||
use log::debug;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use thiserror::Error;
|
||||
use tokio::fs::remove_dir_all;
|
||||
use tokio::sync::{mpsc, oneshot, RwLock};
|
||||
use tokio::task::spawn_blocking;
|
||||
use tokio::fs::remove_dir_all;
|
||||
use uuid::Uuid;
|
||||
|
||||
use super::{IndexSettings, get_arc_ownership_blocking};
|
||||
use super::update_handler::UpdateHandler;
|
||||
use super::{get_arc_ownership_blocking, IndexSettings};
|
||||
use crate::index::UpdateResult as UResult;
|
||||
use crate::index::{Document, Index, SearchQuery, SearchResult, Settings};
|
||||
use crate::index_controller::{
|
||||
@ -49,7 +49,11 @@ impl IndexMeta {
|
||||
let created_at = index.created_at(&txn)?;
|
||||
let updated_at = index.updated_at(&txn)?;
|
||||
let primary_key = index.primary_key(&txn)?.map(String::from);
|
||||
Ok(Self { primary_key, updated_at, created_at })
|
||||
Ok(Self {
|
||||
primary_key,
|
||||
updated_at,
|
||||
created_at,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@ -98,7 +102,7 @@ enum IndexMsg {
|
||||
uuid: Uuid,
|
||||
index_settings: IndexSettings,
|
||||
ret: oneshot::Sender<Result<IndexMeta>>,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
struct IndexActor<S> {
|
||||
@ -136,8 +140,7 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
store: S,
|
||||
) -> Result<Self> {
|
||||
let options = IndexerOpts::default();
|
||||
let update_handler =
|
||||
UpdateHandler::new(&options).map_err(IndexError::Error)?;
|
||||
let update_handler = UpdateHandler::new(&options).map_err(IndexError::Error)?;
|
||||
let update_handler = Arc::new(update_handler);
|
||||
let read_receiver = Some(read_receiver);
|
||||
let write_receiver = Some(write_receiver);
|
||||
@ -241,7 +244,11 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
GetMeta { uuid, ret } => {
|
||||
let _ = ret.send(self.handle_get_meta(uuid).await);
|
||||
}
|
||||
UpdateIndex { uuid, index_settings, ret } => {
|
||||
UpdateIndex {
|
||||
uuid,
|
||||
index_settings,
|
||||
ret,
|
||||
} => {
|
||||
let _ = ret.send(self.handle_update_index(uuid, index_settings).await);
|
||||
}
|
||||
}
|
||||
@ -366,30 +373,34 @@ impl<S: IndexStore + Sync + Send> IndexActor<S> {
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_update_index(&self, uuid: Uuid, index_settings: IndexSettings) -> Result<IndexMeta> {
|
||||
let index = self.store
|
||||
async fn handle_update_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
index_settings: IndexSettings,
|
||||
) -> Result<IndexMeta> {
|
||||
let index = self
|
||||
.store
|
||||
.get(uuid)
|
||||
.await?
|
||||
.ok_or(IndexError::UnexistingIndex)?;
|
||||
|
||||
spawn_blocking(move || {
|
||||
match index_settings.primary_key {
|
||||
Some(ref primary_key) => {
|
||||
let mut txn = index.write_txn()?;
|
||||
if index.primary_key(&txn)?.is_some() {
|
||||
return Err(IndexError::ExistingPrimaryKey)
|
||||
}
|
||||
index.put_primary_key(&mut txn, primary_key)?;
|
||||
let meta = IndexMeta::new_txn(&index, &txn)?;
|
||||
txn.commit()?;
|
||||
Ok(meta)
|
||||
},
|
||||
None => {
|
||||
let meta = IndexMeta::new(&index)?;
|
||||
Ok(meta)
|
||||
},
|
||||
spawn_blocking(move || match index_settings.primary_key {
|
||||
Some(ref primary_key) => {
|
||||
let mut txn = index.write_txn()?;
|
||||
if index.primary_key(&txn)?.is_some() {
|
||||
return Err(IndexError::ExistingPrimaryKey);
|
||||
}
|
||||
index.put_primary_key(&mut txn, primary_key)?;
|
||||
let meta = IndexMeta::new_txn(&index, &txn)?;
|
||||
txn.commit()?;
|
||||
Ok(meta)
|
||||
}
|
||||
}).await
|
||||
None => {
|
||||
let meta = IndexMeta::new(&index)?;
|
||||
Ok(meta)
|
||||
}
|
||||
})
|
||||
.await
|
||||
.map_err(|e| IndexError::Error(e.into()))?
|
||||
}
|
||||
}
|
||||
@ -416,7 +427,8 @@ impl IndexActorHandle {
|
||||
|
||||
pub async fn create_index(&self, uuid: Uuid, primary_key: Option<String>) -> Result<IndexMeta> {
|
||||
let (ret, receiver) = oneshot::channel();
|
||||
let msg = IndexMsg::CreateIndex { ret,
|
||||
let msg = IndexMsg::CreateIndex {
|
||||
ret,
|
||||
uuid,
|
||||
primary_key,
|
||||
};
|
||||
@ -502,10 +514,14 @@ impl IndexActorHandle {
|
||||
pub async fn update_index(
|
||||
&self,
|
||||
uuid: Uuid,
|
||||
index_settings: IndexSettings
|
||||
index_settings: IndexSettings,
|
||||
) -> Result<IndexMeta> {
|
||||
let (ret, receiver) = oneshot::channel();
|
||||
let msg = IndexMsg::UpdateIndex { uuid, index_settings, ret };
|
||||
let msg = IndexMsg::UpdateIndex {
|
||||
uuid,
|
||||
index_settings,
|
||||
ret,
|
||||
};
|
||||
let _ = self.read_sender.send(msg).await;
|
||||
Ok(receiver.await.expect("IndexActor has been killed")?)
|
||||
}
|
||||
@ -571,10 +587,7 @@ impl IndexStore for HeedIndexStore {
|
||||
let index = spawn_blocking(move || open_index(path, index_size))
|
||||
.await
|
||||
.map_err(|e| IndexError::Error(e.into()))??;
|
||||
self.index_store
|
||||
.write()
|
||||
.await
|
||||
.insert(uuid, index.clone());
|
||||
self.index_store.write().await.insert(uuid, index.clone());
|
||||
Ok(Some(index))
|
||||
}
|
||||
}
|
||||
@ -582,7 +595,8 @@ impl IndexStore for HeedIndexStore {
|
||||
|
||||
async fn delete(&self, uuid: Uuid) -> Result<Option<Index>> {
|
||||
let db_path = self.path.join(format!("index-{}", uuid));
|
||||
remove_dir_all(db_path).await
|
||||
remove_dir_all(db_path)
|
||||
.await
|
||||
.map_err(|e| IndexError::Error(e.into()))?;
|
||||
let index = self.index_store.write().await.remove(&uuid);
|
||||
Ok(index)
|
||||
@ -590,11 +604,9 @@ impl IndexStore for HeedIndexStore {
|
||||
}
|
||||
|
||||
fn open_index(path: impl AsRef<Path>, size: usize) -> Result<Index> {
|
||||
create_dir_all(&path)
|
||||
.map_err(|e| IndexError::Error(e.into()))?;
|
||||
create_dir_all(&path).map_err(|e| IndexError::Error(e.into()))?;
|
||||
let mut options = EnvOpenOptions::new();
|
||||
options.map_size(size);
|
||||
let index = milli::Index::new(options, &path)
|
||||
.map_err(IndexError::Error)?;
|
||||
let index = milli::Index::new(options, &path).map_err(IndexError::Error)?;
|
||||
Ok(Index(Arc::new(index)))
|
||||
}
|
||||
|
@ -9,17 +9,17 @@ use std::path::Path;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use anyhow::bail;
|
||||
use actix_web::web::{Bytes, Payload};
|
||||
use anyhow::bail;
|
||||
use futures::stream::StreamExt;
|
||||
use milli::update::{IndexDocumentsMethod, UpdateFormat};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::mpsc;
|
||||
use tokio::time::sleep;
|
||||
|
||||
pub use updates::{Processed, Processing, Failed};
|
||||
use crate::index::{SearchResult, SearchQuery, Document};
|
||||
use crate::index::{UpdateResult, Settings, Facets};
|
||||
use crate::index::{Document, SearchQuery, SearchResult};
|
||||
use crate::index::{Facets, Settings, UpdateResult};
|
||||
pub use updates::{Failed, Processed, Processing};
|
||||
|
||||
pub type UpdateStatus = updates::UpdateStatus<UpdateMeta, UpdateResult, String>;
|
||||
|
||||
@ -51,7 +51,6 @@ pub struct IndexSettings {
|
||||
pub primary_key: Option<String>,
|
||||
}
|
||||
|
||||
|
||||
pub struct IndexController {
|
||||
uuid_resolver: uuid_resolver::UuidResolverHandle,
|
||||
index_handle: index_actor::IndexActorHandle,
|
||||
@ -59,11 +58,20 @@ pub struct IndexController {
|
||||
}
|
||||
|
||||
impl IndexController {
|
||||
pub fn new(path: impl AsRef<Path>, index_size: usize, update_store_size: usize) -> anyhow::Result<Self> {
|
||||
pub fn new(
|
||||
path: impl AsRef<Path>,
|
||||
index_size: usize,
|
||||
update_store_size: usize,
|
||||
) -> anyhow::Result<Self> {
|
||||
let uuid_resolver = uuid_resolver::UuidResolverHandle::new(&path)?;
|
||||
let index_actor = index_actor::IndexActorHandle::new(&path, index_size)?;
|
||||
let update_handle = update_actor::UpdateActorHandle::new(index_actor.clone(), &path, update_store_size)?;
|
||||
Ok(Self { uuid_resolver, index_handle: index_actor, update_handle })
|
||||
let update_handle =
|
||||
update_actor::UpdateActorHandle::new(index_actor.clone(), &path, update_store_size)?;
|
||||
Ok(Self {
|
||||
uuid_resolver,
|
||||
index_handle: index_actor,
|
||||
update_handle,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn add_documents(
|
||||
@ -75,7 +83,11 @@ impl IndexController {
|
||||
primary_key: Option<String>,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let uuid = self.uuid_resolver.get_or_create(uid).await?;
|
||||
let meta = UpdateMeta::DocumentsAddition { method, format, primary_key };
|
||||
let meta = UpdateMeta::DocumentsAddition {
|
||||
method,
|
||||
format,
|
||||
primary_key,
|
||||
};
|
||||
let (sender, receiver) = mpsc::channel(10);
|
||||
|
||||
// It is necessary to spawn a local task to senf the payload to the update handle to
|
||||
@ -84,10 +96,13 @@ impl IndexController {
|
||||
tokio::task::spawn_local(async move {
|
||||
while let Some(bytes) = payload.next().await {
|
||||
match bytes {
|
||||
Ok(bytes) => { let _ = sender.send(Ok(bytes)).await; },
|
||||
Ok(bytes) => {
|
||||
let _ = sender.send(Ok(bytes)).await;
|
||||
}
|
||||
Err(e) => {
|
||||
let error: Box<dyn std::error::Error + Sync + Send + 'static> = Box::new(e);
|
||||
let _ = sender.send(Err(error)).await; },
|
||||
let _ = sender.send(Err(error)).await;
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
@ -105,7 +120,11 @@ impl IndexController {
|
||||
Ok(status)
|
||||
}
|
||||
|
||||
pub async fn delete_documents(&self, uid: String, document_ids: Vec<String>) -> anyhow::Result<UpdateStatus> {
|
||||
pub async fn delete_documents(
|
||||
&self,
|
||||
uid: String,
|
||||
document_ids: Vec<String>,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let uuid = self.uuid_resolver.resolve(uid).await?;
|
||||
let meta = UpdateMeta::DeleteDocuments;
|
||||
let (sender, receiver) = mpsc::channel(10);
|
||||
@ -120,7 +139,12 @@ impl IndexController {
|
||||
Ok(status)
|
||||
}
|
||||
|
||||
pub async fn update_settings(&self, uid: String, settings: Settings, create: bool) -> anyhow::Result<UpdateStatus> {
|
||||
pub async fn update_settings(
|
||||
&self,
|
||||
uid: String,
|
||||
settings: Settings,
|
||||
create: bool,
|
||||
) -> anyhow::Result<UpdateStatus> {
|
||||
let uuid = if create {
|
||||
let uuid = self.uuid_resolver.get_or_create(uid).await?;
|
||||
// We need to create the index upfront, since it would otherwise only be created when
|
||||
@ -143,9 +167,12 @@ impl IndexController {
|
||||
Ok(status)
|
||||
}
|
||||
|
||||
pub async fn create_index(&self, index_settings: IndexSettings) -> anyhow::Result<IndexMetadata> {
|
||||
let IndexSettings { uid: name, primary_key } = index_settings;
|
||||
let uid = name.unwrap();
|
||||
pub async fn create_index(
|
||||
&self,
|
||||
index_settings: IndexSettings,
|
||||
) -> anyhow::Result<IndexMetadata> {
|
||||
let IndexSettings { uid, primary_key } = index_settings;
|
||||
let uid = uid.ok_or_else(|| anyhow::anyhow!("Can't create an index without a uid."))?;
|
||||
let uuid = self.uuid_resolver.create(uid.clone()).await?;
|
||||
let meta = self.index_handle.create_index(uuid, primary_key).await?;
|
||||
let _ = self.update_handle.create(uuid).await?;
|
||||
@ -155,25 +182,20 @@ impl IndexController {
|
||||
}
|
||||
|
||||
pub async fn delete_index(&self, uid: String) -> anyhow::Result<()> {
|
||||
let uuid = self.uuid_resolver
|
||||
.delete(uid)
|
||||
.await?;
|
||||
let uuid = self.uuid_resolver.delete(uid).await?;
|
||||
self.update_handle.delete(uuid).await?;
|
||||
self.index_handle.delete(uuid).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn update_status(&self, uid: String, id: u64) -> anyhow::Result<UpdateStatus> {
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid)
|
||||
.await?;
|
||||
let uuid = self.uuid_resolver.resolve(uid).await?;
|
||||
let result = self.update_handle.update_status(uuid, id).await?;
|
||||
Ok(result)
|
||||
}
|
||||
|
||||
pub async fn all_update_status(&self, uid: String) -> anyhow::Result<Vec<UpdateStatus>> {
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid).await?;
|
||||
let uuid = self.uuid_resolver.resolve(uid).await?;
|
||||
let result = self.update_handle.get_all_updates_status(uuid).await?;
|
||||
Ok(result)
|
||||
}
|
||||
@ -193,9 +215,7 @@ impl IndexController {
|
||||
}
|
||||
|
||||
pub async fn settings(&self, uid: String) -> anyhow::Result<Settings> {
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid.clone())
|
||||
.await?;
|
||||
let uuid = self.uuid_resolver.resolve(uid.clone()).await?;
|
||||
let settings = self.index_handle.settings(uuid).await?;
|
||||
Ok(settings)
|
||||
}
|
||||
@ -207,10 +227,11 @@ impl IndexController {
|
||||
limit: usize,
|
||||
attributes_to_retrieve: Option<Vec<String>>,
|
||||
) -> anyhow::Result<Vec<Document>> {
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid.clone())
|
||||
let uuid = self.uuid_resolver.resolve(uid.clone()).await?;
|
||||
let documents = self
|
||||
.index_handle
|
||||
.documents(uuid, offset, limit, attributes_to_retrieve)
|
||||
.await?;
|
||||
let documents = self.index_handle.documents(uuid, offset, limit, attributes_to_retrieve).await?;
|
||||
Ok(documents)
|
||||
}
|
||||
|
||||
@ -220,21 +241,24 @@ impl IndexController {
|
||||
doc_id: String,
|
||||
attributes_to_retrieve: Option<Vec<String>>,
|
||||
) -> anyhow::Result<Document> {
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid.clone())
|
||||
let uuid = self.uuid_resolver.resolve(uid.clone()).await?;
|
||||
let document = self
|
||||
.index_handle
|
||||
.document(uuid, doc_id, attributes_to_retrieve)
|
||||
.await?;
|
||||
let document = self.index_handle.document(uuid, doc_id, attributes_to_retrieve).await?;
|
||||
Ok(document)
|
||||
}
|
||||
|
||||
pub async fn update_index(&self, uid: String, index_settings: IndexSettings) -> anyhow::Result<IndexMetadata> {
|
||||
pub async fn update_index(
|
||||
&self,
|
||||
uid: String,
|
||||
index_settings: IndexSettings,
|
||||
) -> anyhow::Result<IndexMetadata> {
|
||||
if index_settings.uid.is_some() {
|
||||
bail!("Can't change the index uid.")
|
||||
}
|
||||
|
||||
let uuid = self.uuid_resolver
|
||||
.resolve(uid.clone())
|
||||
.await?;
|
||||
let uuid = self.uuid_resolver.resolve(uid.clone()).await?;
|
||||
let meta = self.index_handle.update_index(uuid, index_settings).await?;
|
||||
let meta = IndexMetadata { uid, meta };
|
||||
Ok(meta)
|
||||
@ -248,9 +272,7 @@ impl IndexController {
|
||||
|
||||
pub async fn get_index(&self, uid: String) -> anyhow::Result<IndexMetadata> {
|
||||
let uuid = self.uuid_resolver.resolve(uid.clone()).await?;
|
||||
let meta = self.index_handle
|
||||
.get_index_meta(uuid)
|
||||
.await?;
|
||||
let meta = self.index_handle.get_index_meta(uuid).await?;
|
||||
let meta = IndexMetadata { uid, meta };
|
||||
Ok(meta)
|
||||
}
|
||||
|
@ -52,7 +52,7 @@ enum UpdateMsg<D> {
|
||||
Create {
|
||||
uuid: Uuid,
|
||||
ret: oneshot::Sender<Result<()>>,
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
struct UpdateActor<D, S> {
|
||||
@ -213,7 +213,11 @@ impl<D> UpdateActorHandle<D>
|
||||
where
|
||||
D: AsRef<[u8]> + Sized + 'static + Sync + Send,
|
||||
{
|
||||
pub fn new(index_handle: IndexActorHandle, path: impl AsRef<Path>, update_store_size: usize) -> anyhow::Result<Self> {
|
||||
pub fn new(
|
||||
index_handle: IndexActorHandle,
|
||||
path: impl AsRef<Path>,
|
||||
update_store_size: usize,
|
||||
) -> anyhow::Result<Self> {
|
||||
let path = path.as_ref().to_owned().join("updates");
|
||||
let (sender, receiver) = mpsc::channel(100);
|
||||
let store = MapUpdateStoreStore::new(index_handle, &path, update_store_size);
|
||||
@ -278,7 +282,11 @@ struct MapUpdateStoreStore {
|
||||
}
|
||||
|
||||
impl MapUpdateStoreStore {
|
||||
fn new(index_handle: IndexActorHandle, path: impl AsRef<Path>, update_store_size: usize) -> Self {
|
||||
fn new(
|
||||
index_handle: IndexActorHandle,
|
||||
path: impl AsRef<Path>,
|
||||
update_store_size: usize,
|
||||
) -> Self {
|
||||
let db = Arc::new(RwLock::new(HashMap::new()));
|
||||
let path = path.as_ref().to_owned();
|
||||
Self {
|
||||
|
@ -1,14 +1,14 @@
|
||||
use std::fs::File;
|
||||
|
||||
use crate::index::Index;
|
||||
use anyhow::Result;
|
||||
use grenad::CompressionType;
|
||||
use milli::update::UpdateBuilder;
|
||||
use crate::index::Index;
|
||||
use rayon::ThreadPool;
|
||||
|
||||
use crate::index::UpdateResult;
|
||||
use crate::index_controller::updates::{Failed, Processed, Processing};
|
||||
use crate::index_controller::UpdateMeta;
|
||||
use crate::index::UpdateResult;
|
||||
use crate::option::IndexerOpts;
|
||||
|
||||
pub struct UpdateHandler {
|
||||
@ -23,9 +23,7 @@ pub struct UpdateHandler {
|
||||
}
|
||||
|
||||
impl UpdateHandler {
|
||||
pub fn new(
|
||||
opt: &IndexerOpts,
|
||||
) -> anyhow::Result<Self> {
|
||||
pub fn new(opt: &IndexerOpts) -> anyhow::Result<Self> {
|
||||
let thread_pool = rayon::ThreadPoolBuilder::new()
|
||||
.num_threads(opt.indexing_jobs.unwrap_or(0))
|
||||
.build()?;
|
||||
@ -59,7 +57,6 @@ impl UpdateHandler {
|
||||
update_builder
|
||||
}
|
||||
|
||||
|
||||
pub fn handle_update(
|
||||
&self,
|
||||
meta: Processing<UpdateMeta>,
|
||||
|
@ -4,11 +4,11 @@ use std::sync::Arc;
|
||||
|
||||
use heed::types::{DecodeIgnore, OwnedType, SerdeJson};
|
||||
use heed::{Database, Env, EnvOpenOptions};
|
||||
use parking_lot::RwLock;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
use tokio::sync::mpsc;
|
||||
use uuid::Uuid;
|
||||
use parking_lot::RwLock;
|
||||
|
||||
use crate::index_controller::updates::*;
|
||||
|
||||
@ -252,24 +252,27 @@ where
|
||||
|
||||
updates.extend(pending);
|
||||
|
||||
let aborted =
|
||||
self.aborted_meta.iter(&rtxn)?
|
||||
let aborted = self
|
||||
.aborted_meta
|
||||
.iter(&rtxn)?
|
||||
.filter_map(Result::ok)
|
||||
.map(|(_, p)| p)
|
||||
.map(UpdateStatus::from);
|
||||
|
||||
updates.extend(aborted);
|
||||
|
||||
let processed =
|
||||
self.processed_meta.iter(&rtxn)?
|
||||
let processed = self
|
||||
.processed_meta
|
||||
.iter(&rtxn)?
|
||||
.filter_map(Result::ok)
|
||||
.map(|(_, p)| p)
|
||||
.map(UpdateStatus::from);
|
||||
|
||||
updates.extend(processed);
|
||||
|
||||
let failed =
|
||||
self.failed_meta.iter(&rtxn)?
|
||||
let failed = self
|
||||
.failed_meta
|
||||
.iter(&rtxn)?
|
||||
.filter_map(Result::ok)
|
||||
.map(|(_, p)| p)
|
||||
.map(UpdateStatus::from);
|
||||
@ -372,90 +375,90 @@ where
|
||||
|
||||
//#[cfg(test)]
|
||||
//mod tests {
|
||||
//use super::*;
|
||||
//use std::thread;
|
||||
//use std::time::{Duration, Instant};
|
||||
//use super::*;
|
||||
//use std::thread;
|
||||
//use std::time::{Duration, Instant};
|
||||
|
||||
//#[test]
|
||||
//fn simple() {
|
||||
//let dir = tempfile::tempdir().unwrap();
|
||||
//let mut options = EnvOpenOptions::new();
|
||||
//options.map_size(4096 * 100);
|
||||
//let update_store = UpdateStore::open(
|
||||
//options,
|
||||
//dir,
|
||||
//|meta: Processing<String>, _content: &_| -> Result<_, Failed<_, ()>> {
|
||||
//let new_meta = meta.meta().to_string() + " processed";
|
||||
//let processed = meta.process(new_meta);
|
||||
//Ok(processed)
|
||||
//},
|
||||
//)
|
||||
//.unwrap();
|
||||
//#[test]
|
||||
//fn simple() {
|
||||
//let dir = tempfile::tempdir().unwrap();
|
||||
//let mut options = EnvOpenOptions::new();
|
||||
//options.map_size(4096 * 100);
|
||||
//let update_store = UpdateStore::open(
|
||||
//options,
|
||||
//dir,
|
||||
//|meta: Processing<String>, _content: &_| -> Result<_, Failed<_, ()>> {
|
||||
//let new_meta = meta.meta().to_string() + " processed";
|
||||
//let processed = meta.process(new_meta);
|
||||
//Ok(processed)
|
||||
//},
|
||||
//)
|
||||
//.unwrap();
|
||||
|
||||
//let meta = String::from("kiki");
|
||||
//let update = update_store.register_update(meta, &[]).unwrap();
|
||||
//thread::sleep(Duration::from_millis(100));
|
||||
//let meta = update_store.meta(update.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "kiki processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
//}
|
||||
|
||||
//#[test]
|
||||
//#[ignore]
|
||||
//fn long_running_update() {
|
||||
//let dir = tempfile::tempdir().unwrap();
|
||||
//let mut options = EnvOpenOptions::new();
|
||||
//options.map_size(4096 * 100);
|
||||
//let update_store = UpdateStore::open(
|
||||
//options,
|
||||
//dir,
|
||||
//|meta: Processing<String>, _content: &_| -> Result<_, Failed<_, ()>> {
|
||||
//thread::sleep(Duration::from_millis(400));
|
||||
//let new_meta = meta.meta().to_string() + "processed";
|
||||
//let processed = meta.process(new_meta);
|
||||
//Ok(processed)
|
||||
//},
|
||||
//)
|
||||
//.unwrap();
|
||||
|
||||
//let before_register = Instant::now();
|
||||
|
||||
//let meta = String::from("kiki");
|
||||
//let update_kiki = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//let meta = String::from("coco");
|
||||
//let update_coco = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//let meta = String::from("cucu");
|
||||
//let update_cucu = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//thread::sleep(Duration::from_millis(400 * 3 + 100));
|
||||
|
||||
//let meta = update_store.meta(update_kiki.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "kiki processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
|
||||
//let meta = update_store.meta(update_coco.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "coco processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
|
||||
//let meta = update_store.meta(update_cucu.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "cucu processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
//}
|
||||
//let meta = String::from("kiki");
|
||||
//let update = update_store.register_update(meta, &[]).unwrap();
|
||||
//thread::sleep(Duration::from_millis(100));
|
||||
//let meta = update_store.meta(update.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "kiki processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
//}
|
||||
|
||||
//#[test]
|
||||
//#[ignore]
|
||||
//fn long_running_update() {
|
||||
//let dir = tempfile::tempdir().unwrap();
|
||||
//let mut options = EnvOpenOptions::new();
|
||||
//options.map_size(4096 * 100);
|
||||
//let update_store = UpdateStore::open(
|
||||
//options,
|
||||
//dir,
|
||||
//|meta: Processing<String>, _content: &_| -> Result<_, Failed<_, ()>> {
|
||||
//thread::sleep(Duration::from_millis(400));
|
||||
//let new_meta = meta.meta().to_string() + "processed";
|
||||
//let processed = meta.process(new_meta);
|
||||
//Ok(processed)
|
||||
//},
|
||||
//)
|
||||
//.unwrap();
|
||||
|
||||
//let before_register = Instant::now();
|
||||
|
||||
//let meta = String::from("kiki");
|
||||
//let update_kiki = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//let meta = String::from("coco");
|
||||
//let update_coco = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//let meta = String::from("cucu");
|
||||
//let update_cucu = update_store.register_update(meta, &[]).unwrap();
|
||||
//assert!(before_register.elapsed() < Duration::from_millis(200));
|
||||
|
||||
//thread::sleep(Duration::from_millis(400 * 3 + 100));
|
||||
|
||||
//let meta = update_store.meta(update_kiki.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "kiki processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
|
||||
//let meta = update_store.meta(update_coco.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "coco processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
|
||||
//let meta = update_store.meta(update_cucu.id()).unwrap().unwrap();
|
||||
//if let UpdateStatus::Processed(Processed { success, .. }) = meta {
|
||||
//assert_eq!(success, "cucu processed");
|
||||
//} else {
|
||||
//panic!()
|
||||
//}
|
||||
//}
|
||||
//}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use chrono::{Utc, DateTime};
|
||||
use serde::{Serialize, Deserialize};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use uuid::Uuid;
|
||||
|
||||
#[derive(Debug, PartialEq, Eq, Hash, Serialize, Deserialize, Clone)]
|
||||
|
@ -1,6 +1,9 @@
|
||||
use std::{fs::create_dir_all, path::Path};
|
||||
|
||||
use heed::{Database, Env, EnvOpenOptions, types::{ByteSlice, Str}};
|
||||
use heed::{
|
||||
types::{ByteSlice, Str},
|
||||
Database, Env, EnvOpenOptions,
|
||||
};
|
||||
use log::{info, warn};
|
||||
use thiserror::Error;
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
@ -73,14 +76,14 @@ impl<S: UuidStore> UuidResolverActor<S> {
|
||||
|
||||
async fn handle_create(&self, uid: String) -> Result<Uuid> {
|
||||
if !is_index_uid_valid(&uid) {
|
||||
return Err(UuidError::BadlyFormatted(uid))
|
||||
return Err(UuidError::BadlyFormatted(uid));
|
||||
}
|
||||
self.store.create_uuid(uid, true).await
|
||||
}
|
||||
|
||||
async fn handle_get_or_create(&self, uid: String) -> Result<Uuid> {
|
||||
if !is_index_uid_valid(&uid) {
|
||||
return Err(UuidError::BadlyFormatted(uid))
|
||||
return Err(UuidError::BadlyFormatted(uid));
|
||||
}
|
||||
self.store.create_uuid(uid, false).await
|
||||
}
|
||||
@ -106,7 +109,8 @@ impl<S: UuidStore> UuidResolverActor<S> {
|
||||
}
|
||||
|
||||
fn is_index_uid_valid(uid: &str) -> bool {
|
||||
uid.chars().all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||
uid.chars()
|
||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
@ -235,7 +239,8 @@ impl UuidStore for HeedUuidStore {
|
||||
Ok(uuid)
|
||||
}
|
||||
}
|
||||
}).await?
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn get_uuid(&self, name: String) -> Result<Option<Uuid>> {
|
||||
@ -250,7 +255,8 @@ impl UuidStore for HeedUuidStore {
|
||||
}
|
||||
None => Ok(None),
|
||||
}
|
||||
}).await?
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn delete(&self, uid: String) -> Result<Option<Uuid>> {
|
||||
@ -265,9 +271,10 @@ impl UuidStore for HeedUuidStore {
|
||||
txn.commit()?;
|
||||
Ok(Some(uuid))
|
||||
}
|
||||
None => Ok(None)
|
||||
None => Ok(None),
|
||||
}
|
||||
}).await?
|
||||
})
|
||||
.await?
|
||||
}
|
||||
|
||||
async fn list(&self) -> Result<Vec<(String, Uuid)>> {
|
||||
@ -282,6 +289,7 @@ impl UuidStore for HeedUuidStore {
|
||||
entries.push((name.to_owned(), uuid))
|
||||
}
|
||||
Ok(entries)
|
||||
}).await?
|
||||
})
|
||||
.await?
|
||||
}
|
||||
}
|
||||
|
@ -1,63 +1,59 @@
|
||||
pub mod data;
|
||||
pub mod error;
|
||||
pub mod helpers;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
mod index;
|
||||
mod index_controller;
|
||||
pub mod option;
|
||||
pub mod routes;
|
||||
|
||||
pub use option::Opt;
|
||||
pub use self::data::Data;
|
||||
pub use option::Opt;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! create_app {
|
||||
($data:expr, $enable_frontend:expr) => {
|
||||
{
|
||||
use actix_cors::Cors;
|
||||
use actix_web::App;
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::{web, middleware};
|
||||
use meilisearch_http::error::payload_error_handler;
|
||||
use meilisearch_http::routes::*;
|
||||
($data:expr, $enable_frontend:expr) => {{
|
||||
use actix_cors::Cors;
|
||||
use actix_web::middleware::TrailingSlash;
|
||||
use actix_web::App;
|
||||
use actix_web::{middleware, web};
|
||||
use meilisearch_http::error::payload_error_handler;
|
||||
use meilisearch_http::routes::*;
|
||||
|
||||
let app = App::new()
|
||||
.data($data.clone())
|
||||
.app_data(
|
||||
web::JsonConfig::default()
|
||||
let app = App::new()
|
||||
.data($data.clone())
|
||||
.app_data(
|
||||
web::JsonConfig::default()
|
||||
.limit($data.http_payload_size_limit())
|
||||
.content_type(|_mime| true) // Accept all mime types
|
||||
.error_handler(|err, _req| payload_error_handler(err).into()),
|
||||
)
|
||||
.app_data(
|
||||
web::QueryConfig::default()
|
||||
.error_handler(|err, _req| payload_error_handler(err).into())
|
||||
)
|
||||
.configure(document::services)
|
||||
.configure(index::services)
|
||||
.configure(search::services)
|
||||
.configure(settings::services)
|
||||
.configure(stop_words::services)
|
||||
.configure(synonym::services)
|
||||
.configure(health::services)
|
||||
.configure(stats::services)
|
||||
.configure(key::services);
|
||||
//.configure(routes::dump::services);
|
||||
let app = if $enable_frontend {
|
||||
app
|
||||
.service(load_html)
|
||||
.service(load_css)
|
||||
} else {
|
||||
app
|
||||
};
|
||||
app.wrap(
|
||||
Cors::default()
|
||||
)
|
||||
.app_data(
|
||||
web::QueryConfig::default()
|
||||
.error_handler(|err, _req| payload_error_handler(err).into()),
|
||||
)
|
||||
.configure(document::services)
|
||||
.configure(index::services)
|
||||
.configure(search::services)
|
||||
.configure(settings::services)
|
||||
.configure(stop_words::services)
|
||||
.configure(synonym::services)
|
||||
.configure(health::services)
|
||||
.configure(stats::services)
|
||||
.configure(key::services);
|
||||
//.configure(routes::dump::services);
|
||||
let app = if $enable_frontend {
|
||||
app.service(load_html).service(load_css)
|
||||
} else {
|
||||
app
|
||||
};
|
||||
app.wrap(
|
||||
Cors::default()
|
||||
.send_wildcard()
|
||||
.allowed_headers(vec!["content-type", "x-meili-api-key"])
|
||||
.max_age(86_400) // 24h
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::Compress::default())
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
}
|
||||
};
|
||||
.max_age(86_400), // 24h
|
||||
)
|
||||
.wrap(middleware::Logger::default())
|
||||
.wrap(middleware::Compress::default())
|
||||
.wrap(middleware::NormalizePath::new(TrailingSlash::Trim))
|
||||
}};
|
||||
}
|
||||
|
@ -2,7 +2,7 @@ use std::env;
|
||||
|
||||
use actix_web::HttpServer;
|
||||
use main_error::MainError;
|
||||
use meilisearch_http::{Data, Opt, create_app};
|
||||
use meilisearch_http::{create_app, Data, Opt};
|
||||
use structopt::StructOpt;
|
||||
|
||||
//mod analytics;
|
||||
@ -44,29 +44,30 @@ async fn main() -> Result<(), MainError> {
|
||||
}
|
||||
}
|
||||
"development" => {
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info")).init();
|
||||
env_logger::Builder::from_env(env_logger::Env::default().default_filter_or("info"))
|
||||
.init();
|
||||
}
|
||||
_ => unreachable!(),
|
||||
}
|
||||
|
||||
//if let Some(path) = &opt.import_snapshot {
|
||||
//snapshot::load_snapshot(&opt.db_path, path, opt.ignore_snapshot_if_db_exists, opt.ignore_missing_snapshot)?;
|
||||
//snapshot::load_snapshot(&opt.db_path, path, opt.ignore_snapshot_if_db_exists, opt.ignore_missing_snapshot)?;
|
||||
//}
|
||||
|
||||
let data = Data::new(opt.clone())?;
|
||||
|
||||
//if !opt.no_analytics {
|
||||
//let analytics_data = data.clone();
|
||||
//let analytics_opt = opt.clone();
|
||||
//thread::spawn(move || analytics::analytics_sender(analytics_data, analytics_opt));
|
||||
//let analytics_data = data.clone();
|
||||
//let analytics_opt = opt.clone();
|
||||
//thread::spawn(move || analytics::analytics_sender(analytics_data, analytics_opt));
|
||||
//}
|
||||
|
||||
//if let Some(path) = &opt.import_dump {
|
||||
//dump::import_dump(&data, path, opt.dump_batch_size)?;
|
||||
//dump::import_dump(&data, path, opt.dump_batch_size)?;
|
||||
//}
|
||||
|
||||
//if opt.schedule_snapshot {
|
||||
//snapshot::schedule_snapshot(data.clone(), &opt.snapshot_dir, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
//snapshot::schedule_snapshot(data.clone(), &opt.snapshot_dir, opt.snapshot_interval_sec.unwrap_or(86400))?;
|
||||
//}
|
||||
|
||||
print_launch_resume(&opt, &data);
|
||||
@ -78,11 +79,14 @@ async fn main() -> Result<(), MainError> {
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn run_http(data: Data, opt: Opt, enable_frontend: bool) -> Result<(), Box<dyn std::error::Error>> {
|
||||
|
||||
async fn run_http(
|
||||
data: Data,
|
||||
opt: Opt,
|
||||
enable_frontend: bool,
|
||||
) -> Result<(), Box<dyn std::error::Error>> {
|
||||
let http_server = HttpServer::new(move || create_app!(&data, enable_frontend))
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
// Disable signals allows the server to terminate immediately when a user enter CTRL-C
|
||||
.disable_signals();
|
||||
|
||||
if let Some(config) = opt.get_ssl_config()? {
|
||||
http_server
|
||||
@ -95,7 +99,6 @@ async fn run_http(data: Data, opt: Opt, enable_frontend: bool) -> Result<(), Box
|
||||
Ok(())
|
||||
}
|
||||
|
||||
|
||||
pub fn print_launch_resume(opt: &Opt, data: &Data) {
|
||||
let ascii_name = r#"
|
||||
888b d888 d8b 888 d8b .d8888b. 888
|
||||
|
@ -1,15 +1,15 @@
|
||||
use std::{error, fs};
|
||||
use std::io::{BufReader, Read};
|
||||
use std::path::PathBuf;
|
||||
use std::sync::Arc;
|
||||
use std::{error, fs};
|
||||
|
||||
use byte_unit::Byte;
|
||||
use grenad::CompressionType;
|
||||
use rustls::internal::pemfile::{certs, pkcs8_private_keys, rsa_private_keys};
|
||||
use rustls::{
|
||||
AllowAnyAnonymousOrAuthenticatedClient, AllowAnyAuthenticatedClient, NoClientAuth,
|
||||
RootCertStore,
|
||||
};
|
||||
use grenad::CompressionType;
|
||||
use structopt::StructOpt;
|
||||
|
||||
#[derive(Debug, Clone, StructOpt)]
|
||||
@ -99,7 +99,11 @@ pub struct Opt {
|
||||
/// The Sentry DSN to use for error reporting. This defaults to the MeiliSearch Sentry project.
|
||||
/// You can disable sentry all together using the `--no-sentry` flag or `MEILI_NO_SENTRY` environment variable.
|
||||
#[cfg(all(not(debug_assertions), feature = "sentry"))]
|
||||
#[structopt(long, env = "SENTRY_DSN", default_value = "https://5ddfa22b95f241198be2271aaf028653@sentry.io/3060337")]
|
||||
#[structopt(
|
||||
long,
|
||||
env = "SENTRY_DSN",
|
||||
default_value = "https://5ddfa22b95f241198be2271aaf028653@sentry.io/3060337"
|
||||
)]
|
||||
pub sentry_dsn: String,
|
||||
|
||||
/// Disable Sentry error reporting.
|
||||
|
@ -7,10 +7,10 @@ use milli::update::{IndexDocumentsMethod, UpdateFormat};
|
||||
use serde::Deserialize;
|
||||
use serde_json::Value;
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::ResponseError;
|
||||
use crate::helpers::Authentication;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_OFFSET: usize = 0;
|
||||
const DEFAULT_RETRIEVE_DOCUMENTS_LIMIT: usize = 20;
|
||||
@ -19,7 +19,10 @@ macro_rules! guard_content_type {
|
||||
($fn_name:ident, $guard_value:literal) => {
|
||||
fn $fn_name(head: &actix_web::dev::RequestHead) -> bool {
|
||||
if let Some(content_type) = head.headers.get("Content-Type") {
|
||||
content_type.to_str().map(|v| v.contains($guard_value)).unwrap_or(false)
|
||||
content_type
|
||||
.to_str()
|
||||
.map(|v| v.contains($guard_value))
|
||||
.unwrap_or(false)
|
||||
} else {
|
||||
false
|
||||
}
|
||||
@ -57,7 +60,10 @@ async fn get_document(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let index = path.index_uid.clone();
|
||||
let id = path.document_id.clone();
|
||||
match data.retrieve_document(index, id, None as Option<Vec<String>>).await {
|
||||
match data
|
||||
.retrieve_document(index, id, None as Option<Vec<String>>)
|
||||
.await
|
||||
{
|
||||
Ok(document) => {
|
||||
let json = serde_json::to_string(&document).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -76,7 +82,10 @@ async fn delete_document(
|
||||
data: web::Data<Data>,
|
||||
path: web::Path<DocumentParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
match data.delete_documents(path.index_uid.clone(), vec![path.document_id.clone()]).await {
|
||||
match data
|
||||
.delete_documents(path.index_uid.clone(), vec![path.document_id.clone()])
|
||||
.await
|
||||
{
|
||||
Ok(result) => {
|
||||
let json = serde_json::to_string(&result).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -104,16 +113,17 @@ async fn get_all_documents(
|
||||
let attributes_to_retrieve = params
|
||||
.attributes_to_retrieve
|
||||
.as_ref()
|
||||
.map(|attrs| attrs
|
||||
.split(',')
|
||||
.map(String::from)
|
||||
.collect::<Vec<_>>());
|
||||
.map(|attrs| attrs.split(',').map(String::from).collect::<Vec<_>>());
|
||||
|
||||
match data.retrieve_documents(
|
||||
path.index_uid.clone(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve).await {
|
||||
match data
|
||||
.retrieve_documents(
|
||||
path.index_uid.clone(),
|
||||
params.offset.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_OFFSET),
|
||||
params.limit.unwrap_or(DEFAULT_RETRIEVE_DOCUMENTS_LIMIT),
|
||||
attributes_to_retrieve,
|
||||
)
|
||||
.await
|
||||
{
|
||||
Ok(docs) => {
|
||||
let json = serde_json::to_string(&docs).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -149,7 +159,8 @@ async fn add_documents_json(
|
||||
UpdateFormat::Json,
|
||||
body,
|
||||
params.primary_key.clone(),
|
||||
).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
match addition_result {
|
||||
Ok(update) => {
|
||||
@ -163,7 +174,6 @@ async fn add_documents_json(
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/// Default route for adding documents, this should return an error and redirect to the documentation
|
||||
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||
async fn add_documents_default(
|
||||
@ -191,7 +201,7 @@ async fn update_documents_default(
|
||||
#[put(
|
||||
"/indexes/{index_uid}/documents",
|
||||
wrap = "Authentication::Private",
|
||||
guard = "guard_json",
|
||||
guard = "guard_json"
|
||||
)]
|
||||
async fn update_documents(
|
||||
data: web::Data<Data>,
|
||||
@ -206,7 +216,8 @@ async fn update_documents(
|
||||
UpdateFormat::Json,
|
||||
body,
|
||||
params.primary_key.clone(),
|
||||
).await;
|
||||
)
|
||||
.await;
|
||||
|
||||
match addition_result {
|
||||
Ok(update) => {
|
||||
@ -231,7 +242,11 @@ async fn delete_documents(
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let ids = body
|
||||
.iter()
|
||||
.map(|v| v.as_str().map(String::from).unwrap_or_else(|| v.to_string()))
|
||||
.map(|v| {
|
||||
v.as_str()
|
||||
.map(String::from)
|
||||
.unwrap_or_else(|| v.to_string())
|
||||
})
|
||||
.collect();
|
||||
|
||||
match data.delete_documents(path.index_uid.clone(), ids).await {
|
||||
|
@ -3,10 +3,10 @@ use actix_web::{web, HttpResponse};
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::ResponseError;
|
||||
use crate::helpers::Authentication;
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(list_indexes)
|
||||
@ -18,7 +18,6 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
.service(get_all_updates_status);
|
||||
}
|
||||
|
||||
|
||||
#[get("/indexes", wrap = "Authentication::Private")]
|
||||
async fn list_indexes(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||
match data.list_indexes().await {
|
||||
@ -96,7 +95,10 @@ async fn update_index(
|
||||
body: web::Json<UpdateIndexRequest>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let body = body.into_inner();
|
||||
match data.update_index(path.into_inner().index_uid, body.primary_key, body.uid).await {
|
||||
match data
|
||||
.update_index(path.into_inner().index_uid, body.primary_key, body.uid)
|
||||
.await
|
||||
{
|
||||
Ok(meta) => {
|
||||
let json = serde_json::to_string(&meta).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -135,7 +137,9 @@ async fn get_update_status(
|
||||
path: web::Path<UpdateParam>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let params = path.into_inner();
|
||||
let result = data.get_update_status(params.index_uid, params.update_id).await;
|
||||
let result = data
|
||||
.get_update_status(params.index_uid, params.update_id)
|
||||
.await;
|
||||
match result {
|
||||
Ok(meta) => {
|
||||
let json = serde_json::to_string(&meta).unwrap();
|
||||
|
@ -1,6 +1,6 @@
|
||||
use actix_web::get;
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web::get;
|
||||
use serde::Serialize;
|
||||
|
||||
use crate::helpers::Authentication;
|
||||
|
@ -6,9 +6,9 @@ use serde::Deserialize;
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::helpers::Authentication;
|
||||
use crate::index::{SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
use crate::routes::IndexParam;
|
||||
use crate::Data;
|
||||
use crate::index::{SearchQuery, DEFAULT_SEARCH_LIMIT};
|
||||
|
||||
pub fn services(cfg: &mut web::ServiceConfig) {
|
||||
cfg.service(search_with_post).service(search_with_url_query);
|
||||
@ -80,7 +80,9 @@ async fn search_with_url_query(
|
||||
let query: SearchQuery = match params.into_inner().try_into() {
|
||||
Ok(q) => q,
|
||||
Err(e) => {
|
||||
return Ok(HttpResponse::BadRequest().body(serde_json::json!({ "error": e.to_string() })))
|
||||
return Ok(
|
||||
HttpResponse::BadRequest().body(serde_json::json!({ "error": e.to_string() }))
|
||||
)
|
||||
}
|
||||
};
|
||||
let search_result = data.search(path.into_inner().index_uid, query).await;
|
||||
@ -101,7 +103,9 @@ async fn search_with_post(
|
||||
path: web::Path<IndexParam>,
|
||||
params: web::Json<SearchQuery>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let search_result = data.search(path.into_inner().index_uid, params.into_inner()).await;
|
||||
let search_result = data
|
||||
.search(path.into_inner().index_uid, params.into_inner())
|
||||
.await;
|
||||
match search_result {
|
||||
Ok(docs) => {
|
||||
let docs = serde_json::to_string(&docs).unwrap();
|
||||
|
@ -1,9 +1,9 @@
|
||||
use actix_web::{web, HttpResponse, delete, get, post};
|
||||
use actix_web::{delete, get, post, web, HttpResponse};
|
||||
|
||||
use crate::Data;
|
||||
use crate::error::ResponseError;
|
||||
use crate::index::Settings;
|
||||
use crate::helpers::Authentication;
|
||||
use crate::index::Settings;
|
||||
use crate::Data;
|
||||
|
||||
#[macro_export]
|
||||
macro_rules! make_setting_route {
|
||||
@ -98,15 +98,15 @@ make_setting_route!(
|
||||
);
|
||||
|
||||
//make_setting_route!(
|
||||
//"/indexes/{index_uid}/settings/distinct-attribute",
|
||||
//String,
|
||||
//distinct_attribute
|
||||
//"/indexes/{index_uid}/settings/distinct-attribute",
|
||||
//String,
|
||||
//distinct_attribute
|
||||
//);
|
||||
|
||||
//make_setting_route!(
|
||||
//"/indexes/{index_uid}/settings/ranking-rules",
|
||||
//Vec<String>,
|
||||
//ranking_rules
|
||||
//"/indexes/{index_uid}/settings/ranking-rules",
|
||||
//Vec<String>,
|
||||
//ranking_rules
|
||||
//);
|
||||
|
||||
macro_rules! create_services {
|
||||
@ -137,7 +137,10 @@ async fn update_all(
|
||||
index_uid: web::Path<String>,
|
||||
body: web::Json<Settings>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
match data.update_settings(index_uid.into_inner(), body.into_inner(), true).await {
|
||||
match data
|
||||
.update_settings(index_uid.into_inner(), body.into_inner(), true)
|
||||
.await
|
||||
{
|
||||
Ok(update_result) => {
|
||||
let json = serde_json::to_string(&update_result).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -170,7 +173,10 @@ async fn delete_all(
|
||||
index_uid: web::Path<String>,
|
||||
) -> Result<HttpResponse, ResponseError> {
|
||||
let settings = Settings::cleared();
|
||||
match data.update_settings(index_uid.into_inner(), settings, false).await {
|
||||
match data
|
||||
.update_settings(index_uid.into_inner(), settings, false)
|
||||
.await
|
||||
{
|
||||
Ok(update_result) => {
|
||||
let json = serde_json::to_string(&update_result).unwrap();
|
||||
Ok(HttpResponse::Ok().body(json))
|
||||
@ -180,4 +186,3 @@ async fn delete_all(
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
use std::collections::{HashMap, BTreeMap};
|
||||
use std::collections::{BTreeMap, HashMap};
|
||||
|
||||
use actix_web::get;
|
||||
use actix_web::web;
|
||||
use actix_web::HttpResponse;
|
||||
use actix_web::get;
|
||||
use chrono::{DateTime, Utc};
|
||||
use serde::Serialize;
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web::{delete, get, post};
|
||||
use actix_web::{web, HttpResponse};
|
||||
use std::collections::BTreeSet;
|
||||
|
||||
use crate::error::ResponseError;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use std::collections::BTreeMap;
|
||||
|
||||
use actix_web::{web, HttpResponse};
|
||||
use actix_web::{delete, get, post};
|
||||
use actix_web::{web, HttpResponse};
|
||||
|
||||
use crate::error::ResponseError;
|
||||
use crate::helpers::Authentication;
|
||||
|
@ -19,7 +19,10 @@ impl Index<'_> {
|
||||
|
||||
pub async fn load_test_set(&self) -> u64 {
|
||||
let url = format!("/indexes/{}/documents", self.uid);
|
||||
let (response, code) = self.service.post_str(url, include_str!("../assets/test_set.json")).await;
|
||||
let (response, code) = self
|
||||
.service
|
||||
.post_str(url, include_str!("../assets/test_set.json"))
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let update_id = response["updateId"].as_i64().unwrap();
|
||||
self.wait_update_id(update_id as u64).await;
|
||||
@ -60,7 +63,11 @@ impl Index<'_> {
|
||||
self.service.post(url, documents).await
|
||||
}
|
||||
|
||||
pub async fn update_documents(&self, documents: Value, primary_key: Option<&str>) -> (Value, StatusCode) {
|
||||
pub async fn update_documents(
|
||||
&self,
|
||||
documents: Value,
|
||||
primary_key: Option<&str>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = match primary_key {
|
||||
Some(key) => format!("/indexes/{}/documents?primaryKey={}", self.uid, key),
|
||||
None => format!("/indexes/{}/documents", self.uid),
|
||||
@ -95,7 +102,11 @@ impl Index<'_> {
|
||||
self.service.get(url).await
|
||||
}
|
||||
|
||||
pub async fn get_document(&self, id: u64, _options: Option<GetDocumentOptions>) -> (Value, StatusCode) {
|
||||
pub async fn get_document(
|
||||
&self,
|
||||
id: u64,
|
||||
_options: Option<GetDocumentOptions>,
|
||||
) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/{}", self.uid, id);
|
||||
self.service.get(url).await
|
||||
}
|
||||
@ -111,7 +122,10 @@ impl Index<'_> {
|
||||
}
|
||||
|
||||
if let Some(attributes_to_retrieve) = options.attributes_to_retrieve {
|
||||
url.push_str(&format!("attributesToRetrieve={}&", attributes_to_retrieve.join(",")));
|
||||
url.push_str(&format!(
|
||||
"attributesToRetrieve={}&",
|
||||
attributes_to_retrieve.join(",")
|
||||
));
|
||||
}
|
||||
|
||||
self.service.get(url).await
|
||||
@ -129,7 +143,9 @@ impl Index<'_> {
|
||||
|
||||
pub async fn delete_batch(&self, ids: Vec<u64>) -> (Value, StatusCode) {
|
||||
let url = format!("/indexes/{}/documents/delete-batch", self.uid);
|
||||
self.service.post(url, serde_json::to_value(&ids).unwrap()).await
|
||||
self.service
|
||||
.post(url, serde_json::to_value(&ids).unwrap())
|
||||
.await
|
||||
}
|
||||
|
||||
pub async fn settings(&self) -> (Value, StatusCode) {
|
||||
|
@ -2,8 +2,8 @@ mod index;
|
||||
mod server;
|
||||
mod service;
|
||||
|
||||
pub use index::{GetAllDocumentsOptions, GetDocumentOptions};
|
||||
pub use server::Server;
|
||||
pub use index::{GetDocumentOptions, GetAllDocumentsOptions};
|
||||
|
||||
/// Performs a search test on both post and get routes
|
||||
#[macro_export]
|
||||
|
@ -5,7 +5,7 @@ use tempdir::TempDir;
|
||||
use urlencoding::encode;
|
||||
|
||||
use meilisearch_http::data::Data;
|
||||
use meilisearch_http::option::{Opt, IndexerOpts};
|
||||
use meilisearch_http::option::{IndexerOpts, Opt};
|
||||
|
||||
use super::index::Index;
|
||||
use super::service::Service;
|
||||
@ -55,10 +55,7 @@ impl Server {
|
||||
let data = Data::new(opt).unwrap();
|
||||
let service = Service(data);
|
||||
|
||||
Server {
|
||||
service,
|
||||
_dir: dir,
|
||||
}
|
||||
Server { service, _dir: dir }
|
||||
}
|
||||
|
||||
/// Returns a view to an index. There is no guarantee that the index exists.
|
||||
|
@ -1,15 +1,14 @@
|
||||
use actix_web::{http::StatusCode, test};
|
||||
use serde_json::Value;
|
||||
|
||||
use meilisearch_http::data::Data;
|
||||
use meilisearch_http::create_app;
|
||||
use meilisearch_http::data::Data;
|
||||
|
||||
pub struct Service(pub Data);
|
||||
|
||||
impl Service {
|
||||
pub async fn post(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||
let mut app =
|
||||
test::init_service(create_app!(&self.0, true)).await;
|
||||
let mut app = test::init_service(create_app!(&self.0, true)).await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
.uri(url.as_ref())
|
||||
@ -24,9 +23,12 @@ impl Service {
|
||||
}
|
||||
|
||||
/// Send a test post request from a text body, with a `content-type:application/json` header.
|
||||
pub async fn post_str(&self, url: impl AsRef<str>, body: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let mut app =
|
||||
test::init_service(create_app!(&self.0, true)).await;
|
||||
pub async fn post_str(
|
||||
&self,
|
||||
url: impl AsRef<str>,
|
||||
body: impl AsRef<str>,
|
||||
) -> (Value, StatusCode) {
|
||||
let mut app = test::init_service(create_app!(&self.0, true)).await;
|
||||
|
||||
let req = test::TestRequest::post()
|
||||
.uri(url.as_ref())
|
||||
@ -42,8 +44,7 @@ impl Service {
|
||||
}
|
||||
|
||||
pub async fn get(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let mut app =
|
||||
test::init_service(create_app!(&self.0, true)).await;
|
||||
let mut app = test::init_service(create_app!(&self.0, true)).await;
|
||||
|
||||
let req = test::TestRequest::get().uri(url.as_ref()).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
@ -55,8 +56,7 @@ impl Service {
|
||||
}
|
||||
|
||||
pub async fn put(&self, url: impl AsRef<str>, body: Value) -> (Value, StatusCode) {
|
||||
let mut app =
|
||||
test::init_service(create_app!(&self.0, true)).await;
|
||||
let mut app = test::init_service(create_app!(&self.0, true)).await;
|
||||
|
||||
let req = test::TestRequest::put()
|
||||
.uri(url.as_ref())
|
||||
@ -71,8 +71,7 @@ impl Service {
|
||||
}
|
||||
|
||||
pub async fn delete(&self, url: impl AsRef<str>) -> (Value, StatusCode) {
|
||||
let mut app =
|
||||
test::init_service(create_app!(&self.0, true)).await;
|
||||
let mut app = test::init_service(create_app!(&self.0, true)).await;
|
||||
|
||||
let req = test::TestRequest::delete().uri(url.as_ref()).to_request();
|
||||
let res = test::call_service(&mut app, req).await;
|
||||
|
@ -1,7 +1,7 @@
|
||||
use serde_json::{json, Value};
|
||||
use chrono::DateTime;
|
||||
use serde_json::{json, Value};
|
||||
|
||||
use crate::common::{Server, GetAllDocumentsOptions};
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn add_documents_no_index_creation() {
|
||||
@ -32,9 +32,12 @@ async fn add_documents_no_index_creation() {
|
||||
assert_eq!(response["updateId"], 0);
|
||||
assert_eq!(response["success"]["DocumentsAddition"]["nb_documents"], 1);
|
||||
|
||||
let processed_at = DateTime::parse_from_rfc3339(response["processedAt"].as_str().unwrap()).unwrap();
|
||||
let enqueued_at = DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap();
|
||||
let started_processing_at = DateTime::parse_from_rfc3339(response["startedProcessingAt"].as_str().unwrap()).unwrap();
|
||||
let processed_at =
|
||||
DateTime::parse_from_rfc3339(response["processedAt"].as_str().unwrap()).unwrap();
|
||||
let enqueued_at =
|
||||
DateTime::parse_from_rfc3339(response["enqueuedAt"].as_str().unwrap()).unwrap();
|
||||
let started_processing_at =
|
||||
DateTime::parse_from_rfc3339(response["startedProcessingAt"].as_str().unwrap()).unwrap();
|
||||
assert!(processed_at > started_processing_at);
|
||||
assert!(started_processing_at > enqueued_at);
|
||||
|
||||
@ -71,7 +74,8 @@ async fn document_addition_with_primary_key() {
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
let (_response, code) = index.add_documents(documents, Some("primary")).await; assert_eq!(code, 200);
|
||||
let (_response, code) = index.add_documents(documents, Some("primary")).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
|
||||
@ -97,7 +101,8 @@ async fn document_update_with_primary_key() {
|
||||
"content": "foo",
|
||||
}
|
||||
]);
|
||||
let (_response, code) = index.update_documents(documents, Some("primary")).await; assert_eq!(code, 200);
|
||||
let (_response, code) = index.update_documents(documents, Some("primary")).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
|
||||
@ -158,7 +163,7 @@ async fn update_documents_with_primary_key_and_primary_key_already_exists() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
let (response, code) = index.get_update(0).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["updateId"], 0);
|
||||
@ -263,7 +268,10 @@ async fn update_document() {
|
||||
|
||||
let (response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.to_string(), r##"{"doc_id":1,"content":"foo","other":"bar"}"##);
|
||||
assert_eq!(
|
||||
response.to_string(),
|
||||
r##"{"doc_id":1,"content":"foo","other":"bar"}"##
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -275,7 +283,12 @@ async fn add_larger_dataset() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["success"]["DocumentsAddition"]["nb_documents"], 77);
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(1000),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
}
|
||||
@ -291,7 +304,12 @@ async fn update_larger_dataset() {
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["status"], "processed");
|
||||
assert_eq!(response["success"]["DocumentsAddition"]["nb_documents"], 77);
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { limit: Some(1000), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(1000),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 77);
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
use serde_json::json;
|
||||
|
||||
use crate::common::{Server, GetAllDocumentsOptions};
|
||||
use crate::common::{GetAllDocumentsOptions, Server};
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_one_document_unexisting_index() {
|
||||
@ -24,7 +24,9 @@ async fn delete_one_unexisting_document() {
|
||||
async fn delete_one_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.add_documents(json!([{ "id": 0, "content": "foobar" }]), None).await;
|
||||
index
|
||||
.add_documents(json!([{ "id": 0, "content": "foobar" }]), None)
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
let (_response, code) = server.index("test").delete_document(0).await;
|
||||
assert_eq!(code, 200);
|
||||
@ -39,20 +41,26 @@ async fn clear_all_documents_unexisting_index() {
|
||||
let server = Server::new().await;
|
||||
let (_response, code) = server.index("test").clear_all_documents().await;
|
||||
assert_eq!(code, 400);
|
||||
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn clear_all_documents() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.add_documents(json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]), None).await;
|
||||
index
|
||||
.add_documents(
|
||||
json!([{ "id": 1, "content": "foobar" }, { "id": 0, "content": "foobar" }]),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
let (_response, code) = index.clear_all_documents().await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
@ -67,7 +75,9 @@ async fn clear_all_documents_empty_index() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let _update = index.wait_update_id(0).await;
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
@ -89,13 +99,14 @@ async fn delete_batch() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 3);
|
||||
}
|
||||
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn delete_no_document_batch() {
|
||||
let server = Server::new().await;
|
||||
@ -106,7 +117,9 @@ async fn delete_no_document_batch() {
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let _update = index.wait_update_id(1).await;
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 3);
|
||||
}
|
||||
|
@ -1,5 +1,5 @@
|
||||
use crate::common::Server;
|
||||
use crate::common::GetAllDocumentsOptions;
|
||||
use crate::common::Server;
|
||||
|
||||
use serde_json::json;
|
||||
|
||||
@ -8,10 +8,7 @@ use serde_json::json;
|
||||
#[actix_rt::test]
|
||||
async fn get_unexisting_index_single_document() {
|
||||
let server = Server::new().await;
|
||||
let (_response, code) = server
|
||||
.index("test")
|
||||
.get_document(1, None)
|
||||
.await;
|
||||
let (_response, code) = server.index("test").get_document(1, None).await;
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
||||
@ -20,9 +17,7 @@ async fn get_unexisting_document() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
let (_response, code) = index
|
||||
.get_document(1, None)
|
||||
.await;
|
||||
let (_response, code) = index.get_document(1, None).await;
|
||||
assert_eq!(code, 400);
|
||||
}
|
||||
|
||||
@ -40,14 +35,15 @@ async fn get_document() {
|
||||
let (_, code) = index.add_documents(documents, None).await;
|
||||
assert_eq!(code, 200);
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index
|
||||
.get_document(0, None)
|
||||
.await;
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response, serde_json::json!( {
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
}));
|
||||
assert_eq!(
|
||||
response,
|
||||
serde_json::json!( {
|
||||
"id": 0,
|
||||
"content": "foobar",
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -67,7 +63,9 @@ async fn get_no_documents() {
|
||||
let (_, code) = index.create(None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert!(response.as_array().unwrap().is_empty());
|
||||
}
|
||||
@ -78,7 +76,9 @@ async fn get_all_documents_no_options() {
|
||||
let index = server.index("test");
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
let arr = response.as_array().unwrap();
|
||||
assert_eq!(arr.len(), 20);
|
||||
@ -109,7 +109,12 @@ async fn test_get_all_documents_limit() {
|
||||
let index = server.index("test");
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { limit: Some(5), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
limit: Some(5),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 5);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 0);
|
||||
@ -121,7 +126,12 @@ async fn test_get_all_documents_offset() {
|
||||
let index = server.index("test");
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { offset: Some(5), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
offset: Some(5),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0]["id"], 13);
|
||||
@ -133,35 +143,90 @@ async fn test_get_all_documents_attributes_to_retrieve() {
|
||||
let index = server.index("test");
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { attributes_to_retrieve: Some(vec!["name"]), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
attributes_to_retrieve: Some(vec!["name"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap().keys().count(), 1);
|
||||
assert!(response.as_array().unwrap()[0].as_object().unwrap().get("name").is_some());
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("name")
|
||||
.is_some());
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { attributes_to_retrieve: Some(vec![]), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
attributes_to_retrieve: Some(vec![]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap().keys().count(), 0);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
0
|
||||
);
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions { attributes_to_retrieve: Some(vec!["name", "tags"]), ..Default::default() }).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions {
|
||||
attributes_to_retrieve: Some(vec!["name", "tags"]),
|
||||
..Default::default()
|
||||
})
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap().keys().count(), 2);
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
2
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
async fn get_documents_displayed_attributes() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.update_settings(json!({"displayedAttributes": ["gender"]})).await;
|
||||
index
|
||||
.update_settings(json!({"displayedAttributes": ["gender"]}))
|
||||
.await;
|
||||
index.load_test_set().await;
|
||||
|
||||
let (response, code) = index.get_all_documents(GetAllDocumentsOptions::default()).await;
|
||||
let (response, code) = index
|
||||
.get_all_documents(GetAllDocumentsOptions::default())
|
||||
.await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 20);
|
||||
assert_eq!(response.as_array().unwrap()[0].as_object().unwrap().keys().count(), 1);
|
||||
assert!(response.as_array().unwrap()[0].as_object().unwrap().get("gender").is_some());
|
||||
assert_eq!(
|
||||
response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.keys()
|
||||
.count(),
|
||||
1
|
||||
);
|
||||
assert!(response.as_array().unwrap()[0]
|
||||
.as_object()
|
||||
.unwrap()
|
||||
.get("gender")
|
||||
.is_some());
|
||||
|
||||
let (response, code) = index.get_document(0, None).await;
|
||||
assert_eq!(code, 200);
|
||||
|
@ -1,3 +1,3 @@
|
||||
mod add_documents;
|
||||
mod get_documents;
|
||||
mod delete_documents;
|
||||
mod get_documents;
|
||||
|
@ -7,7 +7,6 @@ async fn create_index_no_primary_key() {
|
||||
let index = server.index("test");
|
||||
let (response, code) = index.create(None).await;
|
||||
|
||||
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["uid"], "test");
|
||||
assert!(response.get("createdAt").is_some());
|
||||
|
@ -6,12 +6,10 @@ async fn create_and_delete_index() {
|
||||
let index = server.index("test");
|
||||
let (_response, code) = index.create(None).await;
|
||||
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
let (_response, code) = index.delete().await;
|
||||
|
||||
|
||||
assert_eq!(code, 200);
|
||||
|
||||
assert_eq!(index.get().await.1, 400);
|
||||
|
@ -52,6 +52,12 @@ async fn list_multiple_indexes() {
|
||||
assert!(response.is_array());
|
||||
let arr = response.as_array().unwrap();
|
||||
assert_eq!(arr.len(), 2);
|
||||
assert!(arr.iter().find(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null).is_some());
|
||||
assert!(arr.iter().find(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key").is_some());
|
||||
assert!(arr
|
||||
.iter()
|
||||
.find(|entry| entry["uid"] == "test" && entry["primaryKey"] == Value::Null)
|
||||
.is_some());
|
||||
assert!(arr
|
||||
.iter()
|
||||
.find(|entry| entry["uid"] == "test1" && entry["primaryKey"] == "key")
|
||||
.is_some());
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
mod create_index;
|
||||
mod delete_index;
|
||||
mod get_index;
|
||||
mod update_index;
|
||||
mod delete_index;
|
||||
|
@ -1,8 +1,8 @@
|
||||
mod common;
|
||||
mod documents;
|
||||
mod index;
|
||||
mod search;
|
||||
mod settings;
|
||||
mod documents;
|
||||
mod updates;
|
||||
|
||||
// Tests are isolated by features in different modules to allow better readability, test
|
||||
|
@ -1,3 +1,2 @@
|
||||
// This modules contains all the test concerning search. Each particular feture of the search
|
||||
// should be tested in its own module to isolate tests and keep the tests readable.
|
||||
|
||||
|
@ -21,7 +21,17 @@ async fn get_settings() {
|
||||
assert_eq!(settings["searchableAttributes"], json!(["*"]));
|
||||
println!("{:?}", settings);
|
||||
assert_eq!(settings["attributesForFaceting"], json!({}));
|
||||
assert_eq!(settings["rankingRules"], json!(["typo", "words", "proximity", "attribute", "wordsPosition", "exactness"]));
|
||||
assert_eq!(
|
||||
settings["rankingRules"],
|
||||
json!([
|
||||
"typo",
|
||||
"words",
|
||||
"proximity",
|
||||
"attribute",
|
||||
"wordsPosition",
|
||||
"exactness"
|
||||
])
|
||||
);
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -36,20 +46,24 @@ async fn update_settings_unknown_field() {
|
||||
async fn test_partial_update() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
let (_response, _code) = index.update_settings(json!({"displayedAttributes": ["foo"]})).await;
|
||||
let (_response, _code) = index
|
||||
.update_settings(json!({"displayedAttributes": ["foo"]}))
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"],json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"],json!(["*"]));
|
||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"], json!(["*"]));
|
||||
|
||||
let (_response, _) = index.update_settings(json!({"searchableAttributes": ["bar"]})).await;
|
||||
let (_response, _) = index
|
||||
.update_settings(json!({"searchableAttributes": ["bar"]}))
|
||||
.await;
|
||||
index.wait_update_id(1).await;
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"],json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"],json!(["bar"]));
|
||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"], json!(["bar"]));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -64,20 +78,22 @@ async fn delete_settings_unexisting_index() {
|
||||
async fn reset_all_settings() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.update_settings(json!({"displayedAttributes": ["foo"], "searchableAttributes": ["bar"]})).await;
|
||||
index
|
||||
.update_settings(json!({"displayedAttributes": ["foo"], "searchableAttributes": ["bar"]}))
|
||||
.await;
|
||||
index.wait_update_id(0).await;
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"],json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"],json!(["bar"]));
|
||||
assert_eq!(response["displayedAttributes"], json!(["foo"]));
|
||||
assert_eq!(response["searchableAttributes"], json!(["bar"]));
|
||||
|
||||
index.delete_settings().await;
|
||||
index.wait_update_id(1).await;
|
||||
|
||||
let (response, code) = index.settings().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response["displayedAttributes"],json!(["*"]));
|
||||
assert_eq!(response["searchableAttributes"],json!(["*"]));
|
||||
assert_eq!(response["displayedAttributes"], json!(["*"]));
|
||||
assert_eq!(response["searchableAttributes"], json!(["*"]));
|
||||
}
|
||||
|
||||
#[actix_rt::test]
|
||||
@ -149,4 +165,5 @@ macro_rules! test_setting_routes {
|
||||
test_setting_routes!(
|
||||
attributes_for_faceting,
|
||||
displayed_attributes,
|
||||
searchable_attributes);
|
||||
searchable_attributes
|
||||
);
|
||||
|
@ -21,13 +21,15 @@ async fn get_update_status() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.add_documents(
|
||||
serde_json::json!([{
|
||||
"id": 1,
|
||||
"content": "foobar",
|
||||
}]),
|
||||
None
|
||||
).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::json!([{
|
||||
"id": 1,
|
||||
"content": "foobar",
|
||||
}]),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (_response, code) = index.get_update(0).await;
|
||||
assert_eq!(code, 200);
|
||||
// TODO check resonse format, as per #48
|
||||
@ -55,10 +57,12 @@ async fn list_updates() {
|
||||
let server = Server::new().await;
|
||||
let index = server.index("test");
|
||||
index.create(None).await;
|
||||
index.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None
|
||||
).await;
|
||||
index
|
||||
.add_documents(
|
||||
serde_json::from_str(include_str!("../assets/test_set.json")).unwrap(),
|
||||
None,
|
||||
)
|
||||
.await;
|
||||
let (response, code) = index.list_updates().await;
|
||||
assert_eq!(code, 200);
|
||||
assert_eq!(response.as_array().unwrap().len(), 1);
|
||||
|
Loading…
Reference in New Issue
Block a user