mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-25 19:45:05 +08:00
missing payload error
This commit is contained in:
parent
18cb514073
commit
4eb3817b03
@ -87,6 +87,7 @@ pub enum Code {
|
|||||||
InvalidContentType,
|
InvalidContentType,
|
||||||
MissingContentType,
|
MissingContentType,
|
||||||
MalformedPayload,
|
MalformedPayload,
|
||||||
|
MissingPayload,
|
||||||
}
|
}
|
||||||
|
|
||||||
impl Code {
|
impl Code {
|
||||||
@ -158,9 +159,14 @@ impl Code {
|
|||||||
DumpProcessFailed => {
|
DumpProcessFailed => {
|
||||||
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
ErrCode::internal("dump_process_failed", StatusCode::INTERNAL_SERVER_ERROR)
|
||||||
}
|
}
|
||||||
MissingContentType => ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE),
|
MissingContentType => {
|
||||||
|
ErrCode::invalid("missing_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||||
|
}
|
||||||
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
|
MalformedPayload => ErrCode::invalid("malformed_payload", StatusCode::BAD_REQUEST),
|
||||||
InvalidContentType => ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE),
|
InvalidContentType => {
|
||||||
|
ErrCode::invalid("invalid_content_type", StatusCode::UNSUPPORTED_MEDIA_TYPE)
|
||||||
|
}
|
||||||
|
MissingPayload => ErrCode::invalid("missing_payload", StatusCode::BAD_REQUEST),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,9 +11,9 @@ use serde::{Deserialize, Serialize};
|
|||||||
|
|
||||||
#[derive(Debug, thiserror::Error)]
|
#[derive(Debug, thiserror::Error)]
|
||||||
pub enum MeilisearchHttpError {
|
pub enum MeilisearchHttpError {
|
||||||
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: \"application/json\", \"application/x-ndjson\", \"test/csv\"")]
|
#[error("A Content-Type header is missing. Accepted values for the Content-Type header are: \"application/json\", \"application/x-ndjson\", \"text/csv\"")]
|
||||||
MissingContentType,
|
MissingContentType,
|
||||||
#[error("The Content-Type \"{0}\" is invalid. Accepted values for the Content-Type header are: \"application/json\", \"application/x-ndjson\", \"test/csv\"")]
|
#[error("The Content-Type \"{0}\" is invalid. Accepted values for the Content-Type header are: \"application/json\", \"application/x-ndjson\", \"text/csv\"")]
|
||||||
InvalidContentType(String),
|
InvalidContentType(String),
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -133,13 +133,16 @@ pub async fn add_documents(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
document_addition(
|
document_addition(
|
||||||
req.headers().get("Content-type").map(|s| s.to_str().unwrap_or("unkown")),
|
req.headers()
|
||||||
|
.get("Content-type")
|
||||||
|
.map(|s| s.to_str().unwrap_or("unkown")),
|
||||||
meilisearch,
|
meilisearch,
|
||||||
path.into_inner().index_uid,
|
path.into_inner().index_uid,
|
||||||
params.into_inner().primary_key,
|
params.into_inner().primary_key,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::ReplaceDocuments)
|
IndexDocumentsMethod::ReplaceDocuments,
|
||||||
.await
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
pub async fn update_documents(
|
pub async fn update_documents(
|
||||||
@ -151,13 +154,16 @@ pub async fn update_documents(
|
|||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
debug!("called with params: {:?}", params);
|
debug!("called with params: {:?}", params);
|
||||||
document_addition(
|
document_addition(
|
||||||
req.headers().get("Content-type").map(|s| s.to_str().unwrap_or("unkown")),
|
req.headers()
|
||||||
|
.get("Content-type")
|
||||||
|
.map(|s| s.to_str().unwrap_or("unkown")),
|
||||||
meilisearch,
|
meilisearch,
|
||||||
path.into_inner().index_uid,
|
path.into_inner().index_uid,
|
||||||
params.into_inner().primary_key,
|
params.into_inner().primary_key,
|
||||||
body,
|
body,
|
||||||
IndexDocumentsMethod::UpdateDocuments)
|
IndexDocumentsMethod::UpdateDocuments,
|
||||||
.await
|
)
|
||||||
|
.await
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Route used when the payload type is "application/json"
|
/// Route used when the payload type is "application/json"
|
||||||
@ -174,7 +180,9 @@ async fn document_addition(
|
|||||||
Some("application/json") => DocumentAdditionFormat::Json,
|
Some("application/json") => DocumentAdditionFormat::Json,
|
||||||
Some("application/x-ndjson") => DocumentAdditionFormat::Ndjson,
|
Some("application/x-ndjson") => DocumentAdditionFormat::Ndjson,
|
||||||
Some("text/csv") => DocumentAdditionFormat::Csv,
|
Some("text/csv") => DocumentAdditionFormat::Csv,
|
||||||
Some(other) => return Err(MeilisearchHttpError::InvalidContentType(other.to_string()).into()),
|
Some(other) => {
|
||||||
|
return Err(MeilisearchHttpError::InvalidContentType(other.to_string()).into())
|
||||||
|
}
|
||||||
None => return Err(MeilisearchHttpError::MissingContentType.into()),
|
None => return Err(MeilisearchHttpError::MissingContentType.into()),
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -185,9 +193,7 @@ async fn document_addition(
|
|||||||
format,
|
format,
|
||||||
};
|
};
|
||||||
|
|
||||||
let update_status = meilisearch
|
let update_status = meilisearch.register_update(index_uid, update, true).await?;
|
||||||
.register_update(index_uid, update, true)
|
|
||||||
.await?;
|
|
||||||
|
|
||||||
debug!("returns: {:?}", update_status);
|
debug!("returns: {:?}", update_status);
|
||||||
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
Ok(HttpResponse::Accepted().json(serde_json::json!({ "updateId": update_status.id() })))
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use std::collections::BTreeMap;
|
use std::collections::BTreeMap;
|
||||||
|
use std::fmt;
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
use std::time::Duration;
|
use std::time::Duration;
|
||||||
@ -75,6 +76,16 @@ pub enum DocumentAdditionFormat {
|
|||||||
Ndjson,
|
Ndjson,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
impl fmt::Display for DocumentAdditionFormat {
|
||||||
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
|
match self {
|
||||||
|
DocumentAdditionFormat::Json => write!(f, "json"),
|
||||||
|
DocumentAdditionFormat::Ndjson => write!(f, "ndjson"),
|
||||||
|
DocumentAdditionFormat::Csv => write!(f, "csv"),
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
#[derive(Serialize, Debug)]
|
#[derive(Serialize, Debug)]
|
||||||
#[serde(rename_all = "camelCase")]
|
#[serde(rename_all = "camelCase")]
|
||||||
pub struct Stats {
|
pub struct Stats {
|
||||||
|
@ -5,7 +5,7 @@ use meilisearch_error::{Code, ErrorCode};
|
|||||||
|
|
||||||
use crate::{
|
use crate::{
|
||||||
document_formats::DocumentFormatError,
|
document_formats::DocumentFormatError,
|
||||||
index_controller::update_file_store::UpdateFileStoreError,
|
index_controller::{update_file_store::UpdateFileStoreError, DocumentAdditionFormat},
|
||||||
};
|
};
|
||||||
|
|
||||||
pub type Result<T> = std::result::Result<T, UpdateLoopError>;
|
pub type Result<T> = std::result::Result<T, UpdateLoopError>;
|
||||||
@ -26,6 +26,8 @@ pub enum UpdateLoopError {
|
|||||||
// TODO: The reference to actix has to go.
|
// TODO: The reference to actix has to go.
|
||||||
#[error("{0}")]
|
#[error("{0}")]
|
||||||
PayloadError(#[from] actix_web::error::PayloadError),
|
PayloadError(#[from] actix_web::error::PayloadError),
|
||||||
|
#[error("A {0} payload is missing.")]
|
||||||
|
MissingPayload(DocumentAdditionFormat),
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<T> From<tokio::sync::mpsc::error::SendError<T>> for UpdateLoopError
|
impl<T> From<tokio::sync::mpsc::error::SendError<T>> for UpdateLoopError
|
||||||
@ -63,6 +65,7 @@ impl ErrorCode for UpdateLoopError {
|
|||||||
actix_web::error::PayloadError::Overflow => Code::PayloadTooLarge,
|
actix_web::error::PayloadError::Overflow => Code::PayloadTooLarge,
|
||||||
_ => Code::Internal,
|
_ => Code::Internal,
|
||||||
},
|
},
|
||||||
|
Self::MissingPayload(_) => Code::MissingPayload,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -3,7 +3,7 @@ mod message;
|
|||||||
pub mod status;
|
pub mod status;
|
||||||
pub mod store;
|
pub mod store;
|
||||||
|
|
||||||
use std::io;
|
use std::io::{self, BufRead, BufReader};
|
||||||
use std::path::{Path, PathBuf};
|
use std::path::{Path, PathBuf};
|
||||||
use std::sync::atomic::AtomicBool;
|
use std::sync::atomic::AtomicBool;
|
||||||
use std::sync::Arc;
|
use std::sync::Arc;
|
||||||
@ -191,9 +191,15 @@ impl UpdateLoop {
|
|||||||
method,
|
method,
|
||||||
format,
|
format,
|
||||||
} => {
|
} => {
|
||||||
let reader = StreamReader::new(payload);
|
let mut reader = BufReader::new(StreamReader::new(payload));
|
||||||
let (content_uuid, mut update_file) = self.update_file_store.new_update()?;
|
let (content_uuid, mut update_file) = self.update_file_store.new_update()?;
|
||||||
tokio::task::spawn_blocking(move || -> Result<_> {
|
tokio::task::spawn_blocking(move || -> Result<_> {
|
||||||
|
// check if the payload is empty, and return an error
|
||||||
|
reader.fill_buf()?;
|
||||||
|
if reader.buffer().is_empty() {
|
||||||
|
return Err(UpdateLoopError::MissingPayload(format));
|
||||||
|
}
|
||||||
|
|
||||||
match format {
|
match format {
|
||||||
DocumentAdditionFormat::Json => read_json(reader, &mut *update_file)?,
|
DocumentAdditionFormat::Json => read_json(reader, &mut *update_file)?,
|
||||||
DocumentAdditionFormat::Csv => read_csv(reader, &mut *update_file)?,
|
DocumentAdditionFormat::Csv => read_csv(reader, &mut *update_file)?,
|
||||||
|
Loading…
Reference in New Issue
Block a user