route document add json

This commit is contained in:
mpostma 2020-12-23 16:12:37 +01:00
parent 1a38bfd31f
commit 02ef1d41d7
5 changed files with 71 additions and 13 deletions

View File

@ -50,7 +50,7 @@ slice-group-by = "0.2.6"
structopt = "0.3.20" structopt = "0.3.20"
tar = "0.4.29" tar = "0.4.29"
tempfile = "3.1.0" tempfile = "3.1.0"
tokio = { version = "*", features = ["full"] } tokio = { version = "0.2", features = ["full"] }
ureq = { version = "1.5.1", default-features = false, features = ["tls"] } ureq = { version = "1.5.1", default-features = false, features = ["tls"] }
walkdir = "2.3.1" walkdir = "2.3.1"
whoami = "1.0.0" whoami = "1.0.0"
@ -68,7 +68,7 @@ version = "0.18.1"
serde_url_params = "0.2.0" serde_url_params = "0.2.0"
tempdir = "0.3.7" tempdir = "0.3.7"
assert-json-diff = { branch = "master", git = "https://github.com/qdequele/assert-json-diff" } assert-json-diff = { branch = "master", git = "https://github.com/qdequele/assert-json-diff" }
tokio = { version = "0.2.18", features = ["macros", "time"] } tokio = { version = "0.2", features = ["macros", "time"] }
[features] [features]
default = ["sentry"] default = ["sentry"]

View File

@ -1,5 +1,6 @@
use std::ops::Deref; use std::ops::Deref;
use std::sync::Arc; use std::sync::Arc;
use std::fs::create_dir_all;
use async_compression::tokio_02::write::GzipEncoder; use async_compression::tokio_02::write::GzipEncoder;
use futures_util::stream::StreamExt; use futures_util::stream::StreamExt;
@ -27,7 +28,7 @@ impl Deref for Data {
#[derive(Clone)] #[derive(Clone)]
pub struct DataInner { pub struct DataInner {
pub indexes: Arc<Index>, pub indexes: Arc<Index>,
pub update_queue: UpdateQueue, pub update_queue: Arc<UpdateQueue>,
api_keys: ApiKeys, api_keys: ApiKeys,
options: Opt, options: Opt,
} }
@ -60,10 +61,11 @@ impl Data {
pub fn new(options: Opt) -> anyhow::Result<Data> { pub fn new(options: Opt) -> anyhow::Result<Data> {
let db_size = options.max_mdb_size.get_bytes() as usize; let db_size = options.max_mdb_size.get_bytes() as usize;
let path = options.db_path.join("main"); let path = options.db_path.join("main");
create_dir_all(&path)?;
let indexes = Index::new(&path, Some(db_size))?; let indexes = Index::new(&path, Some(db_size))?;
let indexes = Arc::new(indexes); let indexes = Arc::new(indexes);
let update_queue = UpdateQueue::new(&options, indexes.clone())?; let update_queue = Arc::new(UpdateQueue::new(&options, indexes.clone())?);
let mut api_keys = ApiKeys { let mut api_keys = ApiKeys {
master: options.clone().master_key, master: options.clone().master_key,
@ -89,8 +91,8 @@ impl Data {
B: Deref<Target = [u8]>, B: Deref<Target = [u8]>,
E: std::error::Error + Send + Sync + 'static, E: std::error::Error + Send + Sync + 'static,
{ {
let file = tokio::task::block_in_place(tempfile::tempfile)?; let file = tokio::task::spawn_blocking(tempfile::tempfile).await?;
let file = tokio::fs::File::from_std(file); let file = tokio::fs::File::from_std(file?);
let mut encoder = GzipEncoder::new(file); let mut encoder = GzipEncoder::new(file);
while let Some(result) = stream.next().await { while let Some(result) = stream.next().await {
@ -105,7 +107,10 @@ impl Data {
let mmap = unsafe { memmap::Mmap::map(&file)? }; let mmap = unsafe { memmap::Mmap::map(&file)? };
let meta = UpdateMeta::DocumentsAddition { method, format }; let meta = UpdateMeta::DocumentsAddition { method, format };
let update_id = tokio::task::block_in_place(|| self.update_queue.register_update(&meta, &mmap[..]))?;
let queue = self.update_queue.clone();
let meta_cloned = meta.clone();
let update_id = tokio::task::spawn_blocking(move || queue.register_update(&meta_cloned, &mmap[..])).await??;
Ok(UpdateStatus::Pending { update_id, meta }) Ok(UpdateStatus::Pending { update_id, meta })
} }

View File

@ -1,14 +1,31 @@
use actix_web::web::Payload;
use actix_web::{delete, get, post, put}; use actix_web::{delete, get, post, put};
use actix_web::{web, HttpResponse}; use actix_web::{web, HttpResponse};
use indexmap::IndexMap; use indexmap::IndexMap;
use serde_json::Value; use log::error;
use milli::update::{IndexDocumentsMethod, UpdateFormat};
use serde::Deserialize; use serde::Deserialize;
use serde_json::Value;
use crate::Data; use crate::Data;
use crate::error::ResponseError; use crate::error::ResponseError;
use crate::helpers::Authentication; use crate::helpers::Authentication;
use crate::routes::IndexParam; use crate::routes::IndexParam;
macro_rules! guard_content_type {
($fn_name:ident, $guard_value:literal) => {
fn $fn_name(head: &actix_web::dev::RequestHead) -> bool {
if let Some(content_type) = head.headers.get("Content-Type") {
content_type.to_str().map(|v| v.contains($guard_value)).unwrap_or(false)
} else {
false
}
}
};
}
guard_content_type!(guard_json, "application/json");
type Document = IndexMap<String, Value>; type Document = IndexMap<String, Value>;
#[derive(Deserialize)] #[derive(Deserialize)]
@ -21,7 +38,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
cfg.service(get_document) cfg.service(get_document)
.service(delete_document) .service(delete_document)
.service(get_all_documents) .service(get_all_documents)
.service(add_documents) .service(add_documents_json)
.service(update_documents) .service(update_documents)
.service(delete_documents) .service(delete_documents)
.service(clear_all_documents); .service(clear_all_documents);
@ -91,12 +108,46 @@ async fn update_multiple_documents(
todo!() todo!()
} }
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")] /// Route used when the payload type is "application/json"
async fn add_documents( #[post(
"/indexes/{index_uid}/documents",
wrap = "Authentication::Private",
guard = "guard_json"
)]
async fn add_documents_json(
data: web::Data<Data>, data: web::Data<Data>,
path: web::Path<IndexParam>,
params: web::Query<UpdateDocumentsQuery>,
body: Payload,
) -> Result<HttpResponse, ResponseError> {
let addition_result = data
.add_documents(
IndexDocumentsMethod::UpdateDocuments,
UpdateFormat::Json,
body
).await;
match addition_result {
Ok(update) => {
let value = serde_json::to_string(&update).unwrap();
let response = HttpResponse::Ok().body(value);
Ok(response)
}
Err(e) => {
error!("{}", e);
todo!()
}
}
}
/// Default route for addign documents, this should return an error en redirect to the docuentation
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
async fn add_documents_default(
_data: web::Data<Data>,
_path: web::Path<IndexParam>, _path: web::Path<IndexParam>,
_params: web::Query<UpdateDocumentsQuery>, _params: web::Query<UpdateDocumentsQuery>,
body: web::Json<Vec<Document>>, _body: web::Json<Vec<Document>>,
) -> Result<HttpResponse, ResponseError> { ) -> Result<HttpResponse, ResponseError> {
todo!() todo!()
} }

View File

@ -14,7 +14,7 @@ pub mod synonym;
#[derive(Deserialize)] #[derive(Deserialize)]
pub struct IndexParam { pub struct IndexParam {
_index_uid: String, index_uid: String,
} }
#[derive(Serialize)] #[derive(Serialize)]

View File

@ -5,6 +5,7 @@ pub use settings::{Settings, Facets};
use std::io; use std::io;
use std::sync::Arc; use std::sync::Arc;
use std::ops::Deref; use std::ops::Deref;
use std::fs::create_dir_all;
use anyhow::Result; use anyhow::Result;
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
@ -336,6 +337,7 @@ impl UpdateQueue {
let handler = UpdateHandler::new(&opt.indexer_options, indexes, sender)?; let handler = UpdateHandler::new(&opt.indexer_options, indexes, sender)?;
let size = opt.max_udb_size.get_bytes() as usize; let size = opt.max_udb_size.get_bytes() as usize;
let path = opt.db_path.join("updates.mdb"); let path = opt.db_path.join("updates.mdb");
create_dir_all(&path)?;
let inner = UpdateStore::open( let inner = UpdateStore::open(
Some(size), Some(size),
path, path,