mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-29 16:45:30 +08:00
prepare boilerplate code for new api
This commit is contained in:
parent
8c0ab106c7
commit
29b1f55bb0
@ -1,652 +0,0 @@
|
|||||||
use std::cmp::Ordering;
|
|
||||||
use std::collections::{HashMap, HashSet};
|
|
||||||
use std::hash::{Hash, Hasher};
|
|
||||||
use std::time::Instant;
|
|
||||||
|
|
||||||
use indexmap::IndexMap;
|
|
||||||
use log::error;
|
|
||||||
use meilisearch_core::{Filter, MainReader};
|
|
||||||
use meilisearch_core::facets::FacetFilter;
|
|
||||||
use meilisearch_core::criterion::*;
|
|
||||||
use meilisearch_core::settings::RankingRule;
|
|
||||||
use meilisearch_core::{Highlight, Index, RankedMap};
|
|
||||||
use meilisearch_schema::{FieldId, Schema};
|
|
||||||
use meilisearch_tokenizer::is_cjk;
|
|
||||||
use serde::{Deserialize, Serialize};
|
|
||||||
use serde_json::Value;
|
|
||||||
use siphasher::sip::SipHasher;
|
|
||||||
use slice_group_by::GroupBy;
|
|
||||||
|
|
||||||
use crate::error::{Error, ResponseError};
|
|
||||||
|
|
||||||
pub trait IndexSearchExt {
|
|
||||||
fn new_search(&self, query: Option<String>) -> SearchBuilder;
|
|
||||||
}
|
|
||||||
|
|
||||||
impl IndexSearchExt for Index {
|
|
||||||
fn new_search(&self, query: Option<String>) -> SearchBuilder {
|
|
||||||
SearchBuilder {
|
|
||||||
index: self,
|
|
||||||
query,
|
|
||||||
offset: 0,
|
|
||||||
limit: 20,
|
|
||||||
attributes_to_crop: None,
|
|
||||||
attributes_to_retrieve: None,
|
|
||||||
attributes_to_highlight: None,
|
|
||||||
filters: None,
|
|
||||||
matches: false,
|
|
||||||
facet_filters: None,
|
|
||||||
facets: None,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub struct SearchBuilder<'a> {
|
|
||||||
index: &'a Index,
|
|
||||||
query: Option<String>,
|
|
||||||
offset: usize,
|
|
||||||
limit: usize,
|
|
||||||
attributes_to_crop: Option<HashMap<String, usize>>,
|
|
||||||
attributes_to_retrieve: Option<HashSet<String>>,
|
|
||||||
attributes_to_highlight: Option<HashSet<String>>,
|
|
||||||
filters: Option<String>,
|
|
||||||
matches: bool,
|
|
||||||
facet_filters: Option<FacetFilter>,
|
|
||||||
facets: Option<Vec<(FieldId, String)>>
|
|
||||||
}
|
|
||||||
|
|
||||||
impl<'a> SearchBuilder<'a> {
|
|
||||||
pub fn offset(&mut self, value: usize) -> &SearchBuilder {
|
|
||||||
self.offset = value;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn limit(&mut self, value: usize) -> &SearchBuilder {
|
|
||||||
self.limit = value;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn attributes_to_crop(&mut self, value: HashMap<String, usize>) -> &SearchBuilder {
|
|
||||||
self.attributes_to_crop = Some(value);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn attributes_to_retrieve(&mut self, value: HashSet<String>) -> &SearchBuilder {
|
|
||||||
self.attributes_to_retrieve = Some(value);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_retrievable_field(&mut self, value: String) -> &SearchBuilder {
|
|
||||||
let attributes_to_retrieve = self.attributes_to_retrieve.get_or_insert(HashSet::new());
|
|
||||||
attributes_to_retrieve.insert(value);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn attributes_to_highlight(&mut self, value: HashSet<String>) -> &SearchBuilder {
|
|
||||||
self.attributes_to_highlight = Some(value);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_facet_filters(&mut self, filters: FacetFilter) -> &SearchBuilder {
|
|
||||||
self.facet_filters = Some(filters);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn filters(&mut self, value: String) -> &SearchBuilder {
|
|
||||||
self.filters = Some(value);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_matches(&mut self) -> &SearchBuilder {
|
|
||||||
self.matches = true;
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn add_facets(&mut self, facets: Vec<(FieldId, String)>) -> &SearchBuilder {
|
|
||||||
self.facets = Some(facets);
|
|
||||||
self
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn search(self, reader: &MainReader) -> Result<SearchResult, ResponseError> {
|
|
||||||
let schema = self
|
|
||||||
.index
|
|
||||||
.main
|
|
||||||
.schema(reader)?
|
|
||||||
.ok_or(Error::internal("missing schema"))?;
|
|
||||||
|
|
||||||
let ranked_map = self.index.main.ranked_map(reader)?.unwrap_or_default();
|
|
||||||
|
|
||||||
// Change criteria
|
|
||||||
let mut query_builder = match self.get_criteria(reader, &ranked_map, &schema)? {
|
|
||||||
Some(criteria) => self.index.query_builder_with_criteria(criteria),
|
|
||||||
None => self.index.query_builder(),
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(filter_expression) = &self.filters {
|
|
||||||
let filter = Filter::parse(filter_expression, &schema)?;
|
|
||||||
let index = &self.index;
|
|
||||||
query_builder.with_filter(move |id| {
|
|
||||||
let reader = &reader;
|
|
||||||
let filter = &filter;
|
|
||||||
match filter.test(reader, index, id) {
|
|
||||||
Ok(res) => res,
|
|
||||||
Err(e) => {
|
|
||||||
log::warn!("unexpected error during filtering: {}", e);
|
|
||||||
false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(field) = self.index.main.distinct_attribute(reader)? {
|
|
||||||
let index = &self.index;
|
|
||||||
query_builder.with_distinct(1, move |id| {
|
|
||||||
match index.document_attribute_bytes(reader, id, field) {
|
|
||||||
Ok(Some(bytes)) => {
|
|
||||||
let mut s = SipHasher::new();
|
|
||||||
bytes.hash(&mut s);
|
|
||||||
Some(s.finish())
|
|
||||||
}
|
|
||||||
_ => None,
|
|
||||||
}
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
query_builder.set_facet_filter(self.facet_filters);
|
|
||||||
query_builder.set_facets(self.facets);
|
|
||||||
|
|
||||||
let start = Instant::now();
|
|
||||||
let result = query_builder.query(reader, self.query.as_deref(), self.offset..(self.offset + self.limit));
|
|
||||||
let search_result = result.map_err(Error::search_documents)?;
|
|
||||||
let time_ms = start.elapsed().as_millis() as usize;
|
|
||||||
|
|
||||||
let mut all_attributes: HashSet<&str> = HashSet::new();
|
|
||||||
let mut all_formatted: HashSet<&str> = HashSet::new();
|
|
||||||
|
|
||||||
match &self.attributes_to_retrieve {
|
|
||||||
Some(to_retrieve) => {
|
|
||||||
all_attributes.extend(to_retrieve.iter().map(String::as_str));
|
|
||||||
|
|
||||||
if let Some(to_highlight) = &self.attributes_to_highlight {
|
|
||||||
all_formatted.extend(to_highlight.iter().map(String::as_str));
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(to_crop) = &self.attributes_to_crop {
|
|
||||||
all_formatted.extend(to_crop.keys().map(String::as_str));
|
|
||||||
}
|
|
||||||
|
|
||||||
all_attributes.extend(&all_formatted);
|
|
||||||
},
|
|
||||||
None => {
|
|
||||||
all_attributes.extend(schema.displayed_name());
|
|
||||||
// If we specified at least one attribute to highlight or crop then
|
|
||||||
// all available attributes will be returned in the _formatted field.
|
|
||||||
if self.attributes_to_highlight.is_some() || self.attributes_to_crop.is_some() {
|
|
||||||
all_formatted.extend(all_attributes.iter().cloned());
|
|
||||||
}
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut hits = Vec::with_capacity(self.limit);
|
|
||||||
for doc in search_result.documents {
|
|
||||||
let mut document: IndexMap<String, Value> = self
|
|
||||||
.index
|
|
||||||
.document(reader, Some(&all_attributes), doc.id)
|
|
||||||
.map_err(|e| Error::retrieve_document(doc.id.0, e))?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to retrieve the document; Corrupted data",
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let mut formatted = document.iter()
|
|
||||||
.filter(|(key, _)| all_formatted.contains(key.as_str()))
|
|
||||||
.map(|(k, v)| (k.clone(), v.clone()))
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let mut matches = doc.highlights.clone();
|
|
||||||
|
|
||||||
// Crops fields if needed
|
|
||||||
if let Some(fields) = &self.attributes_to_crop {
|
|
||||||
crop_document(&mut formatted, &mut matches, &schema, fields);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Transform to readable matches
|
|
||||||
if let Some(attributes_to_highlight) = &self.attributes_to_highlight {
|
|
||||||
let matches = calculate_matches(
|
|
||||||
&matches,
|
|
||||||
self.attributes_to_highlight.clone(),
|
|
||||||
&schema,
|
|
||||||
);
|
|
||||||
formatted = calculate_highlights(&formatted, &matches, attributes_to_highlight);
|
|
||||||
}
|
|
||||||
|
|
||||||
let matches_info = if self.matches {
|
|
||||||
Some(calculate_matches(&matches, self.attributes_to_retrieve.clone(), &schema))
|
|
||||||
} else {
|
|
||||||
None
|
|
||||||
};
|
|
||||||
|
|
||||||
if let Some(attributes_to_retrieve) = &self.attributes_to_retrieve {
|
|
||||||
document.retain(|key, _| attributes_to_retrieve.contains(&key.to_string()))
|
|
||||||
}
|
|
||||||
|
|
||||||
let hit = SearchHit {
|
|
||||||
document,
|
|
||||||
formatted,
|
|
||||||
matches_info,
|
|
||||||
};
|
|
||||||
|
|
||||||
hits.push(hit);
|
|
||||||
}
|
|
||||||
|
|
||||||
let results = SearchResult {
|
|
||||||
hits,
|
|
||||||
offset: self.offset,
|
|
||||||
limit: self.limit,
|
|
||||||
nb_hits: search_result.nb_hits,
|
|
||||||
exhaustive_nb_hits: search_result.exhaustive_nb_hit,
|
|
||||||
processing_time_ms: time_ms,
|
|
||||||
query: self.query.unwrap_or_default(),
|
|
||||||
facets_distribution: search_result.facets,
|
|
||||||
exhaustive_facets_count: search_result.exhaustive_facets_count,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(results)
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_criteria(
|
|
||||||
&self,
|
|
||||||
reader: &MainReader,
|
|
||||||
ranked_map: &'a RankedMap,
|
|
||||||
schema: &Schema,
|
|
||||||
) -> Result<Option<Criteria<'a>>, ResponseError> {
|
|
||||||
let ranking_rules = self.index.main.ranking_rules(reader)?;
|
|
||||||
|
|
||||||
if let Some(ranking_rules) = ranking_rules {
|
|
||||||
let mut builder = CriteriaBuilder::with_capacity(7 + ranking_rules.len());
|
|
||||||
for rule in ranking_rules {
|
|
||||||
match rule {
|
|
||||||
RankingRule::Typo => builder.push(Typo),
|
|
||||||
RankingRule::Words => builder.push(Words),
|
|
||||||
RankingRule::Proximity => builder.push(Proximity),
|
|
||||||
RankingRule::Attribute => builder.push(Attribute),
|
|
||||||
RankingRule::WordsPosition => builder.push(WordsPosition),
|
|
||||||
RankingRule::Exactness => builder.push(Exactness),
|
|
||||||
RankingRule::Asc(field) => {
|
|
||||||
match SortByAttr::lower_is_better(&ranked_map, &schema, &field) {
|
|
||||||
Ok(rule) => builder.push(rule),
|
|
||||||
Err(err) => error!("Error during criteria builder; {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
RankingRule::Desc(field) => {
|
|
||||||
match SortByAttr::higher_is_better(&ranked_map, &schema, &field) {
|
|
||||||
Ok(rule) => builder.push(rule),
|
|
||||||
Err(err) => error!("Error during criteria builder; {:?}", err),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
builder.push(DocumentId);
|
|
||||||
return Ok(Some(builder.build()));
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(None)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Eq, PartialEq, Serialize, Deserialize)]
|
|
||||||
pub struct MatchPosition {
|
|
||||||
pub start: usize,
|
|
||||||
pub length: usize,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl PartialOrd for MatchPosition {
|
|
||||||
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
|
|
||||||
Some(self.cmp(other))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
impl Ord for MatchPosition {
|
|
||||||
fn cmp(&self, other: &Self) -> Ordering {
|
|
||||||
match self.start.cmp(&other.start) {
|
|
||||||
Ordering::Equal => self.length.cmp(&other.length),
|
|
||||||
_ => self.start.cmp(&other.start),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
pub type HighlightInfos = HashMap<String, Value>;
|
|
||||||
pub type MatchesInfos = HashMap<String, Vec<MatchPosition>>;
|
|
||||||
// pub type RankingInfos = HashMap<String, u64>;
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
||||||
pub struct SearchHit {
|
|
||||||
#[serde(flatten)]
|
|
||||||
pub document: IndexMap<String, Value>,
|
|
||||||
#[serde(rename = "_formatted", skip_serializing_if = "IndexMap::is_empty")]
|
|
||||||
pub formatted: IndexMap<String, Value>,
|
|
||||||
#[serde(rename = "_matchesInfo", skip_serializing_if = "Option::is_none")]
|
|
||||||
pub matches_info: Option<MatchesInfos>,
|
|
||||||
}
|
|
||||||
|
|
||||||
#[derive(Debug, Clone, Serialize)]
|
|
||||||
#[serde(rename_all = "camelCase")]
|
|
||||||
pub struct SearchResult {
|
|
||||||
pub hits: Vec<SearchHit>,
|
|
||||||
pub offset: usize,
|
|
||||||
pub limit: usize,
|
|
||||||
pub nb_hits: usize,
|
|
||||||
pub exhaustive_nb_hits: bool,
|
|
||||||
pub processing_time_ms: usize,
|
|
||||||
pub query: String,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub facets_distribution: Option<HashMap<String, HashMap<String, usize>>>,
|
|
||||||
#[serde(skip_serializing_if = "Option::is_none")]
|
|
||||||
pub exhaustive_facets_count: Option<bool>,
|
|
||||||
}
|
|
||||||
|
|
||||||
/// returns the start index and the length on the crop.
|
|
||||||
fn aligned_crop(text: &str, match_index: usize, context: usize) -> (usize, usize) {
|
|
||||||
let is_word_component = |c: &char| c.is_alphanumeric() && !is_cjk(*c);
|
|
||||||
|
|
||||||
let word_end_index = |mut index| {
|
|
||||||
if text.chars().nth(index - 1).map_or(false, |c| is_word_component(&c)) {
|
|
||||||
index += text.chars().skip(index).take_while(is_word_component).count();
|
|
||||||
}
|
|
||||||
index
|
|
||||||
};
|
|
||||||
|
|
||||||
if context == 0 {
|
|
||||||
// count need to be at least 1 for cjk queries to return something
|
|
||||||
return (match_index, 1 + text.chars().skip(match_index).take_while(is_word_component).count());
|
|
||||||
}
|
|
||||||
let start = match match_index.saturating_sub(context) {
|
|
||||||
0 => 0,
|
|
||||||
n => {
|
|
||||||
let word_end_index = word_end_index(n);
|
|
||||||
// skip whitespaces if any
|
|
||||||
word_end_index + text.chars().skip(word_end_index).take_while(char::is_ascii_whitespace).count()
|
|
||||||
}
|
|
||||||
};
|
|
||||||
let end = word_end_index(match_index + context);
|
|
||||||
|
|
||||||
(start, end - start)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn crop_text(
|
|
||||||
text: &str,
|
|
||||||
matches: impl IntoIterator<Item = Highlight>,
|
|
||||||
context: usize,
|
|
||||||
) -> (String, Vec<Highlight>) {
|
|
||||||
let mut matches = matches.into_iter().peekable();
|
|
||||||
|
|
||||||
let char_index = matches.peek().map(|m| m.char_index as usize).unwrap_or(0);
|
|
||||||
let (start, count) = aligned_crop(text, char_index, context);
|
|
||||||
|
|
||||||
// TODO do something about double allocation
|
|
||||||
let text = text
|
|
||||||
.chars()
|
|
||||||
.skip(start)
|
|
||||||
.take(count)
|
|
||||||
.collect::<String>()
|
|
||||||
.trim()
|
|
||||||
.to_string();
|
|
||||||
|
|
||||||
// update matches index to match the new cropped text
|
|
||||||
let matches = matches
|
|
||||||
.take_while(|m| (m.char_index as usize) + (m.char_length as usize) <= start + count)
|
|
||||||
.map(|m| Highlight {
|
|
||||||
char_index: m.char_index - start as u16,
|
|
||||||
..m
|
|
||||||
})
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
(text, matches)
|
|
||||||
}
|
|
||||||
|
|
||||||
fn crop_document(
|
|
||||||
document: &mut IndexMap<String, Value>,
|
|
||||||
matches: &mut Vec<Highlight>,
|
|
||||||
schema: &Schema,
|
|
||||||
fields: &HashMap<String, usize>,
|
|
||||||
) {
|
|
||||||
matches.sort_unstable_by_key(|m| (m.char_index, m.char_length));
|
|
||||||
|
|
||||||
for (field, length) in fields {
|
|
||||||
let attribute = match schema.id(field) {
|
|
||||||
Some(attribute) => attribute,
|
|
||||||
None => continue,
|
|
||||||
};
|
|
||||||
|
|
||||||
let selected_matches = matches
|
|
||||||
.iter()
|
|
||||||
.filter(|m| FieldId::new(m.attribute) == attribute)
|
|
||||||
.cloned();
|
|
||||||
|
|
||||||
if let Some(Value::String(ref mut original_text)) = document.get_mut(field) {
|
|
||||||
let (cropped_text, cropped_matches) =
|
|
||||||
crop_text(original_text, selected_matches, *length);
|
|
||||||
|
|
||||||
*original_text = cropped_text;
|
|
||||||
|
|
||||||
matches.retain(|m| FieldId::new(m.attribute) != attribute);
|
|
||||||
matches.extend_from_slice(&cropped_matches);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_matches(
|
|
||||||
matches: &[Highlight],
|
|
||||||
attributes_to_retrieve: Option<HashSet<String>>,
|
|
||||||
schema: &Schema,
|
|
||||||
) -> MatchesInfos {
|
|
||||||
let mut matches_result: HashMap<String, Vec<MatchPosition>> = HashMap::new();
|
|
||||||
for m in matches.iter() {
|
|
||||||
if let Some(attribute) = schema.name(FieldId::new(m.attribute)) {
|
|
||||||
if let Some(ref attributes_to_retrieve) = attributes_to_retrieve {
|
|
||||||
if !attributes_to_retrieve.contains(attribute) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !schema.displayed_name().contains(attribute) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
if let Some(pos) = matches_result.get_mut(attribute) {
|
|
||||||
pos.push(MatchPosition {
|
|
||||||
start: m.char_index as usize,
|
|
||||||
length: m.char_length as usize,
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
let mut positions = Vec::new();
|
|
||||||
positions.push(MatchPosition {
|
|
||||||
start: m.char_index as usize,
|
|
||||||
length: m.char_length as usize,
|
|
||||||
});
|
|
||||||
matches_result.insert(attribute.to_string(), positions);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for (_, val) in matches_result.iter_mut() {
|
|
||||||
val.sort_unstable();
|
|
||||||
val.dedup();
|
|
||||||
}
|
|
||||||
matches_result
|
|
||||||
}
|
|
||||||
|
|
||||||
fn calculate_highlights(
|
|
||||||
document: &IndexMap<String, Value>,
|
|
||||||
matches: &MatchesInfos,
|
|
||||||
attributes_to_highlight: &HashSet<String>,
|
|
||||||
) -> IndexMap<String, Value> {
|
|
||||||
let mut highlight_result = document.clone();
|
|
||||||
|
|
||||||
for (attribute, matches) in matches.iter() {
|
|
||||||
if attributes_to_highlight.contains(attribute) {
|
|
||||||
if let Some(Value::String(value)) = document.get(attribute) {
|
|
||||||
let value: Vec<_> = value.chars().collect();
|
|
||||||
let mut highlighted_value = String::new();
|
|
||||||
let mut index = 0;
|
|
||||||
|
|
||||||
let longest_matches = matches
|
|
||||||
.linear_group_by_key(|m| m.start)
|
|
||||||
.map(|group| group.last().unwrap())
|
|
||||||
.filter(move |m| m.start >= index);
|
|
||||||
|
|
||||||
for m in longest_matches {
|
|
||||||
let before = value.get(index..m.start);
|
|
||||||
let highlighted = value.get(m.start..(m.start + m.length));
|
|
||||||
if let (Some(before), Some(highlighted)) = (before, highlighted) {
|
|
||||||
highlighted_value.extend(before);
|
|
||||||
highlighted_value.push_str("<em>");
|
|
||||||
highlighted_value.extend(highlighted);
|
|
||||||
highlighted_value.push_str("</em>");
|
|
||||||
index = m.start + m.length;
|
|
||||||
} else {
|
|
||||||
error!("value: {:?}; index: {:?}, match: {:?}", value, index, m);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
highlighted_value.extend(value[index..].iter());
|
|
||||||
highlight_result.insert(attribute.to_string(), Value::String(highlighted_value));
|
|
||||||
};
|
|
||||||
}
|
|
||||||
}
|
|
||||||
highlight_result
|
|
||||||
}
|
|
||||||
|
|
||||||
#[cfg(test)]
|
|
||||||
mod tests {
|
|
||||||
use super::*;
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn aligned_crops() {
|
|
||||||
let text = r#"En ce début de trentième millénaire, l'Empire n'a jamais été aussi puissant, aussi étendu à travers toute la galaxie. C'est dans sa capitale, Trantor, que l'éminent savant Hari Seldon invente la psychohistoire, une science toute nouvelle, à base de psychologie et de mathématiques, qui lui permet de prédire l'avenir... C'est-à-dire l'effondrement de l'Empire d'ici cinq siècles et au-delà, trente mille années de chaos et de ténèbres. Pour empêcher cette catastrophe et sauver la civilisation, Seldon crée la Fondation."#;
|
|
||||||
|
|
||||||
// simple test
|
|
||||||
let (start, length) = aligned_crop(&text, 6, 2);
|
|
||||||
let cropped = text.chars().skip(start).take(length).collect::<String>().trim().to_string();
|
|
||||||
assert_eq!("début", cropped);
|
|
||||||
|
|
||||||
// first word test
|
|
||||||
let (start, length) = aligned_crop(&text, 0, 1);
|
|
||||||
let cropped = text.chars().skip(start).take(length).collect::<String>().trim().to_string();
|
|
||||||
assert_eq!("En", cropped);
|
|
||||||
// last word test
|
|
||||||
let (start, length) = aligned_crop(&text, 510, 2);
|
|
||||||
let cropped = text.chars().skip(start).take(length).collect::<String>().trim().to_string();
|
|
||||||
assert_eq!("Fondation", cropped);
|
|
||||||
|
|
||||||
// CJK tests
|
|
||||||
let text = "this isのス foo myタイリ test";
|
|
||||||
|
|
||||||
// mixed charset
|
|
||||||
let (start, length) = aligned_crop(&text, 5, 3);
|
|
||||||
let cropped = text.chars().skip(start).take(length).collect::<String>().trim().to_string();
|
|
||||||
assert_eq!("isの", cropped);
|
|
||||||
|
|
||||||
// split regular word / CJK word, no space
|
|
||||||
let (start, length) = aligned_crop(&text, 7, 1);
|
|
||||||
let cropped = text.chars().skip(start).take(length).collect::<String>().trim().to_string();
|
|
||||||
assert_eq!("の", cropped);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn calculate_matches() {
|
|
||||||
let mut matches = Vec::new();
|
|
||||||
matches.push(Highlight { attribute: 0, char_index: 0, char_length: 3});
|
|
||||||
matches.push(Highlight { attribute: 0, char_index: 0, char_length: 2});
|
|
||||||
|
|
||||||
let mut attributes_to_retrieve: HashSet<String> = HashSet::new();
|
|
||||||
attributes_to_retrieve.insert("title".to_string());
|
|
||||||
|
|
||||||
let schema = Schema::with_primary_key("title");
|
|
||||||
|
|
||||||
let matches_result = super::calculate_matches(&matches, Some(attributes_to_retrieve), &schema);
|
|
||||||
|
|
||||||
let mut matches_result_expected: HashMap<String, Vec<MatchPosition>> = HashMap::new();
|
|
||||||
|
|
||||||
let mut positions = Vec::new();
|
|
||||||
positions.push(MatchPosition {
|
|
||||||
start: 0,
|
|
||||||
length: 2,
|
|
||||||
});
|
|
||||||
positions.push(MatchPosition {
|
|
||||||
start: 0,
|
|
||||||
length: 3,
|
|
||||||
});
|
|
||||||
matches_result_expected.insert("title".to_string(), positions);
|
|
||||||
|
|
||||||
assert_eq!(matches_result, matches_result_expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn calculate_highlights() {
|
|
||||||
let data = r#"{
|
|
||||||
"title": "Fondation (Isaac ASIMOV)",
|
|
||||||
"description": "En ce début de trentième millénaire, l'Empire n'a jamais été aussi puissant, aussi étendu à travers toute la galaxie. C'est dans sa capitale, Trantor, que l'éminent savant Hari Seldon invente la psychohistoire, une science toute nouvelle, à base de psychologie et de mathématiques, qui lui permet de prédire l'avenir... C'est-à-dire l'effondrement de l'Empire d'ici cinq siècles et au-delà, trente mille années de chaos et de ténèbres. Pour empêcher cette catastrophe et sauver la civilisation, Seldon crée la Fondation."
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let document: IndexMap<String, Value> = serde_json::from_str(data).unwrap();
|
|
||||||
let mut attributes_to_highlight = HashSet::new();
|
|
||||||
attributes_to_highlight.insert("title".to_string());
|
|
||||||
attributes_to_highlight.insert("description".to_string());
|
|
||||||
|
|
||||||
let mut matches = HashMap::new();
|
|
||||||
|
|
||||||
let mut m = Vec::new();
|
|
||||||
m.push(MatchPosition {
|
|
||||||
start: 0,
|
|
||||||
length: 9,
|
|
||||||
});
|
|
||||||
matches.insert("title".to_string(), m);
|
|
||||||
|
|
||||||
let mut m = Vec::new();
|
|
||||||
m.push(MatchPosition {
|
|
||||||
start: 510,
|
|
||||||
length: 9,
|
|
||||||
});
|
|
||||||
matches.insert("description".to_string(), m);
|
|
||||||
let result = super::calculate_highlights(&document, &matches, &attributes_to_highlight);
|
|
||||||
|
|
||||||
let mut result_expected = IndexMap::new();
|
|
||||||
result_expected.insert(
|
|
||||||
"title".to_string(),
|
|
||||||
Value::String("<em>Fondation</em> (Isaac ASIMOV)".to_string()),
|
|
||||||
);
|
|
||||||
result_expected.insert("description".to_string(), Value::String("En ce début de trentième millénaire, l'Empire n'a jamais été aussi puissant, aussi étendu à travers toute la galaxie. C'est dans sa capitale, Trantor, que l'éminent savant Hari Seldon invente la psychohistoire, une science toute nouvelle, à base de psychologie et de mathématiques, qui lui permet de prédire l'avenir... C'est-à-dire l'effondrement de l'Empire d'ici cinq siècles et au-delà, trente mille années de chaos et de ténèbres. Pour empêcher cette catastrophe et sauver la civilisation, Seldon crée la <em>Fondation</em>.".to_string()));
|
|
||||||
|
|
||||||
assert_eq!(result, result_expected);
|
|
||||||
}
|
|
||||||
|
|
||||||
#[test]
|
|
||||||
fn highlight_longest_match() {
|
|
||||||
let data = r#"{
|
|
||||||
"title": "Ice"
|
|
||||||
}"#;
|
|
||||||
|
|
||||||
let document: IndexMap<String, Value> = serde_json::from_str(data).unwrap();
|
|
||||||
let mut attributes_to_highlight = HashSet::new();
|
|
||||||
attributes_to_highlight.insert("title".to_string());
|
|
||||||
|
|
||||||
let mut matches = HashMap::new();
|
|
||||||
|
|
||||||
let mut m = Vec::new();
|
|
||||||
m.push(MatchPosition {
|
|
||||||
start: 0,
|
|
||||||
length: 2,
|
|
||||||
});
|
|
||||||
m.push(MatchPosition {
|
|
||||||
start: 0,
|
|
||||||
length: 3,
|
|
||||||
});
|
|
||||||
matches.insert("title".to_string(), m);
|
|
||||||
|
|
||||||
let result = super::calculate_highlights(&document, &matches, &attributes_to_highlight);
|
|
||||||
|
|
||||||
let mut result_expected = IndexMap::new();
|
|
||||||
result_expected.insert(
|
|
||||||
"title".to_string(),
|
|
||||||
Value::String("<em>Ice</em>".to_string()),
|
|
||||||
);
|
|
||||||
|
|
||||||
assert_eq!(result, result_expected);
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,5 +1,4 @@
|
|||||||
pub mod authentication;
|
pub mod authentication;
|
||||||
pub mod meilisearch;
|
|
||||||
pub mod normalize_path;
|
pub mod normalize_path;
|
||||||
pub mod compression;
|
pub mod compression;
|
||||||
|
|
||||||
|
@ -1 +0,0 @@
|
|||||||
pub mod update_operation;
|
|
@ -1,33 +0,0 @@
|
|||||||
use std::fmt;
|
|
||||||
|
|
||||||
#[allow(dead_code)]
|
|
||||||
#[derive(Debug)]
|
|
||||||
pub enum UpdateOperation {
|
|
||||||
ClearAllDocuments,
|
|
||||||
DocumentsAddition,
|
|
||||||
DocumentsDeletion,
|
|
||||||
SynonymsUpdate,
|
|
||||||
SynonymsDeletion,
|
|
||||||
StopWordsAddition,
|
|
||||||
StopWordsDeletion,
|
|
||||||
Schema,
|
|
||||||
Config,
|
|
||||||
}
|
|
||||||
|
|
||||||
impl fmt::Display for UpdateOperation {
|
|
||||||
fn fmt(&self, f: &mut fmt::Formatter) -> std::fmt::Result {
|
|
||||||
use UpdateOperation::*;
|
|
||||||
|
|
||||||
match self {
|
|
||||||
ClearAllDocuments => write!(f, "ClearAllDocuments"),
|
|
||||||
DocumentsAddition => write!(f, "DocumentsAddition"),
|
|
||||||
DocumentsDeletion => write!(f, "DocumentsDeletion"),
|
|
||||||
SynonymsUpdate => write!(f, "SynonymsUpdate"),
|
|
||||||
SynonymsDeletion => write!(f, "SynonymsDelettion"),
|
|
||||||
StopWordsAddition => write!(f, "StopWordsAddition"),
|
|
||||||
StopWordsDeletion => write!(f, "StopWordsDeletion"),
|
|
||||||
Schema => write!(f, "Schema"),
|
|
||||||
Config => write!(f, "Config"),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -38,23 +38,7 @@ async fn get_document(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let internal_id = index
|
|
||||||
.main
|
|
||||||
.external_to_internal_docid(&reader, &path.document_id)?
|
|
||||||
.ok_or(Error::document_not_found(&path.document_id))?;
|
|
||||||
|
|
||||||
let document: Document = index
|
|
||||||
.document(&reader, None, internal_id)?
|
|
||||||
.ok_or(Error::document_not_found(&path.document_id))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(document))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -65,17 +49,7 @@ async fn delete_document(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<DocumentParam>,
|
path: web::Path<DocumentParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let mut documents_deletion = index.documents_deletion();
|
|
||||||
documents_deletion.delete_document_by_external_docid(path.document_id.clone());
|
|
||||||
|
|
||||||
let update_id = data.db.update_write(|w| documents_deletion.finalize(w))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -94,32 +68,7 @@ pub fn get_all_documents_sync(
|
|||||||
limit: usize,
|
limit: usize,
|
||||||
attributes_to_retrieve: Option<&String>
|
attributes_to_retrieve: Option<&String>
|
||||||
) -> Result<Vec<Document>, Error> {
|
) -> Result<Vec<Document>, Error> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(index_uid)
|
|
||||||
.ok_or(Error::index_not_found(index_uid))?;
|
|
||||||
|
|
||||||
|
|
||||||
let documents_ids: Result<BTreeSet<_>, _> = index
|
|
||||||
.documents_fields_counts
|
|
||||||
.documents_ids(reader)?
|
|
||||||
.skip(offset)
|
|
||||||
.take(limit)
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let attributes: Option<HashSet<&str>> = attributes_to_retrieve
|
|
||||||
.map(|a| a.split(',').collect());
|
|
||||||
|
|
||||||
let mut documents = Vec::new();
|
|
||||||
for document_id in documents_ids? {
|
|
||||||
if let Ok(Some(document)) =
|
|
||||||
index.document::<Document>(reader, attributes.as_ref(), document_id)
|
|
||||||
{
|
|
||||||
documents.push(document);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(documents)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/indexes/{index_uid}/documents", wrap = "Authentication::Public")]
|
#[get("/indexes/{index_uid}/documents", wrap = "Authentication::Public")]
|
||||||
@ -128,21 +77,7 @@ async fn get_all_documents(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
params: web::Query<BrowseQuery>,
|
params: web::Query<BrowseQuery>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let offset = params.offset.unwrap_or(0);
|
todo!()
|
||||||
let limit = params.limit.unwrap_or(20);
|
|
||||||
let index_uid = &path.index_uid;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let documents = get_all_documents_sync(
|
|
||||||
&data,
|
|
||||||
&reader,
|
|
||||||
index_uid,
|
|
||||||
offset,
|
|
||||||
limit,
|
|
||||||
params.attributes_to_retrieve.as_ref()
|
|
||||||
)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(documents))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
|
fn find_primary_key(document: &IndexMap<String, Value>) -> Option<String> {
|
||||||
@ -167,41 +102,7 @@ async fn update_multiple_documents(
|
|||||||
body: web::Json<Vec<Document>>,
|
body: web::Json<Vec<Document>>,
|
||||||
is_partial: bool,
|
is_partial: bool,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let mut schema = index
|
|
||||||
.main
|
|
||||||
.schema(&reader)?
|
|
||||||
.ok_or(meilisearch_core::Error::SchemaMissing)?;
|
|
||||||
|
|
||||||
if schema.primary_key().is_none() {
|
|
||||||
let id = match ¶ms.primary_key {
|
|
||||||
Some(id) => id.to_string(),
|
|
||||||
None => body
|
|
||||||
.first()
|
|
||||||
.and_then(find_primary_key)
|
|
||||||
.ok_or(meilisearch_core::Error::MissingPrimaryKey)?,
|
|
||||||
};
|
|
||||||
|
|
||||||
schema.set_primary_key(&id).map_err(Error::bad_request)?;
|
|
||||||
|
|
||||||
data.db.main_write(|w| index.main.put_schema(w, &schema))?;
|
|
||||||
}
|
|
||||||
|
|
||||||
let mut document_addition = if is_partial {
|
|
||||||
index.documents_partial_addition()
|
|
||||||
} else {
|
|
||||||
index.documents_addition()
|
|
||||||
};
|
|
||||||
|
|
||||||
for document in body.into_inner() {
|
|
||||||
document_addition.update_document(document);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(data.db.update_write(|w| document_addition.finalize(w))?)
|
|
||||||
})?;
|
|
||||||
return Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
#[post("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||||
@ -211,7 +112,7 @@ async fn add_documents(
|
|||||||
params: web::Query<UpdateDocumentsQuery>,
|
params: web::Query<UpdateDocumentsQuery>,
|
||||||
body: web::Json<Vec<Document>>,
|
body: web::Json<Vec<Document>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
update_multiple_documents(data, path, params, body, false).await
|
todo!()
|
||||||
}
|
}
|
||||||
|
|
||||||
#[put("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
#[put("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||||
@ -233,21 +134,7 @@ async fn delete_documents(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Vec<Value>>,
|
body: web::Json<Vec<Value>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let mut documents_deletion = index.documents_deletion();
|
|
||||||
|
|
||||||
for document_id in body.into_inner() {
|
|
||||||
let document_id = update::value_to_string(&document_id);
|
|
||||||
documents_deletion.delete_document_by_external_docid(document_id);
|
|
||||||
}
|
|
||||||
|
|
||||||
let update_id = data.db.update_write(|w| documents_deletion.finalize(w))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
#[delete("/indexes/{index_uid}/documents", wrap = "Authentication::Private")]
|
||||||
@ -255,12 +142,5 @@ async fn clear_all_documents(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let update_id = data.db.update_write(|w| index.clear_all(w))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
@ -19,11 +19,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
|||||||
async fn trigger_dump(
|
async fn trigger_dump(
|
||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let dumps_dir = Path::new(&data.dumps_dir);
|
todo!()
|
||||||
match init_dump_process(&data, &dumps_dir) {
|
|
||||||
Ok(resume) => Ok(HttpResponse::Accepted().json(resume)),
|
|
||||||
Err(e) => Err(e.into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize)]
|
#[derive(Debug, Serialize)]
|
||||||
@ -42,23 +38,5 @@ async fn get_dump_status(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<DumpParam>,
|
path: web::Path<DumpParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let dumps_dir = Path::new(&data.dumps_dir);
|
todo!()
|
||||||
let dump_uid = &path.dump_uid;
|
|
||||||
|
|
||||||
if let Some(resume) = DumpInfo::get_current() {
|
|
||||||
if &resume.uid == dump_uid {
|
|
||||||
return Ok(HttpResponse::Ok().json(resume));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if File::open(compressed_dumps_dir(Path::new(dumps_dir), dump_uid)).is_ok() {
|
|
||||||
let resume = DumpInfo::new(
|
|
||||||
dump_uid.into(),
|
|
||||||
DumpStatus::Done
|
|
||||||
);
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(resume))
|
|
||||||
} else {
|
|
||||||
Err(Error::not_found("dump does not exist").into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -23,12 +23,7 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
fn generate_uid() -> String {
|
fn generate_uid() -> String {
|
||||||
let mut rng = rand::thread_rng();
|
todo!()
|
||||||
let sample = b"abcdefghijklmnopqrstuvwxyz0123456789";
|
|
||||||
sample
|
|
||||||
.choose_multiple(&mut rng, 8)
|
|
||||||
.map(|c| *c as char)
|
|
||||||
.collect()
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Serialize, Deserialize, Clone)]
|
#[derive(Debug, Serialize, Deserialize, Clone)]
|
||||||
@ -42,54 +37,7 @@ pub struct IndexResponse {
|
|||||||
}
|
}
|
||||||
|
|
||||||
pub fn list_indexes_sync(data: &web::Data<Data>, reader: &MainReader) -> Result<Vec<IndexResponse>, ResponseError> {
|
pub fn list_indexes_sync(data: &web::Data<Data>, reader: &MainReader) -> Result<Vec<IndexResponse>, ResponseError> {
|
||||||
let mut indexes = Vec::new();
|
todo!()
|
||||||
|
|
||||||
for index_uid in data.db.indexes_uids() {
|
|
||||||
let index = data.db.open_index(&index_uid);
|
|
||||||
|
|
||||||
match index {
|
|
||||||
Some(index) => {
|
|
||||||
let name = index.main.name(reader)?.ok_or(Error::internal(
|
|
||||||
"Impossible to get the name of an index",
|
|
||||||
))?;
|
|
||||||
let created_at = index
|
|
||||||
.main
|
|
||||||
.created_at(reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the create date of an index",
|
|
||||||
))?;
|
|
||||||
let updated_at = index
|
|
||||||
.main
|
|
||||||
.updated_at(reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the last update date of an index",
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let primary_key = match index.main.schema(reader) {
|
|
||||||
Ok(Some(schema)) => match schema.primary_key() {
|
|
||||||
Some(primary_key) => Some(primary_key.to_owned()),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let index_response = IndexResponse {
|
|
||||||
name,
|
|
||||||
uid: index_uid,
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
primary_key,
|
|
||||||
};
|
|
||||||
indexes.push(index_response);
|
|
||||||
}
|
|
||||||
None => error!(
|
|
||||||
"Index {} is referenced in the indexes list but cannot be found",
|
|
||||||
index_uid
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(indexes)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/indexes", wrap = "Authentication::Private")]
|
#[get("/indexes", wrap = "Authentication::Private")]
|
||||||
@ -105,44 +53,7 @@ async fn get_index(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let name = index.main.name(&reader)?.ok_or(Error::internal(
|
|
||||||
"Impossible to get the name of an index",
|
|
||||||
))?;
|
|
||||||
let created_at = index
|
|
||||||
.main
|
|
||||||
.created_at(&reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the create date of an index",
|
|
||||||
))?;
|
|
||||||
let updated_at = index
|
|
||||||
.main
|
|
||||||
.updated_at(&reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the last update date of an index",
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let primary_key = match index.main.schema(&reader) {
|
|
||||||
Ok(Some(schema)) => match schema.primary_key() {
|
|
||||||
Some(primary_key) => Some(primary_key.to_owned()),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
let index_response = IndexResponse {
|
|
||||||
name,
|
|
||||||
uid: path.index_uid.clone(),
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
primary_key,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(index_response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
@ -160,46 +71,7 @@ pub fn create_index_sync(
|
|||||||
name: String,
|
name: String,
|
||||||
primary_key: Option<String>,
|
primary_key: Option<String>,
|
||||||
) -> Result<IndexResponse, Error> {
|
) -> Result<IndexResponse, Error> {
|
||||||
|
todo!()
|
||||||
let created_index = database
|
|
||||||
.create_index(&uid)
|
|
||||||
.map_err(|e| match e {
|
|
||||||
meilisearch_core::Error::IndexAlreadyExists => Error::IndexAlreadyExists(uid.clone()),
|
|
||||||
_ => Error::create_index(e)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let index_response = database.main_write::<_, _, Error>(|mut write_txn| {
|
|
||||||
created_index.main.put_name(&mut write_txn, &name)?;
|
|
||||||
|
|
||||||
let created_at = created_index
|
|
||||||
.main
|
|
||||||
.created_at(&write_txn)?
|
|
||||||
.ok_or(Error::internal("Impossible to read created at"))?;
|
|
||||||
|
|
||||||
let updated_at = created_index
|
|
||||||
.main
|
|
||||||
.updated_at(&write_txn)?
|
|
||||||
.ok_or(Error::internal("Impossible to read updated at"))?;
|
|
||||||
|
|
||||||
if let Some(id) = primary_key.clone() {
|
|
||||||
if let Some(mut schema) = created_index.main.schema(&write_txn)? {
|
|
||||||
schema
|
|
||||||
.set_primary_key(&id)
|
|
||||||
.map_err(Error::bad_request)?;
|
|
||||||
created_index.main.put_schema(&mut write_txn, &schema)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
let index_response = IndexResponse {
|
|
||||||
name,
|
|
||||||
uid,
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
primary_key,
|
|
||||||
};
|
|
||||||
Ok(index_response)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(index_response)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post("/indexes", wrap = "Authentication::Private")]
|
#[post("/indexes", wrap = "Authentication::Private")]
|
||||||
@ -207,36 +79,7 @@ async fn create_index(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
body: web::Json<IndexCreateRequest>,
|
body: web::Json<IndexCreateRequest>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
if let (None, None) = (body.name.clone(), body.uid.clone()) {
|
todo!()
|
||||||
return Err(Error::bad_request(
|
|
||||||
"Index creation must have an uid",
|
|
||||||
).into());
|
|
||||||
}
|
|
||||||
|
|
||||||
let uid = match &body.uid {
|
|
||||||
Some(uid) => {
|
|
||||||
if uid
|
|
||||||
.chars()
|
|
||||||
.all(|x| x.is_ascii_alphanumeric() || x == '-' || x == '_')
|
|
||||||
{
|
|
||||||
uid.to_owned()
|
|
||||||
} else {
|
|
||||||
return Err(Error::InvalidIndexUid.into());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => loop {
|
|
||||||
let uid = generate_uid();
|
|
||||||
if data.db.open_index(&uid).is_none() {
|
|
||||||
break uid;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
let name = body.name.as_ref().unwrap_or(&uid).to_string();
|
|
||||||
|
|
||||||
let index_response = create_index_sync(&data.db, uid, name, body.primary_key.clone())?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Created().json(index_response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug, Deserialize)]
|
#[derive(Debug, Deserialize)]
|
||||||
@ -262,60 +105,7 @@ async fn update_index(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<IndexCreateRequest>,
|
body: web::Json<IndexCreateRequest>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
data.db.main_write::<_, _, ResponseError>(|writer| {
|
|
||||||
if let Some(name) = &body.name {
|
|
||||||
index.main.put_name(writer, name)?;
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(id) = body.primary_key.clone() {
|
|
||||||
if let Some(mut schema) = index.main.schema(writer)? {
|
|
||||||
schema.set_primary_key(&id)?;
|
|
||||||
index.main.put_schema(writer, &schema)?;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
index.main.put_updated_at(writer)?;
|
|
||||||
Ok(())
|
|
||||||
})?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let name = index.main.name(&reader)?.ok_or(Error::internal(
|
|
||||||
"Impossible to get the name of an index",
|
|
||||||
))?;
|
|
||||||
let created_at = index
|
|
||||||
.main
|
|
||||||
.created_at(&reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the create date of an index",
|
|
||||||
))?;
|
|
||||||
let updated_at = index
|
|
||||||
.main
|
|
||||||
.updated_at(&reader)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to get the last update date of an index",
|
|
||||||
))?;
|
|
||||||
|
|
||||||
let primary_key = match index.main.schema(&reader) {
|
|
||||||
Ok(Some(schema)) => match schema.primary_key() {
|
|
||||||
Some(primary_key) => Some(primary_key.to_owned()),
|
|
||||||
None => None,
|
|
||||||
},
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let index_response = IndexResponse {
|
|
||||||
name,
|
|
||||||
uid: path.index_uid.clone(),
|
|
||||||
created_at,
|
|
||||||
updated_at,
|
|
||||||
primary_key,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(index_response))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/indexes/{index_uid}", wrap = "Authentication::Private")]
|
#[delete("/indexes/{index_uid}", wrap = "Authentication::Private")]
|
||||||
@ -323,11 +113,7 @@ async fn delete_index(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
if data.db.delete_index(&path.index_uid)? {
|
todo!()
|
||||||
Ok(HttpResponse::NoContent().finish())
|
|
||||||
} else {
|
|
||||||
Err(Error::index_not_found(&path.index_uid).into())
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -344,34 +130,14 @@ async fn get_update_status(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<UpdateParam>,
|
path: web::Path<UpdateParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.update_read_txn()?;
|
|
||||||
|
|
||||||
let status = index.update_status(&reader, path.update_id)?;
|
|
||||||
|
|
||||||
match status {
|
|
||||||
Some(status) => Ok(HttpResponse::Ok().json(status)),
|
|
||||||
None => Err(Error::NotFound(format!(
|
|
||||||
"Update {}",
|
|
||||||
path.update_id
|
|
||||||
)).into()),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
pub fn get_all_updates_status_sync(
|
pub fn get_all_updates_status_sync(
|
||||||
data: &web::Data<Data>,
|
data: &web::Data<Data>,
|
||||||
reader: &UpdateReader,
|
reader: &UpdateReader,
|
||||||
index_uid: &str,
|
index_uid: &str,
|
||||||
) -> Result<Vec<UpdateStatus>, Error> {
|
) -> Result<Vec<UpdateStatus>, Error> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(index_uid)
|
|
||||||
.ok_or(Error::index_not_found(index_uid))?;
|
|
||||||
|
|
||||||
Ok(index.all_updates_status(reader)?)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
|
#[get("/indexes/{index_uid}/updates", wrap = "Authentication::Private")]
|
||||||
@ -379,10 +145,5 @@ async fn get_all_updates_status(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
|
todo!()
|
||||||
let reader = data.db.update_read_txn()?;
|
|
||||||
|
|
||||||
let response = get_all_updates_status_sync(&data, &reader, &path.index_uid)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(response))
|
|
||||||
}
|
}
|
||||||
|
@ -18,9 +18,5 @@ struct KeysResponse {
|
|||||||
|
|
||||||
#[get("/keys", wrap = "Authentication::Admin")]
|
#[get("/keys", wrap = "Authentication::Admin")]
|
||||||
async fn list(data: web::Data<Data>) -> HttpResponse {
|
async fn list(data: web::Data<Data>) -> HttpResponse {
|
||||||
let api_keys = data.api_keys.clone();
|
todo!()
|
||||||
HttpResponse::Ok().json(KeysResponse {
|
|
||||||
private: api_keys.private,
|
|
||||||
public: api_keys.public,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -40,8 +40,7 @@ async fn search_with_url_query(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
params: web::Query<SearchQuery>,
|
params: web::Query<SearchQuery>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let search_result = params.search(&path.index_uid, data)?;
|
todo!()
|
||||||
Ok(HttpResponse::Ok().json(search_result))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Deserialize)]
|
#[derive(Deserialize)]
|
||||||
@ -95,175 +94,5 @@ impl SearchQuery {
|
|||||||
index_uid: &str,
|
index_uid: &str,
|
||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
) -> Result<SearchResult, ResponseError> {
|
) -> Result<SearchResult, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(index_uid)
|
|
||||||
.ok_or(Error::index_not_found(index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let schema = index
|
|
||||||
.main
|
|
||||||
.schema(&reader)?
|
|
||||||
.ok_or(Error::internal("Impossible to retrieve the schema"))?;
|
|
||||||
|
|
||||||
let query = self
|
|
||||||
.q
|
|
||||||
.clone()
|
|
||||||
.and_then(|q| if q.is_empty() { None } else { Some(q) });
|
|
||||||
|
|
||||||
let mut search_builder = index.new_search(query);
|
|
||||||
|
|
||||||
if let Some(offset) = self.offset {
|
|
||||||
search_builder.offset(offset);
|
|
||||||
}
|
|
||||||
if let Some(limit) = self.limit {
|
|
||||||
search_builder.limit(limit);
|
|
||||||
}
|
|
||||||
|
|
||||||
let available_attributes = schema.displayed_name();
|
|
||||||
let mut restricted_attributes: HashSet<&str>;
|
|
||||||
match &self.attributes_to_retrieve {
|
|
||||||
Some(attributes_to_retrieve) => {
|
|
||||||
let attributes_to_retrieve: HashSet<&str> =
|
|
||||||
attributes_to_retrieve.split(',').collect();
|
|
||||||
if attributes_to_retrieve.contains("*") {
|
|
||||||
restricted_attributes = available_attributes.clone();
|
|
||||||
} else {
|
|
||||||
restricted_attributes = HashSet::new();
|
|
||||||
for attr in attributes_to_retrieve {
|
|
||||||
if available_attributes.contains(attr) {
|
|
||||||
restricted_attributes.insert(attr);
|
|
||||||
search_builder.add_retrievable_field(attr.to_string());
|
|
||||||
} else {
|
|
||||||
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => {
|
|
||||||
restricted_attributes = available_attributes.clone();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(ref facet_filters) = self.facet_filters {
|
|
||||||
let attrs = index
|
|
||||||
.main
|
|
||||||
.attributes_for_faceting(&reader)?
|
|
||||||
.unwrap_or_default();
|
|
||||||
search_builder.add_facet_filters(FacetFilter::from_str(
|
|
||||||
facet_filters,
|
|
||||||
&schema,
|
|
||||||
&attrs,
|
|
||||||
)?);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(facets) = &self.facets_distribution {
|
|
||||||
match index.main.attributes_for_faceting(&reader)? {
|
|
||||||
Some(ref attrs) => {
|
|
||||||
let field_ids = prepare_facet_list(&facets, &schema, attrs)?;
|
|
||||||
search_builder.add_facets(field_ids);
|
|
||||||
}
|
|
||||||
None => return Err(FacetCountError::NoFacetSet.into()),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(attributes_to_crop) = &self.attributes_to_crop {
|
|
||||||
let default_length = self.crop_length.unwrap_or(200);
|
|
||||||
let mut final_attributes: HashMap<String, usize> = HashMap::new();
|
|
||||||
|
|
||||||
for attribute in attributes_to_crop.split(',') {
|
|
||||||
let mut attribute = attribute.split(':');
|
|
||||||
let attr = attribute.next();
|
|
||||||
let length = attribute
|
|
||||||
.next()
|
|
||||||
.and_then(|s| s.parse().ok())
|
|
||||||
.unwrap_or(default_length);
|
|
||||||
match attr {
|
|
||||||
Some("*") => {
|
|
||||||
for attr in &restricted_attributes {
|
|
||||||
final_attributes.insert(attr.to_string(), length);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Some(attr) => {
|
|
||||||
if available_attributes.contains(attr) {
|
|
||||||
final_attributes.insert(attr.to_string(), length);
|
|
||||||
} else {
|
|
||||||
warn!("The attributes {:?} present in attributesToCrop parameter doesn't exist", attr);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
None => (),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
search_builder.attributes_to_crop(final_attributes);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(attributes_to_highlight) = &self.attributes_to_highlight {
|
|
||||||
let mut final_attributes: HashSet<String> = HashSet::new();
|
|
||||||
for attribute in attributes_to_highlight.split(',') {
|
|
||||||
if attribute == "*" {
|
|
||||||
for attr in &restricted_attributes {
|
|
||||||
final_attributes.insert(attr.to_string());
|
|
||||||
}
|
|
||||||
} else if available_attributes.contains(attribute) {
|
|
||||||
final_attributes.insert(attribute.to_string());
|
|
||||||
} else {
|
|
||||||
warn!("The attributes {:?} present in attributesToHighlight parameter doesn't exist", attribute);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
search_builder.attributes_to_highlight(final_attributes);
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(filters) = &self.filters {
|
|
||||||
search_builder.filters(filters.to_string());
|
|
||||||
}
|
|
||||||
|
|
||||||
if let Some(matches) = self.matches {
|
|
||||||
if matches {
|
|
||||||
search_builder.get_matches();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
search_builder.search(&reader)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Parses the incoming string into an array of attributes for which to return a count. It returns
|
|
||||||
/// a Vec of attribute names ascociated with their id.
|
|
||||||
///
|
|
||||||
/// An error is returned if the array is malformed, or if it contains attributes that are
|
|
||||||
/// unexisting, or not set as facets.
|
|
||||||
fn prepare_facet_list(
|
|
||||||
facets: &str,
|
|
||||||
schema: &Schema,
|
|
||||||
facet_attrs: &[FieldId],
|
|
||||||
) -> Result<Vec<(FieldId, String)>, FacetCountError> {
|
|
||||||
let json_array = serde_json::from_str(facets)?;
|
|
||||||
match json_array {
|
|
||||||
Value::Array(vals) => {
|
|
||||||
let wildcard = Value::String("*".to_string());
|
|
||||||
if vals.iter().any(|f| f == &wildcard) {
|
|
||||||
let attrs = facet_attrs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|&id| schema.name(id).map(|n| (id, n.to_string())))
|
|
||||||
.collect();
|
|
||||||
return Ok(attrs);
|
|
||||||
}
|
|
||||||
let mut field_ids = Vec::with_capacity(facet_attrs.len());
|
|
||||||
for facet in vals {
|
|
||||||
match facet {
|
|
||||||
Value::String(facet) => {
|
|
||||||
if let Some(id) = schema.id(&facet) {
|
|
||||||
if !facet_attrs.contains(&id) {
|
|
||||||
return Err(FacetCountError::AttributeNotSet(facet));
|
|
||||||
}
|
|
||||||
field_ids.push((id, facet));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
bad_val => return Err(FacetCountError::unexpected_token(bad_val, &["String"])),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Ok(field_ids)
|
|
||||||
}
|
|
||||||
bad_val => Err(FacetCountError::unexpected_token(bad_val, &["[String]"])),
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -32,91 +32,13 @@ pub fn services(cfg: &mut web::ServiceConfig) {
|
|||||||
.service(update_attributes_for_faceting);
|
.service(update_attributes_for_faceting);
|
||||||
}
|
}
|
||||||
|
|
||||||
pub fn update_all_settings_txn(
|
|
||||||
data: &web::Data<Data>,
|
|
||||||
settings: SettingsUpdate,
|
|
||||||
index_uid: &str,
|
|
||||||
write_txn: &mut UpdateWriter,
|
|
||||||
) -> Result<u64, Error> {
|
|
||||||
let index = data
|
|
||||||
.db
|
|
||||||
.open_index(index_uid)
|
|
||||||
.ok_or(Error::index_not_found(index_uid))?;
|
|
||||||
|
|
||||||
let update_id = index.settings_update(write_txn, settings)?;
|
|
||||||
Ok(update_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
#[post("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
#[post("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||||
async fn update_all(
|
async fn update_all(
|
||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Settings>,
|
body: web::Json<Settings>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
Ok(data.db.update_write::<_, _, ResponseError>(|writer| {
|
|
||||||
let settings = body.into_inner().to_update().map_err(Error::bad_request)?;
|
|
||||||
let update_id = index.settings_update(writer, settings)?;
|
|
||||||
Ok(update_id)
|
|
||||||
})?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
pub fn get_all_sync(data: &web::Data<Data>, reader: &MainReader, index_uid: &str) -> Result<Settings, Error> {
|
|
||||||
let index = data
|
|
||||||
.db
|
|
||||||
.open_index(index_uid)
|
|
||||||
.ok_or(Error::index_not_found(index_uid))?;
|
|
||||||
|
|
||||||
let stop_words: BTreeSet<String> = index.main.stop_words(&reader)?.into_iter().collect();
|
|
||||||
|
|
||||||
let synonyms_list = index.main.synonyms(reader)?;
|
|
||||||
|
|
||||||
let mut synonyms = BTreeMap::new();
|
|
||||||
let index_synonyms = &index.synonyms;
|
|
||||||
for synonym in synonyms_list {
|
|
||||||
let list = index_synonyms.synonyms(reader, synonym.as_bytes())?;
|
|
||||||
synonyms.insert(synonym, list);
|
|
||||||
}
|
|
||||||
|
|
||||||
let ranking_rules = index
|
|
||||||
.main
|
|
||||||
.ranking_rules(reader)?
|
|
||||||
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| r.to_string())
|
|
||||||
.collect();
|
|
||||||
|
|
||||||
let schema = index.main.schema(&reader)?;
|
|
||||||
|
|
||||||
let distinct_attribute = match (index.main.distinct_attribute(reader)?, &schema) {
|
|
||||||
(Some(id), Some(schema)) => schema.name(id).map(str::to_string),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
let attributes_for_faceting = match (&schema, &index.main.attributes_for_faceting(&reader)?) {
|
|
||||||
(Some(schema), Some(attrs)) => attrs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|&id| schema.name(id))
|
|
||||||
.map(str::to_string)
|
|
||||||
.collect(),
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
|
|
||||||
let searchable_attributes = schema.as_ref().map(get_indexed_attributes);
|
|
||||||
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
|
|
||||||
|
|
||||||
Ok(Settings {
|
|
||||||
ranking_rules: Some(Some(ranking_rules)),
|
|
||||||
distinct_attribute: Some(distinct_attribute),
|
|
||||||
searchable_attributes: Some(searchable_attributes),
|
|
||||||
displayed_attributes: Some(displayed_attributes),
|
|
||||||
stop_words: Some(Some(stop_words)),
|
|
||||||
synonyms: Some(Some(synonyms)),
|
|
||||||
attributes_for_faceting: Some(Some(attributes_for_faceting)),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
#[get("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||||
@ -124,10 +46,7 @@ async fn get_all(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let reader = data.db.main_read_txn()?;
|
todo!()
|
||||||
let settings = get_all_sync(&data, &reader, &path.index_uid)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(settings))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
#[delete("/indexes/{index_uid}/settings", wrap = "Authentication::Private")]
|
||||||
@ -135,27 +54,7 @@ async fn delete_all(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
ranking_rules: UpdateState::Clear,
|
|
||||||
distinct_attribute: UpdateState::Clear,
|
|
||||||
primary_key: UpdateState::Clear,
|
|
||||||
searchable_attributes: UpdateState::Clear,
|
|
||||||
displayed_attributes: UpdateState::Clear,
|
|
||||||
stop_words: UpdateState::Clear,
|
|
||||||
synonyms: UpdateState::Clear,
|
|
||||||
attributes_for_faceting: UpdateState::Clear,
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get(
|
#[get(
|
||||||
@ -166,21 +65,7 @@ async fn get_rules(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let ranking_rules = index
|
|
||||||
.main
|
|
||||||
.ranking_rules(&reader)?
|
|
||||||
.unwrap_or(DEFAULT_RANKING_RULES.to_vec())
|
|
||||||
.into_iter()
|
|
||||||
.map(|r| r.to_string())
|
|
||||||
.collect::<Vec<String>>();
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(ranking_rules))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -192,19 +77,7 @@ async fn update_rules(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Option<Vec<String>>>,
|
body: web::Json<Option<Vec<String>>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = Settings {
|
|
||||||
ranking_rules: Some(body.into_inner()),
|
|
||||||
..Settings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -215,21 +88,7 @@ async fn delete_rules(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
ranking_rules: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get(
|
#[get(
|
||||||
@ -240,19 +99,7 @@ async fn get_distinct(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let distinct_attribute_id = index.main.distinct_attribute(&reader)?;
|
|
||||||
let schema = index.main.schema(&reader)?;
|
|
||||||
let distinct_attribute = match (schema, distinct_attribute_id) {
|
|
||||||
(Some(schema), Some(id)) => schema.name(id).map(str::to_string),
|
|
||||||
_ => None,
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(distinct_attribute))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -264,19 +111,7 @@ async fn update_distinct(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Option<String>>,
|
body: web::Json<Option<String>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = Settings {
|
|
||||||
distinct_attribute: Some(body.into_inner()),
|
|
||||||
..Settings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -287,21 +122,7 @@ async fn delete_distinct(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
distinct_attribute: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get(
|
#[get(
|
||||||
@ -312,15 +133,7 @@ async fn get_searchable(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let schema = index.main.schema(&reader)?;
|
|
||||||
let searchable_attributes: Option<Vec<String>> = schema.as_ref().map(get_indexed_attributes);
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(searchable_attributes))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -332,20 +145,7 @@ async fn update_searchable(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Option<Vec<String>>>,
|
body: web::Json<Option<Vec<String>>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = Settings {
|
|
||||||
searchable_attributes: Some(body.into_inner()),
|
|
||||||
..Settings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
|
||||||
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -356,21 +156,7 @@ async fn delete_searchable(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
searchable_attributes: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get(
|
#[get(
|
||||||
@ -381,17 +167,7 @@ async fn get_displayed(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let schema = index.main.schema(&reader)?;
|
|
||||||
|
|
||||||
let displayed_attributes = schema.as_ref().map(get_displayed_attributes);
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(displayed_attributes))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -403,19 +179,7 @@ async fn update_displayed(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Option<BTreeSet<String>>>,
|
body: web::Json<Option<BTreeSet<String>>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = Settings {
|
|
||||||
displayed_attributes: Some(body.into_inner()),
|
|
||||||
..Settings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -426,21 +190,7 @@ async fn delete_displayed(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
displayed_attributes: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[get(
|
#[get(
|
||||||
@ -451,26 +201,7 @@ async fn get_attributes_for_faceting(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let attributes_for_faceting = data.db.main_read::<_, _, ResponseError>(|reader| {
|
|
||||||
let schema = index.main.schema(reader)?;
|
|
||||||
let attrs = index.main.attributes_for_faceting(reader)?;
|
|
||||||
let attr_names = match (&schema, &attrs) {
|
|
||||||
(Some(schema), Some(attrs)) => attrs
|
|
||||||
.iter()
|
|
||||||
.filter_map(|&id| schema.name(id))
|
|
||||||
.map(str::to_string)
|
|
||||||
.collect(),
|
|
||||||
_ => vec![],
|
|
||||||
};
|
|
||||||
Ok(attr_names)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(attributes_for_faceting))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -482,19 +213,7 @@ async fn update_attributes_for_faceting(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<Option<Vec<String>>>,
|
body: web::Json<Option<Vec<String>>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = Settings {
|
|
||||||
attributes_for_faceting: Some(body.into_inner()),
|
|
||||||
..Settings::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let settings = settings.to_update().map_err(Error::bad_request)?;
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -505,43 +224,5 @@ async fn delete_attributes_for_faceting(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
attributes_for_faceting: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_indexed_attributes(schema: &Schema) -> Vec<String> {
|
|
||||||
if schema.is_indexed_all() {
|
|
||||||
["*"].iter().map(|s| s.to_string()).collect()
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
.indexed_name()
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
fn get_displayed_attributes(schema: &Schema) -> BTreeSet<String> {
|
|
||||||
if schema.is_displayed_all() {
|
|
||||||
["*"].iter().map(|s| s.to_string()).collect()
|
|
||||||
} else {
|
|
||||||
schema
|
|
||||||
.displayed_name()
|
|
||||||
.iter()
|
|
||||||
.map(|s| s.to_string())
|
|
||||||
.collect()
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -32,30 +32,7 @@ async fn index_stats(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let number_of_documents = index.main.number_of_documents(&reader)?;
|
|
||||||
|
|
||||||
let fields_distribution = index.main.fields_distribution(&reader)?.unwrap_or_default();
|
|
||||||
|
|
||||||
let update_reader = data.db.update_read_txn()?;
|
|
||||||
|
|
||||||
let is_indexing =
|
|
||||||
data.db.is_indexing(&update_reader, &path.index_uid)?
|
|
||||||
.ok_or(Error::internal(
|
|
||||||
"Impossible to know if the database is indexing",
|
|
||||||
))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(IndexStatsResponse {
|
|
||||||
number_of_documents,
|
|
||||||
is_indexing,
|
|
||||||
fields_distribution,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
@ -68,52 +45,7 @@ struct StatsResult {
|
|||||||
|
|
||||||
#[get("/stats", wrap = "Authentication::Private")]
|
#[get("/stats", wrap = "Authentication::Private")]
|
||||||
async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
async fn get_stats(data: web::Data<Data>) -> Result<HttpResponse, ResponseError> {
|
||||||
let mut index_list = HashMap::new();
|
todo!()
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let update_reader = data.db.update_read_txn()?;
|
|
||||||
|
|
||||||
let indexes_set = data.db.indexes_uids();
|
|
||||||
for index_uid in indexes_set {
|
|
||||||
let index = data.db.open_index(&index_uid);
|
|
||||||
match index {
|
|
||||||
Some(index) => {
|
|
||||||
let number_of_documents = index.main.number_of_documents(&reader)?;
|
|
||||||
|
|
||||||
let fields_distribution = index.main.fields_distribution(&reader)?.unwrap_or_default();
|
|
||||||
|
|
||||||
let is_indexing = data.db.is_indexing(&update_reader, &index_uid)?.ok_or(
|
|
||||||
Error::internal("Impossible to know if the database is indexing"),
|
|
||||||
)?;
|
|
||||||
|
|
||||||
let response = IndexStatsResponse {
|
|
||||||
number_of_documents,
|
|
||||||
is_indexing,
|
|
||||||
fields_distribution,
|
|
||||||
};
|
|
||||||
index_list.insert(index_uid, response);
|
|
||||||
}
|
|
||||||
None => error!(
|
|
||||||
"Index {:?} is referenced in the indexes list but cannot be found",
|
|
||||||
index_uid
|
|
||||||
),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let database_size = WalkDir::new(&data.db_path)
|
|
||||||
.into_iter()
|
|
||||||
.filter_map(|entry| entry.ok())
|
|
||||||
.filter_map(|entry| entry.metadata().ok())
|
|
||||||
.filter(|metadata| metadata.is_file())
|
|
||||||
.fold(0, |acc, m| acc + m.len());
|
|
||||||
|
|
||||||
let last_update = data.db.last_update(&reader)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(StatsResult {
|
|
||||||
database_size,
|
|
||||||
last_update,
|
|
||||||
indexes: index_list,
|
|
||||||
}))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Serialize)]
|
#[derive(Serialize)]
|
||||||
@ -126,9 +58,5 @@ struct VersionResponse {
|
|||||||
|
|
||||||
#[get("/version", wrap = "Authentication::Private")]
|
#[get("/version", wrap = "Authentication::Private")]
|
||||||
async fn get_version() -> HttpResponse {
|
async fn get_version() -> HttpResponse {
|
||||||
HttpResponse::Ok().json(VersionResponse {
|
todo!()
|
||||||
commit_sha: env!("VERGEN_SHA").to_string(),
|
|
||||||
build_date: env!("VERGEN_BUILD_TIMESTAMP").to_string(),
|
|
||||||
pkg_version: env!("CARGO_PKG_VERSION").to_string(),
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
@ -20,14 +20,7 @@ async fn get(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
let stop_words = index.main.stop_words(&reader)?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(stop_words))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -39,18 +32,7 @@ async fn update(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<BTreeSet<String>>,
|
body: web::Json<BTreeSet<String>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = SettingsUpdate {
|
|
||||||
stop_words: UpdateState::Update(body.into_inner()),
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -61,19 +43,5 @@ async fn delete(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
stop_words: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
@ -22,23 +22,7 @@ async fn get(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let reader = data.db.main_read_txn()?;
|
|
||||||
|
|
||||||
let synonyms_list = index.main.synonyms(&reader)?;
|
|
||||||
|
|
||||||
let mut synonyms = IndexMap::new();
|
|
||||||
let index_synonyms = &index.synonyms;
|
|
||||||
for synonym in synonyms_list {
|
|
||||||
let list = index_synonyms.synonyms(&reader, synonym.as_bytes())?;
|
|
||||||
synonyms.insert(synonym, list);
|
|
||||||
}
|
|
||||||
|
|
||||||
Ok(HttpResponse::Ok().json(synonyms))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[post(
|
#[post(
|
||||||
@ -50,18 +34,7 @@ async fn update(
|
|||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
body: web::Json<BTreeMap<String, Vec<String>>>,
|
body: web::Json<BTreeMap<String, Vec<String>>>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let update_id = data.get_or_create_index(&path.index_uid, |index| {
|
todo!()
|
||||||
let settings = SettingsUpdate {
|
|
||||||
synonyms: UpdateState::Update(body.into_inner()),
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
Ok(data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?)
|
|
||||||
})?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
#[delete(
|
#[delete(
|
||||||
@ -72,19 +45,5 @@ async fn delete(
|
|||||||
data: web::Data<Data>,
|
data: web::Data<Data>,
|
||||||
path: web::Path<IndexParam>,
|
path: web::Path<IndexParam>,
|
||||||
) -> Result<HttpResponse, ResponseError> {
|
) -> Result<HttpResponse, ResponseError> {
|
||||||
let index = data
|
todo!()
|
||||||
.db
|
|
||||||
.open_index(&path.index_uid)
|
|
||||||
.ok_or(Error::index_not_found(&path.index_uid))?;
|
|
||||||
|
|
||||||
let settings = SettingsUpdate {
|
|
||||||
synonyms: UpdateState::Clear,
|
|
||||||
..SettingsUpdate::default()
|
|
||||||
};
|
|
||||||
|
|
||||||
let update_id = data
|
|
||||||
.db
|
|
||||||
.update_write(|w| index.settings_update(w, settings))?;
|
|
||||||
|
|
||||||
Ok(HttpResponse::Accepted().json(IndexUpdateResponse::with_id(update_id)))
|
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user