mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 09:04:59 +08:00
Apply suggestions from code review
Co-authored-by: Clément Renault <clement@meilisearch.com>
This commit is contained in:
parent
c81ff22c5b
commit
a84f3a8b31
@ -695,7 +695,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
struct QueryBody {
|
struct QueryBody {
|
||||||
query: Option<String>,
|
query: Option<String>,
|
||||||
filters: Option<String>,
|
filters: Option<String>,
|
||||||
sorters: Option<String>,
|
sort: Option<String>,
|
||||||
facet_filters: Option<Vec<UntaggedEither<Vec<String>, String>>>,
|
facet_filters: Option<Vec<UntaggedEither<Vec<String>, String>>>,
|
||||||
facet_distribution: Option<bool>,
|
facet_distribution: Option<bool>,
|
||||||
limit: Option<usize>,
|
limit: Option<usize>,
|
||||||
@ -755,7 +755,7 @@ async fn main() -> anyhow::Result<()> {
|
|||||||
search.limit(limit);
|
search.limit(limit);
|
||||||
}
|
}
|
||||||
|
|
||||||
if let Some(sort) = query.sorters {
|
if let Some(sort) = query.sort {
|
||||||
search.sort_criteria(vec![sort.parse().unwrap()]);
|
search.sort_criteria(vec![sort.parse().unwrap()]);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -84,12 +84,16 @@ impl FromStr for Member {
|
|||||||
text.strip_prefix("_geoPoint(")
|
text.strip_prefix("_geoPoint(")
|
||||||
.and_then(|point| point.strip_suffix(")"))
|
.and_then(|point| point.strip_suffix(")"))
|
||||||
.ok_or_else(|| UserError::InvalidRankingRuleName { name: text.to_string() })?;
|
.ok_or_else(|| UserError::InvalidRankingRuleName { name: text.to_string() })?;
|
||||||
let point = point
|
let (lat, long) = point
|
||||||
.split(',')
|
.split_once(',')
|
||||||
.map(|el| el.trim().parse())
|
.ok_or_else(|| UserError::InvalidRankingRuleName { name: text.to_string() })
|
||||||
.collect::<Result<Vec<f64>, _>>()
|
.and_then(|(lat, long)| {
|
||||||
.map_err(|_| UserError::InvalidRankingRuleName { name: text.to_string() })?;
|
lat.trim()
|
||||||
Ok(Member::Geo([point[0], point[1]]))
|
.parse()
|
||||||
|
.and_then(|lat| long.trim().parse().map(|long| (lat, long)))
|
||||||
|
.map_err(|_| UserError::InvalidRankingRuleName { name: text.to_string() })
|
||||||
|
})?;
|
||||||
|
Ok(Member::Geo([lat, long]))
|
||||||
} else {
|
} else {
|
||||||
Ok(Member::Field(text.to_string()))
|
Ok(Member::Field(text.to_string()))
|
||||||
}
|
}
|
||||||
@ -99,7 +103,7 @@ impl FromStr for Member {
|
|||||||
impl fmt::Display for Member {
|
impl fmt::Display for Member {
|
||||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||||
match self {
|
match self {
|
||||||
Member::Field(name) => write!(f, "{}", name),
|
Member::Field(name) => f.write_str(name),
|
||||||
Member::Geo([lat, lng]) => write!(f, "_geoPoint({}, {})", lat, lng),
|
Member::Geo([lat, lng]) => write!(f, "_geoPoint({}, {})", lat, lng),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -12,10 +12,8 @@ use crate::{DocumentId, FieldId};
|
|||||||
|
|
||||||
pub type Object = Map<String, Value>;
|
pub type Object = Map<String, Value>;
|
||||||
|
|
||||||
const RESERVED_KEYWORD: &[&'static str] = &["_geo", "_geoDistance"];
|
|
||||||
|
|
||||||
pub fn is_reserved_keyword(keyword: &str) -> bool {
|
pub fn is_reserved_keyword(keyword: &str) -> bool {
|
||||||
RESERVED_KEYWORD.contains(&keyword)
|
["_geo", "_geoDistance"].contains(&keyword)
|
||||||
}
|
}
|
||||||
|
|
||||||
#[derive(Debug)]
|
#[derive(Debug)]
|
||||||
|
@ -299,6 +299,7 @@ impl Index {
|
|||||||
|
|
||||||
/* geo rtree */
|
/* geo rtree */
|
||||||
|
|
||||||
|
/// Writes the provided `rtree` which associates coordinates to documents ids.
|
||||||
pub(crate) fn put_geo_rtree(
|
pub(crate) fn put_geo_rtree(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
@ -307,10 +308,12 @@ impl Index {
|
|||||||
self.main.put::<_, Str, SerdeBincode<RTree<GeoPoint>>>(wtxn, main_key::GEO_RTREE_KEY, rtree)
|
self.main.put::<_, Str, SerdeBincode<RTree<GeoPoint>>>(wtxn, main_key::GEO_RTREE_KEY, rtree)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Delete the `rtree` which associates coordinates to documents ids.
|
||||||
pub(crate) fn delete_geo_rtree(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
pub(crate) fn delete_geo_rtree(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
self.main.delete::<_, Str>(wtxn, main_key::GEO_RTREE_KEY)
|
self.main.delete::<_, Str>(wtxn, main_key::GEO_RTREE_KEY)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/// Returns the `rtree` which associates coordinates to documents ids.
|
||||||
pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<RTree<GeoPoint>>> {
|
pub fn geo_rtree<'t>(&self, rtxn: &'t RoTxn) -> Result<Option<RTree<GeoPoint>>> {
|
||||||
match self
|
match self
|
||||||
.main
|
.main
|
||||||
@ -323,7 +326,7 @@ impl Index {
|
|||||||
|
|
||||||
/* geo faceted */
|
/* geo faceted */
|
||||||
|
|
||||||
/// Writes the documents ids that are faceted with a _geo field
|
/// Writes the documents ids that are faceted with a _geo field.
|
||||||
pub(crate) fn put_geo_faceted_documents_ids(
|
pub(crate) fn put_geo_faceted_documents_ids(
|
||||||
&self,
|
&self,
|
||||||
wtxn: &mut RwTxn,
|
wtxn: &mut RwTxn,
|
||||||
@ -336,16 +339,12 @@ impl Index {
|
|||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Delete the documents ids that are faceted with a _geo field
|
/// Delete the documents ids that are faceted with a _geo field.
|
||||||
pub(crate) fn delete_geo_faceted_documents_ids(&self, wtxn: &mut RwTxn) -> heed::Result<()> {
|
pub(crate) fn delete_geo_faceted_documents_ids(&self, wtxn: &mut RwTxn) -> heed::Result<bool> {
|
||||||
self.main.put::<_, Str, RoaringBitmapCodec>(
|
self.main.delete::<_, Str>(wtxn, main_key::GEO_FACETED_DOCUMENTS_IDS_KEY)
|
||||||
wtxn,
|
|
||||||
main_key::GEO_FACETED_DOCUMENTS_IDS_KEY,
|
|
||||||
&RoaringBitmap::new(),
|
|
||||||
)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Retrieve all the documents ids that faceted with a _geo field
|
/// Retrieve all the documents ids that faceted with a _geo field.
|
||||||
pub fn geo_faceted_documents_ids(&self, rtxn: &RoTxn) -> heed::Result<RoaringBitmap> {
|
pub fn geo_faceted_documents_ids(&self, rtxn: &RoTxn) -> heed::Result<RoaringBitmap> {
|
||||||
match self
|
match self
|
||||||
.main
|
.main
|
||||||
|
@ -142,7 +142,7 @@ where
|
|||||||
Some((head, tail))
|
Some((head, tail))
|
||||||
}
|
}
|
||||||
|
|
||||||
/// Return the distance between two points in meters
|
/// Return the distance between two points in meters.
|
||||||
fn distance_between_two_points(a: &[f64; 2], b: &[f64; 2]) -> f64 {
|
fn distance_between_two_points(a: &[f64; 2], b: &[f64; 2]) -> f64 {
|
||||||
let a = haversine::Location { latitude: a[0], longitude: a[1] };
|
let a = haversine::Location { latitude: a[0], longitude: a[1] };
|
||||||
let b = haversine::Location { latitude: b[0], longitude: b[1] };
|
let b = haversine::Location { latitude: b[0], longitude: b[1] };
|
||||||
|
@ -1,3 +1,5 @@
|
|||||||
|
use std::iter;
|
||||||
|
|
||||||
use roaring::RoaringBitmap;
|
use roaring::RoaringBitmap;
|
||||||
use rstar::RTree;
|
use rstar::RTree;
|
||||||
|
|
||||||
@ -23,7 +25,7 @@ impl<'t> Geo<'t> {
|
|||||||
parent: Box<dyn Criterion + 't>,
|
parent: Box<dyn Criterion + 't>,
|
||||||
point: [f64; 2],
|
point: [f64; 2],
|
||||||
) -> Result<Self> {
|
) -> Result<Self> {
|
||||||
let candidates = Box::new(std::iter::empty());
|
let candidates = Box::new(iter::empty());
|
||||||
let allowed_candidates = index.geo_faceted_documents_ids(rtxn)?;
|
let allowed_candidates = index.geo_faceted_documents_ids(rtxn)?;
|
||||||
let bucket_candidates = RoaringBitmap::new();
|
let bucket_candidates = RoaringBitmap::new();
|
||||||
let rtree = index.geo_rtree(rtxn)?;
|
let rtree = index.geo_rtree(rtxn)?;
|
||||||
@ -41,7 +43,7 @@ impl<'t> Geo<'t> {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
impl<'t> Criterion for Geo<'t> {
|
impl Criterion for Geo<'_> {
|
||||||
fn next(&mut self, params: &mut CriterionParameters) -> Result<Option<CriterionResult>> {
|
fn next(&mut self, params: &mut CriterionParameters) -> Result<Option<CriterionResult>> {
|
||||||
// if there is no rtree we have nothing to returns
|
// if there is no rtree we have nothing to returns
|
||||||
let rtree = match self.rtree.as_ref() {
|
let rtree = match self.rtree.as_ref() {
|
||||||
@ -108,7 +110,7 @@ fn geo_point(
|
|||||||
let results = rtree
|
let results = rtree
|
||||||
.nearest_neighbor_iter(&point)
|
.nearest_neighbor_iter(&point)
|
||||||
.filter_map(move |point| candidates.contains(point.data).then(|| point.data))
|
.filter_map(move |point| candidates.contains(point.data).then(|| point.data))
|
||||||
.map(|id| std::iter::once(id).collect::<RoaringBitmap>())
|
.map(|id| iter::once(id).collect::<RoaringBitmap>())
|
||||||
.collect::<Vec<_>>();
|
.collect::<Vec<_>>();
|
||||||
|
|
||||||
Box::new(results.into_iter())
|
Box::new(results.into_iter())
|
||||||
|
@ -311,7 +311,9 @@ impl<'t> CriteriaBuilder<'t> {
|
|||||||
point.clone(),
|
point.clone(),
|
||||||
)?),
|
)?),
|
||||||
AscDescName::Desc(Member::Geo(_point)) => {
|
AscDescName::Desc(Member::Geo(_point)) => {
|
||||||
return Err(UserError::InvalidSortName { name: "Sorting in descending order is currently not supported for the geosearch".to_string() })?
|
return Err(UserError::InvalidSortName {
|
||||||
|
name: "Sorting in descending order is currently not supported for the geosearch".to_string(),
|
||||||
|
})?
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -21,7 +21,9 @@ use crate::error::UserError;
|
|||||||
use crate::heed_codec::facet::{
|
use crate::heed_codec::facet::{
|
||||||
FacetLevelValueF64Codec, FacetStringLevelZeroCodec, FacetStringLevelZeroValueCodec,
|
FacetLevelValueF64Codec, FacetStringLevelZeroCodec, FacetStringLevelZeroValueCodec,
|
||||||
};
|
};
|
||||||
use crate::{CboRoaringBitmapCodec, FieldId, FieldsIdsMap, Index, Result};
|
use crate::{
|
||||||
|
distance_between_two_points, CboRoaringBitmapCodec, FieldId, FieldsIdsMap, Index, Result,
|
||||||
|
};
|
||||||
|
|
||||||
#[derive(Debug, Clone, PartialEq)]
|
#[derive(Debug, Clone, PartialEq)]
|
||||||
pub enum Operator {
|
pub enum Operator {
|
||||||
@ -505,19 +507,18 @@ impl FilterCondition {
|
|||||||
LowerThanOrEqual(val) => (Included(f64::MIN), Included(*val)),
|
LowerThanOrEqual(val) => (Included(f64::MIN), Included(*val)),
|
||||||
Between(left, right) => (Included(*left), Included(*right)),
|
Between(left, right) => (Included(*left), Included(*right)),
|
||||||
GeoLowerThan(base_point, distance) => {
|
GeoLowerThan(base_point, distance) => {
|
||||||
let mut result = RoaringBitmap::new();
|
|
||||||
let rtree = match index.geo_rtree(rtxn)? {
|
let rtree = match index.geo_rtree(rtxn)? {
|
||||||
Some(rtree) => rtree,
|
Some(rtree) => rtree,
|
||||||
None => return Ok(result),
|
None => return Ok(RoaringBitmap::new()),
|
||||||
};
|
};
|
||||||
|
|
||||||
rtree
|
let result = rtree
|
||||||
.nearest_neighbor_iter(base_point)
|
.nearest_neighbor_iter(base_point)
|
||||||
.take_while(|point| {
|
.take_while(|point| {
|
||||||
dbg!(crate::distance_between_two_points(base_point, point.geom()))
|
distance_between_two_points(base_point, point.geom()) < *distance
|
||||||
< *distance
|
|
||||||
})
|
})
|
||||||
.for_each(|point| drop(result.insert(point.data)));
|
.map(|point| point.data)
|
||||||
|
.collect();
|
||||||
|
|
||||||
return Ok(result);
|
return Ok(result);
|
||||||
}
|
}
|
||||||
@ -602,7 +603,8 @@ fn field_id(
|
|||||||
return Err(PestError::new_from_span(
|
return Err(PestError::new_from_span(
|
||||||
ErrorVariant::CustomError {
|
ErrorVariant::CustomError {
|
||||||
message: format!(
|
message: format!(
|
||||||
"`{}` is a reserved keyword and thus can't be used as a filter expression. Available filterable attributes are: {}",
|
"`{}` is a reserved keyword and therefore can't be used as a filter expression. \
|
||||||
|
Available filterable attributes are: {}",
|
||||||
key.as_str(),
|
key.as_str(),
|
||||||
filterable_fields.iter().join(", "),
|
filterable_fields.iter().join(", "),
|
||||||
),
|
),
|
||||||
@ -691,7 +693,7 @@ mod tests {
|
|||||||
assert!(result.is_err());
|
assert!(result.is_err());
|
||||||
let error = result.unwrap_err();
|
let error = result.unwrap_err();
|
||||||
assert!(error.to_string().contains(
|
assert!(error.to_string().contains(
|
||||||
"`_geo` is a reserved keyword and thus can't be used as a filter expression."
|
"`_geo` is a reserved keyword and therefore can't be used as a filter expression."
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -383,15 +383,16 @@ impl<'t, 'u, 'i> DeleteDocuments<'t, 'u, 'i> {
|
|||||||
if let Some(mut rtree) = self.index.geo_rtree(self.wtxn)? {
|
if let Some(mut rtree) = self.index.geo_rtree(self.wtxn)? {
|
||||||
let mut geo_faceted_doc_ids = self.index.geo_faceted_documents_ids(self.wtxn)?;
|
let mut geo_faceted_doc_ids = self.index.geo_faceted_documents_ids(self.wtxn)?;
|
||||||
|
|
||||||
let points_to_remove: Vec<_> = rtree
|
let (points_to_remove, docids_to_remove): (Vec<_>, RoaringBitmap) = rtree
|
||||||
.iter()
|
.iter()
|
||||||
.filter(|&point| self.documents_ids.contains(point.data))
|
.filter(|&point| self.documents_ids.contains(point.data))
|
||||||
.cloned()
|
.cloned()
|
||||||
.collect();
|
.map(|point| (point, point.data))
|
||||||
|
.unzip();
|
||||||
points_to_remove.iter().for_each(|point| {
|
points_to_remove.iter().for_each(|point| {
|
||||||
rtree.remove(&point);
|
rtree.remove(&point);
|
||||||
geo_faceted_doc_ids.remove(point.data);
|
|
||||||
});
|
});
|
||||||
|
geo_faceted_doc_ids -= docids_to_remove;
|
||||||
|
|
||||||
self.index.put_geo_rtree(self.wtxn, &rtree)?;
|
self.index.put_geo_rtree(self.wtxn, &rtree)?;
|
||||||
self.index.put_geo_faceted_documents_ids(self.wtxn, &geo_faceted_doc_ids)?;
|
self.index.put_geo_faceted_documents_ids(self.wtxn, &geo_faceted_doc_ids)?;
|
||||||
|
@ -22,11 +22,10 @@ pub fn extract_geo_points<R: io::Read>(
|
|||||||
|
|
||||||
while let Some((docid_bytes, value)) = obkv_documents.next()? {
|
while let Some((docid_bytes, value)) = obkv_documents.next()? {
|
||||||
let obkv = obkv::KvReader::new(value);
|
let obkv = obkv::KvReader::new(value);
|
||||||
let point = match obkv.get(geo_field_id) {
|
let point: Value = match obkv.get(geo_field_id) {
|
||||||
Some(point) => point,
|
Some(point) => serde_json::from_slice(point).map_err(InternalError::SerdeJson)?,
|
||||||
None => continue,
|
None => continue,
|
||||||
};
|
};
|
||||||
let point: Value = serde_json::from_slice(point).map_err(InternalError::SerdeJson)?;
|
|
||||||
|
|
||||||
if let Some((lat, lng)) = point["lat"].as_f64().zip(point["lng"].as_f64()) {
|
if let Some((lat, lng)) = point["lat"].as_f64().zip(point["lng"].as_f64()) {
|
||||||
// this will create an array of 16 bytes (two 8 bytes floats)
|
// this will create an array of 16 bytes (two 8 bytes floats)
|
||||||
|
@ -189,12 +189,9 @@ fn extract_documents_data(
|
|||||||
let documents_chunk_cloned = documents_chunk.clone();
|
let documents_chunk_cloned = documents_chunk.clone();
|
||||||
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
|
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
|
||||||
rayon::spawn(move || {
|
rayon::spawn(move || {
|
||||||
let _ = match extract_geo_points(
|
let result =
|
||||||
documents_chunk_cloned,
|
extract_geo_points(documents_chunk_cloned, indexer, primary_key_id, geo_field_id);
|
||||||
indexer,
|
let _ = match result {
|
||||||
primary_key_id,
|
|
||||||
geo_field_id,
|
|
||||||
) {
|
|
||||||
Ok(geo_points) => lmdb_writer_sx_cloned.send(Ok(TypedChunk::GeoPoints(geo_points))),
|
Ok(geo_points) => lmdb_writer_sx_cloned.send(Ok(TypedChunk::GeoPoints(geo_points))),
|
||||||
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
|
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
|
||||||
};
|
};
|
||||||
|
@ -237,12 +237,17 @@ impl<'t, 'u, 'i, 'a> IndexDocuments<'t, 'u, 'i, 'a> {
|
|||||||
// get filterable fields for facet databases
|
// get filterable fields for facet databases
|
||||||
let faceted_fields = self.index.faceted_fields_ids(self.wtxn)?;
|
let faceted_fields = self.index.faceted_fields_ids(self.wtxn)?;
|
||||||
// get the fid of the `_geo` field.
|
// get the fid of the `_geo` field.
|
||||||
let geo_field_id = if let Some(gfid) = self.index.fields_ids_map(self.wtxn)?.id("_geo") {
|
let geo_field_id = match self.index.fields_ids_map(self.wtxn)?.id("_geo") {
|
||||||
(self.index.sortable_fields_ids(self.wtxn)?.contains(&gfid)
|
Some(gfid) => {
|
||||||
|| self.index.filterable_fields_ids(self.wtxn)?.contains(&gfid))
|
let is_sortable = self.index.sortable_fields_ids(self.wtxn)?.contains(&gfid);
|
||||||
.then(|| gfid)
|
let is_filterable = self.index.filterable_fields_ids(self.wtxn)?.contains(&gfid);
|
||||||
|
if is_sortable || is_filterable {
|
||||||
|
Some(gfid)
|
||||||
} else {
|
} else {
|
||||||
None
|
None
|
||||||
|
}
|
||||||
|
}
|
||||||
|
None => None,
|
||||||
};
|
};
|
||||||
|
|
||||||
let stop_words = self.index.stop_words(self.wtxn)?;
|
let stop_words = self.index.stop_words(self.wtxn)?;
|
||||||
|
@ -1,4 +1,5 @@
|
|||||||
use std::borrow::Cow;
|
use std::borrow::Cow;
|
||||||
|
use std::convert::TryInto;
|
||||||
use std::fs::File;
|
use std::fs::File;
|
||||||
|
|
||||||
use heed::types::ByteSlice;
|
use heed::types::ByteSlice;
|
||||||
@ -11,7 +12,7 @@ use super::helpers::{
|
|||||||
};
|
};
|
||||||
use crate::heed_codec::facet::{decode_prefix_string, encode_prefix_string};
|
use crate::heed_codec::facet::{decode_prefix_string, encode_prefix_string};
|
||||||
use crate::update::index_documents::helpers::into_clonable_grenad;
|
use crate::update::index_documents::helpers::into_clonable_grenad;
|
||||||
use crate::{BoRoaringBitmapCodec, CboRoaringBitmapCodec, GeoPoint, Index, Result};
|
use crate::{BoRoaringBitmapCodec, CboRoaringBitmapCodec, DocumentId, GeoPoint, Index, Result};
|
||||||
|
|
||||||
pub(crate) enum TypedChunk {
|
pub(crate) enum TypedChunk {
|
||||||
DocidWordPositions(grenad::Reader<CursorClonableMmap>),
|
DocidWordPositions(grenad::Reader<CursorClonableMmap>),
|
||||||
@ -180,24 +181,22 @@ pub(crate) fn write_typed_chunk_into_index(
|
|||||||
is_merged_database = true;
|
is_merged_database = true;
|
||||||
}
|
}
|
||||||
TypedChunk::GeoPoints(mut geo_points) => {
|
TypedChunk::GeoPoints(mut geo_points) => {
|
||||||
// TODO: we should create the rtree with the `RTree::bulk_load` function
|
|
||||||
let mut rtree = index.geo_rtree(wtxn)?.unwrap_or_default();
|
let mut rtree = index.geo_rtree(wtxn)?.unwrap_or_default();
|
||||||
let mut doc_ids = index.geo_faceted_documents_ids(wtxn)?;
|
let mut geo_faceted_docids = index.geo_faceted_documents_ids(wtxn)?;
|
||||||
|
|
||||||
while let Some((key, value)) = geo_points.next()? {
|
while let Some((key, value)) = geo_points.next()? {
|
||||||
// convert the key back to a u32 (4 bytes)
|
// convert the key back to a u32 (4 bytes)
|
||||||
let (key, _) = helpers::try_split_array_at::<u8, 4>(key).unwrap();
|
let docid = key.try_into().map(DocumentId::from_be_bytes).unwrap();
|
||||||
let key = u32::from_be_bytes(key);
|
|
||||||
|
|
||||||
// convert the latitude and longitude back to a f64 (8 bytes)
|
// convert the latitude and longitude back to a f64 (8 bytes)
|
||||||
let (lat, tail) = helpers::try_split_array_at::<u8, 8>(value).unwrap();
|
let (lat, tail) = helpers::try_split_array_at::<u8, 8>(value).unwrap();
|
||||||
let (lng, _) = helpers::try_split_array_at::<u8, 8>(tail).unwrap();
|
let (lng, _) = helpers::try_split_array_at::<u8, 8>(tail).unwrap();
|
||||||
let point = [f64::from_ne_bytes(lat), f64::from_ne_bytes(lng)];
|
let point = [f64::from_ne_bytes(lat), f64::from_ne_bytes(lng)];
|
||||||
rtree.insert(GeoPoint::new(point, key));
|
rtree.insert(GeoPoint::new(point, docid));
|
||||||
doc_ids.insert(key);
|
geo_faceted_docids.insert(docid);
|
||||||
}
|
}
|
||||||
index.put_geo_rtree(wtxn, &rtree)?;
|
index.put_geo_rtree(wtxn, &rtree)?;
|
||||||
index.put_geo_faceted_documents_ids(wtxn, &doc_ids)?;
|
index.put_geo_faceted_documents_ids(wtxn, &geo_faceted_docids)?;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user