4043: Bring back hotfixes from v1.3.3 into v1.4.0 r=Kerollmops a=curquiza



Co-authored-by: curquiza <curquiza@users.noreply.github.com>
Co-authored-by: meili-bors[bot] <89034592+meili-bors[bot]@users.noreply.github.com>
Co-authored-by: Kerollmops <clement@meilisearch.com>
Co-authored-by: curquiza <clementine@meilisearch.com>
This commit is contained in:
meili-bors[bot] 2023-09-11 12:27:34 +00:00 committed by GitHub
commit 487d493f49
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
7 changed files with 110 additions and 35 deletions

10
Cargo.lock generated
View File

@ -1444,6 +1444,7 @@ dependencies = [
"insta", "insta",
"nom", "nom",
"nom_locate", "nom_locate",
"unescaper",
] ]
[[package]] [[package]]
@ -4180,6 +4181,15 @@ version = "0.1.6"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9" checksum = "ed646292ffc8188ef8ea4d1e0e0150fb15a5c2e12ad9b8fc191ae7a8a7f3c4b9"
[[package]]
name = "unescaper"
version = "0.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a96a44ae11e25afb520af4534fd7b0bd8cd613e35a78def813b8cf41631fa3c8"
dependencies = [
"thiserror",
]
[[package]] [[package]]
name = "unicase" name = "unicase"
version = "2.6.0" version = "2.6.0"

View File

@ -14,6 +14,7 @@ license.workspace = true
[dependencies] [dependencies]
nom = "7.1.3" nom = "7.1.3"
nom_locate = "4.1.0" nom_locate = "4.1.0"
unescaper = "0.1.2"
[dev-dependencies] [dev-dependencies]
insta = "1.29.0" insta = "1.29.0"

View File

@ -62,6 +62,7 @@ pub enum ErrorKind<'a> {
MisusedGeoRadius, MisusedGeoRadius,
MisusedGeoBoundingBox, MisusedGeoBoundingBox,
InvalidPrimary, InvalidPrimary,
InvalidEscapedNumber,
ExpectedEof, ExpectedEof,
ExpectedValue(ExpectedValueKind), ExpectedValue(ExpectedValueKind),
MalformedValue, MalformedValue,
@ -147,6 +148,9 @@ impl<'a> Display for Error<'a> {
let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) }; let text = if input.trim().is_empty() { "but instead got nothing.".to_string() } else { format!("at `{}`.", escaped_input) };
writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)? writeln!(f, "Was expecting an operation `=`, `!=`, `>=`, `>`, `<=`, `<`, `IN`, `NOT IN`, `TO`, `EXISTS`, `NOT EXISTS`, `IS NULL`, `IS NOT NULL`, `IS EMPTY`, `IS NOT EMPTY`, `_geoRadius`, or `_geoBoundingBox` {}", text)?
} }
ErrorKind::InvalidEscapedNumber => {
writeln!(f, "Found an invalid escaped sequence number: `{}`.", escaped_input)?
}
ErrorKind::ExpectedEof => { ErrorKind::ExpectedEof => {
writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)? writeln!(f, "Found unexpected characters at the end of the filter: `{}`. You probably forgot an `OR` or an `AND` rule.", escaped_input)?
} }

View File

@ -545,6 +545,8 @@ impl<'a> std::fmt::Display for Token<'a> {
#[cfg(test)] #[cfg(test)]
pub mod tests { pub mod tests {
use FilterCondition as Fc;
use super::*; use super::*;
/// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element /// Create a raw [Token]. You must specify the string that appear BEFORE your element followed by your element
@ -556,14 +558,22 @@ pub mod tests {
unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into() unsafe { Span::new_from_raw_offset(offset, lines as u32, value, "") }.into()
} }
#[test]
fn parse() {
use FilterCondition as Fc;
fn p(s: &str) -> impl std::fmt::Display + '_ { fn p(s: &str) -> impl std::fmt::Display + '_ {
Fc::parse(s).unwrap().unwrap() Fc::parse(s).unwrap().unwrap()
} }
#[test]
fn parse_escaped() {
insta::assert_display_snapshot!(p(r#"title = 'foo\\'"#), @r#"{title} = {foo\}"#);
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\'"#), @r#"{title} = {foo\\}"#);
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\\\'"#), @r#"{title} = {foo\\\}"#);
insta::assert_display_snapshot!(p(r#"title = 'foo\\\\\\\\'"#), @r#"{title} = {foo\\\\}"#);
// but it also works with other sequencies
insta::assert_display_snapshot!(p(r#"title = 'foo\x20\n\t\"\'"'"#), @"{title} = {foo \n\t\"\'\"}");
}
#[test]
fn parse() {
// Test equal // Test equal
insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}"); insta::assert_display_snapshot!(p("channel = Ponce"), @"{channel} = {Ponce}");
insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}"); insta::assert_display_snapshot!(p("subscribers = 12"), @"{subscribers} = {12}");

View File

@ -171,8 +171,25 @@ pub fn parse_value(input: Span) -> IResult<Token> {
}) })
})?; })?;
match unescaper::unescape(value.value()) {
Ok(content) => {
if content.len() != value.value().len() {
Ok((input, Token::new(value.original_span(), Some(content))))
} else {
Ok((input, value)) Ok((input, value))
} }
}
Err(unescaper::Error::IncompleteStr(_)) => Err(nom::Err::Incomplete(nom::Needed::Unknown)),
Err(unescaper::Error::ParseIntError { .. }) => Err(nom::Err::Error(Error::new_from_kind(
value.original_span(),
ErrorKind::InvalidEscapedNumber,
))),
Err(unescaper::Error::InvalidChar { .. }) => Err(nom::Err::Error(Error::new_from_kind(
value.original_span(),
ErrorKind::MalformedValue,
))),
}
}
fn is_value_component(c: char) -> bool { fn is_value_component(c: char) -> bool {
c.is_alphanumeric() || ['_', '-', '.'].contains(&c) c.is_alphanumeric() || ['_', '-', '.'].contains(&c)
@ -318,17 +335,17 @@ pub mod test {
("\"cha'nnel\"", "cha'nnel", false), ("\"cha'nnel\"", "cha'nnel", false),
("I'm tamo", "I", false), ("I'm tamo", "I", false),
// escaped thing but not quote // escaped thing but not quote
(r#""\\""#, r#"\\"#, false), (r#""\\""#, r#"\"#, true),
(r#""\\\\\\""#, r#"\\\\\\"#, false), (r#""\\\\\\""#, r#"\\\"#, true),
(r#""aa\\aa""#, r#"aa\\aa"#, false), (r#""aa\\aa""#, r#"aa\aa"#, true),
// with double quote // with double quote
(r#""Hello \"world\"""#, r#"Hello "world""#, true), (r#""Hello \"world\"""#, r#"Hello "world""#, true),
(r#""Hello \\\"world\\\"""#, r#"Hello \\"world\\""#, true), (r#""Hello \\\"world\\\"""#, r#"Hello \"world\""#, true),
(r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true), (r#""I'm \"super\" tamo""#, r#"I'm "super" tamo"#, true),
(r#""\"\"""#, r#""""#, true), (r#""\"\"""#, r#""""#, true),
// with simple quote // with simple quote
(r#"'Hello \'world\''"#, r#"Hello 'world'"#, true), (r#"'Hello \'world\''"#, r#"Hello 'world'"#, true),
(r#"'Hello \\\'world\\\''"#, r#"Hello \\'world\\'"#, true), (r#"'Hello \\\'world\\\''"#, r#"Hello \'world\'"#, true),
(r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true), (r#"'I\'m "super" tamo'"#, r#"I'm "super" tamo"#, true),
(r#"'\'\''"#, r#"''"#, true), (r#"'\'\''"#, r#"''"#, true),
]; ];
@ -350,7 +367,14 @@ pub mod test {
"Filter `{}` was not supposed to be escaped", "Filter `{}` was not supposed to be escaped",
input input
); );
assert_eq!(token.value(), expected, "Filter `{}` failed.", input); assert_eq!(
token.value(),
expected,
"Filter `{}` failed by giving `{}` instead of `{}`.",
input,
token.value(),
expected
);
} }
} }

View File

@ -59,7 +59,13 @@ pub(crate) fn data_from_obkv_documents(
original_obkv_chunks original_obkv_chunks
.par_bridge() .par_bridge()
.map(|original_documents_chunk| { .map(|original_documents_chunk| {
send_original_documents_data(original_documents_chunk, lmdb_writer_sx.clone()) send_original_documents_data(
original_documents_chunk,
indexer,
lmdb_writer_sx.clone(),
vectors_field_id,
primary_key_id,
)
}) })
.collect::<Result<()>>()?; .collect::<Result<()>>()?;
@ -76,7 +82,6 @@ pub(crate) fn data_from_obkv_documents(
&faceted_fields, &faceted_fields,
primary_key_id, primary_key_id,
geo_fields_ids, geo_fields_ids,
vectors_field_id,
&stop_words, &stop_words,
&allowed_separators, &allowed_separators,
&dictionary, &dictionary,
@ -265,11 +270,33 @@ fn spawn_extraction_task<FE, FS, M>(
/// - documents /// - documents
fn send_original_documents_data( fn send_original_documents_data(
original_documents_chunk: Result<grenad::Reader<File>>, original_documents_chunk: Result<grenad::Reader<File>>,
indexer: GrenadParameters,
lmdb_writer_sx: Sender<Result<TypedChunk>>, lmdb_writer_sx: Sender<Result<TypedChunk>>,
vectors_field_id: Option<FieldId>,
primary_key_id: FieldId,
) -> Result<()> { ) -> Result<()> {
let original_documents_chunk = let original_documents_chunk =
original_documents_chunk.and_then(|c| unsafe { as_cloneable_grenad(&c) })?; original_documents_chunk.and_then(|c| unsafe { as_cloneable_grenad(&c) })?;
if let Some(vectors_field_id) = vectors_field_id {
let documents_chunk_cloned = original_documents_chunk.clone();
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
rayon::spawn(move || {
let result = extract_vector_points(
documents_chunk_cloned,
indexer,
primary_key_id,
vectors_field_id,
);
let _ = match result {
Ok(vector_points) => {
lmdb_writer_sx_cloned.send(Ok(TypedChunk::VectorPoints(vector_points)))
}
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
};
});
}
// TODO: create a custom internal error // TODO: create a custom internal error
lmdb_writer_sx.send(Ok(TypedChunk::Documents(original_documents_chunk))).unwrap(); lmdb_writer_sx.send(Ok(TypedChunk::Documents(original_documents_chunk))).unwrap();
Ok(()) Ok(())
@ -291,7 +318,6 @@ fn send_and_extract_flattened_documents_data(
faceted_fields: &HashSet<FieldId>, faceted_fields: &HashSet<FieldId>,
primary_key_id: FieldId, primary_key_id: FieldId,
geo_fields_ids: Option<(FieldId, FieldId)>, geo_fields_ids: Option<(FieldId, FieldId)>,
vectors_field_id: Option<FieldId>,
stop_words: &Option<fst::Set<&[u8]>>, stop_words: &Option<fst::Set<&[u8]>>,
allowed_separators: &Option<&[&str]>, allowed_separators: &Option<&[&str]>,
dictionary: &Option<&[&str]>, dictionary: &Option<&[&str]>,
@ -322,25 +348,6 @@ fn send_and_extract_flattened_documents_data(
}); });
} }
if let Some(vectors_field_id) = vectors_field_id {
let documents_chunk_cloned = flattened_documents_chunk.clone();
let lmdb_writer_sx_cloned = lmdb_writer_sx.clone();
rayon::spawn(move || {
let result = extract_vector_points(
documents_chunk_cloned,
indexer,
primary_key_id,
vectors_field_id,
);
let _ = match result {
Ok(vector_points) => {
lmdb_writer_sx_cloned.send(Ok(TypedChunk::VectorPoints(vector_points)))
}
Err(error) => lmdb_writer_sx_cloned.send(Err(error)),
};
});
}
let (docid_word_positions_chunk, docid_fid_facet_values_chunks): (Result<_>, Result<_>) = let (docid_word_positions_chunk, docid_fid_facet_values_chunks): (Result<_>, Result<_>) =
rayon::join( rayon::join(
|| { || {

View File

@ -2550,6 +2550,25 @@ mod tests {
db_snap!(index, word_position_docids, 3, @"74f556b91d161d997a89468b4da1cb8f"); db_snap!(index, word_position_docids, 3, @"74f556b91d161d997a89468b4da1cb8f");
} }
/// Index multiple different number of vectors in documents.
/// Vectors must be of the same length.
#[test]
fn test_multiple_vectors() {
let index = TempIndex::new();
index.add_documents(documents!([{"id": 0, "_vectors": [[0, 1, 2], [3, 4, 5]] }])).unwrap();
index.add_documents(documents!([{"id": 1, "_vectors": [6, 7, 8] }])).unwrap();
index
.add_documents(
documents!([{"id": 2, "_vectors": [[9, 10, 11], [12, 13, 14], [15, 16, 17]] }]),
)
.unwrap();
let rtxn = index.read_txn().unwrap();
let res = index.search(&rtxn).vector([0.0, 1.0, 2.0]).execute().unwrap();
assert_eq!(res.documents_ids.len(), 3);
}
#[test] #[test]
fn reproduce_the_bug() { fn reproduce_the_bug() {
/* /*