2018-04-22 23:34:41 +08:00
|
|
|
// TODO make the raptor binary expose multiple subcommand
|
|
|
|
// make only one binary
|
|
|
|
|
|
|
|
extern crate raptor;
|
2018-07-07 03:26:07 +08:00
|
|
|
extern crate rocksdb;
|
2018-04-22 23:34:41 +08:00
|
|
|
extern crate serde_json;
|
2018-06-24 07:28:27 +08:00
|
|
|
#[macro_use] extern crate serde_derive;
|
|
|
|
extern crate unidecode;
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-06-24 07:28:27 +08:00
|
|
|
use std::path::Path;
|
2018-04-23 00:10:01 +08:00
|
|
|
use std::collections::HashSet;
|
2018-06-24 07:28:27 +08:00
|
|
|
use std::fs::{self, File};
|
|
|
|
use std::io::{self, BufReader, BufRead};
|
2018-05-13 21:12:15 +08:00
|
|
|
use std::iter;
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-07-11 03:29:17 +08:00
|
|
|
use raptor::{MetadataBuilder, Metadata, DocIndex};
|
2018-07-07 03:26:07 +08:00
|
|
|
use rocksdb::{DB, WriteBatch, Writable};
|
2018-04-22 23:34:41 +08:00
|
|
|
use serde_json::from_str;
|
2018-06-24 07:28:27 +08:00
|
|
|
use unidecode::unidecode;
|
|
|
|
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
|
|
struct Product {
|
|
|
|
title: String,
|
|
|
|
product_id: u64,
|
|
|
|
ft: String,
|
|
|
|
}
|
|
|
|
|
|
|
|
fn set_readonly<P>(path: P, readonly: bool) -> io::Result<()>
|
|
|
|
where P: AsRef<Path>
|
|
|
|
{
|
|
|
|
let mut perms = fs::metadata(&path)?.permissions();
|
|
|
|
perms.set_readonly(readonly);
|
|
|
|
fs::set_permissions(&path, perms)
|
|
|
|
}
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-06-26 04:26:49 +08:00
|
|
|
fn is_readonly<P>(path: P) -> io::Result<bool>
|
|
|
|
where P: AsRef<Path>
|
|
|
|
{
|
|
|
|
fs::metadata(&path).map(|m| m.permissions().readonly())
|
|
|
|
}
|
|
|
|
|
2018-04-22 23:34:41 +08:00
|
|
|
fn main() {
|
|
|
|
let data = File::open("products.json_lines").unwrap();
|
|
|
|
let data = BufReader::new(data);
|
|
|
|
|
2018-04-23 00:10:01 +08:00
|
|
|
let common_words = {
|
2018-05-13 21:12:15 +08:00
|
|
|
match File::open("fr.stopwords.txt") {
|
|
|
|
Ok(file) => {
|
|
|
|
let file = BufReader::new(file);
|
|
|
|
let mut set = HashSet::new();
|
|
|
|
for line in file.lines().filter_map(|l| l.ok()) {
|
|
|
|
for word in line.split_whitespace() {
|
|
|
|
set.insert(word.to_owned());
|
|
|
|
}
|
|
|
|
}
|
|
|
|
set
|
|
|
|
},
|
|
|
|
Err(e) => {
|
|
|
|
eprintln!("{:?}", e);
|
|
|
|
HashSet::new()
|
|
|
|
},
|
2018-04-23 00:10:01 +08:00
|
|
|
}
|
|
|
|
};
|
|
|
|
|
2018-07-11 03:29:17 +08:00
|
|
|
let map_file = "map.meta";
|
|
|
|
let indexes_file = "indexes.meta";
|
2018-07-07 03:26:07 +08:00
|
|
|
let rocksdb_file = "rocksdb/storage";
|
2018-06-26 04:26:49 +08:00
|
|
|
|
2018-07-11 03:29:17 +08:00
|
|
|
for file in &[map_file, indexes_file, rocksdb_file] {
|
2018-06-26 04:26:49 +08:00
|
|
|
match is_readonly(file) {
|
|
|
|
Ok(true) => panic!("the {:?} file is readonly, please make it writeable", file),
|
|
|
|
Err(ref e) if e.kind() == io::ErrorKind::NotFound => (),
|
|
|
|
Err(e) => panic!("{:?}", e),
|
|
|
|
_ => (),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-07 03:26:07 +08:00
|
|
|
let db = DB::open_default(rocksdb_file).unwrap();
|
|
|
|
|
2018-07-11 03:29:17 +08:00
|
|
|
let map = File::create(map_file).unwrap();
|
|
|
|
let indexes = File::create(indexes_file).unwrap();
|
|
|
|
let mut builder = MetadataBuilder::new(map, indexes);
|
|
|
|
|
2018-04-22 23:34:41 +08:00
|
|
|
for line in data.lines() {
|
|
|
|
let line = line.unwrap();
|
|
|
|
|
2018-06-24 07:28:27 +08:00
|
|
|
let product: Product = from_str(&line).unwrap();
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-06-25 01:27:07 +08:00
|
|
|
let title = iter::repeat(0).zip(product.title.split_whitespace()).filter(|&(_, w)| !common_words.contains(w)).enumerate();
|
|
|
|
let description = iter::repeat(1).zip(product.ft.split_whitespace()).filter(|&(_, w)| !common_words.contains(w)).enumerate();
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-07-07 03:26:07 +08:00
|
|
|
let mut batch = WriteBatch::new();
|
|
|
|
|
|
|
|
let title_key = format!("{}-title", product.product_id);
|
|
|
|
let _ = batch.put(title_key.as_bytes(), product.title.as_bytes());
|
|
|
|
|
|
|
|
let description_key = format!("{}-description", product.product_id);
|
|
|
|
let _ = batch.put(description_key.as_bytes(), product.ft.as_bytes());
|
|
|
|
|
|
|
|
db.write(batch).unwrap();
|
|
|
|
|
2018-05-13 21:12:15 +08:00
|
|
|
let words = title.chain(description);
|
|
|
|
for (i, (attr, word)) in words {
|
2018-05-27 17:15:05 +08:00
|
|
|
let doc_index = DocIndex {
|
2018-06-24 07:28:27 +08:00
|
|
|
document: product.product_id,
|
2018-05-27 17:15:05 +08:00
|
|
|
attribute: attr,
|
|
|
|
attribute_index: i as u32,
|
2018-05-13 21:12:15 +08:00
|
|
|
};
|
2018-06-24 07:28:27 +08:00
|
|
|
// insert the exact representation
|
|
|
|
let word_lower = word.to_lowercase();
|
|
|
|
|
|
|
|
// and the unidecoded lowercased version
|
|
|
|
let word_unidecoded = unidecode(word).to_lowercase();
|
|
|
|
if word_lower != word_unidecoded {
|
|
|
|
builder.insert(word_unidecoded, doc_index);
|
|
|
|
}
|
|
|
|
|
|
|
|
builder.insert(word_lower, doc_index);
|
2018-04-22 23:34:41 +08:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-07-11 03:29:17 +08:00
|
|
|
builder.finish().unwrap();
|
2018-04-22 23:34:41 +08:00
|
|
|
|
2018-06-26 04:26:49 +08:00
|
|
|
set_readonly(map_file, true).unwrap();
|
2018-07-11 03:29:17 +08:00
|
|
|
set_readonly(indexes_file, true).unwrap();
|
2018-07-07 03:26:07 +08:00
|
|
|
set_readonly(rocksdb_file, true).unwrap();
|
2018-06-24 07:28:27 +08:00
|
|
|
|
2018-05-27 17:15:05 +08:00
|
|
|
println!("Checking the dump consistency...");
|
2018-07-11 03:29:17 +08:00
|
|
|
unsafe { Metadata::from_paths(map_file, indexes_file).unwrap() };
|
2018-04-22 23:34:41 +08:00
|
|
|
}
|