mirror of
https://github.com/meilisearch/meilisearch.git
synced 2024-11-30 00:55:00 +08:00
Create api keys
This commit is contained in:
parent
49f976c8d8
commit
3eb6f4b56f
@ -26,18 +26,37 @@ pub struct AuthController {
|
||||
}
|
||||
|
||||
impl AuthController {
|
||||
pub fn new(
|
||||
pub async fn new(
|
||||
db_path: impl AsRef<Path>,
|
||||
master_key: &Option<String>,
|
||||
zk: Option<zk::Client>,
|
||||
) -> Result<Self> {
|
||||
let store = HeedAuthStore::new(db_path)?;
|
||||
let controller = Self { store: Arc::new(store), master_key: master_key.clone(), zk };
|
||||
|
||||
if store.is_empty()? {
|
||||
generate_default_keys(&store)?;
|
||||
match controller.zk {
|
||||
// setup the auth zk environment, the `auth` node
|
||||
Some(ref zk) => {
|
||||
let options =
|
||||
zk::CreateOptions::new(zk::CreateMode::Persistent, zk::Acl::anyone_all());
|
||||
// TODO: we should catch the potential unexpected errors here https://docs.rs/zookeeper-client/latest/zookeeper_client/struct.Client.html#method.create
|
||||
// for the moment we consider that `create` only returns Error::NodeExists.
|
||||
if zk.create("/auth", &[], &options).await.is_ok() {
|
||||
// TODO: if the store is not empty, should we push the locally stored keys in zk or should we erase the local keys?
|
||||
if controller.store.is_empty()? {
|
||||
generate_default_keys(&controller).await?;
|
||||
}
|
||||
}
|
||||
// TODO: If `auth` node already exist, we should synchronize the local data with zk.
|
||||
}
|
||||
None => {
|
||||
if controller.store.is_empty()? {
|
||||
generate_default_keys(&controller).await?;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Ok(Self { store: Arc::new(store), master_key: master_key.clone(), zk })
|
||||
Ok(controller)
|
||||
}
|
||||
|
||||
/// Return `Ok(())` if the auth controller is able to access one of its database.
|
||||
@ -59,23 +78,22 @@ impl AuthController {
|
||||
pub async fn create_key(&self, create_key: CreateApiKey) -> Result<Key> {
|
||||
match self.store.get_api_key(create_key.uid)? {
|
||||
Some(_) => Err(AuthControllerError::ApiKeyAlreadyExists(create_key.uid.to_string())),
|
||||
None => {
|
||||
None => self.put_key(create_key.to_key()).await,
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn put_key(&self, key: Key) -> Result<Key> {
|
||||
let store = self.store.clone();
|
||||
let key =
|
||||
tokio::task::spawn_blocking(move || store.put_api_key(create_key.to_key()))
|
||||
.await??;
|
||||
// TODO: we may commit only after zk persisted the keys
|
||||
let key = tokio::task::spawn_blocking(move || store.put_api_key(key)).await??;
|
||||
if let Some(ref zk) = self.zk {
|
||||
zk.create(
|
||||
&format!("/auth/{}", key.uid),
|
||||
&serde_json::to_vec_pretty(&key)?,
|
||||
&zk::CreateOptions::new(zk::CreateMode::Persistent, zk::Acl::anyone_all()),
|
||||
)
|
||||
let options = zk::CreateOptions::new(zk::CreateMode::Persistent, zk::Acl::anyone_all());
|
||||
|
||||
zk.create(&format!("/auth/{}", key.uid), &serde_json::to_vec_pretty(&key)?, &options)
|
||||
.await?;
|
||||
}
|
||||
Ok(key)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn update_key(&self, uid: Uuid, patch: PatchApiKey) -> Result<Key> {
|
||||
let mut key = self.get_key(uid)?;
|
||||
@ -324,9 +342,9 @@ pub struct IndexSearchRules {
|
||||
pub filter: Option<serde_json::Value>,
|
||||
}
|
||||
|
||||
fn generate_default_keys(store: &HeedAuthStore) -> Result<()> {
|
||||
store.put_api_key(Key::default_admin())?;
|
||||
store.put_api_key(Key::default_search())?;
|
||||
async fn generate_default_keys(controller: &AuthController) -> Result<()> {
|
||||
controller.put_key(Key::default_admin()).await?;
|
||||
controller.put_key(Key::default_search()).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
@ -137,7 +137,7 @@ enum OnFailure {
|
||||
KeepDb,
|
||||
}
|
||||
|
||||
pub fn setup_meilisearch(
|
||||
pub async fn setup_meilisearch(
|
||||
opt: &Opt,
|
||||
zk: Option<zk::Client>,
|
||||
) -> anyhow::Result<(Arc<IndexScheduler>, Arc<AuthController>)> {
|
||||
@ -147,7 +147,7 @@ pub fn setup_meilisearch(
|
||||
// the db is empty and the snapshot exists, import it
|
||||
if empty_db && snapshot_path_exists {
|
||||
match compression::from_tar_gz(snapshot_path, &opt.db_path) {
|
||||
Ok(()) => open_or_create_database_unchecked(opt, OnFailure::RemoveDb, zk)?,
|
||||
Ok(()) => open_or_create_database_unchecked(opt, OnFailure::RemoveDb, zk).await?,
|
||||
Err(e) => {
|
||||
std::fs::remove_dir_all(&opt.db_path)?;
|
||||
return Err(e);
|
||||
@ -164,14 +164,14 @@ pub fn setup_meilisearch(
|
||||
bail!("snapshot doesn't exist at {}", snapshot_path.display())
|
||||
// the snapshot and the db exist, and we can ignore the snapshot because of the ignore_snapshot_if_db_exists flag
|
||||
} else {
|
||||
open_or_create_database(opt, empty_db, zk)?
|
||||
open_or_create_database(opt, empty_db, zk).await?
|
||||
}
|
||||
} else if let Some(ref path) = opt.import_dump {
|
||||
let src_path_exists = path.exists();
|
||||
// the db is empty and the dump exists, import it
|
||||
if empty_db && src_path_exists {
|
||||
let (mut index_scheduler, mut auth_controller) =
|
||||
open_or_create_database_unchecked(opt, OnFailure::RemoveDb, zk)?;
|
||||
open_or_create_database_unchecked(opt, OnFailure::RemoveDb, zk).await?;
|
||||
match import_dump(&opt.db_path, path, &mut index_scheduler, &mut auth_controller) {
|
||||
Ok(()) => (index_scheduler, auth_controller),
|
||||
Err(e) => {
|
||||
@ -191,10 +191,10 @@ pub fn setup_meilisearch(
|
||||
// the dump and the db exist and we can ignore the dump because of the ignore_dump_if_db_exists flag
|
||||
// or, the dump is missing but we can ignore that because of the ignore_missing_dump flag
|
||||
} else {
|
||||
open_or_create_database(opt, empty_db, zk)?
|
||||
open_or_create_database(opt, empty_db, zk).await?
|
||||
}
|
||||
} else {
|
||||
open_or_create_database(opt, empty_db, zk)?
|
||||
open_or_create_database(opt, empty_db, zk).await?
|
||||
};
|
||||
|
||||
// We create a loop in a thread that registers snapshotCreation tasks
|
||||
@ -218,7 +218,7 @@ pub fn setup_meilisearch(
|
||||
}
|
||||
|
||||
/// Try to start the IndexScheduler and AuthController without checking the VERSION file or anything.
|
||||
fn open_or_create_database_unchecked(
|
||||
async fn open_or_create_database_unchecked(
|
||||
opt: &Opt,
|
||||
on_failure: OnFailure,
|
||||
zk: Option<zk::Client>,
|
||||
@ -250,7 +250,7 @@ fn open_or_create_database_unchecked(
|
||||
|
||||
match (
|
||||
index_scheduler_builder(),
|
||||
auth_controller.map_err(anyhow::Error::from),
|
||||
auth_controller.await.map_err(anyhow::Error::from),
|
||||
create_version_file(&opt.db_path).map_err(anyhow::Error::from),
|
||||
) {
|
||||
(Ok(i), Ok(a), Ok(())) => Ok((i, a)),
|
||||
@ -264,7 +264,7 @@ fn open_or_create_database_unchecked(
|
||||
}
|
||||
|
||||
/// Ensure you're in a valid state and open the IndexScheduler + AuthController for you.
|
||||
fn open_or_create_database(
|
||||
async fn open_or_create_database(
|
||||
opt: &Opt,
|
||||
empty_db: bool,
|
||||
zk: Option<zk::Client>,
|
||||
@ -273,7 +273,7 @@ fn open_or_create_database(
|
||||
check_version_file(&opt.db_path)?;
|
||||
}
|
||||
|
||||
open_or_create_database_unchecked(opt, OnFailure::KeepDb, zk)
|
||||
open_or_create_database_unchecked(opt, OnFailure::KeepDb, zk).await
|
||||
}
|
||||
|
||||
fn import_dump(
|
||||
|
@ -68,7 +68,7 @@ async fn main() -> anyhow::Result<()> {
|
||||
Some(ref url) => Some(zk::Client::connect(url).await.unwrap()),
|
||||
None => None,
|
||||
};
|
||||
let (index_scheduler, auth_controller) = setup_meilisearch(&opt, zk)?;
|
||||
let (index_scheduler, auth_controller) = setup_meilisearch(&opt, zk).await?;
|
||||
|
||||
#[cfg(all(not(debug_assertions), feature = "analytics"))]
|
||||
let analytics = if !opt.no_analytics {
|
||||
|
Loading…
Reference in New Issue
Block a user