Uncomment static default values and fix typo

This commit is contained in:
mlemesle 2022-09-21 16:31:16 +02:00
parent d406fe901b
commit 56d72d4493
3 changed files with 26 additions and 30 deletions

View File

@ -1,16 +1,16 @@
# This file shows the default configuration of Meilisearch. # This file shows the default configuration of Meilisearch.
# All variables are defined here https://docs.meilisearch.com/learn/configuration/instance_options.html#environment-variables # All variables are defined here https://docs.meilisearch.com/learn/configuration/instance_options.html#environment-variables
# db_path = "./data.ms" db_path = "./data.ms"
# The destination where the database must be created. # The destination where the database must be created.
# env = "development" # Possible values: [development, production] env = "development" # Possible values: [development, production]
# This environment variable must be set to `production` if you are running in production. # This environment variable must be set to `production` if you are running in production.
# More logs wiil be displayed if the server is running in development mode. Setting the master # More logs wiil be displayed if the server is running in development mode. Setting the master
# key is optional; hence no security on the updates routes. This # key is optional; hence no security on the updates routes. This
# is useful to debug when integrating the engine with another service. # is useful to debug when integrating the engine with another service.
# http_addr = "127.0.0.1:7700" http_addr = "127.0.0.1:7700"
# The address on which the HTTP server will listen. # The address on which the HTTP server will listen.
# master_key = "MASTER_KEY" # master_key = "MASTER_KEY"
@ -19,40 +19,38 @@
# no_analytics = false # no_analytics = false
# Do not send analytics to Meilisearch. # Do not send analytics to Meilisearch.
# disable_auto_batching = false disable_auto_batching = false
# The engine will disable task auto-batching, and will sequencialy compute each task one by one. # The engine will disable task auto-batching, and will sequencialy compute each task one by one.
### DUMP ### DUMP
# dumps_dir = "dumps/" dumps_dir = "dumps/"
# Folder where dumps are created when the dump route is called. # Folder where dumps are created when the dump route is called.
# import_dump = "./path/to/my/file.dump" # import_dump = "./path/to/my/file.dump"
# Import a dump from the specified path, must be a `.dump` file. # Import a dump from the specified path, must be a `.dump` file.
# ignore_missing_dump = false ignore_missing_dump = false
# If the dump doesn't exist, load or create the database specified by `db_path` instead. # If the dump doesn't exist, load or create the database specified by `db_path` instead.
# ignore_dump_if_db_exists = false ignore_dump_if_db_exists = false
# Ignore the dump if a database already exists, and load that database instead. # Ignore the dump if a database already exists, and load that database instead.
### ###
# log_level = "INFO" # Possible values: [ERROR, WARN, INFO, DEBUG, TRACE] log_level = "INFO" # Possible values: [ERROR, WARN, INFO, DEBUG, TRACE]
# Set the log level. # Set the log level.
### INDEX ### INDEX
# max_index_size = "100 GiB" max_index_size = "100 GiB"
# The maximum size, in bytes, of the main LMDB database directory. # The maximum size, in bytes, of the main LMDB database directory.
# max_indexing_memory = "2 GiB" # max_indexing_memory = "2 GiB"
# The maximum amount of memory the indexer will use. It defaults to 2/3 of the available # The maximum amount of memory the indexer will use.
# memory. It is recommended to use something like 80%-90% of the available memory, no
# more.
# #
# In case the engine is unable to retrieve the available memory the engine will try to use # In case the engine is unable to retrieve the available memory the engine will try to use
# the memory it needs but without real limit, this can lead to Out-Of-Memory issues and it # the memory it needs but without real limit, this can lead to Out-Of-Memory issues and it
@ -70,33 +68,33 @@
### ###
# max_task_db_size = "100 GiB" max_task_db_size = "100 GiB"
# The maximum size, in bytes, of the update LMDB database directory. # The maximum size, in bytes, of the update LMDB database directory.
# http_payload_size_limit = 100000000 http_payload_size_limit = "100 MB"
# The maximum size, in bytes, of accepted JSON payloads. # The maximum size, in bytes, of accepted JSON payloads.
### SNAPSHOT ### SNAPSHOT
# schedule_snapshot = false schedule_snapshot = false
# Activate snapshot scheduling. # Activate snapshot scheduling.
# snapshot_dir = "snapshots/" snapshot_dir = "snapshots/"
# Defines the directory path where Meilisearch will create a snapshot each snapshot_interval_sec. # Defines the directory path where Meilisearch will create a snapshot each snapshot_interval_sec.
# snapshot_interval_sec = 86400 snapshot_interval_sec = 86400
# Defines time interval, in seconds, between each snapshot creation. # Defines time interval, in seconds, between each snapshot creation.
# import_snapshot = false # import_snapshot = "./path/to/my/snapshot"
# Defines the path of the snapshot file to import. This option will, by default, stop the # Defines the path of the snapshot file to import. This option will, by default, stop the
# process if a database already exist, or if no snapshot exists at the given path. If this # process if a database already exists, or if no snapshot exists at the given path. If this
# option is not specified, no snapshot is imported. # option is not specified, no snapshot is imported.
# ignore_missing_snapshot = false ignore_missing_snapshot = false
# The engine will ignore a missing snapshot and not return an error in such a case. # The engine will ignore a missing snapshot and not return an error in such a case.
# ignore_snapshot_if_db_exists = false ignore_snapshot_if_db_exists = false
# The engine will skip snapshot importation and not return an error in such a case. # The engine will skip snapshot importation and not return an error in such a case.
### ###
@ -119,13 +117,13 @@
# ssl_ocsp_path = "./path/to/OCSPFILE" # ssl_ocsp_path = "./path/to/OCSPFILE"
# Read DER-encoded OCSP response from OCSPFILE and staple to certificate. Optional. # Read DER-encoded OCSP response from OCSPFILE and staple to certificate. Optional.
# ssl_require_auth = false ssl_require_auth = false
# Send a fatal alert if the client does not complete client authentication. # Send a fatal alert if the client does not complete client authentication.
# ssl_resumption = false ssl_resumption = false
# SSL support session resumption. # SSL support session resumption.
# ssl_tickets = false ssl_tickets = false
# SSL support tickets. # SSL support tickets.
### ###

View File

@ -60,7 +60,7 @@ const DEFAULT_HTTP_PAYLOAD_SIZE_LIMIT: &str = "100 MB";
const DEFAULT_SNAPSHOT_DIR: &str = "snapshots/"; const DEFAULT_SNAPSHOT_DIR: &str = "snapshots/";
const DEFAULT_SNAPSHOT_INTERVAL_SEC: u64 = 86400; const DEFAULT_SNAPSHOT_INTERVAL_SEC: u64 = 86400;
const DEFAULT_DUMPS_DIR: &str = "dumps/"; const DEFAULT_DUMPS_DIR: &str = "dumps/";
const DEFAULT_LOG_LEVEL: &str = "info"; const DEFAULT_LOG_LEVEL: &str = "INFO";
#[derive(Debug, Clone, Parser, Serialize, Deserialize)] #[derive(Debug, Clone, Parser, Serialize, Deserialize)]
#[clap(version)] #[clap(version)]
@ -126,8 +126,8 @@ pub struct Opt {
/// Enable client authentication, and accept certificates /// Enable client authentication, and accept certificates
/// signed by those roots provided in CERTFILE. /// signed by those roots provided in CERTFILE.
#[clap(long, env = MEILI_SSL_AUTH_PATH, parse(from_os_str))]
#[serde(skip_serializing)] #[serde(skip_serializing)]
#[clap(long, env = MEILI_SSL_AUTH_PATH, parse(from_os_str))]
pub ssl_auth_path: Option<PathBuf>, pub ssl_auth_path: Option<PathBuf>,
/// Read DER-encoded OCSP response from OCSPFILE and staple to certificate. /// Read DER-encoded OCSP response from OCSPFILE and staple to certificate.
@ -152,7 +152,7 @@ pub struct Opt {
pub ssl_tickets: bool, pub ssl_tickets: bool,
/// Defines the path of the snapshot file to import. /// Defines the path of the snapshot file to import.
/// This option will, by default, stop the process if a database already exists or if no snapshot exists at /// This option will, by default, stop the process if a database already exists, or if no snapshot exists at
/// the given path. If this option is not specified, no snapshot is imported. /// the given path. If this option is not specified, no snapshot is imported.
#[clap(long, env = MEILI_IMPORT_SNAPSHOT)] #[clap(long, env = MEILI_IMPORT_SNAPSHOT)]
pub import_snapshot: Option<PathBuf>, pub import_snapshot: Option<PathBuf>,

View File

@ -28,9 +28,7 @@ pub struct IndexerOpts {
#[clap(long, hide = true)] #[clap(long, hide = true)]
pub max_nb_chunks: Option<usize>, pub max_nb_chunks: Option<usize>,
/// The maximum amount of memory the indexer will use. It defaults to 2/3 /// The maximum amount of memory the indexer will use.
/// of the available memory. It is recommended to use something like 80%-90%
/// of the available memory, no more.
/// ///
/// In case the engine is unable to retrieve the available memory the engine will /// In case the engine is unable to retrieve the available memory the engine will
/// try to use the memory it needs but without real limit, this can lead to /// try to use the memory it needs but without real limit, this can lead to