From 5a7b6a40e2e7996ecbcd98f105fe58bd84302889 Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Mon, 13 Jan 2025 16:36:31 +0400 Subject: [PATCH 1/3] feat: db connect options --- libs/blockscout-service-launcher/Cargo.toml | 4 +- .../src/database.rs | 157 ++++++++++++++---- libs/env-collector/Cargo.toml | 2 +- libs/env-collector/src/lib.rs | 102 ++++++++++-- .../{{project-name}}-server/src/server.rs | 7 +- 5 files changed, 219 insertions(+), 53 deletions(-) diff --git a/libs/blockscout-service-launcher/Cargo.toml b/libs/blockscout-service-launcher/Cargo.toml index 578b5d83f..3cb4f74eb 100644 --- a/libs/blockscout-service-launcher/Cargo.toml +++ b/libs/blockscout-service-launcher/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "blockscout-service-launcher" -version = "0.15.0" +version = "0.16.0" description = "Allows to launch blazingly fast blockscout rust services" license = "MIT" repository = "https://github.com/blockscout/blockscout-rs" @@ -26,6 +26,7 @@ prometheus = { version = "0.13", optional = true } reqwest = { version = "0.11", features = ["json"], optional = true } serde = { version = "1.0", features = ["derive"], optional = true } serde_json = {version = "1", optional = true } +serde_with = {version = "3", optional = true } tokio = { version = "1", optional = true } tonic = { version = "0.8", optional = true } tracing = { version = "0.1", optional = true } @@ -90,6 +91,7 @@ database = [ "dep:anyhow", "dep:cfg-if", "dep:serde", + "dep:serde_with", "dep:tracing", "dep:url", ] diff --git a/libs/blockscout-service-launcher/src/database.rs b/libs/blockscout-service-launcher/src/database.rs index e95fc689c..06a806543 100644 --- a/libs/blockscout-service-launcher/src/database.rs +++ b/libs/blockscout-service-launcher/src/database.rs @@ -1,6 +1,7 @@ use anyhow::Context; use serde::{Deserialize, Serialize}; -use std::str::FromStr; +use serde_with::serde_as; +use std::{str::FromStr, time::Duration}; cfg_if::cfg_if! { if #[cfg(feature = "database-1_0")] { @@ -17,7 +18,7 @@ cfg_if::cfg_if! { pub use sea_orm_migration_0_10::MigratorTrait; } else { compile_error!( - "one of the features ['database-0_12', 'database-0_11', 'database-0_10'] \ + "one of the features ['database-1_0', 'database-0_12', 'database-0_11', 'database-0_10'] \ must be enabled" ); } @@ -26,14 +27,14 @@ cfg_if::cfg_if! { const DEFAULT_DB: &str = "postgres"; pub async fn initialize_postgres( - connect_options: impl Into, - create_database: bool, - run_migrations: bool, + settings: &DatabaseSettings, ) -> anyhow::Result { - let connect_options = connect_options.into(); + let db_url = settings.connect.clone().url(); + let connect_options = ConnectOptions::new(db_url); + let connect_options = settings.connect_options.apply_to(connect_options); // Create database if not exists - if create_database { + if settings.create_database { let db_url = connect_options.get_url(); let (db_base_url, db_name) = { let mut db_url = url::Url::from_str(db_url).context("invalid database url")?; @@ -51,7 +52,8 @@ pub async fn initialize_postgres( tracing::info!("creating database '{db_name}'"); let db_base_url = format!("{db_base_url}/{DEFAULT_DB}"); - let create_database_options = with_connect_options(db_base_url, &connect_options); + let create_database_options = ConnectOptions::new(db_base_url); + let create_database_options = settings.connect_options.apply_to(create_database_options); let db = Database::connect(create_database_options).await?; let result = db @@ -75,43 +77,20 @@ pub async fn initialize_postgres( } let db = Database::connect(connect_options).await?; - if run_migrations { + if settings.run_migrations { Migrator::up(&db, None).await?; } Ok(db) } -fn with_connect_options(url: impl Into, source_options: &ConnectOptions) -> ConnectOptions { - let mut options = ConnectOptions::new(url.into()); - if let Some(value) = source_options.get_max_connections() { - options.max_connections(value); - } - if let Some(value) = source_options.get_min_connections() { - options.min_connections(value); - } - if let Some(value) = source_options.get_connect_timeout() { - options.connect_timeout(value); - } - if let Some(value) = source_options.get_idle_timeout() { - options.idle_timeout(value); - } - if let Some(value) = source_options.get_acquire_timeout() { - options.acquire_timeout(value); - } - if let Some(value) = source_options.get_max_lifetime() { - options.max_lifetime(value); - } - options.sqlx_logging(source_options.get_sqlx_logging()); - options.sqlx_logging_level(source_options.get_sqlx_logging_level()); - options -} - #[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] #[serde(deny_unknown_fields)] pub struct DatabaseSettings { pub connect: DatabaseConnectSettings, #[serde(default)] + pub connect_options: DatabaseConnectOptionsSettings, + #[serde(default)] pub create_database: bool, #[serde(default)] pub run_migrations: bool, @@ -162,3 +141,113 @@ impl DatabaseKvConnection { ) } } + +#[serde_as] +#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)] +#[serde(default, deny_unknown_fields)] +pub struct DatabaseConnectOptionsSettings { + /// Maximum number of connections for a pool + pub max_connections: Option, + /// Minimum number of connections for a pool + pub min_connections: Option, + /// The connection timeout for a packet connection + pub connect_timeout: Option, + /// Maximum idle time for a particular connection to prevent + /// network resource exhaustion + pub idle_timeout: Option, + /// Set the maximum amount of time to spend waiting for acquiring a connection + pub acquire_timeout: Option, + /// Set the maximum lifetime of individual connections + pub max_lifetime: Option, + /// Enable SQLx statement logging + pub sqlx_logging: bool, + /// SQLx statement logging level (ignored if `sqlx_logging` is false) + pub sqlx_logging_level: LevelFilter, + #[cfg(feature = "database-1_0")] + /// SQLx slow statements logging level (ignored if `sqlx_logging` is false) + pub sqlx_slow_statements_logging_level: LevelFilter, + #[cfg(feature = "database-1_0")] + #[serde_as(as = "serde_with::DurationSeconds")] + /// SQLx slow statements duration threshold (ignored if `sqlx_logging` is false) + pub sqlx_slow_statements_logging_threshold: Duration, +} + +#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] +#[serde(deny_unknown_fields, rename_all = "lowercase")] +pub enum LevelFilter { + /// A level lower than all log levels. + Off, + /// Corresponds to the `Error` log level. + Error, + /// Corresponds to the `Warn` log level. + Warn, + /// Corresponds to the `Info` log level. + Info, + /// Corresponds to the `Debug` log level. + Debug, + /// Corresponds to the `Trace` log level. + Trace, +} + +impl From for tracing::log::LevelFilter { + fn from(value: LevelFilter) -> Self { + match value { + LevelFilter::Off => Self::Off, + LevelFilter::Error => Self::Error, + LevelFilter::Warn => Self::Warn, + LevelFilter::Info => Self::Info, + LevelFilter::Debug => Self::Debug, + LevelFilter::Trace => Self::Trace, + } + } +} + +impl Default for DatabaseConnectOptionsSettings { + fn default() -> Self { + Self { + max_connections: None, + min_connections: None, + connect_timeout: None, + idle_timeout: None, + acquire_timeout: None, + max_lifetime: None, + sqlx_logging: true, + sqlx_logging_level: LevelFilter::Debug, + #[cfg(feature = "database-1_0")] + sqlx_slow_statements_logging_level: LevelFilter::Off, + #[cfg(feature = "database-1_0")] + sqlx_slow_statements_logging_threshold: Duration::from_secs(1), + } + } +} + +impl DatabaseConnectOptionsSettings { + fn apply_to(&self, mut options: ConnectOptions) -> ConnectOptions { + if let Some(value) = self.max_connections { + options.max_connections(value); + } + if let Some(value) = self.min_connections { + options.min_connections(value); + } + if let Some(value) = self.connect_timeout { + options.connect_timeout(value); + } + if let Some(value) = self.idle_timeout { + options.idle_timeout(value); + } + if let Some(value) = self.acquire_timeout { + options.acquire_timeout(value); + } + if let Some(value) = self.max_lifetime { + options.max_lifetime(value); + } + options.sqlx_logging(self.sqlx_logging); + options.sqlx_logging_level(self.sqlx_logging_level.into()); + #[cfg(feature = "database-1_0")] + options.sqlx_slow_statements_logging_settings( + self.sqlx_slow_statements_logging_level.into(), + self.sqlx_slow_statements_logging_threshold, + ); + options + } +} diff --git a/libs/env-collector/Cargo.toml b/libs/env-collector/Cargo.toml index fb0ff6489..cb364fd91 100644 --- a/libs/env-collector/Cargo.toml +++ b/libs/env-collector/Cargo.toml @@ -13,6 +13,6 @@ serde = { version = "1.0.203", features = ["derive"] } serde_json = "1.0.117" [dev-dependencies] -blockscout-service-launcher = { path = "../blockscout-service-launcher", features = ["database-0_12"] } +blockscout-service-launcher = { path = "../blockscout-service-launcher", features = ["database-1_0"] } pretty_assertions = "1.4.0" tempfile = "3.10.1" \ No newline at end of file diff --git a/libs/env-collector/src/lib.rs b/libs/env-collector/src/lib.rs index b1578b1ca..ab30957b0 100644 --- a/libs/env-collector/src/lib.rs +++ b/libs/env-collector/src/lib.rs @@ -651,6 +651,66 @@ mod tests { true, "e.g. `test-url`", ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_CONNECTIONS", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MIN_CONNECTIONS", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__CONNECT_TIMEOUT", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__IDLE_TIMEOUT", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__ACQUIRE_TIMEOUT", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_LIFETIME", + Some("`null`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING", + Some("`true`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING_LEVEL", + Some("`debug`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_LEVEL", + Some("`off`"), + false, + "", + ), + var( + "TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_THRESHOLD", + Some("`1`"), + false, + "", + ), ])) } @@ -662,17 +722,27 @@ mod tests { [anchor]: <> (anchors.envs.start.cool_postfix) -| Variable | Required | Description | Default Value | -|-------------------------------------------|-------------|------------------|---------------| -| `TEST_SERVICE__TEST` | true | e.g. `value` | | -| `TEST_SERVICE__DATABASE__CREATE_DATABASE` | false | | `false` | -| `TEST_SERVICE__DATABASE__RUN_MIGRATIONS` | false | | `false` | -| `TEST_SERVICE__TEST2` | false | e.g. `123` | `1000` | -| `TEST_SERVICE__TEST3_SET` | false | e.g. `false` | `null` | -| `TEST_SERVICE__TEST4_NOT_SET` | false | | `null` | -| `TEST_SERVICE__TEST5_WITH_UNICODE` | false | | `false` | -| `TEST_SERVICE__STRING_WITH_DEFAULT` | false | | `kekek` | -| `TEST_SERVICE__DATABASE__CONNECT__URL` | true | e.g. `test-url` | | +| Variable | Required | Description | Default Value | +|---------------------------------------------------------------|-------------|------------------|---------------| +| `TEST_SERVICE__TEST` | true | e.g. `value` | | +| `TEST_SERVICE__DATABASE__CREATE_DATABASE` | false | | `false` | +| `TEST_SERVICE__DATABASE__RUN_MIGRATIONS` | false | | `false` | +| `TEST_SERVICE__TEST2` | false | e.g. `123` | `1000` | +| `TEST_SERVICE__TEST3_SET` | false | e.g. `false` | `null` | +| `TEST_SERVICE__TEST4_NOT_SET` | false | | `null` | +| `TEST_SERVICE__TEST5_WITH_UNICODE` | false | | `false` | +| `TEST_SERVICE__STRING_WITH_DEFAULT` | false | | `kekek` | +| `TEST_SERVICE__DATABASE__CONNECT__URL` | true | e.g. `test-url` | | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__ACQUIRE_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__CONNECT_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__IDLE_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_CONNECTIONS` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_LIFETIME` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MIN_CONNECTIONS` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING` | | | `true` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING_LEVEL` | | | `debug` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_LEVEL` | | | `off` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_THRESHOLD` | | | `1` | [anchor]: <> (anchors.envs.end.cool_postfix) "# } @@ -769,6 +839,16 @@ mod tests { | `SOME_EXTRA_VARS2` | true | | `example_value2` | | `TEST_SERVICE__DATABASE__CONNECT__URL` | true | e.g. `test-url` | | | `TEST_SERVICE__TEST` | true | e.g. `value` | | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__ACQUIRE_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__CONNECT_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__IDLE_TIMEOUT` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_CONNECTIONS` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MAX_LIFETIME` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__MIN_CONNECTIONS` | | | `null` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING` | | | `true` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_LOGGING_LEVEL` | | | `debug` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_LEVEL` | | | `off` | +| `TEST_SERVICE__DATABASE__CONNECT_OPTIONS__SQLX_SLOW_STATEMENTS_LOGGING_THRESHOLD` | | | `1` | | `TEST_SERVICE__DATABASE__CREATE_DATABASE` | | | `false` | | `TEST_SERVICE__DATABASE__RUN_MIGRATIONS` | | | `false` | | `TEST_SERVICE__STRING_WITH_DEFAULT` | | | `kekek` | diff --git a/service-template/{{project-name}}-server/src/server.rs b/service-template/{{project-name}}-server/src/server.rs index 88677a340..558401f14 100644 --- a/service-template/{{project-name}}-server/src/server.rs +++ b/service-template/{{project-name}}-server/src/server.rs @@ -44,12 +44,7 @@ pub async fn run(settings: Settings) -> Result<(), anyhow::Error> { let health = Arc::new(HealthService::default()); {% if database and migrations %} - let _db_connection = database::initialize_postgres::( - &settings.database.connect.url(), - settings.database.create_database, - settings.database.run_migrations, - ) - .await?; + let _db_connection = database::initialize_postgres::(&settings.database).await?; {% endif %} // TODO: init services here From 081a63a9ddaae0eb52d1de9e54d0dfb592d9a9a4 Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Mon, 13 Jan 2025 17:52:04 +0400 Subject: [PATCH 2/3] chore: remove duplicated LevelFilter --- .../src/database.rs | 60 +++++++++---------- 1 file changed, 27 insertions(+), 33 deletions(-) diff --git a/libs/blockscout-service-launcher/src/database.rs b/libs/blockscout-service-launcher/src/database.rs index 06a806543..a2ab554bf 100644 --- a/libs/blockscout-service-launcher/src/database.rs +++ b/libs/blockscout-service-launcher/src/database.rs @@ -1,7 +1,8 @@ use anyhow::Context; -use serde::{Deserialize, Serialize}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; use serde_with::serde_as; use std::{str::FromStr, time::Duration}; +use tracing::log::LevelFilter; cfg_if::cfg_if! { if #[cfg(feature = "database-1_0")] { @@ -161,9 +162,17 @@ pub struct DatabaseConnectOptionsSettings { pub max_lifetime: Option, /// Enable SQLx statement logging pub sqlx_logging: bool, + #[serde( + deserialize_with = "string_to_level_filter", + serialize_with = "level_filter_to_string" + )] /// SQLx statement logging level (ignored if `sqlx_logging` is false) pub sqlx_logging_level: LevelFilter, #[cfg(feature = "database-1_0")] + #[serde( + deserialize_with = "string_to_level_filter", + serialize_with = "level_filter_to_string" + )] /// SQLx slow statements logging level (ignored if `sqlx_logging` is false) pub sqlx_slow_statements_logging_level: LevelFilter, #[cfg(feature = "database-1_0")] @@ -172,36 +181,6 @@ pub struct DatabaseConnectOptionsSettings { pub sqlx_slow_statements_logging_threshold: Duration, } -#[derive(Debug, Clone, Copy, Serialize, Deserialize, PartialEq, Eq)] -#[serde(deny_unknown_fields, rename_all = "lowercase")] -pub enum LevelFilter { - /// A level lower than all log levels. - Off, - /// Corresponds to the `Error` log level. - Error, - /// Corresponds to the `Warn` log level. - Warn, - /// Corresponds to the `Info` log level. - Info, - /// Corresponds to the `Debug` log level. - Debug, - /// Corresponds to the `Trace` log level. - Trace, -} - -impl From for tracing::log::LevelFilter { - fn from(value: LevelFilter) -> Self { - match value { - LevelFilter::Off => Self::Off, - LevelFilter::Error => Self::Error, - LevelFilter::Warn => Self::Warn, - LevelFilter::Info => Self::Info, - LevelFilter::Debug => Self::Debug, - LevelFilter::Trace => Self::Trace, - } - } -} - impl Default for DatabaseConnectOptionsSettings { fn default() -> Self { Self { @@ -242,12 +221,27 @@ impl DatabaseConnectOptionsSettings { options.max_lifetime(value); } options.sqlx_logging(self.sqlx_logging); - options.sqlx_logging_level(self.sqlx_logging_level.into()); + options.sqlx_logging_level(self.sqlx_logging_level); #[cfg(feature = "database-1_0")] options.sqlx_slow_statements_logging_settings( - self.sqlx_slow_statements_logging_level.into(), + self.sqlx_slow_statements_logging_level, self.sqlx_slow_statements_logging_threshold, ); options } } + +fn string_to_level_filter<'de, D>(deserializer: D) -> Result +where + D: Deserializer<'de>, +{ + let string = String::deserialize(deserializer)?; + LevelFilter::from_str(&string).map_err(::custom) +} + +pub fn level_filter_to_string(x: &LevelFilter, s: S) -> Result +where + S: Serializer, +{ + s.serialize_str(x.as_str().to_lowercase().as_str()) +} From 8920cd9bb07cc41f56117a70fdf389fa2014f53e Mon Sep 17 00:00:00 2001 From: Kirill Fedoseev Date: Mon, 13 Jan 2025 18:15:13 +0400 Subject: [PATCH 3/3] chore: minor refactor --- libs/blockscout-service-launcher/src/database.rs | 9 +++------ 1 file changed, 3 insertions(+), 6 deletions(-) diff --git a/libs/blockscout-service-launcher/src/database.rs b/libs/blockscout-service-launcher/src/database.rs index a2ab554bf..fa880b3f2 100644 --- a/libs/blockscout-service-launcher/src/database.rs +++ b/libs/blockscout-service-launcher/src/database.rs @@ -31,14 +31,11 @@ pub async fn initialize_postgres( settings: &DatabaseSettings, ) -> anyhow::Result { let db_url = settings.connect.clone().url(); - let connect_options = ConnectOptions::new(db_url); - let connect_options = settings.connect_options.apply_to(connect_options); // Create database if not exists if settings.create_database { - let db_url = connect_options.get_url(); let (db_base_url, db_name) = { - let mut db_url = url::Url::from_str(db_url).context("invalid database url")?; + let mut db_url: url::Url = db_url.parse().context("invalid database url")?; let db_name = db_url .path_segments() .and_then(|mut segments| segments.next()) @@ -53,8 +50,7 @@ pub async fn initialize_postgres( tracing::info!("creating database '{db_name}'"); let db_base_url = format!("{db_base_url}/{DEFAULT_DB}"); - let create_database_options = ConnectOptions::new(db_base_url); - let create_database_options = settings.connect_options.apply_to(create_database_options); + let create_database_options = settings.connect_options.apply_to(db_base_url.into()); let db = Database::connect(create_database_options).await?; let result = db @@ -77,6 +73,7 @@ pub async fn initialize_postgres( }; } + let connect_options = settings.connect_options.apply_to(db_url.into()); let db = Database::connect(connect_options).await?; if settings.run_migrations { Migrator::up(&db, None).await?;