diff --git a/zk_toolbox/README.md b/zk_toolbox/README.md index 469e36a65f6..a3b44fa98b3 100644 --- a/zk_toolbox/README.md +++ b/zk_toolbox/README.md @@ -247,6 +247,53 @@ Run the external node: zk_inception en run ``` +### Portal + +Once you have at least one chain initialized, you can run the [portal](https://github.com/matter-labs/dapp-portal) - a +web-app to bridge tokens between L1 and L2 and more: + +```bash +zk_inception portal +``` + +This command will start the dockerized portal app using configuration from `apps/portal.config.json` file inside your +ecosystem directory. You can edit this file to configure the portal app if needed. By default, portal starts on +`http://localhost:3030`, you can configure the port in `apps.yaml` file. + +### Explorer + +For better understanding of the blockchain data, you can use the +[explorer](https://github.com/matter-labs/block-explorer) - a web-app to view and inspect transactions, blocks, +contracts and more. + +First, each chain should be initialized: + +```bash +zk_inception explorer init +``` + +This command creates a database to store explorer data and generatesdocker compose file with explorer services +(`explorer-docker-compose.yml`). + +Next, for each chain you want to have an explorer, you need to start its backend services: + +```bash +zk_inception explorer backend --chain +``` + +This command uses previously created docker compose file to start the services (api, data fetcher, worker) required for +the explorer. + +Finally, you can run the explorer app: + +```bash +zk_inception explorer run +``` + +This command will start the dockerized explorer app using configuration from `apps/explorer.config.json` file inside +your ecosystem directory. You can edit this file to configure the app if needed. By default, explorer starts on +`http://localhost:3010`, you can configure the port in `apps.yaml` file. + ### Update To update your node: diff --git a/zk_toolbox/crates/common/src/docker.rs b/zk_toolbox/crates/common/src/docker.rs index 0ca31383f9c..a5731808814 100644 --- a/zk_toolbox/crates/common/src/docker.rs +++ b/zk_toolbox/crates/common/src/docker.rs @@ -1,26 +1,33 @@ -use std::collections::HashMap; - +use url::Url; use xshell::{cmd, Shell}; use crate::cmd::Cmd; -pub fn up(shell: &Shell, docker_compose_file: &str) -> anyhow::Result<()> { - Ok(Cmd::new(cmd!(shell, "docker compose -f {docker_compose_file} up -d")).run()?) +pub fn up(shell: &Shell, docker_compose_file: &str, detach: bool) -> anyhow::Result<()> { + let args = if detach { vec!["-d"] } else { vec![] }; + let mut cmd = Cmd::new(cmd!( + shell, + "docker compose -f {docker_compose_file} up {args...}" + )); + cmd = if !detach { cmd.with_force_run() } else { cmd }; + Ok(cmd.run()?) } pub fn down(shell: &Shell, docker_compose_file: &str) -> anyhow::Result<()> { Ok(Cmd::new(cmd!(shell, "docker compose -f {docker_compose_file} down")).run()?) } -pub fn run( - shell: &Shell, - docker_image: &str, - docker_args: HashMap, -) -> anyhow::Result<()> { - let mut args = vec![]; - for (key, value) in docker_args.iter() { - args.push(key); - args.push(value); +pub fn run(shell: &Shell, docker_image: &str, docker_args: Vec) -> anyhow::Result<()> { + Ok(Cmd::new(cmd!(shell, "docker run {docker_args...} {docker_image}")).run()?) +} + +pub fn adjust_localhost_for_docker(mut url: Url) -> anyhow::Result { + if let Some(host) = url.host_str() { + if host == "localhost" || host == "127.0.0.1" { + url.set_host(Some("host.docker.internal"))?; + } + } else { + anyhow::bail!("Failed to parse: no host"); } - Ok(Cmd::new(cmd!(shell, "docker run {args...} {docker_image}")).run()?) + Ok(url) } diff --git a/zk_toolbox/crates/config/src/apps.rs b/zk_toolbox/crates/config/src/apps.rs new file mode 100644 index 00000000000..697b35b0851 --- /dev/null +++ b/zk_toolbox/crates/config/src/apps.rs @@ -0,0 +1,59 @@ +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; +use xshell::Shell; + +use crate::{ + consts::{APPS_CONFIG_FILE, DEFAULT_EXPLORER_PORT, DEFAULT_PORTAL_PORT, LOCAL_CONFIGS_PATH}, + traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig, ZkToolboxConfig}, +}; + +/// Ecosystem level configuration for the apps (portal and explorer). +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct AppsEcosystemConfig { + pub portal: AppEcosystemConfig, + pub explorer: AppEcosystemConfig, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct AppEcosystemConfig { + pub http_port: u16, +} + +impl ZkToolboxConfig for AppsEcosystemConfig {} +impl FileConfigWithDefaultName for AppsEcosystemConfig { + const FILE_NAME: &'static str = APPS_CONFIG_FILE; +} + +impl AppsEcosystemConfig { + pub fn get_config_path(ecosystem_base_path: &Path) -> PathBuf { + ecosystem_base_path + .join(LOCAL_CONFIGS_PATH) + .join(APPS_CONFIG_FILE) + } + + pub fn read_or_create_default(shell: &Shell) -> anyhow::Result { + let config_path = Self::get_config_path(&shell.current_dir()); + match Self::read(shell, &config_path) { + Ok(config) => Ok(config), + Err(_) => { + let config = Self::default(); + config.save(shell, &config_path)?; + Ok(config) + } + } + } +} + +impl Default for AppsEcosystemConfig { + fn default() -> Self { + AppsEcosystemConfig { + portal: AppEcosystemConfig { + http_port: DEFAULT_PORTAL_PORT, + }, + explorer: AppEcosystemConfig { + http_port: DEFAULT_EXPLORER_PORT, + }, + } + } +} diff --git a/zk_toolbox/crates/config/src/consts.rs b/zk_toolbox/crates/config/src/consts.rs index b4bbbdffbe2..1e1c0998f00 100644 --- a/zk_toolbox/crates/config/src/consts.rs +++ b/zk_toolbox/crates/config/src/consts.rs @@ -30,12 +30,43 @@ pub const ERA_OBSERVABILITY_COMPOSE_FILE: &str = "era-observability/docker-compo pub const ERA_OBSERBAVILITY_DIR: &str = "era-observability"; /// Era observability repo link pub const ERA_OBSERBAVILITY_GIT_REPO: &str = "https://github.com/matter-labs/era-observability"; +pub(crate) const LOCAL_APPS_PATH: &str = "apps/"; +pub(crate) const LOCAL_CHAINS_PATH: &str = "chains/"; pub(crate) const LOCAL_CONFIGS_PATH: &str = "configs/"; +pub(crate) const LOCAL_GENERATED_PATH: &str = ".generated/"; pub(crate) const LOCAL_DB_PATH: &str = "db/"; pub(crate) const LOCAL_ARTIFACTS_PATH: &str = "artifacts/"; -/// Name of portal config file -pub const PORTAL_CONFIG_FILE: &str = "portal.config.js"; +/// Name of apps config file +pub const APPS_CONFIG_FILE: &str = "apps.yaml"; +/// Name of portal runtime config file (auto-generated) +pub const PORTAL_JS_CONFIG_FILE: &str = "portal.config.js"; +/// Name of portal config JSON file +pub const PORTAL_CONFIG_FILE: &str = "portal.config.json"; +/// Name of explorer runtime config file (auto-generated) +pub const EXPLORER_JS_CONFIG_FILE: &str = "explorer.config.js"; +/// Name of explorer config JSON file +pub const EXPLORER_CONFIG_FILE: &str = "explorer.config.json"; +/// Name of explorer docker compose file +pub const EXPLORER_DOCKER_COMPOSE_FILE: &str = "explorer-docker-compose.yml"; + +/// Default port for the explorer app +pub const DEFAULT_EXPLORER_PORT: u16 = 3010; +/// Default port for the portal app +pub const DEFAULT_PORTAL_PORT: u16 = 3030; +/// Default port for the explorer worker service +pub const DEFAULT_EXPLORER_WORKER_PORT: u16 = 3001; +/// Default port for the explorer API service +pub const DEFAULT_EXPLORER_API_PORT: u16 = 3002; +/// Default port for the explorer data fetcher service +pub const DEFAULT_EXPLORER_DATA_FETCHER_PORT: u16 = 3040; + +pub const EXPLORER_API_DOCKER_IMAGE: &str = "matterlabs/block-explorer-api"; +pub const EXPLORER_DATA_FETCHER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-data-fetcher"; +pub const EXPLORER_WORKER_DOCKER_IMAGE: &str = "matterlabs/block-explorer-worker"; + +/// Interval (in milliseconds) for polling new batches to process in explorer app +pub const EXPLORER_BATCHES_PROCESSING_POLLING_INTERVAL: u64 = 1000; /// Path to ecosystem contacts pub(crate) const ECOSYSTEM_PATH: &str = "etc/env/ecosystems"; diff --git a/zk_toolbox/crates/config/src/docker_compose.rs b/zk_toolbox/crates/config/src/docker_compose.rs new file mode 100644 index 00000000000..05c6e73eaea --- /dev/null +++ b/zk_toolbox/crates/config/src/docker_compose.rs @@ -0,0 +1,43 @@ +use std::collections::HashMap; + +use serde::{Deserialize, Serialize}; + +use crate::traits::ZkToolboxConfig; + +#[derive(Debug, Default, Serialize, Deserialize, Clone)] +pub struct DockerComposeConfig { + pub services: HashMap, + #[serde(skip_serializing_if = "Option::is_none")] + pub name: Option, + #[serde(flatten)] + pub other: serde_json::Value, +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct DockerComposeService { + pub image: String, + #[serde(skip_serializing_if = "Option::is_none")] + pub platform: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub ports: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub environment: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub volumes: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub depends_on: Option>, + #[serde(skip_serializing_if = "Option::is_none")] + pub restart: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub extra_hosts: Option>, + #[serde(flatten)] + pub other: serde_json::Value, +} + +impl ZkToolboxConfig for DockerComposeConfig {} + +impl DockerComposeConfig { + pub fn add_service(&mut self, name: &str, service: DockerComposeService) { + self.services.insert(name.to_string(), service); + } +} diff --git a/zk_toolbox/crates/config/src/explorer.rs b/zk_toolbox/crates/config/src/explorer.rs new file mode 100644 index 00000000000..ee7a59e5105 --- /dev/null +++ b/zk_toolbox/crates/config/src/explorer.rs @@ -0,0 +1,147 @@ +use std::path::{Path, PathBuf}; + +use serde::{Deserialize, Serialize}; +use xshell::Shell; + +use crate::{ + consts::{ + EXPLORER_CONFIG_FILE, EXPLORER_JS_CONFIG_FILE, LOCAL_APPS_PATH, LOCAL_CONFIGS_PATH, + LOCAL_GENERATED_PATH, + }, + traits::{ReadConfig, SaveConfig, ZkToolboxConfig}, +}; + +/// Explorer JSON configuration file. This file contains configuration for the explorer app. +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ExplorerConfig { + pub app_environment: String, + pub environment_config: EnvironmentConfig, + #[serde(flatten)] + pub other: serde_json::Value, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +pub struct EnvironmentConfig { + pub networks: Vec, +} + +#[derive(Serialize, Deserialize, Debug, Clone)] +#[serde(rename_all = "camelCase")] +pub struct ExplorerChainConfig { + pub name: String, // L2 network chain name (the one used during the chain initialization) + pub l2_network_name: String, // How the network is displayed in the app dropdown + pub l2_chain_id: u64, + pub rpc_url: String, // L2 RPC URL + pub api_url: String, // L2 API URL + pub base_token_address: String, // L2 base token address (currently always 0x800A) + pub hostnames: Vec, // Custom domain to use when switched to this chain in the app + pub icon: String, // Icon to show in the explorer dropdown + pub maintenance: bool, // Maintenance warning + pub published: bool, // If false, the chain will not be shown in the explorer dropdown + #[serde(skip_serializing_if = "Option::is_none")] + pub bridge_url: Option, // Link to the portal bridge + #[serde(skip_serializing_if = "Option::is_none")] + pub l1_explorer_url: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub verification_api_url: Option, // L2 verification API URL + #[serde(flatten)] + pub other: serde_json::Value, +} + +impl ExplorerConfig { + /// Returns the path to the explorer configuration file. + pub fn get_config_path(ecosystem_base_path: &Path) -> PathBuf { + ecosystem_base_path + .join(LOCAL_CONFIGS_PATH) + .join(LOCAL_APPS_PATH) + .join(EXPLORER_CONFIG_FILE) + } + + /// Reads the existing config or creates a default one if it doesn't exist. + pub fn read_or_create_default(shell: &Shell) -> anyhow::Result { + let config_path = Self::get_config_path(&shell.current_dir()); + match Self::read(shell, &config_path) { + Ok(config) => Ok(config), + Err(_) => { + let config = Self::default(); + config.save(shell, &config_path)?; + Ok(config) + } + } + } + + /// Adds or updates a given chain configuration. + pub fn add_chain_config(&mut self, config: &ExplorerChainConfig) { + // Replace if config with the same network name already exists + if let Some(index) = self + .environment_config + .networks + .iter() + .position(|c| c.name == config.name) + { + self.environment_config.networks[index] = config.clone(); + return; + } + self.environment_config.networks.push(config.clone()); + } + + /// Retains only the chains whose names are present in the given vector. + pub fn filter(&mut self, chain_names: &[String]) { + self.environment_config + .networks + .retain(|config| chain_names.contains(&config.name)); + } + + /// Hides all chains except those specified in the given vector. + pub fn hide_except(&mut self, chain_names: &[String]) { + for network in &mut self.environment_config.networks { + network.published = chain_names.contains(&network.name); + } + } + + /// Checks if a chain with the given name exists in the configuration. + pub fn contains(&self, chain_name: &String) -> bool { + self.environment_config + .networks + .iter() + .any(|config| &config.name == chain_name) + } + + pub fn is_empty(&self) -> bool { + self.environment_config.networks.is_empty() + } + + pub fn save_as_js(&self, shell: &Shell) -> anyhow::Result { + // The block-explorer-app is served as a pre-built static app in a Docker image. + // It uses a JavaScript file (config.js) that injects the configuration at runtime + // by overwriting the '##runtimeConfig' property of the window object. + // This file will be mounted to the Docker image when it runs. + let path = Self::get_generated_js_config_path(&shell.current_dir()); + let json = serde_json::to_string_pretty(&self)?; + let config_js_content = format!("window['##runtimeConfig'] = {};", json); + shell.write_file(path.clone(), config_js_content.as_bytes())?; + Ok(path) + } + + fn get_generated_js_config_path(ecosystem_base_path: &Path) -> PathBuf { + ecosystem_base_path + .join(LOCAL_CONFIGS_PATH) + .join(LOCAL_GENERATED_PATH) + .join(EXPLORER_JS_CONFIG_FILE) + } +} + +impl Default for ExplorerConfig { + fn default() -> Self { + ExplorerConfig { + app_environment: "default".to_string(), + environment_config: EnvironmentConfig { + networks: Vec::new(), + }, + other: serde_json::Value::Null, + } + } +} + +impl ZkToolboxConfig for ExplorerConfig {} diff --git a/zk_toolbox/crates/config/src/explorer_compose.rs b/zk_toolbox/crates/config/src/explorer_compose.rs new file mode 100644 index 00000000000..ca9abc1e3e2 --- /dev/null +++ b/zk_toolbox/crates/config/src/explorer_compose.rs @@ -0,0 +1,214 @@ +use std::{ + collections::HashMap, + path::{Path, PathBuf}, +}; + +use anyhow::Context; +use common::{db, docker::adjust_localhost_for_docker}; +use serde::{Deserialize, Serialize}; +use url::Url; + +use crate::{ + consts::{ + DEFAULT_EXPLORER_API_PORT, DEFAULT_EXPLORER_DATA_FETCHER_PORT, + DEFAULT_EXPLORER_WORKER_PORT, EXPLORER_API_DOCKER_IMAGE, + EXPLORER_DATA_FETCHER_DOCKER_IMAGE, EXPLORER_DOCKER_COMPOSE_FILE, + EXPLORER_WORKER_DOCKER_IMAGE, LOCAL_CHAINS_PATH, LOCAL_CONFIGS_PATH, + }, + docker_compose::{DockerComposeConfig, DockerComposeService}, + traits::ZkToolboxConfig, + EXPLORER_BATCHES_PROCESSING_POLLING_INTERVAL, +}; + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ExplorerBackendPorts { + pub api_http_port: u16, + pub data_fetcher_http_port: u16, + pub worker_http_port: u16, +} + +impl ExplorerBackendPorts { + pub fn with_offset(&self, offset: u16) -> Self { + ExplorerBackendPorts { + api_http_port: self.api_http_port + offset, + data_fetcher_http_port: self.data_fetcher_http_port + offset, + worker_http_port: self.worker_http_port + offset, + } + } +} + +impl Default for ExplorerBackendPorts { + fn default() -> Self { + ExplorerBackendPorts { + api_http_port: DEFAULT_EXPLORER_API_PORT, + data_fetcher_http_port: DEFAULT_EXPLORER_DATA_FETCHER_PORT, + worker_http_port: DEFAULT_EXPLORER_WORKER_PORT, + } + } +} + +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ExplorerBackendConfig { + pub database_url: Url, + pub ports: ExplorerBackendPorts, + pub batches_processing_polling_interval: u64, +} + +impl ExplorerBackendConfig { + pub fn new(database_url: Url, ports: &ExplorerBackendPorts) -> Self { + ExplorerBackendConfig { + database_url, + ports: ports.clone(), + batches_processing_polling_interval: EXPLORER_BATCHES_PROCESSING_POLLING_INTERVAL, + } + } +} + +/// Chain-level explorer backend docker compose file. +/// It contains configuration for api, data fetcher, and worker services. +#[derive(Debug, Serialize, Deserialize, Clone)] +pub struct ExplorerBackendComposeConfig { + #[serde(flatten)] + pub docker_compose: DockerComposeConfig, +} + +impl ZkToolboxConfig for ExplorerBackendComposeConfig {} + +impl ExplorerBackendComposeConfig { + const API_NAME: &'static str = "api"; + const DATA_FETCHER_NAME: &'static str = "data-fetcher"; + const WORKER_NAME: &'static str = "worker"; + + pub fn new( + chain_name: &str, + l2_rpc_url: Url, + config: &ExplorerBackendConfig, + ) -> anyhow::Result { + let db_url = adjust_localhost_for_docker(config.database_url.clone())?; + let l2_rpc_url = adjust_localhost_for_docker(l2_rpc_url)?; + + let mut services: HashMap = HashMap::new(); + services.insert( + Self::API_NAME.to_string(), + Self::create_api_service(config.ports.api_http_port, db_url.as_ref()), + ); + services.insert( + Self::DATA_FETCHER_NAME.to_string(), + Self::create_data_fetcher_service( + config.ports.data_fetcher_http_port, + l2_rpc_url.as_ref(), + ), + ); + + let worker = Self::create_worker_service( + config.ports.worker_http_port, + config.ports.data_fetcher_http_port, + l2_rpc_url.as_ref(), + &db_url, + config.batches_processing_polling_interval, + ) + .context("Failed to create worker service")?; + services.insert(Self::WORKER_NAME.to_string(), worker); + + Ok(Self { + docker_compose: DockerComposeConfig { + name: Some(format!("{chain_name}-explorer")), + services, + other: serde_json::Value::Null, + }, + }) + } + + fn create_api_service(port: u16, db_url: &str) -> DockerComposeService { + DockerComposeService { + image: EXPLORER_API_DOCKER_IMAGE.to_string(), + platform: Some("linux/amd64".to_string()), + ports: Some(vec![format!("{}:{}", port, port)]), + volumes: None, + depends_on: Some(vec![Self::WORKER_NAME.to_string()]), + restart: None, + environment: Some(HashMap::from([ + ("PORT".to_string(), port.to_string()), + ("LOG_LEVEL".to_string(), "verbose".to_string()), + ("NODE_ENV".to_string(), "development".to_string()), + ("DATABASE_URL".to_string(), db_url.to_string()), + ])), + extra_hosts: Some(vec!["host.docker.internal:host-gateway".to_string()]), + other: serde_json::Value::Null, + } + } + + fn create_data_fetcher_service(port: u16, l2_rpc_url: &str) -> DockerComposeService { + DockerComposeService { + image: EXPLORER_DATA_FETCHER_DOCKER_IMAGE.to_string(), + platform: Some("linux/amd64".to_string()), + ports: Some(vec![format!("{}:{}", port, port)]), + volumes: None, + depends_on: None, + restart: None, + environment: Some(HashMap::from([ + ("PORT".to_string(), port.to_string()), + ("LOG_LEVEL".to_string(), "verbose".to_string()), + ("NODE_ENV".to_string(), "development".to_string()), + ("BLOCKCHAIN_RPC_URL".to_string(), l2_rpc_url.to_string()), + ])), + extra_hosts: Some(vec!["host.docker.internal:host-gateway".to_string()]), + other: serde_json::Value::Null, + } + } + + fn create_worker_service( + port: u16, + data_fetcher_port: u16, + l2_rpc_url: &str, + db_url: &Url, + batches_processing_polling_interval: u64, + ) -> anyhow::Result { + let data_fetcher_url = format!("http://{}:{}", Self::DATA_FETCHER_NAME, data_fetcher_port); + + // Parse database URL + let db_config = db::DatabaseConfig::from_url(db_url)?; + let db_user = db_url.username().to_string(); + let db_password = db_url.password().unwrap_or(""); + let db_port = db_url.port().unwrap_or(5432); + let db_host = db_url + .host_str() + .context("Failed to parse database host")? + .to_string(); + + Ok(DockerComposeService { + image: EXPLORER_WORKER_DOCKER_IMAGE.to_string(), + platform: Some("linux/amd64".to_string()), + ports: None, + volumes: None, + depends_on: None, + restart: None, + environment: Some(HashMap::from([ + ("PORT".to_string(), port.to_string()), + ("LOG_LEVEL".to_string(), "verbose".to_string()), + ("NODE_ENV".to_string(), "development".to_string()), + ("DATABASE_HOST".to_string(), db_host.to_string()), + ("DATABASE_PORT".to_string(), db_port.to_string()), + ("DATABASE_USER".to_string(), db_user.to_string()), + ("DATABASE_PASSWORD".to_string(), db_password.to_string()), + ("DATABASE_NAME".to_string(), db_config.name.to_string()), + ("BLOCKCHAIN_RPC_URL".to_string(), l2_rpc_url.to_string()), + ("DATA_FETCHER_URL".to_string(), data_fetcher_url), + ( + "BATCHES_PROCESSING_POLLING_INTERVAL".to_string(), + batches_processing_polling_interval.to_string(), + ), + ])), + extra_hosts: Some(vec!["host.docker.internal:host-gateway".to_string()]), + other: serde_json::Value::Null, + }) + } + + pub fn get_config_path(ecosystem_base_path: &Path, chain_name: &str) -> PathBuf { + ecosystem_base_path + .join(LOCAL_CHAINS_PATH) + .join(chain_name) + .join(LOCAL_CONFIGS_PATH) + .join(EXPLORER_DOCKER_COMPOSE_FILE) + } +} diff --git a/zk_toolbox/crates/config/src/general.rs b/zk_toolbox/crates/config/src/general.rs index bcbe69e4719..41c2e4c33cf 100644 --- a/zk_toolbox/crates/config/src/general.rs +++ b/zk_toolbox/crates/config/src/general.rs @@ -9,7 +9,7 @@ use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; use crate::{ consts::GENERAL_FILE, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, + traits::{ConfigWithL2RpcUrl, FileConfigWithDefaultName, ReadConfig, SaveConfig}, }; pub struct RocksDbs { @@ -211,3 +211,14 @@ impl ReadConfig for GeneralConfig { decode_yaml_repr::(&path, false) } } + +impl ConfigWithL2RpcUrl for GeneralConfig { + fn get_l2_rpc_url(&self) -> anyhow::Result { + self.api_config + .as_ref() + .map(|api_config| &api_config.web3_json_rpc.http_url) + .context("API config is missing")? + .parse() + .context("Failed to parse L2 RPC URL") + } +} diff --git a/zk_toolbox/crates/config/src/lib.rs b/zk_toolbox/crates/config/src/lib.rs index 4e00962229b..3c7443f2449 100644 --- a/zk_toolbox/crates/config/src/lib.rs +++ b/zk_toolbox/crates/config/src/lib.rs @@ -1,3 +1,4 @@ +pub use apps::*; pub use chain::*; pub use consts::*; pub use contracts::*; @@ -11,6 +12,7 @@ pub use wallet_creation::*; pub use wallets::*; pub use zksync_protobuf_config::{decode_yaml_repr, encode_yaml_repr}; +mod apps; mod chain; mod consts; mod contracts; @@ -23,6 +25,9 @@ mod secrets; mod wallet_creation; mod wallets; +pub mod docker_compose; +pub mod explorer; +pub mod explorer_compose; pub mod external_node; pub mod forge_interface; pub mod portal; diff --git a/zk_toolbox/crates/config/src/portal.rs b/zk_toolbox/crates/config/src/portal.rs index 4b68d5744cd..c787c6cc702 100644 --- a/zk_toolbox/crates/config/src/portal.rs +++ b/zk_toolbox/crates/config/src/portal.rs @@ -5,28 +5,25 @@ use types::TokenInfo; use xshell::Shell; use crate::{ - consts::{LOCAL_CONFIGS_PATH, PORTAL_CONFIG_FILE}, - traits::{FileConfigWithDefaultName, ReadConfig, SaveConfig}, + consts::{ + LOCAL_APPS_PATH, LOCAL_CONFIGS_PATH, LOCAL_GENERATED_PATH, PORTAL_CONFIG_FILE, + PORTAL_JS_CONFIG_FILE, + }, + traits::{ReadConfig, SaveConfig, ZkToolboxConfig}, }; +/// Portal JSON configuration file. This file contains configuration for the portal app. #[derive(Serialize, Deserialize, Debug, Clone)] #[serde(rename_all = "camelCase")] -pub struct PortalRuntimeConfig { +pub struct PortalConfig { pub node_type: String, - pub hyperchains_config: HyperchainsConfig, + pub hyperchains_config: Vec, + #[serde(flatten)] + pub other: serde_json::Value, } #[derive(Serialize, Deserialize, Debug, Clone)] -pub struct HyperchainsConfig(pub Vec); - -impl HyperchainsConfig { - pub fn is_empty(&self) -> bool { - self.0.is_empty() - } -} - -#[derive(Serialize, Deserialize, Debug, Clone)] -pub struct HyperchainConfig { +pub struct PortalChainConfig { pub network: NetworkConfig, pub tokens: Vec, } @@ -35,10 +32,12 @@ pub struct HyperchainConfig { #[serde(rename_all = "camelCase")] pub struct NetworkConfig { pub id: u64, // L2 Network ID - pub key: String, // L2 Network key - pub name: String, // L2 Network name + pub key: String, // L2 Network key (chain name used during the initialization) + pub name: String, // L2 Network name (displayed in the app dropdown) pub rpc_url: String, // L2 RPC URL #[serde(skip_serializing_if = "Option::is_none")] + pub hidden: Option, // If true, the chain will not be shown in the app dropdown + #[serde(skip_serializing_if = "Option::is_none")] pub block_explorer_url: Option, // L2 Block Explorer URL #[serde(skip_serializing_if = "Option::is_none")] pub block_explorer_api: Option, // L2 Block Explorer API @@ -46,6 +45,8 @@ pub struct NetworkConfig { pub public_l1_network_id: Option, // Ethereum Mainnet or Ethereum Sepolia Testnet ID #[serde(skip_serializing_if = "Option::is_none")] pub l1_network: Option, + #[serde(flatten)] + pub other: serde_json::Value, } #[derive(Serialize, Deserialize, Debug, Clone)] @@ -81,44 +82,94 @@ pub struct TokenConfig { pub name: Option, } -impl PortalRuntimeConfig { +impl PortalConfig { + /// Returns the path to the portal configuration file. pub fn get_config_path(ecosystem_base_path: &Path) -> PathBuf { ecosystem_base_path .join(LOCAL_CONFIGS_PATH) + .join(LOCAL_APPS_PATH) .join(PORTAL_CONFIG_FILE) } -} -impl FileConfigWithDefaultName for PortalRuntimeConfig { - const FILE_NAME: &'static str = PORTAL_CONFIG_FILE; -} + /// Reads the existing config or creates a default one if it doesn't exist. + pub fn read_or_create_default(shell: &Shell) -> anyhow::Result { + let config_path = Self::get_config_path(&shell.current_dir()); + match Self::read(shell, &config_path) { + Ok(config) => Ok(config), + Err(_) => { + let config = Self::default(); + config.save(shell, &config_path)?; + Ok(config) + } + } + } + + /// Adds or updates a given chain configuration. + pub fn add_chain_config(&mut self, config: &PortalChainConfig) { + // Replace if config with the same network key already exists + if let Some(index) = self + .hyperchains_config + .iter() + .position(|c| c.network.key == config.network.key) + { + self.hyperchains_config[index] = config.clone(); + return; + } + self.hyperchains_config.push(config.clone()); + } + + /// Retains only the chains whose names are present in the given vector. + pub fn filter(&mut self, chain_names: &[String]) { + self.hyperchains_config + .retain(|config| chain_names.contains(&config.network.key)); + } + + /// Hides all chains except those specified in the given vector. + pub fn hide_except(&mut self, chain_names: &[String]) { + for config in &mut self.hyperchains_config { + config.network.hidden = Some(!chain_names.contains(&config.network.key)); + } + } -impl SaveConfig for PortalRuntimeConfig { - fn save(&self, shell: &Shell, path: impl AsRef) -> anyhow::Result<()> { + /// Checks if a chain with the given name exists in the configuration. + pub fn contains(&self, chain_name: &String) -> bool { + self.hyperchains_config + .iter() + .any(|config| &config.network.key == chain_name) + } + + pub fn is_empty(&self) -> bool { + self.hyperchains_config.is_empty() + } + + pub fn save_as_js(&self, shell: &Shell) -> anyhow::Result { // The dapp-portal is served as a pre-built static app in a Docker image. // It uses a JavaScript file (config.js) that injects the configuration at runtime // by overwriting the '##runtimeConfig' property of the window object. - // Therefore, we generate a JavaScript file instead of a JSON file. // This file will be mounted to the Docker image when it runs. + let path = Self::get_generated_js_config_path(&shell.current_dir()); let json = serde_json::to_string_pretty(&self)?; let config_js_content = format!("window['##runtimeConfig'] = {};", json); - Ok(shell.write_file(path, config_js_content.as_bytes())?) + shell.write_file(path.clone(), config_js_content.as_bytes())?; + Ok(path) + } + + fn get_generated_js_config_path(ecosystem_base_path: &Path) -> PathBuf { + ecosystem_base_path + .join(LOCAL_CONFIGS_PATH) + .join(LOCAL_GENERATED_PATH) + .join(PORTAL_JS_CONFIG_FILE) } } -impl ReadConfig for PortalRuntimeConfig { - fn read(shell: &Shell, path: impl AsRef) -> anyhow::Result { - let config_js_content = shell.read_file(path)?; - // Extract the JSON part from the JavaScript file - let json_start = config_js_content - .find('{') - .ok_or_else(|| anyhow::anyhow!("Invalid config file format"))?; - let json_end = config_js_content - .rfind('}') - .ok_or_else(|| anyhow::anyhow!("Invalid config file format"))?; - let json_str = &config_js_content[json_start..=json_end]; - // Parse the JSON into PortalRuntimeConfig - let config: PortalRuntimeConfig = serde_json::from_str(json_str)?; - Ok(config) +impl Default for PortalConfig { + fn default() -> Self { + PortalConfig { + node_type: "hyperchain".to_string(), + hyperchains_config: Vec::new(), + other: serde_json::Value::Null, + } } } + +impl ZkToolboxConfig for PortalConfig {} diff --git a/zk_toolbox/crates/config/src/traits.rs b/zk_toolbox/crates/config/src/traits.rs index 1f00b39b040..bb0722762e3 100644 --- a/zk_toolbox/crates/config/src/traits.rs +++ b/zk_toolbox/crates/config/src/traits.rs @@ -5,6 +5,7 @@ use common::files::{ read_json_file, read_toml_file, read_yaml_file, save_json_file, save_toml_file, save_yaml_file, }; use serde::{de::DeserializeOwned, Serialize}; +use url::Url; use xshell::Shell; // Configs that we use only inside zk toolbox, we don't have protobuf implementation for them. @@ -156,3 +157,7 @@ fn save_with_comment( } Ok(()) } + +pub trait ConfigWithL2RpcUrl { + fn get_l2_rpc_url(&self) -> anyhow::Result; +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs index a27b653edf5..d18b05c910e 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/args/mod.rs @@ -1,9 +1,7 @@ pub use containers::*; -pub use portal::*; pub use run_server::*; pub use update::*; mod containers; -mod portal; mod run_server; mod update; diff --git a/zk_toolbox/crates/zk_inception/src/commands/args/portal.rs b/zk_toolbox/crates/zk_inception/src/commands/args/portal.rs deleted file mode 100644 index e31058aad5d..00000000000 --- a/zk_toolbox/crates/zk_inception/src/commands/args/portal.rs +++ /dev/null @@ -1,12 +0,0 @@ -use clap::Parser; -use serde::{Deserialize, Serialize}; - -#[derive(Debug, Serialize, Deserialize, Parser)] -pub struct PortalArgs { - #[clap( - long, - default_value = "3030", - help = "The port number for the portal app" - )] - pub port: u16, -} diff --git a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs index 921eeaa98af..793fbbf31ae 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/chain/init.rs @@ -28,7 +28,7 @@ use crate::{ genesis::genesis, set_token_multiplier_setter::set_token_multiplier_setter, }, - portal::create_and_save_portal_config, + portal::update_portal_config, }, consts::AMOUNT_FOR_DISTRIBUTION_TO_WALLETS, messages::{ @@ -154,7 +154,7 @@ pub async fn init( .await .context(MSG_GENESIS_DATABASE_ERR)?; - create_and_save_portal_config(ecosystem_config, shell) + update_portal_config(shell, chain_config) .await .context(MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR)?; diff --git a/zk_toolbox/crates/zk_inception/src/commands/containers.rs b/zk_toolbox/crates/zk_inception/src/commands/containers.rs index 17c32c04bc2..81d7970df83 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/containers.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/containers.rs @@ -40,7 +40,7 @@ pub fn initialize_docker(shell: &Shell, ecosystem: &EcosystemConfig) -> anyhow:: } fn start_container(shell: &Shell, compose_file: &str, retry_msg: &str) -> anyhow::Result<()> { - while let Err(err) = docker::up(shell, compose_file) { + while let Err(err) = docker::up(shell, compose_file, true) { logger::error(err.to_string()); if !common::PromptConfirm::new(retry_msg).default(true).ask() { return Err(err); diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs index f9940c8a979..356b5322980 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create.rs @@ -15,7 +15,10 @@ use crate::{ containers::{initialize_docker, start_containers}, ecosystem::{ args::create::EcosystemCreateArgs, - create_configs::{create_erc20_deployment_config, create_initial_deployments_config}, + create_configs::{ + create_apps_config, create_erc20_deployment_config, + create_initial_deployments_config, + }, }, }, messages::{ @@ -75,6 +78,7 @@ fn create(args: EcosystemCreateArgs, shell: &Shell) -> anyhow::Result<()> { create_initial_deployments_config(shell, &configs_path)?; create_erc20_deployment_config(shell, &configs_path)?; + create_apps_config(shell, &configs_path)?; let ecosystem_config = EcosystemConfig { name: ecosystem_name.clone(), diff --git a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create_configs.rs b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create_configs.rs index b4f42313e3d..38358355ff9 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create_configs.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/ecosystem/create_configs.rs @@ -2,7 +2,8 @@ use std::path::Path; use config::{ forge_interface::deploy_ecosystem::input::{Erc20DeploymentConfig, InitialDeploymentConfig}, - traits::SaveConfigWithCommentAndBasePath, + traits::{SaveConfigWithBasePath, SaveConfigWithCommentAndBasePath}, + AppsEcosystemConfig, }; use xshell::Shell; @@ -33,3 +34,12 @@ pub fn create_erc20_deployment_config( )?; Ok(config) } + +pub fn create_apps_config( + shell: &Shell, + ecosystem_configs_path: &Path, +) -> anyhow::Result { + let config = AppsEcosystemConfig::default(); + config.save_with_base_path(shell, ecosystem_configs_path)?; + Ok(config) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/explorer/backend.rs b/zk_toolbox/crates/zk_inception/src/commands/explorer/backend.rs new file mode 100644 index 00000000000..6fdd3faa980 --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/explorer/backend.rs @@ -0,0 +1,39 @@ +use std::path::Path; + +use anyhow::Context; +use common::{config::global_config, docker}; +use config::{explorer_compose::ExplorerBackendComposeConfig, EcosystemConfig}; +use xshell::Shell; + +use crate::messages::{ + msg_explorer_chain_not_initialized, MSG_CHAIN_NOT_FOUND_ERR, + MSG_EXPLORER_FAILED_TO_RUN_DOCKER_SERVICES_ERR, +}; + +pub(crate) fn run(shell: &Shell) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let chain_config = ecosystem_config + .load_chain(global_config().chain_name.clone()) + .context(MSG_CHAIN_NOT_FOUND_ERR)?; + let chain_name = chain_config.name.clone(); + // Read chain-level explorer backend docker compose file + let ecosystem_path = shell.current_dir(); + let backend_config_path = + ExplorerBackendComposeConfig::get_config_path(&ecosystem_path, &chain_config.name); + if !backend_config_path.exists() { + anyhow::bail!(msg_explorer_chain_not_initialized(&chain_name)); + } + // Run docker compose + run_backend(shell, &backend_config_path)?; + Ok(()) +} + +fn run_backend(shell: &Shell, explorer_compose_config_path: &Path) -> anyhow::Result<()> { + if let Some(docker_compose_file) = explorer_compose_config_path.to_str() { + docker::up(shell, docker_compose_file, false) + .context(MSG_EXPLORER_FAILED_TO_RUN_DOCKER_SERVICES_ERR)?; + } else { + anyhow::bail!("Invalid docker compose file"); + } + Ok(()) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/explorer/init.rs b/zk_toolbox/crates/zk_inception/src/commands/explorer/init.rs new file mode 100644 index 00000000000..43700d91a0d --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/explorer/init.rs @@ -0,0 +1,135 @@ +use anyhow::Context; +use common::{config::global_config, db, logger, Prompt}; +use config::{ + explorer::{ExplorerChainConfig, ExplorerConfig}, + explorer_compose::{ExplorerBackendComposeConfig, ExplorerBackendConfig, ExplorerBackendPorts}, + traits::{ConfigWithL2RpcUrl, SaveConfig}, + ChainConfig, EcosystemConfig, +}; +use slugify_rs::slugify; +use url::Url; +use xshell::Shell; + +use crate::{ + commands::chain::args::init::PortOffset, + consts::L2_BASE_TOKEN_ADDRESS, + defaults::{generate_explorer_db_name, DATABASE_EXPLORER_URL}, + messages::{ + msg_chain_load_err, msg_explorer_db_name_prompt, msg_explorer_db_url_prompt, + msg_explorer_initializing_database_for, MSG_EXPLORER_FAILED_TO_DROP_DATABASE_ERR, + MSG_EXPLORER_INITIALIZED, + }, +}; + +pub(crate) async fn run(shell: &Shell) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + // If specific chain is provided, initialize only that chain; otherwise, initialize all chains + let chains_enabled = match global_config().chain_name { + Some(ref chain_name) => vec![chain_name.clone()], + None => ecosystem_config.list_of_chains(), + }; + // Initialize chains one by one + let mut explorer_config = ExplorerConfig::read_or_create_default(shell)?; + for chain_name in chains_enabled.iter() { + // Load chain config + let chain_config = ecosystem_config + .load_chain(Some(chain_name.clone())) + .context(msg_chain_load_err(chain_name))?; + // Build backend config - parameters required to create explorer backend services + let backend_config = build_backend_config(&chain_config); + // Initialize explorer database + initialize_explorer_database(&backend_config.database_url).await?; + // Create explorer backend docker compose file + let l2_rpc_url = chain_config.get_general_config()?.get_l2_rpc_url()?; + let backend_compose_config = + ExplorerBackendComposeConfig::new(chain_name, l2_rpc_url, &backend_config)?; + let backend_compose_config_path = + ExplorerBackendComposeConfig::get_config_path(&shell.current_dir(), chain_name); + backend_compose_config.save(shell, &backend_compose_config_path)?; + // Add chain to explorer.json + let explorer_chain_config = build_explorer_chain_config(&chain_config, &backend_config)?; + explorer_config.add_chain_config(&explorer_chain_config); + } + // Save explorer config + let config_path = ExplorerConfig::get_config_path(&shell.current_dir()); + explorer_config.save(shell, config_path)?; + + logger::outro(MSG_EXPLORER_INITIALIZED); + Ok(()) +} + +fn build_backend_config(chain_config: &ChainConfig) -> ExplorerBackendConfig { + // Prompt explorer database name + logger::info(msg_explorer_initializing_database_for(&chain_config.name)); + let db_config = fill_database_values_with_prompt(chain_config); + + // Allocate ports for backend services + let backend_ports = allocate_explorer_services_ports(chain_config); + + // Build explorer backend config + ExplorerBackendConfig::new(db_config.full_url(), &backend_ports) +} + +async fn initialize_explorer_database(db_url: &Url) -> anyhow::Result<()> { + let db_config = db::DatabaseConfig::from_url(db_url)?; + db::drop_db_if_exists(&db_config) + .await + .context(MSG_EXPLORER_FAILED_TO_DROP_DATABASE_ERR)?; + db::init_db(&db_config).await?; + Ok(()) +} + +fn fill_database_values_with_prompt(config: &ChainConfig) -> db::DatabaseConfig { + let defaul_db_name: String = generate_explorer_db_name(config); + let chain_name = config.name.clone(); + let explorer_db_url = Prompt::new(&msg_explorer_db_url_prompt(&chain_name)) + .default(DATABASE_EXPLORER_URL.as_str()) + .ask(); + let explorer_db_name: String = Prompt::new(&msg_explorer_db_name_prompt(&chain_name)) + .default(&defaul_db_name) + .ask(); + let explorer_db_name = slugify!(&explorer_db_name, separator = "_"); + db::DatabaseConfig::new(explorer_db_url, explorer_db_name) +} + +fn allocate_explorer_services_ports(chain_config: &ChainConfig) -> ExplorerBackendPorts { + // Try to allocate intuitive ports with an offset from the defaults + let offset: u16 = PortOffset::from_chain_id(chain_config.id as u16).into(); + ExplorerBackendPorts::default().with_offset(offset) +} + +fn build_explorer_chain_config( + chain_config: &ChainConfig, + backend_config: &ExplorerBackendConfig, +) -> anyhow::Result { + let general_config = chain_config.get_general_config()?; + // Get L2 RPC URL from general config + let l2_rpc_url = general_config.get_l2_rpc_url()?; + // Get Verification API URL from general config + let verification_api_url = general_config + .contract_verifier + .as_ref() + .map(|verifier| &verifier.url) + .context("verification_url")?; + // Build API URL + let api_port = backend_config.ports.api_http_port; + let api_url = format!("http://127.0.0.1:{}", api_port); + + // Build explorer chain config + Ok(ExplorerChainConfig { + name: chain_config.name.clone(), + l2_network_name: chain_config.name.clone(), + l2_chain_id: chain_config.chain_id.as_u64(), + rpc_url: l2_rpc_url.to_string(), + api_url: api_url.to_string(), + base_token_address: L2_BASE_TOKEN_ADDRESS.to_string(), + hostnames: Vec::new(), + icon: "/images/icons/zksync-arrows.svg".to_string(), + maintenance: false, + published: true, + bridge_url: None, + l1_explorer_url: None, + verification_api_url: Some(verification_api_url.to_string()), + other: serde_json::Value::Null, + }) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/explorer/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/explorer/mod.rs new file mode 100644 index 00000000000..4b66d49598c --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/explorer/mod.rs @@ -0,0 +1,27 @@ +use clap::Subcommand; +use xshell::Shell; + +mod backend; +mod init; +mod run; + +#[derive(Subcommand, Debug)] +pub enum ExplorerCommands { + /// Initialize explorer (create database to store explorer data and generate docker + /// compose file with explorer services). Runs for all chains, unless --chain is passed + Init, + /// Start explorer backend services (api, data_fetcher, worker) for a given chain. + /// Uses default chain, unless --chain is passed + #[command(alias = "backend")] + RunBackend, + /// Run explorer app + Run, +} + +pub(crate) async fn run(shell: &Shell, args: ExplorerCommands) -> anyhow::Result<()> { + match args { + ExplorerCommands::Init => init::run(shell).await, + ExplorerCommands::Run => run::run(shell), + ExplorerCommands::RunBackend => backend::run(shell), + } +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/explorer/run.rs b/zk_toolbox/crates/zk_inception/src/commands/explorer/run.rs new file mode 100644 index 00000000000..a6519f62edb --- /dev/null +++ b/zk_toolbox/crates/zk_inception/src/commands/explorer/run.rs @@ -0,0 +1,98 @@ +use std::path::Path; + +use anyhow::Context; +use common::{config::global_config, docker, logger}; +use config::{explorer::*, traits::SaveConfig, AppsEcosystemConfig, EcosystemConfig}; +use xshell::Shell; + +use crate::{ + consts::{EXPLORER_APP_DOCKER_CONFIG_PATH, EXPLORER_APP_DOCKER_IMAGE}, + messages::{ + msg_explorer_running_with_config, msg_explorer_starting_on, + MSG_EXPLORER_FAILED_TO_CREATE_CONFIG_ERR, MSG_EXPLORER_FAILED_TO_FIND_ANY_CHAIN_ERR, + MSG_EXPLORER_FAILED_TO_RUN_DOCKER_ERR, + }, +}; + +pub(crate) fn run(shell: &Shell) -> anyhow::Result<()> { + let ecosystem_config = EcosystemConfig::from_file(shell)?; + let ecosystem_path = shell.current_dir(); + // Get ecosystem level apps.yaml config + let apps_config = AppsEcosystemConfig::read_or_create_default(shell)?; + // If specific_chain is provided, run only with that chain; otherwise, run with all chains + let chains_enabled = match global_config().chain_name { + Some(ref chain_name) => vec![chain_name.clone()], + None => ecosystem_config.list_of_chains(), + }; + + // Read explorer config + let config_path = ExplorerConfig::get_config_path(&ecosystem_path); + let mut explorer_config = ExplorerConfig::read_or_create_default(shell) + .context(MSG_EXPLORER_FAILED_TO_CREATE_CONFIG_ERR)?; + + // Validate and update explorer config + explorer_config.filter(&ecosystem_config.list_of_chains()); + explorer_config.hide_except(&chains_enabled); + if explorer_config.is_empty() { + anyhow::bail!(MSG_EXPLORER_FAILED_TO_FIND_ANY_CHAIN_ERR); + } + + // Save explorer config + explorer_config.save(shell, &config_path)?; + + let config_js_path = explorer_config + .save_as_js(shell) + .context(MSG_EXPLORER_FAILED_TO_CREATE_CONFIG_ERR)?; + + logger::info(msg_explorer_running_with_config(&config_path)); + logger::info(msg_explorer_starting_on( + "127.0.0.1", + apps_config.explorer.http_port, + )); + let name = explorer_app_name(&ecosystem_config.name); + run_explorer( + shell, + &config_js_path, + &name, + apps_config.explorer.http_port, + )?; + Ok(()) +} + +fn run_explorer( + shell: &Shell, + config_file_path: &Path, + name: &str, + port: u16, +) -> anyhow::Result<()> { + let port_mapping = format!("{}:{}", port, port); + let volume_mapping = format!( + "{}:{}", + config_file_path.display(), + EXPLORER_APP_DOCKER_CONFIG_PATH + ); + + let docker_args: Vec = vec![ + "--platform".to_string(), + "linux/amd64".to_string(), + "--name".to_string(), + name.to_string(), + "-p".to_string(), + port_mapping, + "-v".to_string(), + volume_mapping, + "-e".to_string(), + format!("PORT={}", port), + "--rm".to_string(), + ]; + + docker::run(shell, EXPLORER_APP_DOCKER_IMAGE, docker_args) + .with_context(|| MSG_EXPLORER_FAILED_TO_RUN_DOCKER_ERR)?; + Ok(()) +} + +/// Generates a name for the explorer app Docker container. +/// Will be passed as `--name` argument to `docker run`. +fn explorer_app_name(ecosystem_name: &str) -> String { + format!("{}-explorer-app", ecosystem_name) +} diff --git a/zk_toolbox/crates/zk_inception/src/commands/mod.rs b/zk_toolbox/crates/zk_inception/src/commands/mod.rs index 0ac363beb2d..523faea0478 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/mod.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/mod.rs @@ -3,6 +3,7 @@ pub mod chain; pub mod containers; pub mod contract_verifier; pub mod ecosystem; +pub mod explorer; pub mod external_node; pub mod portal; pub mod prover; diff --git a/zk_toolbox/crates/zk_inception/src/commands/portal.rs b/zk_toolbox/crates/zk_inception/src/commands/portal.rs index cc939f3fb3e..5bf21121177 100644 --- a/zk_toolbox/crates/zk_inception/src/commands/portal.rs +++ b/zk_toolbox/crates/zk_inception/src/commands/portal.rs @@ -1,33 +1,30 @@ -use std::{collections::HashMap, path::Path}; +use std::path::Path; -use anyhow::{anyhow, Context}; -use common::{docker, ethereum, logger}; +use anyhow::Context; +use common::{config::global_config, docker, ethereum, logger}; use config::{ portal::*, - traits::{ReadConfig, SaveConfig}, - ChainConfig, EcosystemConfig, + traits::{ConfigWithL2RpcUrl, SaveConfig}, + AppsEcosystemConfig, ChainConfig, EcosystemConfig, }; use ethers::types::Address; use types::{BaseToken, TokenInfo}; use xshell::Shell; use crate::{ - commands::args::PortalArgs, - consts::{L2_BASE_TOKEN_ADDRESS, PORTAL_DOCKER_CONTAINER_PORT, PORTAL_DOCKER_IMAGE}, + consts::{L2_BASE_TOKEN_ADDRESS, PORTAL_DOCKER_CONFIG_PATH, PORTAL_DOCKER_IMAGE}, messages::{ - msg_portal_starting_on, MSG_PORTAL_CONFIG_IS_EMPTY_ERR, - MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR, MSG_PORTAL_FAILED_TO_RUN_DOCKER_ERR, + msg_portal_running_with_config, msg_portal_starting_on, + MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR, MSG_PORTAL_FAILED_TO_FIND_ANY_CHAIN_ERR, + MSG_PORTAL_FAILED_TO_RUN_DOCKER_ERR, }, }; -async fn create_hyperchain_config(chain_config: &ChainConfig) -> anyhow::Result { +async fn build_portal_chain_config( + chain_config: &ChainConfig, +) -> anyhow::Result { // Get L2 RPC URL from general config - let general_config = chain_config.get_general_config()?; - let rpc_url = general_config - .api_config - .as_ref() - .map(|api_config| &api_config.web3_json_rpc.http_url) - .context("api_config")?; + let l2_rpc_url = chain_config.get_general_config()?.get_l2_rpc_url()?; // Get L1 RPC URL from secrects config let secrets_config = chain_config.get_secrets_config()?; let l1_rpc_url = secrets_config @@ -68,97 +65,126 @@ async fn create_hyperchain_config(chain_config: &ChainConfig) -> anyhow::Result< name: Some(base_token_info.name.to_string()), }]; // Build hyperchain config - Ok(HyperchainConfig { + Ok(PortalChainConfig { network: NetworkConfig { id: chain_config.chain_id.as_u64(), key: chain_config.name.clone(), name: chain_config.name.clone(), - rpc_url: rpc_url.to_string(), + rpc_url: l2_rpc_url.to_string(), l1_network, public_l1_network_id: None, block_explorer_url: None, block_explorer_api: None, + hidden: None, + other: serde_json::Value::Null, }, tokens, }) } -async fn create_hyperchains_config( - chain_configs: &[ChainConfig], -) -> anyhow::Result { - let mut hyperchain_configs = Vec::new(); - for chain_config in chain_configs { - if let Ok(config) = create_hyperchain_config(chain_config).await { - hyperchain_configs.push(config) - } - } - Ok(HyperchainsConfig(hyperchain_configs)) +pub async fn update_portal_config( + shell: &Shell, + chain_config: &ChainConfig, +) -> anyhow::Result { + // Build and append portal chain config to the portal config + let portal_chain_config = build_portal_chain_config(chain_config).await?; + let mut portal_config = PortalConfig::read_or_create_default(shell)?; + portal_config.add_chain_config(&portal_chain_config); + // Save portal config + let config_path = PortalConfig::get_config_path(&shell.current_dir()); + portal_config.save(shell, config_path)?; + Ok(portal_config) } -pub async fn create_portal_config( +/// Validates portal config - appends missing chains and removes unknown chains +async fn validate_portal_config( + portal_config: &mut PortalConfig, ecosystem_config: &EcosystemConfig, -) -> anyhow::Result { - let chains: Vec = ecosystem_config.list_of_chains(); - let mut chain_configs = Vec::new(); - for chain in chains { - if let Some(chain_config) = ecosystem_config.load_chain(Some(chain.clone())) { - chain_configs.push(chain_config) +) -> anyhow::Result<()> { + let chain_names = ecosystem_config.list_of_chains(); + for chain_name in &chain_names { + if portal_config.contains(chain_name) { + continue; + } + // Append missing chain, chain might not be initialized, so ignoring errors + if let Some(chain_config) = ecosystem_config.load_chain(Some(chain_name.clone())) { + if let Ok(portal_chain_config) = build_portal_chain_config(&chain_config).await { + portal_config.add_chain_config(&portal_chain_config); + } } } - let hyperchains_config = create_hyperchains_config(&chain_configs).await?; - if hyperchains_config.is_empty() { - anyhow::bail!("Failed to create any valid hyperchain config") - } - let runtime_config = PortalRuntimeConfig { - node_type: "hyperchain".to_string(), - hyperchains_config, - }; - Ok(runtime_config) -} - -pub async fn create_and_save_portal_config( - ecosystem_config: &EcosystemConfig, - shell: &Shell, -) -> anyhow::Result { - let portal_config = create_portal_config(ecosystem_config).await?; - let config_path = PortalRuntimeConfig::get_config_path(&shell.current_dir()); - portal_config.save(shell, config_path)?; - Ok(portal_config) + portal_config.filter(&chain_names); + Ok(()) } -pub async fn run(shell: &Shell, args: PortalArgs) -> anyhow::Result<()> { +pub async fn run(shell: &Shell) -> anyhow::Result<()> { let ecosystem_config: EcosystemConfig = EcosystemConfig::from_file(shell)?; - let config_path = PortalRuntimeConfig::get_config_path(&shell.current_dir()); - logger::info(format!( - "Using portal config file at {}", - config_path.display() - )); - - let portal_config = match PortalRuntimeConfig::read(shell, &config_path) { - Ok(config) => config, - Err(_) => create_and_save_portal_config(&ecosystem_config, shell) - .await - .context(MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR)?, + // Get ecosystem level apps.yaml config + let apps_config = AppsEcosystemConfig::read_or_create_default(shell)?; + // Display all chains, unless --chain is passed + let chains_enabled = match global_config().chain_name { + Some(ref chain_name) => vec![chain_name.clone()], + None => ecosystem_config.list_of_chains(), }; - if portal_config.hyperchains_config.is_empty() { - return Err(anyhow!(MSG_PORTAL_CONFIG_IS_EMPTY_ERR)); + + // Read portal config + let config_path = PortalConfig::get_config_path(&shell.current_dir()); + let mut portal_config = PortalConfig::read_or_create_default(shell) + .context(MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR)?; + + // Validate and update portal config + validate_portal_config(&mut portal_config, &ecosystem_config).await?; + portal_config.hide_except(&chains_enabled); + if portal_config.is_empty() { + anyhow::bail!(MSG_PORTAL_FAILED_TO_FIND_ANY_CHAIN_ERR); } - logger::info(msg_portal_starting_on("127.0.0.1", args.port)); - run_portal(shell, &config_path, args.port)?; + // Save portal config + portal_config.save(shell, &config_path)?; + + let config_js_path = portal_config + .save_as_js(shell) + .context(MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR)?; + + logger::info(msg_portal_running_with_config(&config_path)); + logger::info(msg_portal_starting_on( + "127.0.0.1", + apps_config.portal.http_port, + )); + let name = portal_app_name(&ecosystem_config.name); + run_portal(shell, &config_js_path, &name, apps_config.portal.http_port)?; Ok(()) } -fn run_portal(shell: &Shell, config_file_path: &Path, port: u16) -> anyhow::Result<()> { - let port_mapping = format!("{}:{}", port, PORTAL_DOCKER_CONTAINER_PORT); - let volume_mapping = format!("{}:/usr/src/app/dist/config.js", config_file_path.display()); +fn run_portal(shell: &Shell, config_file_path: &Path, name: &str, port: u16) -> anyhow::Result<()> { + let port_mapping = format!("{}:{}", port, port); + let volume_mapping = format!( + "{}:{}", + config_file_path.display(), + PORTAL_DOCKER_CONFIG_PATH + ); - let mut docker_args: HashMap = HashMap::new(); - docker_args.insert("--platform".to_string(), "linux/amd64".to_string()); - docker_args.insert("-p".to_string(), port_mapping); - docker_args.insert("-v".to_string(), volume_mapping); + let docker_args: Vec = vec![ + "--platform".to_string(), + "linux/amd64".to_string(), + "--name".to_string(), + name.to_string(), + "-p".to_string(), + port_mapping, + "-v".to_string(), + volume_mapping, + "-e".to_string(), + format!("PORT={}", port), + "--rm".to_string(), + ]; docker::run(shell, PORTAL_DOCKER_IMAGE, docker_args) .with_context(|| MSG_PORTAL_FAILED_TO_RUN_DOCKER_ERR)?; Ok(()) } + +/// Generates a name for the portal app Docker container. +/// Will be passed as `--name` argument to `docker run`. +fn portal_app_name(ecosystem_name: &str) -> String { + format!("{}-portal-app", ecosystem_name) +} diff --git a/zk_toolbox/crates/zk_inception/src/consts.rs b/zk_toolbox/crates/zk_inception/src/consts.rs index 7463dc28570..7db976c6103 100644 --- a/zk_toolbox/crates/zk_inception/src/consts.rs +++ b/zk_toolbox/crates/zk_inception/src/consts.rs @@ -8,5 +8,10 @@ pub const DEFAULT_CREDENTIALS_FILE: &str = "~/.config/gcloud/application_default pub const DEFAULT_PROOF_STORE_DIR: &str = "artifacts"; pub const BELLMAN_CUDA_DIR: &str = "era-bellman-cuda"; pub const L2_BASE_TOKEN_ADDRESS: &str = "0x000000000000000000000000000000000000800A"; + +/// Path to the JS runtime config for the block-explorer-app docker container to be mounted to +pub const EXPLORER_APP_DOCKER_CONFIG_PATH: &str = "/usr/src/app/packages/app/dist/config.js"; +pub const EXPLORER_APP_DOCKER_IMAGE: &str = "matterlabs/block-explorer-app"; +/// Path to the JS runtime config for the dapp-portal docker container to be mounted to +pub const PORTAL_DOCKER_CONFIG_PATH: &str = "/usr/src/app/dist/config.js"; pub const PORTAL_DOCKER_IMAGE: &str = "matterlabs/dapp-portal"; -pub const PORTAL_DOCKER_CONTAINER_PORT: u16 = 3000; diff --git a/zk_toolbox/crates/zk_inception/src/defaults.rs b/zk_toolbox/crates/zk_inception/src/defaults.rs index 34b0eeae419..544e2837740 100644 --- a/zk_toolbox/crates/zk_inception/src/defaults.rs +++ b/zk_toolbox/crates/zk_inception/src/defaults.rs @@ -7,6 +7,8 @@ lazy_static! { Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); pub static ref DATABASE_PROVER_URL: Url = Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); + pub static ref DATABASE_EXPLORER_URL: Url = + Url::parse("postgres://postgres:notsecurepassword@localhost:5432").unwrap(); } pub const ROCKS_DB_STATE_KEEPER: &str = "state_keeper"; @@ -40,6 +42,14 @@ pub fn generate_db_names(config: &ChainConfig) -> DBNames { } } +pub fn generate_explorer_db_name(config: &ChainConfig) -> String { + format!( + "zksync_explorer_{}_{}", + config.l1_network.to_string().to_ascii_lowercase(), + config.name + ) +} + pub fn generate_external_node_db_name(config: &ChainConfig) -> String { format!( "external_node_{}_{}", diff --git a/zk_toolbox/crates/zk_inception/src/main.rs b/zk_toolbox/crates/zk_inception/src/main.rs index cb1b5388196..f6f7d83dede 100644 --- a/zk_toolbox/crates/zk_inception/src/main.rs +++ b/zk_toolbox/crates/zk_inception/src/main.rs @@ -13,11 +13,8 @@ use config::EcosystemConfig; use xshell::Shell; use crate::commands::{ - args::{PortalArgs, RunServerArgs}, - chain::ChainCommands, - ecosystem::EcosystemCommands, - external_node::ExternalNodeCommands, - prover::ProverCommands, + args::RunServerArgs, chain::ChainCommands, ecosystem::EcosystemCommands, + explorer::ExplorerCommands, external_node::ExternalNodeCommands, prover::ProverCommands, }; pub mod accept_ownership; @@ -60,7 +57,10 @@ pub enum InceptionSubcommands { #[command(subcommand)] ContractVerifier(ContractVerifierCommands), /// Run dapp-portal - Portal(PortalArgs), + Portal, + /// Run block-explorer + #[command(subcommand)] + Explorer(ExplorerCommands), /// Update ZKsync #[command(alias = "u")] Update(UpdateArgs), @@ -123,7 +123,8 @@ async fn run_subcommand(inception_args: Inception, shell: &Shell) -> anyhow::Res InceptionSubcommands::ContractVerifier(args) => { commands::contract_verifier::run(shell, args).await? } - InceptionSubcommands::Portal(args) => commands::portal::run(shell, args).await?, + InceptionSubcommands::Explorer(args) => commands::explorer::run(shell, args).await?, + InceptionSubcommands::Portal => commands::portal::run(shell).await?, InceptionSubcommands::Update(args) => commands::update::run(shell, args)?, InceptionSubcommands::Markdown => { clap_markdown::print_help_markdown::(); diff --git a/zk_toolbox/crates/zk_inception/src/messages.rs b/zk_toolbox/crates/zk_inception/src/messages.rs index 25933d39db3..cca3e3b549b 100644 --- a/zk_toolbox/crates/zk_inception/src/messages.rs +++ b/zk_toolbox/crates/zk_inception/src/messages.rs @@ -117,6 +117,9 @@ pub(super) fn msg_chain_doesnt_exist_err(chain_name: &str, chains: &Vec) chain_name, chains ) } +pub(super) fn msg_chain_load_err(chain_name: &str) -> String { + format!("Failed to load chain config for {chain_name}") +} /// Chain create related messages pub(super) const MSG_PROVER_MODE_HELP: &str = "Prover options"; @@ -199,6 +202,14 @@ pub(super) fn msg_server_db_name_prompt(chain_name: &str) -> String { format!("Please provide server database name for chain {chain_name}") } +pub(super) fn msg_explorer_db_url_prompt(chain_name: &str) -> String { + format!("Please provide explorer database url for chain {chain_name}") +} + +pub(super) fn msg_explorer_db_name_prompt(chain_name: &str) -> String { + format!("Please provide explorer database name for chain {chain_name}") +} + /// Chain initialize bridges related messages pub(super) const MSG_DEPLOYING_L2_CONTRACT_SPINNER: &str = "Deploying l2 contracts"; @@ -231,14 +242,46 @@ pub(super) const MSG_FAILED_TO_RUN_SERVER_ERR: &str = "Failed to start server"; pub(super) const MSG_PREPARING_EN_CONFIGS: &str = "Preparing External Node config"; /// Portal related messages -pub(super) const MSG_PORTAL_CONFIG_IS_EMPTY_ERR: &str = "Hyperchains config is empty"; +pub(super) const MSG_PORTAL_FAILED_TO_FIND_ANY_CHAIN_ERR: &str = + "Failed to find any valid chain to run portal for"; pub(super) const MSG_PORTAL_FAILED_TO_CREATE_CONFIG_ERR: &str = "Failed to create portal config"; pub(super) const MSG_PORTAL_FAILED_TO_RUN_DOCKER_ERR: &str = "Failed to run portal docker container"; +pub(super) fn msg_portal_running_with_config(path: &Path) -> String { + format!("Running portal with configuration from: {}", path.display()) +} pub(super) fn msg_portal_starting_on(host: &str, port: u16) -> String { format!("Starting portal on http://{host}:{port}") } +/// Explorer related messages +pub(super) const MSG_EXPLORER_FAILED_TO_DROP_DATABASE_ERR: &str = + "Failed to drop explorer database"; +pub(super) const MSG_EXPLORER_FAILED_TO_RUN_DOCKER_SERVICES_ERR: &str = + "Failed to run docker compose with explorer services"; +pub(super) const MSG_EXPLORER_FAILED_TO_RUN_DOCKER_ERR: &str = + "Failed to run explorer docker container"; +pub(super) const MSG_EXPLORER_FAILED_TO_CREATE_CONFIG_ERR: &str = + "Failed to create explorer config"; +pub(super) const MSG_EXPLORER_FAILED_TO_FIND_ANY_CHAIN_ERR: &str = + "Failed to find any valid chain to run explorer for. Did you run `zk_inception explorer init`?"; +pub(super) const MSG_EXPLORER_INITIALIZED: &str = "Explorer has been initialized successfully"; +pub(super) fn msg_explorer_initializing_database_for(chain: &str) -> String { + format!("Initializing explorer database for {chain} chain") +} +pub(super) fn msg_explorer_running_with_config(path: &Path) -> String { + format!( + "Running explorer with configuration from: {}", + path.display() + ) +} +pub(super) fn msg_explorer_starting_on(host: &str, port: u16) -> String { + format!("Starting explorer on http://{host}:{port}") +} +pub(super) fn msg_explorer_chain_not_initialized(chain: &str) -> String { + format!("Chain {chain} is not initialized for explorer: run `zk_inception explorer init --chain {chain}` first") +} + /// Forge utils related messages pub(super) const MSG_DEPLOYER_PK_NOT_SET_ERR: &str = "Deployer private key is not set";