From b69695a020ba4d2850e069e7a0d53a03c5d92ac2 Mon Sep 17 00:00:00 2001 From: Karrq Date: Mon, 27 Jan 2025 15:28:56 +0100 Subject: [PATCH] feat(zk): zksolc linking (#800) * feat(zk): zksolc linking * fix(zk:libs): calculate addresses w/ proper nonce chore: identify action to deploy libs in EraVM * fix: don't always assume DCC is present * feat(executor): `deploy_library` in strategy * fix(zk): create address computation * chore: cleanup unused imports * test(zk): deploy-time linking (script/test) * chore: default zksolc to 1.5.8 * chore: lints * refactor: allow multiple lib deployments * refactor(link): move to executor strategy fix(strategy): remove get_mut for `DualCompiledContracts` * fix: compilation * feat(strategy:link): pass config * feat(zk:link): dedicated linker module fix(zk:link): use version from config refactor(zk:config): extract config -> zksolc compiler logic into function chore: lints * chore: more lints chore: remove accidental file re-inclusion * feat(zk:link): version check * chore: lints & fmt * chore: more formatting * fix(zk:link): retrieve factory dep hash refactor(zk:link): avoid attempting to link fully-linked contracts * fix(zk:compilers): remove dead `libraries` module * feat(link:zk): create2 linking feat(script:zk): link with zksolc * chore: formatting * feat(compiler:zk): `factory_dependencies_unlinked` test(zk:link): use 1.5.9 for deploy-time linking feat(zk:link): detect factory dependencies as link references for library lookup fix(zk:link): require 1.5.9 for deploy-time-linking * refactor: dual compiled contracts as map * feat(link:zk): recursive factory deps libs lookup fix(link:zk): consistent artifact id prefixes chore: formatting refactor(link:zk): improve link targets filtering * fix(link:zk): invert bool * fix(artifacts:zk): `is_unlinked` logic fix(link:zk): don't preemptively strip file prefixes fix(link:zk): better factory dep detection chore: formatting * feat(strategy): `deploy_library` CREATE2 mode feat(strategy): return "broadcastable" tx feat(link:zk): populate newly linked factory deps feat(script): use `deploy_library` * fix(zk:transact): detect direct deployments * feat(script:zk): match CREATE/CREATE2 with EVM * fix(zk:libs): encode extra metadata * refactor(executors): `DeployLibResult` fix(lib:zk): remove EVM lib deployments from broadcastable txs * fix(artifact:zk): avoid underflow refactor(artifact:zk): `all_factory_deps` method chore: formatting * fix(test:zk): proper stripping during link feat(link:script): register target & deps * chore: fix spelling & docs fix(test:zk): default to zksolc 1.5.9 * fix(test:script): avoid expecting create2 output fix(script): avoid unpacking create2 result fix(script): avoid marking create2 result address as persistent * chore: clippy * chore: formatting * chore: codespell * fix(artifacts:zk): `is_unlinked` underflow * feat(compiler:zk): `objectFormat` * fix(script:link:zk): skip version check if no libs * fix(test:link:zk): avoid version check w/o libs * chore: clippy * fix(compiler:zk): optional object_format * fix(link:zk): ignore target version for lookup * fix(link:zk): proper EVM deployed_bc/bc * chore: fmt * chore: fmt * test(zk): `DualCompiledContracts::find` units * fix: clippy * refactor(strategy): dedicated linking module * fix: forgot to commit the new files * chore: fmt * fix(zk:link): new nonce types * test(zk): use default zksolc version normally * docs: add notes on diverging sections * fix(zk): remove duplicate code from merge * refactor(script:zk): move linking to own module * chore: fmt * chore: clippy --- Cargo.lock | 77 +-- crates/config/src/zksync.rs | 54 +- crates/evm/evm/Cargo.toml | 1 + crates/evm/evm/src/executors/mod.rs | 19 +- crates/evm/evm/src/executors/strategy.rs | 81 ++- .../evm/src/executors/strategy/libraries.rs | 157 ++++++ crates/forge/bin/cmd/create.rs | 12 +- crates/forge/src/multi_runner.rs | 115 +---- crates/forge/src/runner.rs | 38 +- .../forge/tests/fixtures/zk/Libraries.s.sol | 13 +- crates/forge/tests/it/test_helpers.rs | 1 + crates/forge/tests/it/zk/linking.rs | 81 ++- crates/linking/Cargo.toml | 5 + crates/linking/src/lib.rs | 4 + crates/linking/src/zksync.rs | 466 ++++++++++++++++++ crates/script/src/build.rs | 128 +++-- crates/script/src/build/zksync.rs | 209 ++++++++ crates/script/src/runner.rs | 89 ++-- crates/strategy/zksync/Cargo.toml | 2 + .../strategy/zksync/src/cheatcode/context.rs | 47 +- .../cheatcode/runner/cheatcode_handlers.rs | 17 +- .../zksync/src/cheatcode/runner/mod.rs | 19 +- .../strategy/zksync/src/executor/context.rs | 7 +- crates/strategy/zksync/src/executor/runner.rs | 91 +++- .../zksync/src/executor/runner/libraries.rs | 295 +++++++++++ crates/zksync/compilers/Cargo.toml | 1 - .../compilers/src/artifacts/contract.rs | 17 +- .../src/compilers/artifact_output/zk.rs | 26 +- .../compilers/src/compilers/zksolc/mod.rs | 5 + .../compilers/src/dual_compiled_contracts.rs | 406 ++++++++++++--- crates/zksync/compilers/src/lib.rs | 2 +- crates/zksync/compilers/src/libraries.rs | 129 ----- crates/zksync/compilers/src/link.rs | 137 +++++ crates/zksync/core/src/lib.rs | 37 +- crates/zksync/core/src/vm/runner.rs | 9 +- testdata/zk/WithLibraries.sol | 2 +- testdata/zk/WithLibraries.t.sol | 17 + 37 files changed, 2252 insertions(+), 564 deletions(-) create mode 100644 crates/evm/evm/src/executors/strategy/libraries.rs create mode 100644 crates/linking/src/zksync.rs create mode 100644 crates/script/src/build/zksync.rs create mode 100644 crates/strategy/zksync/src/executor/runner/libraries.rs delete mode 100644 crates/zksync/compilers/src/libraries.rs create mode 100644 crates/zksync/compilers/src/link.rs create mode 100644 testdata/zk/WithLibraries.t.sol diff --git a/Cargo.lock b/Cargo.lock index 0a00b550e..406961469 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1146,7 +1146,7 @@ dependencies = [ [[package]] name = "anvil" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-chains", "alloy-consensus 0.8.0", @@ -1217,7 +1217,7 @@ dependencies = [ [[package]] name = "anvil-core" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-dyn-abi", @@ -1241,7 +1241,7 @@ dependencies = [ [[package]] name = "anvil-rpc" -version = "0.0.3" +version = "0.0.4" dependencies = [ "serde", "serde_json", @@ -1249,7 +1249,7 @@ dependencies = [ [[package]] name = "anvil-server" -version = "0.0.3" +version = "0.0.4" dependencies = [ "anvil-rpc", "async-trait", @@ -2517,7 +2517,7 @@ checksum = "df8670b8c7b9dae1793364eafadf7239c40d669904660c5960d74cfd80b46a53" [[package]] name = "cast" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-chains", "alloy-consensus 0.8.0", @@ -2633,7 +2633,7 @@ checksum = "613afe47fcd5fac7ccf1db93babcb082c5994d996f20b8b159f2ad1658eb5724" [[package]] name = "chisel" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -4332,7 +4332,7 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "forge" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-chains", "alloy-consensus 0.8.0", @@ -4431,7 +4431,7 @@ dependencies = [ [[package]] name = "forge-doc" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "derive_more", @@ -4454,7 +4454,7 @@ dependencies = [ [[package]] name = "forge-fmt" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "ariadne", @@ -4470,7 +4470,7 @@ dependencies = [ [[package]] name = "forge-script" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-chains", "alloy-consensus 0.8.0", @@ -4519,7 +4519,7 @@ dependencies = [ [[package]] name = "forge-script-sequence" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-network 0.8.0", "alloy-primitives", @@ -4538,7 +4538,7 @@ dependencies = [ [[package]] name = "forge-sol-macro-gen" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-json-abi", "alloy-sol-macro-expander", @@ -4554,7 +4554,7 @@ dependencies = [ [[package]] name = "forge-verify" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -4616,7 +4616,7 @@ dependencies = [ [[package]] name = "foundry-cheatcodes" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-dyn-abi", @@ -4668,7 +4668,7 @@ dependencies = [ [[package]] name = "foundry-cheatcodes-common" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "revm", @@ -4676,7 +4676,7 @@ dependencies = [ [[package]] name = "foundry-cheatcodes-spec" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-sol-types", "foundry-macros", @@ -4687,7 +4687,7 @@ dependencies = [ [[package]] name = "foundry-cli" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-chains", "alloy-dyn-abi", @@ -4730,7 +4730,7 @@ dependencies = [ [[package]] name = "foundry-common" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-contract", @@ -4782,7 +4782,7 @@ dependencies = [ [[package]] name = "foundry-common-fmt" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-dyn-abi", @@ -4910,7 +4910,7 @@ dependencies = [ [[package]] name = "foundry-config" -version = "0.0.3" +version = "0.0.4" dependencies = [ "Inflector", "alloy-chains", @@ -4949,7 +4949,7 @@ dependencies = [ [[package]] name = "foundry-debugger" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "crossterm", @@ -4967,7 +4967,7 @@ dependencies = [ [[package]] name = "foundry-evm" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -4983,6 +4983,7 @@ dependencies = [ "foundry-evm-coverage", "foundry-evm-fuzz", "foundry-evm-traces", + "foundry-linking", "foundry-zksync-compilers", "foundry-zksync-core", "foundry-zksync-inspectors", @@ -4998,7 +4999,7 @@ dependencies = [ [[package]] name = "foundry-evm-abi" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "alloy-sol-types", @@ -5011,7 +5012,7 @@ dependencies = [ [[package]] name = "foundry-evm-core" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-dyn-abi", @@ -5047,7 +5048,7 @@ dependencies = [ [[package]] name = "foundry-evm-coverage" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "eyre", @@ -5062,7 +5063,7 @@ dependencies = [ [[package]] name = "foundry-evm-fuzz" -version = "0.0.3" +version = "0.0.4" dependencies = [ "ahash", "alloy-dyn-abi", @@ -5089,7 +5090,7 @@ dependencies = [ [[package]] name = "foundry-evm-traces" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-dyn-abi", "alloy-json-abi", @@ -5142,17 +5143,20 @@ dependencies = [ [[package]] name = "foundry-linking" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "foundry-compilers", + "foundry-zksync-compilers", + "foundry-zksync-core", "semver 1.0.23", "thiserror 2.0.6", + "tracing", ] [[package]] name = "foundry-macros" -version = "0.0.3" +version = "0.0.4" dependencies = [ "proc-macro-error", "proc-macro2", @@ -5162,7 +5166,7 @@ dependencies = [ [[package]] name = "foundry-strategy-zksync" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-json-abi", "alloy-primitives", @@ -5172,9 +5176,11 @@ dependencies = [ "eyre", "foundry-cheatcodes", "foundry-common", + "foundry-compilers", "foundry-config", "foundry-evm", "foundry-evm-core", + "foundry-linking", "foundry-zksync-compilers", "foundry-zksync-core", "itertools 0.13.0", @@ -5189,7 +5195,7 @@ dependencies = [ [[package]] name = "foundry-test-utils" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "alloy-provider", @@ -5217,7 +5223,7 @@ dependencies = [ [[package]] name = "foundry-wallets" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-consensus 0.8.0", "alloy-dyn-abi", @@ -5248,13 +5254,12 @@ dependencies = [ [[package]] name = "foundry-zksync-compilers" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-json-abi", "alloy-primitives", "dirs 5.0.1", "era-solc", - "eyre", "fd-lock", "foundry-compilers", "foundry-compilers-artifacts-solc", @@ -5276,7 +5281,7 @@ dependencies = [ [[package]] name = "foundry-zksync-core" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-network 0.8.0", "alloy-primitives", @@ -5305,7 +5310,7 @@ dependencies = [ [[package]] name = "foundry-zksync-inspectors" -version = "0.0.3" +version = "0.0.4" dependencies = [ "alloy-primitives", "foundry-evm-core", diff --git a/crates/config/src/zksync.rs b/crates/config/src/zksync.rs index 12db3a329..e5d8c1188 100644 --- a/crates/config/src/zksync.rs +++ b/crates/config/src/zksync.rs @@ -166,6 +166,30 @@ pub fn config_zksolc_settings(config: &Config) -> Result Result { + let zksolc = if let Some(zksolc) = + config_ensure_zksolc(config.zksync.zksolc.as_ref(), config.offline)? + { + zksolc + } else if !config.offline { + let default_version = semver::Version::new(1, 5, 10); + let mut zksolc = ZkSolc::find_installed_version(&default_version)?; + if zksolc.is_none() { + ZkSolc::blocking_install(&default_version)?; + zksolc = ZkSolc::find_installed_version(&default_version)?; + } + zksolc.unwrap_or_else(|| panic!("Could not install zksolc v{default_version}")) + } else { + "zksolc".into() + }; + + Ok(ZkSolcCompiler { zksolc, solc: config_solc_compiler(config)? }) +} + /// Create a new zkSync project pub fn config_create_project( config: &Config, @@ -193,23 +217,7 @@ pub fn config_create_project( builder = builder.sparse_output(filter); } - let zksolc = if let Some(zksolc) = - config_ensure_zksolc(config.zksync.zksolc.as_ref(), config.offline)? - { - zksolc - } else if !config.offline { - let default_version = semver::Version::new(1, 5, 10); - let mut zksolc = ZkSolc::find_installed_version(&default_version)?; - if zksolc.is_none() { - ZkSolc::blocking_install(&default_version)?; - zksolc = ZkSolc::find_installed_version(&default_version)?; - } - zksolc.unwrap_or_else(|| panic!("Could not install zksolc v{default_version}")) - } else { - "zksolc".into() - }; - - let zksolc_compiler = ZkSolcCompiler { zksolc, solc: config_solc_compiler(config)? }; + let zksolc_compiler = config_zksolc_compiler(config)?; let project = builder.build(zksolc_compiler)?; @@ -229,12 +237,12 @@ pub fn config_create_project( fn config_solc_compiler(config: &Config) -> Result { if let Some(path) = &config.zksync.solc_path { if !path.is_file() { - return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))) + return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))); } let version = get_solc_version_info(path)?.version; let solc = Solc::new_with_version(path, Version::new(version.major, version.minor, version.patch)); - return Ok(SolcCompiler::Specific(solc)) + return Ok(SolcCompiler::Specific(solc)); } if let Some(ref solc) = config.solc { @@ -256,7 +264,7 @@ fn config_solc_compiler(config: &Config) -> Result { } SolcReq::Local(path) => { if !path.is_file() { - return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))) + return Err(SolcError::msg(format!("`solc` {} does not exist", path.display()))); } let version = get_solc_version_info(path)?.version; Solc::new_with_version( @@ -307,7 +315,7 @@ pub fn config_ensure_zksolc( if offline { return Err(SolcError::msg(format!( "can't install missing zksolc {version} in offline mode" - ))) + ))); } ZkSolc::blocking_install(version)?; zksolc = ZkSolc::find_installed_version(version)?; @@ -319,12 +327,12 @@ pub fn config_ensure_zksolc( return Err(SolcError::msg(format!( "`zksolc` {} does not exist", zksolc.display() - ))) + ))); } Some(zksolc.clone()) } }; - return Ok(zksolc) + return Ok(zksolc); } Ok(None) diff --git a/crates/evm/evm/Cargo.toml b/crates/evm/evm/Cargo.toml index e870512bd..d626b32e7 100644 --- a/crates/evm/evm/Cargo.toml +++ b/crates/evm/evm/Cargo.toml @@ -22,6 +22,7 @@ foundry-evm-core.workspace = true foundry-evm-coverage.workspace = true foundry-evm-fuzz.workspace = true foundry-evm-traces.workspace = true +foundry-linking.workspace = true foundry-zksync-core.workspace = true foundry-zksync-compilers.workspace = true foundry-zksync-inspectors.workspace = true diff --git a/crates/evm/evm/src/executors/mod.rs b/crates/evm/evm/src/executors/mod.rs index 158fdac8d..d34603737 100644 --- a/crates/evm/evm/src/executors/mod.rs +++ b/crates/evm/evm/src/executors/mod.rs @@ -40,7 +40,7 @@ use std::{ borrow::Cow, time::{Duration, Instant}, }; -use strategy::ExecutorStrategy; +use strategy::{DeployLibKind, DeployLibResult, ExecutorStrategy}; mod builder; pub use builder::ExecutorBuilder; @@ -303,6 +303,23 @@ impl Executor { self.deploy_with_env(env, rd) } + /// Deploys a library contract and commits the new state to the underlying database. + /// + /// Executes a `deploy_kind` transaction with the provided parameters + /// and persistent database state modifications. + /// + /// Will return a list of deployment results and transaction requests + /// Will also ensure nonce is increased for the sender + pub fn deploy_library( + &mut self, + from: Address, + kind: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError> { + self.strategy.runner.deploy_library(self, from, kind, value, rd) + } + /// Deploys a contract using the given `env` and commits the new state to the underlying /// database. /// diff --git a/crates/evm/evm/src/executors/strategy.rs b/crates/evm/evm/src/executors/strategy.rs index a719e841a..d29f76985 100644 --- a/crates/evm/evm/src/executors/strategy.rs +++ b/crates/evm/evm/src/executors/strategy.rs @@ -1,4 +1,4 @@ -use std::{any::Any, fmt::Debug}; +use std::{any::Any, fmt::Debug, path::Path}; use alloy_primitives::{Address, U256}; use alloy_serde::OtherFields; @@ -6,8 +6,17 @@ use eyre::Result; use foundry_cheatcodes::strategy::{ CheatcodeInspectorStrategy, EvmCheatcodeInspectorStrategyRunner, }; -use foundry_evm_core::backend::{strategy::BackendStrategy, Backend, BackendResult, CowBackend}; -use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContracts; +use foundry_compilers::ProjectCompileOutput; +use foundry_config::Config; +use foundry_evm_core::{ + backend::{strategy::BackendStrategy, Backend, BackendResult, CowBackend}, + decode::RevertDecoder, +}; +use foundry_linking::LinkerError; +use foundry_zksync_compilers::{ + compilers::{artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler}, + dual_compiled_contracts::DualCompiledContracts, +}; use revm::{ primitives::{Env, EnvWithHandlerCfg, ResultAndState}, DatabaseRef, @@ -15,7 +24,10 @@ use revm::{ use crate::inspectors::InspectorStack; -use super::Executor; +use super::{EvmError, Executor}; + +mod libraries; +pub use libraries::*; pub trait ExecutorStrategyContext: Debug + Send + Sync + Any { /// Clone the strategy context. @@ -68,9 +80,32 @@ pub trait ExecutorStrategyRunner: Debug + Send + Sync + ExecutorStrategyExt { amount: U256, ) -> BackendResult<()>; + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult; + fn set_nonce(&self, executor: &mut Executor, address: Address, nonce: u64) -> BackendResult<()>; + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult; + + fn link( + &self, + ctx: &mut dyn ExecutorStrategyContext, + config: &Config, + root: &Path, + input: &ProjectCompileOutput, + deployer: Address, + ) -> Result; + + /// Deploys a library, applying state changes + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + input: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError>; + /// Execute a transaction and *WITHOUT* applying state changes. fn call( &self, @@ -110,6 +145,13 @@ pub trait ExecutorStrategyExt { ) { } + fn zksync_set_compilation_output( + &self, + _ctx: &mut dyn ExecutorStrategyContext, + _output: ProjectCompileOutput, + ) { + } + /// Set the fork environment on the context. fn zksync_set_fork_env( &self, @@ -153,6 +195,10 @@ impl ExecutorStrategyRunner for EvmExecutorStrategyRunner { Ok(()) } + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult { + executor.get_balance(address) + } + fn set_nonce( &self, executor: &mut Executor, @@ -166,6 +212,33 @@ impl ExecutorStrategyRunner for EvmExecutorStrategyRunner { Ok(()) } + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult { + executor.get_nonce(address) + } + + fn link( + &self, + _: &mut dyn ExecutorStrategyContext, + _: &Config, + root: &Path, + input: &ProjectCompileOutput, + deployer: Address, + ) -> Result { + self.link_impl(root, input, deployer) + } + + /// Deploys a library, applying state changes + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + kind: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError> { + self.deploy_library_impl(executor, from, kind, value, rd) + } + fn call( &self, _ctx: &dyn ExecutorStrategyContext, diff --git a/crates/evm/evm/src/executors/strategy/libraries.rs b/crates/evm/evm/src/executors/strategy/libraries.rs new file mode 100644 index 000000000..f98ccdad1 --- /dev/null +++ b/crates/evm/evm/src/executors/strategy/libraries.rs @@ -0,0 +1,157 @@ +//! Contains various definitions and items related to deploy-time linking + +use std::{borrow::Borrow, collections::BTreeMap, path::Path}; + +use alloy_json_abi::JsonAbi; +use alloy_primitives::{Address, Bytes, TxKind, B256, U256}; +use eyre::Context; +use foundry_common::{ContractsByArtifact, TestFunctionExt, TransactionMaybeSigned}; +use foundry_compilers::{ + artifacts::Libraries, contracts::ArtifactContracts, Artifact, ArtifactId, ProjectCompileOutput, +}; +use foundry_evm_core::decode::RevertDecoder; +use foundry_linking::{Linker, LinkerError}; + +use crate::executors::{DeployResult, EvmError, Executor}; + +use super::{EvmExecutorStrategyRunner, ExecutorStrategyRunner}; + +pub struct LinkOutput { + pub deployable_contracts: BTreeMap, + pub revert_decoder: RevertDecoder, + pub linked_contracts: ArtifactContracts, + pub known_contracts: ContractsByArtifact, + pub libs_to_deploy: Vec, + pub libraries: Libraries, +} + +/// Type of library deployment +#[derive(Debug, Clone)] +pub enum DeployLibKind { + /// CREATE(bytecode) + Create(Bytes), + + /// CREATE2(salt, bytecode) + Create2(B256, Bytes), +} + +/// Represents the result of a library deployment +#[derive(Debug)] +pub struct DeployLibResult { + /// Result of the deployment + pub result: DeployResult, + /// Equivalent transaction to deploy the given library + pub tx: Option, +} + +impl EvmExecutorStrategyRunner { + pub(super) fn link_impl( + &self, + root: &Path, + input: &ProjectCompileOutput, + deployer: Address, + ) -> Result { + let contracts = + input.artifact_ids().map(|(id, v)| (id.with_stripped_file_prefixes(root), v)).collect(); + let linker = Linker::new(root, contracts); + + // Build revert decoder from ABIs of all artifacts. + let abis = linker + .contracts + .iter() + .filter_map(|(_, contract)| contract.abi.as_ref().map(|abi| abi.borrow())); + let revert_decoder = RevertDecoder::new().with_abis(abis); + + let foundry_linking::LinkOutput { libraries, libs_to_deploy } = linker + .link_with_nonce_or_address(Default::default(), deployer, 0, linker.contracts.keys())?; + + let linked_contracts = linker.get_linked_artifacts(&libraries)?; + + // Create a mapping of name => (abi, deployment code, Vec) + let mut deployable_contracts = BTreeMap::default(); + for (id, contract) in linked_contracts.iter() { + let Some(abi) = &contract.abi else { continue }; + + // if it's a test, link it and add to deployable contracts + if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true) && + abi.functions().any(|func| func.name.is_any_test()) + { + let Some(bytecode) = + contract.get_bytecode_bytes().map(|b| b.into_owned()).filter(|b| !b.is_empty()) + else { + continue; + }; + + deployable_contracts.insert(id.clone(), (abi.clone(), bytecode)); + } + } + + let known_contracts = ContractsByArtifact::new(linked_contracts.clone()); + + Ok(LinkOutput { + deployable_contracts, + revert_decoder, + linked_contracts, + known_contracts, + libs_to_deploy, + libraries, + }) + } + + pub(super) fn deploy_library_impl( + &self, + executor: &mut Executor, + from: Address, + kind: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError> { + let nonce = self.get_nonce(executor, from).context("retrieving sender nonce")?; + + match kind { + DeployLibKind::Create(code) => { + executor.deploy(from, code.clone(), value, rd).map(|dr| { + let mut request = TransactionMaybeSigned::new(Default::default()); + let unsigned = request.as_unsigned_mut().unwrap(); + unsigned.from = Some(from); + unsigned.input = code.into(); + unsigned.nonce = Some(nonce); + + vec![DeployLibResult { result: dr, tx: Some(request) }] + }) + } + DeployLibKind::Create2(salt, code) => { + let create2_deployer = executor.create2_deployer(); + + let calldata: Bytes = [salt.as_ref(), code.as_ref()].concat().into(); + let result = + executor.transact_raw(from, create2_deployer, calldata.clone(), value)?; + let result = result.into_result(rd)?; + + let address = result + .out + .as_ref() + .and_then(|out| out.address().cloned()) + .unwrap_or_else(|| create2_deployer.create2_from_code(salt, code.as_ref())); + debug!(%address, "deployed contract with create2"); + + let mut request = TransactionMaybeSigned::new(Default::default()); + let unsigned = request.as_unsigned_mut().unwrap(); + unsigned.from = Some(from); + unsigned.input = calldata.into(); + unsigned.nonce = Some(nonce); + unsigned.to = Some(TxKind::Call(create2_deployer)); + + // manually increase nonce when performing CALLs + executor + .set_nonce(from, nonce + 1) + .context("increasing nonce after CREATE2 deployment")?; + + Ok(vec![DeployLibResult { + result: DeployResult { raw: result, address }, + tx: Some(request), + }]) + } + } + } +} diff --git a/crates/forge/bin/cmd/create.rs b/crates/forge/bin/cmd/create.rs index d15a88f8f..91a06fd7b 100644 --- a/crates/forge/bin/cmd/create.rs +++ b/crates/forge/bin/cmd/create.rs @@ -174,10 +174,10 @@ impl CreateArgs { let (artifact, id) = remove_zk_contract(&mut zk_output, &target_path, &self.contract.name)?; - let ZkContractArtifact { bytecode, factory_dependencies, abi, .. } = artifact; + let ZkContractArtifact { bytecode, abi, factory_dependencies, .. } = &artifact; - let abi = abi.expect("Abi not found"); - let bin = bytecode.expect("Bytecode not found"); + let abi = abi.clone().expect("Abi not found"); + let bin = bytecode.as_ref().expect("Bytecode not found"); let bytecode = match bin.object() { BytecodeObject::Bytecode(bytes) => bytes.to_vec(), @@ -224,7 +224,7 @@ impl CreateArgs { let factory_deps: Vec> = { let factory_dependencies_map = - factory_dependencies.expect("factory deps not found"); + factory_dependencies.as_ref().expect("factory deps not found"); let mut visited_paths = HashSet::new(); let mut visited_bytecodes = HashSet::new(); let mut queue = VecDeque::new(); @@ -252,12 +252,12 @@ impl CreateArgs { ) }); let fdep_fdeps_map = - fdep_art.factory_dependencies.clone().expect("factory deps not found"); + fdep_art.factory_dependencies.as_ref().expect("factory deps not found"); for dep in fdep_fdeps_map.values() { queue.push_back(dep.clone()) } - // TODO(zk): ensure factory deps are also linked + // NOTE(zk): unlinked factory deps don't show up in `factory_dependencies` let fdep_bytecode = fdep_art .bytecode .clone() diff --git a/crates/forge/src/multi_runner.rs b/crates/forge/src/multi_runner.rs index 1e2ca9854..09c931c1a 100644 --- a/crates/forge/src/multi_runner.rs +++ b/crates/forge/src/multi_runner.rs @@ -9,29 +9,28 @@ use alloy_primitives::{Address, Bytes, U256}; use eyre::Result; use foundry_common::{get_contract_name, shell::verbosity, ContractsByArtifact, TestFunctionExt}; use foundry_compilers::{ - artifacts::{ - CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, Contract, Libraries, - }, + artifacts::{Contract, Libraries}, compilers::Compiler, - Artifact, ArtifactId, ProjectCompileOutput, + ArtifactId, ProjectCompileOutput, }; use foundry_config::{Config, InlineConfig}; use foundry_evm::{ backend::Backend, decode::RevertDecoder, - executors::{strategy::ExecutorStrategy, Executor, ExecutorBuilder}, + executors::{ + strategy::{ExecutorStrategy, LinkOutput}, + Executor, ExecutorBuilder, + }, fork::CreateFork, inspectors::CheatsConfig, opts::EvmOpts, revm, traces::{InternalTraceMode, TraceMode}, }; -use foundry_linking::{LinkOutput, Linker}; use rayon::prelude::*; use revm::primitives::SpecId; use std::{ - borrow::Borrow, collections::BTreeMap, fmt::Debug, path::Path, @@ -486,94 +485,34 @@ impl MultiContractRunnerBuilder { zk_output: Option>, env: revm::primitives::Env, evm_opts: EvmOpts, - strategy: ExecutorStrategy, + mut strategy: ExecutorStrategy, ) -> Result { - let contracts = output - .artifact_ids() - .map(|(id, v)| (id.with_stripped_file_prefixes(root), v)) - .collect(); - let linker = Linker::new(root, contracts); - - // Build revert decoder from ABIs of all artifacts. - let abis = linker - .contracts - .iter() - .filter_map(|(_, contract)| contract.abi.as_ref().map(|abi| abi.borrow())); - let revert_decoder = RevertDecoder::new().with_abis(abis); + if let Some(zk_output) = zk_output { + strategy.runner.zksync_set_compilation_output(strategy.context.as_mut(), zk_output); + } - let LinkOutput { libraries, libs_to_deploy } = linker.link_with_nonce_or_address( - Default::default(), + let LinkOutput { + deployable_contracts, + revert_decoder, + linked_contracts: _, + known_contracts, + libs_to_deploy, + libraries, + } = strategy.runner.link( + strategy.context.as_mut(), + &self.config, + root, + output, LIBRARY_DEPLOYER, - 0, - linker.contracts.keys(), )?; - let linked_contracts = linker.get_linked_artifacts(&libraries)?; - - // Create a mapping of name => (abi, deployment code, Vec) - let mut deployable_contracts = DeployableContracts::default(); - - for (id, contract) in linked_contracts.iter() { - let Some(abi) = &contract.abi else { continue }; - - // if it's a test, link it and add to deployable contracts - if abi.constructor.as_ref().map(|c| c.inputs.is_empty()).unwrap_or(true) && - abi.functions().any(|func| func.name.is_any_test()) - { - let Some(bytecode) = - contract.get_bytecode_bytes().map(|b| b.into_owned()).filter(|b| !b.is_empty()) - else { - continue; - }; - - deployable_contracts - .insert(id.clone(), TestContract { abi: abi.clone(), bytecode }); - } - } - - let mut known_contracts = ContractsByArtifact::default(); - if zk_output.is_none() { - known_contracts = ContractsByArtifact::new(linked_contracts); - } else if let Some(zk_output) = zk_output { - let zk_contracts = zk_output.with_stripped_file_prefixes(root).into_artifacts(); - let mut zk_contracts_map = BTreeMap::new(); - - for (id, contract) in zk_contracts { - if let Some(abi) = contract.abi { - let bytecode = contract.bytecode.as_ref(); - - // TODO(zk): retrieve link_references - if let Some(bytecode_object) = bytecode.map(|b| b.object()) { - let compact_bytecode = CompactBytecode { - object: bytecode_object.clone(), - source_map: None, - link_references: BTreeMap::new(), - }; - let compact_contract = CompactContractBytecode { - abi: Some(abi), - bytecode: Some(compact_bytecode.clone()), - deployed_bytecode: Some(CompactDeployedBytecode { - bytecode: Some(compact_bytecode), - immutable_references: BTreeMap::new(), - }), - }; - zk_contracts_map.insert(id.clone(), compact_contract); - } - } else { - warn!("Abi not found for contract {}", id.identifier()); - } - } - - // Extend zk contracts with solc contracts as well. This is required for traces to - // accurately detect contract names deployed in EVM mode, and when using - // `vm.zkVmSkip()` cheatcode. - zk_contracts_map.extend(linked_contracts); - - known_contracts = ContractsByArtifact::new(zk_contracts_map); - } + let contracts = deployable_contracts + .into_iter() + .map(|(id, (abi, bytecode))| (id, TestContract { abi, bytecode })) + .collect(); Ok(MultiContractRunner { - contracts: deployable_contracts, + contracts, revert_decoder, known_contracts, libs_to_deploy, diff --git a/crates/forge/src/runner.rs b/crates/forge/src/runner.rs index 496cda0ca..cd40ce7df 100644 --- a/crates/forge/src/runner.rs +++ b/crates/forge/src/runner.rs @@ -21,6 +21,7 @@ use foundry_evm::{ invariant::{ check_sequence, replay_error, replay_run, InvariantExecutor, InvariantFuzzError, }, + strategy::DeployLibKind, CallResult, EvmError, Executor, ITest, RawCallResult, }, fuzz::{ @@ -124,23 +125,32 @@ impl<'a> ContractRunner<'a> { let mut result = TestSetup::default(); for code in self.mcr.libs_to_deploy.iter() { - let deploy_result = self.executor.deploy( + let deploy_result = self.executor.deploy_library( LIBRARY_DEPLOYER, - code.clone(), + DeployLibKind::Create(code.clone()), U256::ZERO, Some(&self.mcr.revert_decoder), ); - // Record deployed library address. - if let Ok(deployed) = &deploy_result { - result.deployed_libs.push(deployed.address); - } + let deployments = match deploy_result { + Err(err) => vec![Err(err)], + Ok(deployments) => deployments.into_iter().map(Ok).collect(), + }; + + for deploy_result in + deployments.into_iter().map(|result| result.map(|deployment| deployment.result)) + { + // Record deployed library address. + if let Ok(deployed) = &deploy_result { + result.deployed_libs.push(deployed.address); + } - let (raw, reason) = RawCallResult::from_evm_result(deploy_result.map(Into::into))?; - result.extend(raw, TraceKind::Deployment); - if reason.is_some() { - result.reason = reason; - return Ok(result); + let (raw, reason) = RawCallResult::from_evm_result(deploy_result.map(Into::into))?; + result.extend(raw, TraceKind::Deployment); + if reason.is_some() { + result.reason = reason; + return Ok(result); + } } } @@ -307,7 +317,7 @@ impl<'a> ContractRunner<'a> { [("setUp()".to_string(), TestResult::fail("multiple setUp functions".to_string()))] .into(), warnings, - ) + ); } // Check if `afterInvariant` function with valid signature declared. @@ -323,7 +333,7 @@ impl<'a> ContractRunner<'a> { )] .into(), warnings, - ) + ); } let call_after_invariant = after_invariant_fns.first().is_some_and(|after_invariant_fn| { let match_sig = after_invariant_fn.name == "afterInvariant"; @@ -357,7 +367,7 @@ impl<'a> ContractRunner<'a> { start.elapsed(), [("setUp()".to_string(), TestResult::setup_result(setup))].into(), warnings, - ) + ); } // Filter out functions sequentially since it's very fast and there is no need to do it diff --git a/crates/forge/tests/fixtures/zk/Libraries.s.sol b/crates/forge/tests/fixtures/zk/Libraries.s.sol index 195d4dd7b..25d6bd4bc 100644 --- a/crates/forge/tests/fixtures/zk/Libraries.s.sol +++ b/crates/forge/tests/fixtures/zk/Libraries.s.sol @@ -5,9 +5,18 @@ pragma solidity >=0.8.7 <0.9.0; import {UsesFoo} from "../src/WithLibraries.sol"; import "forge-std/Script.sol"; -contract DeployUsesFoo is Script { - function run () external { +contract GetCodeUnlinked is Script { + function run() external { // should fail because `UsesFoo` is unlinked bytes memory _code = vm.getCode("UsesFoo"); } } + +contract DeployTimeLinking is Script { + function run() external { + vm.broadcast(); + UsesFoo user = new UsesFoo(); + + assert(user.number() == 42); + } +} diff --git a/crates/forge/tests/it/test_helpers.rs b/crates/forge/tests/it/test_helpers.rs index f05855ffa..2428b88d2 100644 --- a/crates/forge/tests/it/test_helpers.rs +++ b/crates/forge/tests/it/test_helpers.rs @@ -612,6 +612,7 @@ pub async fn run_zk_script_test( let content = foundry_common::fs::read_to_string(run_latest).unwrap(); let json: serde_json::Value = serde_json::from_str(&content).unwrap(); + assert_eq!( json["transactions"].as_array().expect("broadcastable txs").len(), expected_broadcastable_txs diff --git a/crates/forge/tests/it/zk/linking.rs b/crates/forge/tests/it/zk/linking.rs index 76758efcd..40f5daff9 100644 --- a/crates/forge/tests/it/zk/linking.rs +++ b/crates/forge/tests/it/zk/linking.rs @@ -1,21 +1,73 @@ -use foundry_test_utils::{forgetest_async, util, TestProject}; +use forge::revm::primitives::SpecId; +use foundry_test_utils::{forgetest_async, util, Filter, TestCommand, TestProject}; +use semver::Version; -use crate::test_helpers::{deploy_zk_contract, run_zk_script_test}; +use crate::{ + config::TestConfig, + test_helpers::{deploy_zk_contract, run_zk_script_test, TEST_DATA_DEFAULT}, +}; -// TODO(zk): add test that actually does the deployment -// of the unlinked contract via script, once recursive linking is supported -// and once we also support doing deploy-time linking +const ZKSOLC_MIN_LINKING_VERSION: Version = Version::new(1, 5, 9); + +#[tokio::test(flavor = "multi_thread")] +async fn test_zk_deploy_time_linking() { + let runner = TEST_DATA_DEFAULT.runner_zksync(); + let filter = Filter::new(".*", "DeployTimeLinking", ".*"); + + TestConfig::with_filter(runner, filter).spec_id(SpecId::SHANGHAI).run().await; +} + +// TODO(zk): add equivalent test for `GetCodeUnlinked` +// would probably need to split in separate file (and skip other file) +// as tests look for _all_ lib deps and deploy them for every test forgetest_async!( #[should_panic = "no bytecode for contract; is it abstract or unlinked?"] - script_using_unlinked_fails, + script_zk_fails_indirect_reference_to_unlinked, + |prj, cmd| { + setup_libs_prj(&mut prj, &mut cmd, None); + run_zk_script_test( + prj.root(), + &mut cmd, + "./script/Libraries.s.sol", + "GetCodeUnlinked", + None, + 1, + Some(&["-vvvvv"]), + ) + .await; + } +); + +forgetest_async!(script_zk_deploy_time_linking, |prj, cmd| { + setup_libs_prj(&mut prj, &mut cmd, None); + run_zk_script_test( + prj.root(), + &mut cmd, + "./script/Libraries.s.sol", + "DeployTimeLinking", + None, + // lib `Foo` + `UsesFoo` deployment + 2, + Some(&["-vvvvv", "--broadcast"]), + ) + .await; +}); + +forgetest_async!( + #[ignore] + #[should_panic = "deploy-time linking not supported"] + script_zk_deploy_time_linking_fails_older_version, |prj, cmd| { - setup_libs_prj(&mut prj); + let mut version = ZKSOLC_MIN_LINKING_VERSION; + version.patch -= 1; + + setup_libs_prj(&mut prj, &mut cmd, Some(version)); run_zk_script_test( prj.root(), &mut cmd, "./script/Libraries.s.sol", - "DeployUsesFoo", + "DeployTimeLinking", None, 1, Some(&["-vvvvv"]), @@ -26,9 +78,9 @@ forgetest_async!( forgetest_async!( #[should_panic = "Dynamic linking not supported"] - create_using_unlinked_fails, + create_zk_using_unlinked_fails, |prj, cmd| { - setup_libs_prj(&mut prj); + setup_libs_prj(&mut prj, &mut cmd, None); // we don't really connect to the rpc because // we expect to fail before that point @@ -45,8 +97,15 @@ forgetest_async!( } ); -fn setup_libs_prj(prj: &mut TestProject) { +fn setup_libs_prj(prj: &mut TestProject, cmd: &mut TestCommand, zksolc: Option) { util::initialize(prj.root()); + + let mut config = cmd.config(); + if let Some(zksolc) = zksolc { + config.zksync.zksolc.replace(foundry_config::SolcReq::Version(zksolc)); + } + prj.write_config(config); + prj.add_script("Libraries.s.sol", include_str!("../../fixtures/zk/Libraries.s.sol")).unwrap(); prj.add_source( "WithLibraries.sol", diff --git a/crates/linking/Cargo.toml b/crates/linking/Cargo.toml index 15d0d113b..f369c62d6 100644 --- a/crates/linking/Cargo.toml +++ b/crates/linking/Cargo.toml @@ -18,3 +18,8 @@ foundry-compilers = { workspace = true, features = ["full"] } semver.workspace = true alloy-primitives = { workspace = true, features = ["rlp"] } thiserror.workspace = true +tracing.workspace = true + +# zk linking utils +foundry-zksync-core.workspace = true +foundry-zksync-compilers.workspace = true diff --git a/crates/linking/src/lib.rs b/crates/linking/src/lib.rs index e44ee7748..aaf40285c 100644 --- a/crates/linking/src/lib.rs +++ b/crates/linking/src/lib.rs @@ -18,6 +18,9 @@ use std::{ str::FromStr, }; +mod zksync; +pub use zksync::*; + /// Errors that can occur during linking. #[derive(Debug, thiserror::Error)] pub enum LinkerError { @@ -31,6 +34,7 @@ pub enum LinkerError { CyclicDependency, } +#[derive(Debug)] pub struct Linker<'a> { /// Root of the project, used to determine whether artifact/library path can be stripped. pub root: PathBuf, diff --git a/crates/linking/src/zksync.rs b/crates/linking/src/zksync.rs new file mode 100644 index 000000000..5afa522e1 --- /dev/null +++ b/crates/linking/src/zksync.rs @@ -0,0 +1,466 @@ +use std::{ + collections::{BTreeMap, BTreeSet, VecDeque}, + path::{Path, PathBuf}, +}; + +use alloy_primitives::{ + hex::FromHex, + map::{HashMap, HashSet}, + Address, B256, +}; +use foundry_compilers::{ + artifacts::{ + BytecodeObject, CompactBytecode, CompactContractBytecode, CompactContractBytecodeCow, + CompactDeployedBytecode, Libraries, + }, + contracts::ArtifactContracts, + Artifact, ArtifactId, ProjectCompileOutput, +}; +use foundry_zksync_compilers::{ + compilers::{ + artifact_output::zk::{ZkArtifactOutput, ZkContractArtifact}, + zksolc::ZkSolcCompiler, + }, + link::{self as zk_link, MissingLibrary}, +}; +use foundry_zksync_core::hash_bytecode; +use semver::Version; + +use crate::{LinkOutput, Linker, LinkerError}; + +/// Errors that can occur during linking. +#[derive(Debug, thiserror::Error)] +pub enum ZkLinkerError { + #[error(transparent)] + Inner(#[from] LinkerError), + #[error("unable to fully link due to missing libraries")] + MissingLibraries(BTreeSet), + #[error("unable to fully link due to unlinked factory dependencies")] + MissingFactoryDeps(BTreeSet), +} + +pub const DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION: Version = Version::new(1, 5, 9); + +#[derive(Debug)] +pub struct ZkLinker<'a> { + pub linker: Linker<'a>, + pub compiler: ZkSolcCompiler, + pub compiler_output: &'a ProjectCompileOutput, +} + +impl<'a> ZkLinker<'a> { + fn zk_artifacts(&'a self) -> impl Iterator + 'a { + self.compiler_output.artifact_ids() + } + + /// Construct a new `ZkLinker` + pub fn new( + root: impl Into, + contracts: ArtifactContracts>, + compiler: ZkSolcCompiler, + compiler_output: &'a ProjectCompileOutput, + ) -> Self { + Self { linker: Linker::new(root, contracts), compiler, compiler_output } + } + + /// Collect the factory dependencies of the `target` artifact + /// + /// Will call itself recursively for nested dependencies + fn zk_collect_factory_deps( + &'a self, + target: &'a ArtifactId, + factory_deps: &mut BTreeSet<&'a ArtifactId>, + ) -> Result<(), LinkerError> { + let (_, artifact) = self + .zk_artifacts() + .find(|(id, _)| id.source == target.source && id.name == target.name) + .ok_or(LinkerError::MissingTargetArtifact)?; + + let already_linked = artifact + .factory_dependencies + .as_ref() + .iter() + .flat_map(|map| map.values()) + .collect::>(); + + let unlinked_deps_of_target = artifact + .factory_dependencies_unlinked + .iter() + .flatten() + // remove already linked deps + .filter(|dep| !already_linked.contains(dep)) + .map(|dep| { + let mut split = dep.split(':'); + let path = split.next().expect("malformed factory dep path"); + let name = split.next().expect("malformed factory dep name"); + + (path.to_string(), name.to_string()) + }); + + for (file, name) in unlinked_deps_of_target { + let id = self + .linker + .find_artifact_id_by_library_path(&file, &name, None) + .ok_or(LinkerError::MissingLibraryArtifact { file, name })?; + + if factory_deps.insert(id) { + self.zk_collect_factory_deps(id, factory_deps)?; + } + } + + Ok(()) + } + + /// Performs DFS on the graph of link references, and populates `deps` with all found libraries, + /// including ones of factory deps. + pub fn zk_collect_dependencies( + &'a self, + target: &'a ArtifactId, + libraries: &mut BTreeSet<&'a ArtifactId>, + factory_deps: Option<&mut BTreeSet<&'a ArtifactId>>, + ) -> Result<(), LinkerError> { + let (_, artifact) = self + .zk_artifacts() + .find(|(id, _)| id.source == target.source && id.name == target.name) + .ok_or(LinkerError::MissingTargetArtifact)?; + + let mut references = BTreeMap::new(); + if let Some(bytecode) = &artifact.bytecode { + references.extend(bytecode.link_references()); + } + + // find all nested factory deps's link references + let mut fdeps_default = BTreeSet::new(); + let factory_deps = factory_deps.unwrap_or(&mut fdeps_default); + self.zk_collect_factory_deps(target, factory_deps)?; + + for (_, fdep) in factory_deps.iter().filter_map(|target| { + self.zk_artifacts().find(|(id, _)| id.source == target.source && id.name == target.name) + }) { + if let Some(bytecode) = &fdep.bytecode { + references.extend(bytecode.link_references()); + } + } + + for (file, libs) in &references { + for contract in libs.keys() { + let id = self + .linker + .find_artifact_id_by_library_path(file, contract, None) + .ok_or_else(|| LinkerError::MissingLibraryArtifact { + file: file.to_string(), + name: contract.to_string(), + })?; + if libraries.insert(id) { + self.zk_collect_dependencies(id, libraries, Some(factory_deps))?; + } + } + } + + Ok(()) + } + + /// Links given artifact with either given library addresses or address computed from sender and + /// nonce. + /// + /// Each key in `libraries` should either be a global path or relative to project root. All + /// remappings should be resolved. + /// + /// When calling for `target` being an external library itself, you should check that `target` + /// does not appear in `libs_to_deploy` to avoid deploying it twice. It may happen in cases + /// when there is a dependency cycle including `target`. + pub fn zk_link_with_nonce_or_address( + &'a self, + libraries: Libraries, + sender: Address, + mut nonce: u64, + targets: impl IntoIterator, + ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(self.linker.root.as_path()); + + let mut needed_libraries = BTreeSet::new(); + for target in targets { + self.zk_collect_dependencies(target, &mut needed_libraries, None)?; + } + + let mut libs_to_deploy = Vec::new(); + + // If `libraries` does not contain needed dependency, compute its address and add to + // `libs_to_deploy`. + for id in needed_libraries { + let (lib_path, lib_name) = self.linker.convert_artifact_id_to_lib_path(id); + + libraries.libs.entry(lib_path).or_default().entry(lib_name).or_insert_with(|| { + let address = foundry_zksync_core::compute_create_address(sender, nonce); + libs_to_deploy.push((id, address)); + nonce += 1; + + address.to_checksum(None) + }); + } + + // Link and collect bytecodes for `libs_to_deploy`. + let libs_to_deploy = self + .zk_get_linked_artifacts(libs_to_deploy.into_iter().map(|(id, _)| id), &libraries)? + .into_iter() + .map(|(_, linked)| linked.get_bytecode_bytes().unwrap().into_owned()) + .collect(); + + Ok(LinkOutput { libraries, libs_to_deploy }) + } + + pub fn zk_link_with_create2( + &'a self, + libraries: Libraries, + sender: Address, + salt: B256, + target: &'a ArtifactId, + ) -> Result { + // Library paths in `link_references` keys are always stripped, so we have to strip + // user-provided paths to be able to match them correctly. + let mut libraries = libraries.with_stripped_file_prefixes(self.linker.root.as_path()); + + let mut contracts = self.linker.contracts.clone(); + + let mut needed_libraries = BTreeSet::new(); + self.zk_collect_dependencies(target, &mut needed_libraries, None)?; + + let attempt_link = |contracts: &mut ArtifactContracts>, + id, + libraries: &Libraries, + zksolc| { + let original = contracts.get(id).expect("library present in list of contracts"); + // Link library with provided libs and extract bytecode object (possibly unlinked). + match Self::zk_link(contracts, id, libraries, zksolc) { + Ok(linked) => { + // persist linked contract for successive iterations + *contracts.entry(id.clone()).or_default() = linked.clone(); + linked.bytecode.expect("library should have bytecode") + } + // the library remains unlinked at this time + Err(_) => original.bytecode.as_ref().expect("library should have bytecode").clone(), + } + }; + + let mut needed_libraries = needed_libraries + .into_iter() + .filter(|id| { + // Filter out already provided libraries. + let (file, name) = self.linker.convert_artifact_id_to_lib_path(id); + !libraries.libs.contains_key(&file) || !libraries.libs[&file].contains_key(&name) + }) + .map(|id| (id, attempt_link(&mut contracts, id, &libraries, &self.compiler))) + .collect::>(); + + let mut libs_to_deploy = Vec::new(); + + // Iteratively compute addresses and link libraries until we have no unlinked libraries + // left. + while !needed_libraries.is_empty() { + // Find any library which is fully linked. + let deployable = needed_libraries + .iter() + .enumerate() + .find(|(_, (_, bytecode))| !bytecode.object.is_unlinked()); + + // If we haven't found any deployable library, it means we have a cyclic dependency. + let Some((index, &(id, _))) = deployable else { + return Err(LinkerError::CyclicDependency); + }; + let (_, library_bytecode) = needed_libraries.swap_remove(index); + + let code = library_bytecode.bytes().expect("fully linked bytecode"); + let bytecode_hash = hash_bytecode(code); + + let address = + foundry_zksync_core::compute_create2_address(sender, bytecode_hash, salt, &[]); + + let (file, name) = self.linker.convert_artifact_id_to_lib_path(id); + + // NOTE(zk): doesn't really matter since we use the EVM + // bytecode to determine what EraVM bytecode to deploy + libs_to_deploy.push(code.clone()); + libraries.libs.entry(file).or_default().insert(name, address.to_checksum(None)); + + for (id, bytecode) in &mut needed_libraries { + *bytecode = attempt_link(&mut contracts, id, &libraries, &self.compiler) + } + } + + Ok(LinkOutput { libraries, libs_to_deploy }) + } + + /// Links given artifact with given libraries. + // TODO(zk): improve interface to reflect batching operation (all bytecodes in all bytecodes + // out) + pub fn zk_link( + contracts: &ArtifactContracts>, + target: &ArtifactId, + libraries: &Libraries, + zksolc: &ZkSolcCompiler, + ) -> Result, ZkLinkerError> { + let artifact_to_link_id = |id: &ArtifactId| format!("{}:{}", id.source.display(), id.name); + + // collect bytecodes & libraries for input to zksolc_link + let bytecodes = contracts + .iter() + .filter_map(|(id, bytecode)| { + let link_id = artifact_to_link_id(id); + let object = bytecode.bytecode.as_ref().map(|bc| bc.object.clone())?; + + let bytes = match object { + BytecodeObject::Bytecode(bytes) => bytes, + BytecodeObject::Unlinked(unlinked) => alloy_primitives::hex::decode(unlinked) + .expect("malformed unlinked bytecode object") + .into(), + }; + + Some((link_id, bytes)) + }) + .collect::>(); + + let libraries = libraries + .libs + .iter() + .flat_map(|(file, libs)| { + libs.iter() + .map(|(name, address)| (file.to_string_lossy(), name.clone(), address.clone())) + }) + .map(|(filename, name, address)| zk_link::Library { + filename: filename.into_owned(), + name, + address: Address::from_hex(address).unwrap(), + }) + .collect::>(); + + let mut link_output = + zk_link::zksolc_link(zksolc, zk_link::LinkJsonInput { bytecodes, libraries }) + .expect("able to call zksolc --link"); // TODO(zk): proper error check + + let link_id = &artifact_to_link_id(target); + + let mut contract = contracts.get(target).ok_or(LinkerError::MissingTargetArtifact)?.clone(); + + if let Some(unlinked) = link_output.unlinked.remove(link_id) { + tracing::error!(factory_dependencies = ?unlinked.factory_dependencies, libraries = ?unlinked.linker_symbols, "unmet linking dependencies"); + + if !unlinked.linker_symbols.is_empty() { + return Err(ZkLinkerError::MissingLibraries( + unlinked.linker_symbols.into_iter().collect(), + )); + } + return Err(ZkLinkerError::MissingFactoryDeps( + unlinked.factory_dependencies.into_iter().collect(), + )); + } + + let linked_output = + link_output.linked.remove(link_id).or_else(|| link_output.ignored.remove(link_id)); + + // NOTE(zk): covers intermittent issue where fully linked bytecode was + // not being returned in `ignored` (or `linked`). + // The check above should catch if the bytecode remains unlinked + let Some(linked) = linked_output else { + return Ok(contract); + }; + + let mut compact_bytecode = CompactBytecode::empty(); + compact_bytecode.object = BytecodeObject::Bytecode( + alloy_primitives::hex::decode(&linked.bytecode) + .expect("malformed unlinked bytecode object") + .into(), + ); + + let mut compact_deployed_bytecode = CompactDeployedBytecode::empty(); + compact_deployed_bytecode.bytecode.replace(compact_bytecode.clone()); + + // TODO(zk): maybe return bytecode hash? + contract.bytecode.replace(std::borrow::Cow::Owned(compact_bytecode)); + contract.deployed_bytecode.replace(std::borrow::Cow::Owned(compact_deployed_bytecode)); + + Ok(contract) + } + + pub fn zk_get_linked_artifacts<'b>( + &self, + targets: impl IntoIterator, + libraries: &Libraries, + ) -> Result { + let mut contracts = self + .linker + .contracts + .clone() + .into_iter() + // we strip these here because the file references are also relative + // and the linker wouldn't be able to properly detect matching factory deps + // (libraries are given separately and already stripped) + .map(|(id, v)| (id.with_stripped_file_prefixes(&self.linker.root), v)) + .collect(); + let mut targets = targets.into_iter().cloned().collect::>(); + let mut linked_artifacts = vec![]; + + // explanation below + while let Some(id) = targets.pop_front() { + if linked_artifacts.iter().any(|(linked, _)| linked == &id) { + // skip already linked + continue; + } + + match Self::zk_link( + &contracts, + // we strip here _only_ so that the target matches what's in `contracts` + // but we want to return the full id in the `linked_artifacts` + &id.clone().with_stripped_file_prefixes(&self.linker.root), + libraries, + &self.compiler, + ) { + Ok(linked) => { + *contracts.entry(id.clone()).or_default() = linked.clone(); + + // persist linked contract for successive iterations + linked_artifacts.push((id, CompactContractBytecode::from(linked))); + } + // contract was ignored, no need to add it to the list of linked contracts + Err(ZkLinkerError::MissingFactoryDeps(fdeps)) => { + // attempt linking again if some factory dep remains unlinked + // this is just in the case where a previously unlinked factory dep + // is linked with the same run as `id` would be linked + // and instead `id` remains unlinked + // TODO(zk): might be unnecessary, observed when paths were wrong + let mut ids = fdeps + .iter() + .flat_map(|fdep| { + contracts.iter().find(|(id, _)| { + // strip here to match against the fdep which is stripped + let id = + (*id).clone().with_stripped_file_prefixes(&self.linker.root); + id.source.as_path() == Path::new(fdep.filename.as_str()) && + id.name == fdep.library + }) + }) + // we want to keep the non-stripped + .map(|(id, _)| id.clone()) + .peekable(); + + // if we have no dep ids then we avoid + // queueing our own id to avoid infinite loop + // TODO(zk): find a better way to avoid issues later + if let Some(sample_dep) = ids.peek() { + // ensure that the sample_dep is in `contracts` + if contracts.get(sample_dep).is_none() { + return Err(ZkLinkerError::MissingFactoryDeps(fdeps)); + } + + targets.extend(ids); // queue factory deps for linking + targets.push_back(id); // requeue original target + } + } + Err(err) => return Err(err), + } + } + + Ok(linked_artifacts.into_iter().collect()) + } +} diff --git a/crates/script/src/build.rs b/crates/script/src/build.rs index ba89cbfd0..dfdde1f73 100644 --- a/crates/script/src/build.rs +++ b/crates/script/src/build.rs @@ -4,17 +4,14 @@ use crate::{ }; use alloy_primitives::{Bytes, B256}; use alloy_provider::Provider; -use eyre::{OptionExt, Result}; +use eyre::{Context, OptionExt, Result}; use forge_script_sequence::ScriptSequence; use foundry_cheatcodes::Wallets; use foundry_common::{ compile::ProjectCompiler, provider::try_get_http_provider, ContractData, ContractsByArtifact, }; use foundry_compilers::{ - artifacts::{ - BytecodeObject, CompactBytecode, CompactContractBytecode, CompactDeployedBytecode, - Libraries, - }, + artifacts::{BytecodeObject, Libraries}, compilers::{multi::MultiCompilerLanguage, Language}, info::ContractInfo, solc::SolcLanguage, @@ -27,7 +24,9 @@ use foundry_zksync_compilers::{ compilers::{artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler}, dual_compiled_contracts::DualCompiledContracts, }; -use std::{collections::BTreeMap, path::PathBuf, str::FromStr, sync::Arc}; +use std::{path::PathBuf, str::FromStr, sync::Arc}; + +mod zksync; /// Container for the compiled contracts. #[derive(Debug)] @@ -50,7 +49,7 @@ impl BuildData { /// Links contracts. Uses CREATE2 linking when possible, otherwise falls back to /// default linking with sender nonce and address. - pub async fn link(self, script_config: &ScriptConfig) -> Result { + pub async fn link(mut self, script_config: &ScriptConfig) -> Result { let create2_deployer = script_config.evm_opts.create2_deployer; let can_use_create2 = if let Some(fork_url) = &script_config.evm_opts.fork_url { let provider = try_get_http_provider(fork_url)?; @@ -64,6 +63,8 @@ impl BuildData { let known_libraries = script_config.config.libraries_with_remappings()?; + // TODO(zk): evaluate using strategies here as well + let maybe_create2_link_output = can_use_create2 .then(|| { self.get_linker() @@ -77,32 +78,59 @@ impl BuildData { }) .flatten(); - let (libraries, predeploy_libs) = if let Some(output) = maybe_create2_link_output { - ( - output.libraries, - ScriptPredeployLibraries::Create2( - output.libs_to_deploy, - script_config.config.create2_library_salt, - ), - ) - } else { - let output = self.get_linker().link_with_nonce_or_address( - known_libraries, - script_config.evm_opts.sender, - script_config.sender_nonce, - [&self.target], - )?; - - (output.libraries, ScriptPredeployLibraries::Default(output.libs_to_deploy)) - }; + let (libraries, predeploy_libs, uses_create2) = + if let Some(output) = maybe_create2_link_output { + ( + output.libraries, + ScriptPredeployLibraries::Create2( + output.libs_to_deploy, + script_config.config.create2_library_salt, + ), + true, + ) + } else { + let output = self.get_linker().link_with_nonce_or_address( + known_libraries.clone(), + script_config.evm_opts.sender, + script_config.sender_nonce, + [&self.target], + )?; - LinkedBuildData::new(libraries, predeploy_libs, self) + (output.libraries, ScriptPredeployLibraries::Default(output.libs_to_deploy), false) + }; + + let known_contracts = self + .get_linker() + .get_linked_artifacts(&libraries) + .context("retrieving fully linked artifacts")?; + let known_contracts = + self.zk_link(script_config, known_libraries, known_contracts, uses_create2).await?; + + LinkedBuildData::new( + libraries, + predeploy_libs, + ContractsByArtifact::new(known_contracts), + self, + ) } /// Links the build data with the given libraries. Expects supplied libraries set being enough /// to fully link target contract. - pub fn link_with_libraries(self, libraries: Libraries) -> Result { - LinkedBuildData::new(libraries, ScriptPredeployLibraries::Default(Vec::new()), self) + pub async fn link_with_libraries( + mut self, + script_config: &ScriptConfig, + libraries: Libraries, + ) -> Result { + let known_contracts = self.get_linker().get_linked_artifacts(&libraries)?; + let known_contracts = + self.zk_link(script_config, libraries.clone(), known_contracts, false).await?; + + LinkedBuildData::new( + libraries, + ScriptPredeployLibraries::Default(Vec::new()), + ContractsByArtifact::new(known_contracts), + self, + ) } } @@ -140,7 +168,8 @@ impl LinkedBuildData { pub fn new( libraries: Libraries, predeploy_libraries: ScriptPredeployLibraries, - mut build_data: BuildData, + known_contracts: ContractsByArtifact, + build_data: BuildData, ) -> Result { let sources = ContractSources::from_project_output( &build_data.output, @@ -148,40 +177,6 @@ impl LinkedBuildData { Some(&libraries), )?; - let mut known_artifacts = build_data.get_linker().get_linked_artifacts(&libraries)?; - // Extend known_artifacts with zk artifacts if available - if let Some(zk_output) = build_data.zk_output.take() { - let zk_contracts = - zk_output.with_stripped_file_prefixes(&build_data.project_root).into_artifacts(); - - for (id, contract) in zk_contracts { - if let Some(abi) = contract.abi { - let bytecode = contract.bytecode.as_ref(); - // TODO(zk): retrieve link_references - if let Some(bytecode_object) = bytecode.map(|b| b.object()) { - let compact_bytecode = CompactBytecode { - object: bytecode_object.clone(), - source_map: None, - link_references: BTreeMap::new(), - }; - let compact_contract = CompactContractBytecode { - abi: Some(abi), - bytecode: Some(compact_bytecode.clone()), - deployed_bytecode: Some(CompactDeployedBytecode { - bytecode: Some(compact_bytecode), - immutable_references: BTreeMap::new(), - }), - }; - known_artifacts.insert(id.clone(), compact_contract); - } - } else { - warn!("Abi not found for contract {}", id.identifier()); - } - } - } - - let known_contracts = ContractsByArtifact::new(known_artifacts); - Ok(Self { build_data, known_contracts, libraries, predeploy_libraries, sources }) } @@ -250,7 +245,7 @@ impl PreprocessedState { zk_output = Some(zk_compiler.zksync_compile(&zk_project)?); Some(DualCompiledContracts::new( &output, - &zk_output.clone().unwrap(), + zk_output.as_ref().unwrap(), &project.paths, &zk_project.paths, )) @@ -392,7 +387,10 @@ impl CompiledState { ScriptSequenceKind::Multi(_) => Libraries::default(), }; - let linked_build_data = build_data.link_with_libraries(libraries)?; + // NOTE(zk): we added `script_config` to be able + // to retrieve the appropriate `zksolc` compiler version + // from the config to be used during linking + let linked_build_data = build_data.link_with_libraries(&script_config, libraries).await?; Ok(BundledState { args, diff --git a/crates/script/src/build/zksync.rs b/crates/script/src/build/zksync.rs new file mode 100644 index 000000000..43d5245a3 --- /dev/null +++ b/crates/script/src/build/zksync.rs @@ -0,0 +1,209 @@ +use std::collections::HashMap; + +use alloy_primitives::{keccak256, B256}; +use eyre::{Context, Result}; +use foundry_compilers::{ + artifacts::{CompactContractBytecode, CompactContractBytecodeCow, Libraries}, + contracts::ArtifactContracts, + info::ContractInfo, + Artifact, +}; +use foundry_linking::{ZkLinker, DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION}; +use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContract; +use foundry_zksync_core::{hash_bytecode, DEFAULT_CREATE2_DEPLOYER_ZKSYNC}; + +use crate::ScriptConfig; + +use super::BuildData; + +impl BuildData { + fn get_zk_linker(&self, script_config: &ScriptConfig) -> Result> { + let zksolc = foundry_config::zksync::config_zksolc_compiler(&script_config.config) + .context("retrieving zksolc compiler to be used for linking")?; + let version = zksolc.version().context("trying to determine zksolc version")?; + + let Some(input) = self.zk_output.as_ref() else { + eyre::bail!("unable to link zk artifacts if no zk compilation output is provided") + }; + + let linker = + ZkLinker::new(self.project_root.clone(), input.artifact_ids().collect(), zksolc, input); + + let mut libs = Default::default(); + linker.zk_collect_dependencies(&self.target, &mut libs, None)?; + + // if there are no no libs, no linking will happen + // so we can skip version check + if !libs.is_empty() && version < DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION { + eyre::bail!( + "deploy-time linking not supported. minimum: {}, given: {}", + DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION, + &version + ); + } + + Ok(linker) + } + + /// Will attempt linking via `zksolc` + /// + /// Will attempt linking with a CREATE2 deployer if possible first, otherwise + /// just using CREATE. + /// After linking is done it will update the list of `DualCompiledContracts` with + /// the newly linked contracts (and their EVM equivalent). + /// Finally, return the list of known contracts + /// + /// If compilation for zksync is not enabled will return the + /// given EVM linked artifacts + pub(super) async fn zk_link( + &mut self, + script_config: &ScriptConfig, + known_libraries: Libraries, + evm_linked_contracts: ArtifactContracts, + use_create2: bool, + ) -> Result { + if !script_config.config.zksync.should_compile() { + return Ok(evm_linked_contracts); + } + + let Some(input) = self.zk_output.as_ref() else { + eyre::bail!("unable to link zk artifacts if no zk compilation output is provided"); + }; + + let mut dual_compiled_contracts = self.dual_compiled_contracts.take().unwrap_or_default(); + + // NOTE(zk): translate solc ArtifactId to zksolc otherwise + // we won't be able to find it in the zksolc output + let Some(target) = input + .artifact_ids() + .map(|(id, _)| id) + .find(|id| id.source == self.target.source && id.name == self.target.name) + else { + eyre::bail!("unable to find zk target artifact for linking"); + }; + let target = ⌖ + + let linker = self.get_zk_linker(script_config)?; + + let create2_deployer = DEFAULT_CREATE2_DEPLOYER_ZKSYNC; + let maybe_create2_link_output = use_create2 + .then(|| { + linker + .zk_link_with_create2( + known_libraries.clone(), + create2_deployer, + script_config.config.create2_library_salt, + target, + ) + .ok() + }) + .flatten(); + + let libraries = if let Some(output) = maybe_create2_link_output { + output.libraries + } else { + let output = linker.zk_link_with_nonce_or_address( + known_libraries, + script_config.evm_opts.sender, + script_config.sender_nonce, + [target], + )?; + + output.libraries + }; + + let mut factory_deps = Default::default(); + let mut libs = Default::default(); + linker + .zk_collect_dependencies(target, &mut libs, Some(&mut factory_deps)) + .expect("able to enumerate all deps"); + + let linked_contracts = linker + .zk_get_linked_artifacts( + // only retrieve target and its deps + factory_deps.into_iter().chain(libs.into_iter()).chain([target]), + &libraries, + ) + .context("retrieving all fully linked contracts")?; + + let newly_linked_dual_compiled_contracts = linked_contracts + .iter() + .flat_map(|(needle, zk)| { + evm_linked_contracts + .iter() + .find(|(id, _)| id.source == needle.source && id.name == needle.name) + .map(|(_, evm)| (needle, zk, evm)) + }) + .filter(|(_, zk, evm)| zk.bytecode.is_some() && evm.bytecode.is_some()) + .map(|(id, linked_zk, evm)| { + let (_, unlinked_zk_artifact) = input + .artifact_ids() + .find(|(contract_id, _)| contract_id == id) + .expect("unable to find original (pre-linking) artifact"); + + let zk_bytecode = + linked_zk.get_bytecode_bytes().expect("no EraVM bytecode (or unlinked)"); + let zk_hash = hash_bytecode(&zk_bytecode); + let evm_deployed = evm + .get_deployed_bytecode_bytes() + .expect("no EVM deployed bytecode (or unlinked)"); + let evm_bytecode = evm.get_bytecode_bytes().expect("no EVM bytecode (or unlinked)"); + let contract_info = ContractInfo { + name: id.name.clone(), + path: Some(id.source.to_string_lossy().into_owned()), + }; + let contract = DualCompiledContract { + zk_bytecode_hash: zk_hash, + zk_deployed_bytecode: zk_bytecode.to_vec(), + // rest of factory deps is populated later + zk_factory_deps: vec![zk_bytecode.to_vec()], + evm_bytecode_hash: B256::from_slice(&keccak256(evm_deployed.as_ref())[..]), + // TODO(zk): determine if this is ok, as it's + // not really used in dual compiled contracts + evm_deployed_bytecode: evm_deployed.to_vec(), + evm_bytecode: evm_bytecode.to_vec(), + }; + + let mut factory_deps = unlinked_zk_artifact.all_factory_deps().collect::>(); + factory_deps.dedup(); + + ((contract_info.clone(), contract), (contract_info, factory_deps)) + }); + + let (new_contracts, new_contracts_deps): (Vec<_>, HashMap<_, _>) = + newly_linked_dual_compiled_contracts.unzip(); + dual_compiled_contracts.extend(new_contracts); + + // now that we have an updated list of DualCompiledContracts + // retrieve all the factory deps for a given contracts and store them + new_contracts_deps.into_iter().for_each(|(info, deps)| { + deps.into_iter().for_each(|dep| { + let mut split = dep.split(':'); + let path = split.next().expect("malformed factory dep path"); + let name = split.next().expect("malformed factory dep name"); + + let bytecode = dual_compiled_contracts + .find(Some(path), Some(name)) + .next() + .expect("unknown factory dep") + .1 + .zk_deployed_bytecode + .clone(); + + dual_compiled_contracts.insert_factory_deps(&info, Some(bytecode)); + }); + }); + + self.dual_compiled_contracts.replace(dual_compiled_contracts); + + // base zksolc contracts + newly linked + evm contracts + let contracts = input + .artifact_ids() + .map(|(id, v)| (id, CompactContractBytecode::from(CompactContractBytecodeCow::from(v)))) + .chain(linked_contracts) + .chain(evm_linked_contracts) + .collect(); + + Ok(contracts) + } +} diff --git a/crates/script/src/runner.rs b/crates/script/src/runner.rs index 6fd579f10..53b315007 100644 --- a/crates/script/src/runner.rs +++ b/crates/script/src/runner.rs @@ -1,15 +1,17 @@ use super::ScriptResult; use crate::build::ScriptPredeployLibraries; use alloy_eips::eip7702::SignedAuthorization; -use alloy_primitives::{Address, Bytes, TxKind, U256}; -use alloy_rpc_types::TransactionRequest; +use alloy_primitives::{Address, Bytes, U256}; use alloy_serde::OtherFields; use eyre::Result; use foundry_cheatcodes::BroadcastableTransaction; use foundry_config::Config; use foundry_evm::{ constants::CALLER, - executors::{DeployResult, EvmError, ExecutionErr, Executor, RawCallResult}, + executors::{ + strategy::{DeployLibKind, DeployLibResult}, + DeployResult, EvmError, ExecutionErr, Executor, RawCallResult, + }, opts::EvmOpts, revm::interpreter::{return_ok, InstructionResult}, traces::{TraceKind, Traces}, @@ -59,29 +61,36 @@ impl ScriptRunner { let mut library_transactions = VecDeque::new(); let mut traces = Traces::default(); + // NOTE(zk): below we moved the logic into the strategy + // so we can override it in the zksync strategy + // Additionally, we have a list of results to register + // both the EVM and EraVM deployment + // Deploy libraries match libraries { ScriptPredeployLibraries::Default(libraries) => libraries.iter().for_each(|code| { - let result = self + let results = self .executor - .deploy(self.evm_opts.sender, code.clone(), U256::ZERO, None) - .expect("couldn't deploy library") - .raw; - - if let Some(deploy_traces) = result.traces { - traces.push((TraceKind::Deployment, deploy_traces)); - } + .deploy_library( + self.evm_opts.sender, + DeployLibKind::Create(code.clone()), + U256::ZERO, + None, + ) + .expect("couldn't deploy library"); + + for DeployLibResult { result, tx } in results { + if let Some(deploy_traces) = result.raw.traces { + traces.push((TraceKind::Deployment, deploy_traces)); + } - library_transactions.push_back(BroadcastableTransaction { - rpc: self.evm_opts.fork_url.clone(), - transaction: TransactionRequest { - from: Some(self.evm_opts.sender), - input: code.clone().into(), - nonce: Some(sender_nonce + library_transactions.len() as u64), - ..Default::default() + if let Some(transaction) = tx { + library_transactions.push_back(BroadcastableTransaction { + rpc: self.evm_opts.fork_url.clone(), + transaction, + }); } - .into(), - }) + } }), ScriptPredeployLibraries::Create2(libraries, salt) => { let create2_deployer = self.executor.create2_deployer(); @@ -91,40 +100,30 @@ impl ScriptRunner { if !self.executor.is_empty_code(address)? { continue; } - let calldata = [salt.as_ref(), library.as_ref()].concat(); - let result = self + + let results = self .executor - .transact_raw( + .deploy_library( self.evm_opts.sender, - create2_deployer, - calldata.clone().into(), + DeployLibKind::Create2(*salt, library.clone()), U256::from(0), + None, ) .expect("couldn't deploy library"); - if let Some(deploy_traces) = result.traces { - traces.push((TraceKind::Deployment, deploy_traces)); - } + for DeployLibResult { result, tx } in results { + if let Some(deploy_traces) = result.raw.traces { + traces.push((TraceKind::Deployment, deploy_traces)); + } - library_transactions.push_back(BroadcastableTransaction { - rpc: self.evm_opts.fork_url.clone(), - transaction: TransactionRequest { - from: Some(self.evm_opts.sender), - input: calldata.into(), - nonce: Some(sender_nonce + library_transactions.len() as u64), - to: Some(TxKind::Call(create2_deployer)), - ..Default::default() + if let Some(transaction) = tx { + library_transactions.push_back(BroadcastableTransaction { + rpc: self.evm_opts.fork_url.clone(), + transaction, + }); } - .into(), - }); + } } - - // Sender nonce is not incremented when performing CALLs. We need to manually - // increase it. - self.executor.set_nonce( - self.evm_opts.sender, - sender_nonce + library_transactions.len() as u64, - )?; } }; diff --git a/crates/strategy/zksync/Cargo.toml b/crates/strategy/zksync/Cargo.toml index c9b7f9e3e..3b61abaf2 100644 --- a/crates/strategy/zksync/Cargo.toml +++ b/crates/strategy/zksync/Cargo.toml @@ -17,10 +17,12 @@ alloy-sol-types.workspace = true alloy-json-abi.workspace = true alloy-zksync.workspace = true foundry-common.workspace = true +foundry-compilers.workspace = true foundry-config.workspace = true foundry-evm.workspace = true foundry-evm-core.workspace = true foundry-cheatcodes.workspace = true +foundry-linking.workspace = true foundry-zksync-core.workspace = true foundry-zksync-compilers.workspace = true diff --git a/crates/strategy/zksync/src/cheatcode/context.rs b/crates/strategy/zksync/src/cheatcode/context.rs index 08b4bf7ef..91ba84093 100644 --- a/crates/strategy/zksync/src/cheatcode/context.rs +++ b/crates/strategy/zksync/src/cheatcode/context.rs @@ -3,6 +3,7 @@ use std::collections::HashSet; use alloy_primitives::{keccak256, map::HashMap, Address, Bytes, B256}; use alloy_sol_types::SolValue; use foundry_cheatcodes::strategy::CheatcodeInspectorStrategyContext; +use foundry_compilers::info::ContractInfo; use foundry_evm_core::constants::{CHEATCODE_ADDRESS, CHEATCODE_CONTRACT_HASH}; use foundry_zksync_compilers::dual_compiled_contracts::{ DualCompiledContract, DualCompiledContracts, @@ -65,32 +66,38 @@ impl ZksyncCheatcodeInspectorStrategyContext { let zk_deployed_bytecode = foundry_zksync_core::EMPTY_CODE.to_vec(); let mut dual_compiled_contracts = dual_compiled_contracts; - dual_compiled_contracts.push(DualCompiledContract { - name: String::from("EmptyEVMBytecode"), - zk_bytecode_hash, - zk_deployed_bytecode: zk_deployed_bytecode.clone(), - zk_factory_deps: Default::default(), - evm_bytecode_hash: B256::from_slice(&keccak256(&empty_bytes)[..]), - evm_deployed_bytecode: Bytecode::new_raw(empty_bytes.clone()).bytecode().to_vec(), - evm_bytecode: Bytecode::new_raw(empty_bytes).bytecode().to_vec(), - }); + dual_compiled_contracts.insert( + ContractInfo::new("EmptyEVMBytecode"), + DualCompiledContract { + zk_bytecode_hash, + zk_deployed_bytecode: zk_deployed_bytecode.clone(), + zk_factory_deps: Default::default(), + evm_bytecode_hash: B256::from_slice(&keccak256(&empty_bytes)[..]), + evm_deployed_bytecode: Bytecode::new_raw(empty_bytes.clone()).bytecode().to_vec(), + evm_bytecode: Bytecode::new_raw(empty_bytes).bytecode().to_vec(), + }, + ); let cheatcodes_bytecode = { let mut bytecode = CHEATCODE_ADDRESS.abi_encode_packed(); bytecode.append(&mut [0; 12].to_vec()); Bytes::from(bytecode) }; - dual_compiled_contracts.push(DualCompiledContract { - name: String::from("CheatcodeBytecode"), - // we put a different bytecode hash here so when importing back to EVM - // we avoid collision with EmptyEVMBytecode for the cheatcodes - zk_bytecode_hash: foundry_zksync_core::hash_bytecode(CHEATCODE_CONTRACT_HASH.as_ref()), - zk_deployed_bytecode: cheatcodes_bytecode.to_vec(), - zk_factory_deps: Default::default(), - evm_bytecode_hash: CHEATCODE_CONTRACT_HASH, - evm_deployed_bytecode: cheatcodes_bytecode.to_vec(), - evm_bytecode: cheatcodes_bytecode.to_vec(), - }); + dual_compiled_contracts.insert( + ContractInfo::new("CheatcodeBytecode"), + DualCompiledContract { + // we put a different bytecode hash here so when importing back to EVM + // we avoid collision with EmptyEVMBytecode for the cheatcodes + zk_bytecode_hash: foundry_zksync_core::hash_bytecode( + CHEATCODE_CONTRACT_HASH.as_ref(), + ), + zk_deployed_bytecode: cheatcodes_bytecode.to_vec(), + zk_factory_deps: Default::default(), + evm_bytecode_hash: CHEATCODE_CONTRACT_HASH, + evm_deployed_bytecode: cheatcodes_bytecode.to_vec(), + evm_bytecode: cheatcodes_bytecode.to_vec(), + }, + ); let mut persisted_factory_deps = HashMap::new(); persisted_factory_deps.insert(zk_bytecode_hash, zk_deployed_bytecode); diff --git a/crates/strategy/zksync/src/cheatcode/runner/cheatcode_handlers.rs b/crates/strategy/zksync/src/cheatcode/runner/cheatcode_handlers.rs index ea274ab30..a1295d1cf 100644 --- a/crates/strategy/zksync/src/cheatcode/runner/cheatcode_handlers.rs +++ b/crates/strategy/zksync/src/cheatcode/runner/cheatcode_handlers.rs @@ -13,6 +13,7 @@ use foundry_cheatcodes::{ zkVmCall, zkVmSkipCall, }, }; +use foundry_compilers::info::ContractInfo; use foundry_evm::backend::LocalForkId; use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContract; use foundry_zksync_core::{ZkPaymasterData, H256}; @@ -213,8 +214,8 @@ impl ZksyncCheatcodeInspectorStrategyRunner { let ctx = get_context(ccx.state.strategy.context.as_mut()); let zk_factory_deps = vec![]; //TODO: add argument to cheatcode + let new_contract_info = ContractInfo::new(name); let new_contract = DualCompiledContract { - name: name.clone(), zk_bytecode_hash: H256(zkBytecodeHash.0), zk_deployed_bytecode: zkDeployedBytecode.to_vec(), zk_factory_deps, @@ -223,18 +224,20 @@ impl ZksyncCheatcodeInspectorStrategyRunner { evm_bytecode: evmBytecode.to_vec(), }; - if let Some(existing) = ctx.dual_compiled_contracts.iter().find(|contract| { - contract.evm_bytecode_hash == new_contract.evm_bytecode_hash && - contract.zk_bytecode_hash == new_contract.zk_bytecode_hash - }) { + if let Some((existing, _)) = + ctx.dual_compiled_contracts.iter().find(|(_, contract)| { + contract.evm_bytecode_hash == new_contract.evm_bytecode_hash && + contract.zk_bytecode_hash == new_contract.zk_bytecode_hash + }) + { warn!( name = existing.name, "contract already exists with the given bytecode hashes" ); - return Ok(Default::default()) + return Ok(Default::default()); } - ctx.dual_compiled_contracts.push(new_contract); + ctx.dual_compiled_contracts.insert(new_contract_info, new_contract); Ok(Default::default()) } diff --git a/crates/strategy/zksync/src/cheatcode/runner/mod.rs b/crates/strategy/zksync/src/cheatcode/runner/mod.rs index 6ef9e685f..2ae0e7a36 100644 --- a/crates/strategy/zksync/src/cheatcode/runner/mod.rs +++ b/crates/strategy/zksync/src/cheatcode/runner/mod.rs @@ -456,7 +456,7 @@ impl CheatcodeInspectorStrategyExt for ZksyncCheatcodeInspectorStrategyRunner { ctx.skip_zk_vm = false; // handled the skip, reset flag ctx.record_next_create_address = true; info!("running create in EVM, instead of zkEVM (skipped)"); - return None + return None; } if let Some(CreateScheme::Create) = input.scheme() { @@ -471,14 +471,14 @@ impl CheatcodeInspectorStrategyExt for ZksyncCheatcodeInspectorStrategyRunner { let address = caller.create(nonce); if ecx.db.get_test_contract_address().map(|addr| address == addr).unwrap_or_default() { info!("running create in EVM, instead of zkEVM (Test Contract) {:#?}", address); - return None + return None; } } let init_code = input.init_code(); if init_code.0 == DEFAULT_CREATE2_DEPLOYER_CODE { info!("running create in EVM, instead of zkEVM (DEFAULT_CREATE2_DEPLOYER_CODE)"); - return None + return None; } info!("running create in zkEVM"); @@ -489,6 +489,7 @@ impl CheatcodeInspectorStrategyExt for ZksyncCheatcodeInspectorStrategyRunner { .unwrap_or_else(|| panic!("failed finding contract for {init_code:?}")); let constructor_args = find_contract.constructor_args(); + let info = find_contract.info(); let contract = find_contract.contract(); let zk_create_input = foundry_zksync_core::encode_create_params( @@ -522,7 +523,7 @@ impl CheatcodeInspectorStrategyExt for ZksyncCheatcodeInspectorStrategyRunner { // NOTE(zk): Clear injected factory deps so that they are not sent on further transactions ctx.zk_use_factory_deps.clear(); - tracing::debug!(contract = contract.name, "using dual compiled contract"); + tracing::debug!(contract = info.name, "using dual compiled contract"); let ccx = foundry_zksync_core::vm::CheatcodeTracerContext { mocked_calls: state.mocked_calls.clone(), @@ -697,7 +698,7 @@ impl CheatcodeInspectorStrategyExt for ZksyncCheatcodeInspectorStrategyRunner { "running call in EVM, instead of zkEVM (Test Contract) {:#?}", call.bytecode_address ); - return None + return None; } info!("running call in zkEVM {:#?}", call); @@ -849,7 +850,7 @@ impl ZksyncCheatcodeInspectorStrategyRunner { ) { if !ctx.using_zk_vm { tracing::info!("already in EVM"); - return + return; } tracing::info!("switching to EVM"); @@ -892,7 +893,7 @@ impl ZksyncCheatcodeInspectorStrategyRunner { .and_then(|zk_bytecode_hash| { ctx.dual_compiled_contracts .find_by_zk_bytecode_hash(zk_bytecode_hash.to_h256()) - .map(|contract| { + .map(|(_, contract)| { ( contract.evm_bytecode_hash, Some(Bytecode::new_raw(Bytes::from( @@ -926,7 +927,7 @@ impl ZksyncCheatcodeInspectorStrategyRunner { ) { if ctx.using_zk_vm { tracing::info!("already in ZK-VM"); - return + return; } tracing::info!("switching to ZK-VM"); @@ -968,7 +969,7 @@ impl ZksyncCheatcodeInspectorStrategyRunner { continue; } - if let Some(contract) = ctx.dual_compiled_contracts.iter().find(|contract| { + if let Some((_, contract)) = ctx.dual_compiled_contracts.iter().find(|(_, contract)| { info.code_hash != KECCAK_EMPTY && info.code_hash == contract.evm_bytecode_hash }) { account_code_storage.insert( diff --git a/crates/strategy/zksync/src/executor/context.rs b/crates/strategy/zksync/src/executor/context.rs index f3d902bb3..8d3b1acd0 100644 --- a/crates/strategy/zksync/src/executor/context.rs +++ b/crates/strategy/zksync/src/executor/context.rs @@ -1,11 +1,16 @@ +use foundry_compilers::ProjectCompileOutput; use foundry_evm::executors::strategy::ExecutorStrategyContext; -use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContracts; +use foundry_zksync_compilers::{ + compilers::{artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler}, + dual_compiled_contracts::DualCompiledContracts, +}; use foundry_zksync_core::{vm::ZkEnv, ZkTransactionMetadata}; /// Defines the context for [ZksyncExecutorStrategyRunner]. #[derive(Debug, Default, Clone)] pub struct ZksyncExecutorStrategyContext { pub(super) transaction_context: Option, + pub(super) compilation_output: Option>, pub(super) dual_compiled_contracts: DualCompiledContracts, pub(super) zk_env: ZkEnv, } diff --git a/crates/strategy/zksync/src/executor/runner.rs b/crates/strategy/zksync/src/executor/runner.rs index 03231d880..ed891d13e 100644 --- a/crates/strategy/zksync/src/executor/runner.rs +++ b/crates/strategy/zksync/src/executor/runner.rs @@ -1,25 +1,34 @@ +use std::path::Path; + use alloy_primitives::{Address, U256}; use alloy_rpc_types::serde_helpers::OtherFields; use alloy_zksync::provider::{zksync_provider, ZksyncProvider}; use eyre::Result; +use foundry_linking::LinkerError; +use revm::{ + primitives::{Env, EnvWithHandlerCfg, ResultAndState}, + Database, +}; +use foundry_compilers::ProjectCompileOutput; +use foundry_config::Config; use foundry_evm::{ backend::{Backend, BackendResult, CowBackend}, + decode::RevertDecoder, executors::{ strategy::{ - EvmExecutorStrategyRunner, ExecutorStrategyContext, ExecutorStrategyExt, - ExecutorStrategyRunner, + DeployLibKind, DeployLibResult, EvmExecutorStrategyRunner, ExecutorStrategyContext, + ExecutorStrategyExt, ExecutorStrategyRunner, LinkOutput, }, - Executor, + EvmError, Executor, }, inspectors::InspectorStack, }; -use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContracts; -use foundry_zksync_core::vm::ZkEnv; -use revm::{ - primitives::{Env, EnvWithHandlerCfg, ResultAndState}, - Database, +use foundry_zksync_compilers::{ + compilers::{artifact_output::zk::ZkArtifactOutput, zksolc::ZkSolcCompiler}, + dual_compiled_contracts::DualCompiledContracts, }; +use foundry_zksync_core::vm::ZkEnv; use crate::{ backend::{ZksyncBackendStrategyBuilder, ZksyncInspectContext}, @@ -27,10 +36,30 @@ use crate::{ executor::{try_get_zksync_transaction_metadata, ZksyncExecutorStrategyContext}, }; +mod libraries; + /// Defines the [ExecutorStrategyRunner] strategy for ZKsync. #[derive(Debug, Default, Clone)] pub struct ZksyncExecutorStrategyRunner; +impl ZksyncExecutorStrategyRunner { + fn set_deployment_nonce( + executor: &mut Executor, + address: Address, + nonce: u64, + ) -> BackendResult<()> { + let (address, slot) = foundry_zksync_core::state::get_nonce_storage(address); + // fetch the full nonce to preserve account's tx nonce + let full_nonce = executor.backend.storage(address, slot)?; + let full_nonce = foundry_zksync_core::state::parse_full_nonce(full_nonce); + let new_full_nonce = + foundry_zksync_core::state::new_full_nonce(full_nonce.tx_nonce, nonce as u128); + executor.backend.insert_account_storage(address, slot, new_full_nonce)?; + + Ok(()) + } +} + fn get_context_ref(ctx: &dyn ExecutorStrategyContext) -> &ZksyncExecutorStrategyContext { ctx.as_any_ref().downcast_ref().expect("expected ZksyncExecutorStrategyContext") } @@ -54,6 +83,13 @@ impl ExecutorStrategyRunner for ZksyncExecutorStrategyRunner { Ok(()) } + fn get_balance(&self, executor: &mut Executor, address: Address) -> BackendResult { + let (address, slot) = foundry_zksync_core::state::get_balance_storage(address); + let balance = executor.backend.storage(address, slot)?; + + Ok(balance) + } + fn set_nonce( &self, executor: &mut Executor, @@ -73,6 +109,36 @@ impl ExecutorStrategyRunner for ZksyncExecutorStrategyRunner { Ok(()) } + fn get_nonce(&self, executor: &mut Executor, address: Address) -> BackendResult { + let (address, slot) = foundry_zksync_core::state::get_nonce_storage(address); + let full_nonce = executor.backend.storage(address, slot)?; + let full_nonce = foundry_zksync_core::state::parse_full_nonce(full_nonce); + + Ok(full_nonce.tx_nonce as u64) + } + + fn link( + &self, + ctx: &mut dyn ExecutorStrategyContext, + config: &Config, + root: &Path, + input: &ProjectCompileOutput, + deployer: Address, + ) -> Result { + self.link_impl(ctx, config, root, input, deployer) + } + + fn deploy_library( + &self, + executor: &mut Executor, + from: Address, + kind: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError> { + self.deploy_library_impl(executor, from, kind, value, rd) + } + fn new_backend_strategy(&self) -> foundry_evm_core::backend::strategy::BackendStrategy { foundry_evm_core::backend::strategy::BackendStrategy::new_zksync() } @@ -154,6 +220,15 @@ impl ExecutorStrategyExt for ZksyncExecutorStrategyRunner { ctx.dual_compiled_contracts = dual_compiled_contracts; } + fn zksync_set_compilation_output( + &self, + ctx: &mut dyn ExecutorStrategyContext, + output: ProjectCompileOutput, + ) { + let ctx = get_context(ctx); + ctx.compilation_output.replace(output); + } + fn zksync_set_fork_env( &self, ctx: &mut dyn ExecutorStrategyContext, diff --git a/crates/strategy/zksync/src/executor/runner/libraries.rs b/crates/strategy/zksync/src/executor/runner/libraries.rs new file mode 100644 index 000000000..6ef500412 --- /dev/null +++ b/crates/strategy/zksync/src/executor/runner/libraries.rs @@ -0,0 +1,295 @@ +//! Contains various definitions and items related to deploy-time linking +//! for zksync + +use std::{collections::HashMap, path::Path}; + +use alloy_primitives::{keccak256, Address, Bytes, TxKind, B256, U256}; +use alloy_zksync::contracts::l2::contract_deployer::CONTRACT_DEPLOYER_ADDRESS; +use foundry_common::{ContractsByArtifact, TransactionMaybeSigned}; +use foundry_compilers::{ + artifacts::CompactContractBytecodeCow, contracts::ArtifactContracts, info::ContractInfo, + Artifact, ProjectCompileOutput, +}; +use foundry_config::Config; +use foundry_evm::{ + backend::DatabaseExt, + decode::RevertDecoder, + executors::{ + strategy::{ + DeployLibKind, DeployLibResult, EvmExecutorStrategyRunner, ExecutorStrategyContext, + ExecutorStrategyRunner, LinkOutput, + }, + DeployResult, EvmError, Executor, + }, +}; +use foundry_linking::{ + LinkerError, ZkLinker, ZkLinkerError, DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION, +}; +use foundry_zksync_compilers::dual_compiled_contracts::DualCompiledContract; +use foundry_zksync_core::{ + encode_create_params, hash_bytecode, ZkTransactionMetadata, DEFAULT_CREATE2_DEPLOYER_ZKSYNC, + ZKSYNC_TRANSACTION_OTHER_FIELDS_KEY, +}; +use revm::primitives::{CreateScheme, Output}; + +use super::{get_context, ZksyncExecutorStrategyRunner}; + +impl ZksyncExecutorStrategyRunner { + pub(super) fn link_impl( + &self, + ctx: &mut dyn ExecutorStrategyContext, + config: &Config, + root: &Path, + input: &ProjectCompileOutput, + deployer: Address, + ) -> Result { + let evm_link = EvmExecutorStrategyRunner.link(ctx, config, root, input, deployer)?; + + let ctx = get_context(ctx); + let Some(input) = ctx.compilation_output.as_ref() else { + return Err(LinkerError::MissingTargetArtifact); + }; + + // we don't strip here unlinke upstream due to + // `input` being used later during linking + // and that is unstripped + let contracts: ArtifactContracts> = + input.artifact_ids().collect(); + + let Ok(zksolc) = foundry_config::zksync::config_zksolc_compiler(config) else { + tracing::error!("unable to determine zksolc compiler to be used for linking"); + // TODO(zk): better error + return Err(LinkerError::CyclicDependency); + }; + let version = zksolc.version().map_err(|_| LinkerError::CyclicDependency)?; + + let linker = ZkLinker::new(root, contracts.clone(), zksolc, input); + + let zk_linker_error_to_linker = |zk_error| match zk_error { + ZkLinkerError::Inner(err) => err, + // TODO(zk): better error value + ZkLinkerError::MissingLibraries(libs) => LinkerError::MissingLibraryArtifact { + file: "libraries".to_string(), + name: libs.len().to_string(), + }, + ZkLinkerError::MissingFactoryDeps(libs) => LinkerError::MissingLibraryArtifact { + file: "factoryDeps".to_string(), + name: libs.len().to_string(), + }, + }; + + let foundry_linking::LinkOutput { libraries, libs_to_deploy: _ } = linker + .zk_link_with_nonce_or_address( + Default::default(), + deployer, + // NOTE(zk): match with EVM nonces as we will be doing a duplex deployment for + // the libs + 0, + linker.linker.contracts.keys(), // link everything + ) + .map_err(zk_linker_error_to_linker)?; + + // if we have no libraries then no linking will happen + // so we can skip the version check + if !libraries.is_empty() { + // TODO(zk): better error + if version < DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION { + tracing::error!( + %version, + minimum_version = %DEPLOY_TIME_LINKING_ZKSOLC_MIN_VERSION, + "deploy-time linking not supported" + ); + return Err(LinkerError::CyclicDependency); + } + } + + let linked_contracts = linker + .zk_get_linked_artifacts(linker.linker.contracts.keys(), &libraries) + .map_err(zk_linker_error_to_linker)?; + + let newly_linked_dual_compiled_contracts = linked_contracts + .iter() + .flat_map(|(needle, zk)| { + // match EVM linking's prefix stripping + let stripped = needle.clone().with_stripped_file_prefixes(root); + evm_link + .linked_contracts + .iter() + .find(|(id, _)| id.source == stripped.source && id.name == stripped.name) + .map(|(_, evm)| (needle, stripped, zk, evm)) + }) + .filter(|(_, _, zk, evm)| zk.bytecode.is_some() && evm.bytecode.is_some()) + .map(|(unstripped_id, id, linked_zk, evm)| { + let (_, unlinked_zk_artifact) = input + .artifact_ids() + .find(|(contract_id, _)| contract_id == unstripped_id) + .expect("unable to find original (pre-linking) artifact"); + let zk_bytecode = + linked_zk.get_bytecode_bytes().expect("no EraVM bytecode (or unlinked)"); + let zk_hash = hash_bytecode(&zk_bytecode); + let evm_deployed = + evm.get_deployed_bytecode_bytes().expect("no EVM bytecode (or unlinked)"); + let evm_bytecode = evm.get_bytecode_bytes().expect("no EVM bytecode (or unlinked)"); + let contract_info = ContractInfo { + name: id.name.clone(), + path: Some(id.source.to_string_lossy().into_owned()), + }; + let contract = DualCompiledContract { + zk_bytecode_hash: zk_hash, + zk_deployed_bytecode: zk_bytecode.to_vec(), + // rest of factory deps is populated later + zk_factory_deps: vec![zk_bytecode.to_vec()], + evm_bytecode_hash: B256::from_slice(&keccak256(evm_deployed.as_ref())[..]), + // TODO(zk): determine if this is ok, as it's + // not really used in dual compiled contracts + evm_deployed_bytecode: evm_deployed.to_vec(), + evm_bytecode: evm_bytecode.to_vec(), + }; + + let mut factory_deps = unlinked_zk_artifact.all_factory_deps().collect::>(); + factory_deps.dedup(); + + ((contract_info.clone(), contract), (contract_info, factory_deps)) + }); + + let (new_contracts, new_contracts_deps): (Vec<_>, HashMap<_, _>) = + newly_linked_dual_compiled_contracts.unzip(); + ctx.dual_compiled_contracts.extend(new_contracts); + + // now that we have an updated list of DualCompiledContracts + // retrieve all the factory deps for a given contracts and store them + new_contracts_deps.into_iter().for_each(|(info, deps)| { + deps.into_iter().for_each(|dep| { + let mut split = dep.split(':'); + let path = split.next().expect("malformed factory dep path"); + let name = split.next().expect("malformed factory dep name"); + + let bytecode = ctx + .dual_compiled_contracts + .find(Some(path), Some(name)) + .next() + .expect("unknown factory dep") + .1 + .zk_deployed_bytecode + .clone(); + + ctx.dual_compiled_contracts.insert_factory_deps(&info, Some(bytecode)); + }); + }); + + let linked_contracts: ArtifactContracts = contracts + .into_iter() + .map(|(id, art)| (id, foundry_compilers::artifacts::CompactContractBytecode::from(art))) + // Extend original zk contracts with newly linked ones + .chain(linked_contracts) + // Extend zk contracts with solc contracts as well. This is required for traces to + // accurately detect contract names deployed in EVM mode, and when using + // `vm.zkVmSkip()` cheatcode. + .chain(evm_link.linked_contracts) + .collect(); + + Ok(LinkOutput { + deployable_contracts: evm_link.deployable_contracts, + revert_decoder: evm_link.revert_decoder, + known_contracts: ContractsByArtifact::new(linked_contracts.clone()), + linked_contracts, + libs_to_deploy: evm_link.libs_to_deploy, + libraries: evm_link.libraries, + }) + } + + pub(super) fn deploy_library_impl( + &self, + executor: &mut Executor, + from: Address, + kind: DeployLibKind, + value: U256, + rd: Option<&RevertDecoder>, + ) -> Result, EvmError> { + // sync deployer account info + let nonce = EvmExecutorStrategyRunner.get_nonce(executor, from).expect("deployer to exist"); + let balance = + EvmExecutorStrategyRunner.get_balance(executor, from).expect("deployer to exist"); + + Self::set_deployment_nonce(executor, from, nonce).map_err(|err| eyre::eyre!(err))?; + self.set_balance(executor, from, balance).map_err(|err| eyre::eyre!(err))?; + tracing::debug!(?nonce, ?balance, sender = ?from, "deploying lib in EraVM"); + + let mut evm_deployment = + EvmExecutorStrategyRunner.deploy_library(executor, from, kind.clone(), value, rd)?; + + let ctx = get_context(executor.strategy.context.as_mut()); + + let (code, create_scheme, to) = match kind { + DeployLibKind::Create(bytes) => { + (bytes, CreateScheme::Create, CONTRACT_DEPLOYER_ADDRESS) + } + DeployLibKind::Create2(salt, bytes) => ( + bytes, + CreateScheme::Create2 { salt: salt.into() }, + DEFAULT_CREATE2_DEPLOYER_ZKSYNC, + ), + }; + + // lookup dual compiled contract based on EVM bytecode + let Some((_, dual_contract)) = + ctx.dual_compiled_contracts.find_by_evm_bytecode(code.as_ref()) + else { + // we don't know what the equivalent zk contract would be + return Ok(evm_deployment); + }; + + // no need for constructor args as it's a lib + let create_params: Bytes = + encode_create_params(&create_scheme, dual_contract.zk_bytecode_hash, vec![]).into(); + + // populate ctx.transaction_context with factory deps + // we also populate the ctx so the deployment is executed + // entirely in EraVM + let factory_deps = ctx.dual_compiled_contracts.fetch_all_factory_deps(dual_contract); + tracing::debug!(n_fdeps = factory_deps.len()); + + // persist existing paymaster data (TODO(zk): is this needed?) + let paymaster_data = + ctx.transaction_context.take().and_then(|metadata| metadata.paymaster_data); + let metadata = ZkTransactionMetadata { factory_deps, paymaster_data }; + ctx.transaction_context = Some(metadata.clone()); + + let result = executor.transact_raw(from, to, create_params.clone(), value)?; + let result = result.into_result(rd)?; + + let Some(Output::Create(_, Some(address))) = result.out else { + return Err(eyre::eyre!( + "Deployment succeeded, but no address was returned: {result:#?}" + ) + .into()); + }; + + // also mark this library as persistent, this will ensure that the state of the library is + // persistent across fork swaps in forking mode + executor.backend_mut().add_persistent_account(address); + tracing::debug!(%address, "deployed contract"); + + let mut request = TransactionMaybeSigned::new(Default::default()); + let unsigned = request.as_unsigned_mut().unwrap(); + unsigned.from = Some(from); + unsigned.input = create_params.into(); + unsigned.nonce = Some(nonce); + // we use the deployer here for consistency with linking + unsigned.to = Some(TxKind::Call(to)); + unsigned.other.insert( + ZKSYNC_TRANSACTION_OTHER_FIELDS_KEY.to_string(), + serde_json::to_value(metadata).expect("failed encoding json"), + ); + + // ignore all EVM broadcastables + evm_deployment.iter_mut().for_each(|result| { + result.tx.take(); + }); + evm_deployment.push(DeployLibResult { + result: DeployResult { raw: result, address }, + tx: Some(request), + }); + Ok(evm_deployment) + } +} diff --git a/crates/zksync/compilers/Cargo.toml b/crates/zksync/compilers/Cargo.toml index 75168a002..0cc213ec5 100644 --- a/crates/zksync/compilers/Cargo.toml +++ b/crates/zksync/compilers/Cargo.toml @@ -21,7 +21,6 @@ serde_json.workspace = true serde.workspace = true semver.workspace = true itertools.workspace = true -eyre.workspace = true walkdir.workspace = true reqwest.workspace = true yansi.workspace = true diff --git a/crates/zksync/compilers/src/artifacts/contract.rs b/crates/zksync/compilers/src/artifacts/contract.rs index dc1623c4e..44296c31c 100644 --- a/crates/zksync/compilers/src/artifacts/contract.rs +++ b/crates/zksync/compilers/src/artifacts/contract.rs @@ -6,7 +6,10 @@ use foundry_compilers_artifacts_solc::{ CompactContractRef, CompactDeployedBytecode, DevDoc, Evm, Offsets, StorageLayout, UserDoc, }; use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::BTreeMap}; +use std::{ + borrow::Cow, + collections::{BTreeMap, HashSet}, +}; /// zksolc: Binary object format. #[derive(Debug, Default, Clone, Serialize, Deserialize, PartialEq, Eq)] @@ -41,7 +44,7 @@ impl TryFrom for ObjectFormat { } impl ObjectFormat { - /// Returns `true` if the bytecode is unlinked + /// Returns true if the object format is considered `unlinked` pub fn is_unlinked(&self) -> bool { matches!(self, Self::Elf) } @@ -71,9 +74,17 @@ pub struct Contract { /// The contract EraVM bytecode hash. #[serde(default, skip_serializing_if = "Option::is_none")] pub hash: Option, - /// The contract factory dependencies. + /// Map of factory dependencies, encoded as => : + /// + /// Only contains fully linked factory dependencies, as + /// unlinked factory dependencies do not have a bytecode hash #[serde(default, skip_serializing_if = "Option::is_none")] pub factory_dependencies: Option>, + /// Complete list of factory dependencies, encoded as : + /// + /// Contains both linked and unlinked factory dependencies + #[serde(default, skip_serializing_if = "Option::is_none")] + pub factory_dependencies_unlinked: Option>, /// EraVM-related outputs #[serde(default, skip_serializing_if = "Option::is_none")] pub eravm: Option, diff --git a/crates/zksync/compilers/src/compilers/artifact_output/zk.rs b/crates/zksync/compilers/src/compilers/artifact_output/zk.rs index 6dcfd1142..c2e700035 100644 --- a/crates/zksync/compilers/src/compilers/artifact_output/zk.rs +++ b/crates/zksync/compilers/src/compilers/artifact_output/zk.rs @@ -11,7 +11,11 @@ use foundry_compilers_artifacts_solc::{ CompactDeployedBytecode, }; use serde::{Deserialize, Serialize}; -use std::{borrow::Cow, collections::BTreeMap, path::Path}; +use std::{ + borrow::Cow, + collections::{BTreeMap, HashSet}, + path::Path, +}; mod bytecode; pub use bytecode::ZkArtifactBytecode; @@ -46,15 +50,31 @@ pub struct ZkContractArtifact { /// contract hash #[serde(default, skip_serializing_if = "Option::is_none")] pub hash: Option, - /// contract factory dependencies + /// List of factory dependencies, encoded as => : + /// + /// Only contains fully linked factory dependencies #[serde(default, skip_serializing_if = "Option::is_none")] pub factory_dependencies: Option>, + /// Complete list of factory dependencies, encoded as : + /// + /// Contains both linked and unlinked factory dependencies + #[serde(default, skip_serializing_if = "Option::is_none")] + pub factory_dependencies_unlinked: Option>, /// The identifier of the source file #[serde(default, skip_serializing_if = "Option::is_none")] pub id: Option, } impl ZkContractArtifact { + /// Returns a list of _all_ factory deps, by : + /// + /// Will return unlinked as well as linked factory deps (might contain duplicates) + pub fn all_factory_deps(&self) -> impl Iterator { + let linked = self.factory_dependencies.iter().flatten().map(|(_, dep)| dep); + let unlinked = self.factory_dependencies_unlinked.iter().flatten(); + linked.chain(unlinked) + } + /// Get contract missing libraries pub fn missing_libraries(&self) -> Option<&Vec> { self.bytecode.as_ref().map(|bc| &bc.missing_libraries) @@ -134,6 +154,7 @@ impl ArtifactOutput for ZkArtifactOutput { ir_optimized, hash, factory_dependencies, + factory_dependencies_unlinked, missing_libraries, object_format, } = contract; @@ -151,6 +172,7 @@ impl ArtifactOutput for ZkArtifactOutput { abi, hash, factory_dependencies, + factory_dependencies_unlinked, storage_layout: Some(storage_layout), bytecode, assembly, diff --git a/crates/zksync/compilers/src/compilers/zksolc/mod.rs b/crates/zksync/compilers/src/compilers/zksolc/mod.rs index 113abb301..615991943 100644 --- a/crates/zksync/compilers/src/compilers/zksolc/mod.rs +++ b/crates/zksync/compilers/src/compilers/zksolc/mod.rs @@ -244,6 +244,11 @@ impl ZkSolcCompiler { Ok(zksolc) } + + /// Retrieve the version of the specified `zksolc` + pub fn version(&self) -> Result { + ZkSolc::get_version_for_path(self.zksolc.as_ref()) + } } /// Version metadata. Will include `zksync_version` if compiler is zksync solc. diff --git a/crates/zksync/compilers/src/dual_compiled_contracts.rs b/crates/zksync/compilers/src/dual_compiled_contracts.rs index 6830f1de9..00571e58b 100644 --- a/crates/zksync/compilers/src/dual_compiled_contracts.rs +++ b/crates/zksync/compilers/src/dual_compiled_contracts.rs @@ -6,8 +6,8 @@ use std::{ }; use foundry_compilers::{ - solc::SolcLanguage, Artifact, ArtifactId, ArtifactOutput, ConfigurableArtifacts, - ProjectCompileOutput, ProjectPathsConfig, + info::ContractInfo, solc::SolcLanguage, Artifact, ArtifactId, ProjectCompileOutput, + ProjectPathsConfig, }; use alloy_primitives::{keccak256, B256}; @@ -28,13 +28,11 @@ pub enum ContractType { /// Defines a contract that has been dual compiled with both zksolc and solc #[derive(Debug, Default, Clone)] pub struct DualCompiledContract { - /// Contract name - pub name: String, /// Deployed bytecode with zksolc pub zk_bytecode_hash: H256, /// Deployed bytecode hash with zksolc pub zk_deployed_bytecode: Vec, - /// Deployed bytecode factory deps + /// Bytecodes of the factory deps for zksolc's deployed bytecode pub zk_factory_deps: Vec>, /// Deployed bytecode hash with solc pub evm_bytecode_hash: B256, @@ -44,14 +42,31 @@ pub struct DualCompiledContract { pub evm_bytecode: Vec, } +/// Indicates the type of match from a `find` search +#[derive(Debug, Clone, Copy, PartialEq, Eq)] +pub enum FindMatchType { + /// The result matched both path and name + FullMatch, + /// The result only matched the path + Path, + /// The result only matched the name + Name, +} + /// Couple contract type with contract and init code pub struct FindBytecodeResult<'a> { r#type: ContractType, + info: &'a ContractInfo, contract: &'a DualCompiledContract, init_code: &'a [u8], } impl<'a> FindBytecodeResult<'a> { + /// Retrieve the found contract's info + pub fn info(&self) -> &'a ContractInfo { + self.info + } + /// Retrieve the found contract pub fn contract(self) -> &'a DualCompiledContract { self.contract @@ -69,7 +84,7 @@ impl<'a> FindBytecodeResult<'a> { /// A collection of `[DualCompiledContract]`s #[derive(Debug, Default, Clone)] pub struct DualCompiledContracts { - contracts: Vec, + contracts: HashMap, /// ZKvm artifacts path pub zk_artifact_path: PathBuf, /// EVM artifacts path @@ -84,37 +99,29 @@ impl DualCompiledContracts { layout: &ProjectPathsConfig, zk_layout: &ProjectPathsConfig, ) -> Self { - let mut dual_compiled_contracts = vec![]; + let mut dual_compiled_contracts = HashMap::new(); let mut solc_bytecodes = HashMap::new(); - let output_artifacts = output - .cached_artifacts() - .artifact_files() - .chain(output.compiled_artifacts().artifact_files()) - .filter_map(|artifact| { - ConfigurableArtifacts::contract_name(&artifact.file) - .map(|name| (name, (&artifact.file, &artifact.artifact))) - }); - let zk_output_artifacts = zk_output - .cached_artifacts() - .artifact_files() - .chain(zk_output.compiled_artifacts().artifact_files()) - .filter_map(|artifact| { - ConfigurableArtifacts::contract_name(&artifact.file) - .map(|name| (name, (&artifact.file, &artifact.artifact))) - }); - - for (_contract_name, (artifact_path, artifact)) in output_artifacts { - let contract_file = artifact_path - .strip_prefix(&layout.artifacts) - .unwrap_or_else(|_| { - panic!( - "failed stripping solc artifact path '{:?}' from '{:?}'", - layout.artifacts, artifact_path - ) - }) - .to_path_buf(); - + let output_artifacts = output.artifact_ids().map(|(id, artifact)| { + ( + ContractInfo { + name: id.name, + path: Some(id.source.to_string_lossy().into_owned()), + }, + artifact, + ) + }); + let zk_output_artifacts = zk_output.artifact_ids().map(|(id, artifact)| { + ( + ContractInfo { + name: id.name, + path: Some(id.source.to_string_lossy().into_owned()), + }, + artifact, + ) + }); + + for (contract_info, artifact) in output_artifacts { let deployed_bytecode = artifact.get_deployed_bytecode(); let deployed_bytecode = deployed_bytecode .as_ref() @@ -122,7 +129,7 @@ impl DualCompiledContracts { let bytecode = artifact.get_bytecode().and_then(|b| b.object.as_bytes().cloned()); if let Some(bytecode) = bytecode { if let Some(deployed_bytecode) = deployed_bytecode { - solc_bytecodes.insert(contract_file, (bytecode, deployed_bytecode.clone())); + solc_bytecodes.insert(contract_info, (bytecode, deployed_bytecode.clone())); } } } @@ -133,26 +140,14 @@ impl DualCompiledContracts { let mut zksolc_all_bytecodes: HashMap> = Default::default(); for (_, zk_artifact) in zk_output.artifacts() { if let (Some(hash), Some(bytecode)) = (&zk_artifact.hash, &zk_artifact.bytecode) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode bytes. - // We should be careful however and check/handle errors in - // case an Unlinked BytecodeObject gets here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present let bytes = bytecode.object().into_bytes().unwrap(); zksolc_all_bytecodes.insert(hash.clone(), bytes.to_vec()); } } - for (contract_name, (artifact_path, artifact)) in zk_output_artifacts { - let contract_file = artifact_path - .strip_prefix(&zk_layout.artifacts) - .unwrap_or_else(|_| { - panic!( - "failed stripping zksolc artifact path '{:?}' from '{:?}'", - zk_layout.artifacts, artifact_path - ) - }) - .to_path_buf(); - + for (contract_info, artifact) in zk_output_artifacts { let maybe_bytecode = &artifact.bytecode; let maybe_hash = &artifact.hash; let maybe_factory_deps = &artifact.factory_dependencies; @@ -161,13 +156,10 @@ impl DualCompiledContracts { (maybe_bytecode, maybe_hash, maybe_factory_deps) { if let Some((solc_bytecode, solc_deployed_bytecode)) = - solc_bytecodes.get(&contract_file) + solc_bytecodes.get(&contract_info) { - // TODO: we can do this because no bytecode object could be unlinked - // at this stage for zksolc, and BytecodeObject as ref will get the bytecode - // bytes. However, we should check and - // handle errors in case an Unlinked BytecodeObject gets - // here somehow + // NOTE(zk): unlinked objects are _still_ encoded as valid hex + // but the hash wouldn't be present in the artifact let bytecode_vec = bytecode.object().into_bytes().unwrap().to_vec(); let mut factory_deps_vec: Vec> = factory_deps_map .keys() @@ -181,17 +173,19 @@ impl DualCompiledContracts { factory_deps_vec.push(bytecode_vec.clone()); - dual_compiled_contracts.push(DualCompiledContract { - name: contract_name, - zk_bytecode_hash: H256::from_str(hash).unwrap(), - zk_deployed_bytecode: bytecode_vec, - zk_factory_deps: factory_deps_vec, - evm_bytecode_hash: keccak256(solc_deployed_bytecode), - evm_bytecode: solc_bytecode.to_vec(), - evm_deployed_bytecode: solc_deployed_bytecode.to_vec(), - }); + dual_compiled_contracts.insert( + contract_info, + DualCompiledContract { + zk_bytecode_hash: H256::from_str(hash).unwrap(), + zk_deployed_bytecode: bytecode_vec, + zk_factory_deps: factory_deps_vec, + evm_bytecode_hash: keccak256(solc_deployed_bytecode), + evm_bytecode: solc_bytecode.to_vec(), + evm_deployed_bytecode: solc_deployed_bytecode.to_vec(), + }, + ); } else { - tracing::error!("matching solc artifact not found for {contract_file:?}"); + tracing::error!("matching solc artifact not found for {contract_info:?}"); } } } @@ -204,18 +198,29 @@ impl DualCompiledContracts { } /// Finds a contract matching the ZK deployed bytecode - pub fn find_by_zk_deployed_bytecode(&self, bytecode: &[u8]) -> Option<&DualCompiledContract> { - self.contracts.iter().find(|contract| bytecode.starts_with(&contract.zk_deployed_bytecode)) + pub fn find_by_zk_deployed_bytecode( + &self, + bytecode: &[u8], + ) -> Option<(&ContractInfo, &DualCompiledContract)> { + self.contracts + .iter() + .find(|(_, contract)| bytecode.starts_with(&contract.zk_deployed_bytecode)) } /// Finds a contract matching the EVM bytecode - pub fn find_by_evm_bytecode(&self, bytecode: &[u8]) -> Option<&DualCompiledContract> { - self.contracts.iter().find(|contract| bytecode.starts_with(&contract.evm_bytecode)) + pub fn find_by_evm_bytecode( + &self, + bytecode: &[u8], + ) -> Option<(&ContractInfo, &DualCompiledContract)> { + self.contracts.iter().find(|(_, contract)| bytecode.starts_with(&contract.evm_bytecode)) } /// Finds a contract matching the ZK bytecode hash - pub fn find_by_zk_bytecode_hash(&self, code_hash: H256) -> Option<&DualCompiledContract> { - self.contracts.iter().find(|contract| code_hash == contract.zk_bytecode_hash) + pub fn find_by_zk_bytecode_hash( + &self, + code_hash: H256, + ) -> Option<(&ContractInfo, &DualCompiledContract)> { + self.contracts.iter().find(|(_, contract)| code_hash == contract.zk_bytecode_hash) } /// Find a contract matching the given bytecode, whether it's EVM or ZK. @@ -229,15 +234,26 @@ impl DualCompiledContracts { let zk = self.find_by_zk_deployed_bytecode(init_code).map(|evm| (ContractType::ZK, evm)); match (&evm, &zk) { - (Some((_, evm)), Some((_, zk))) => { + (Some((_, (evm_info, evm))), Some((_, (zk_info, zk)))) => { if zk.zk_deployed_bytecode.len() >= evm.evm_bytecode.len() { - Some(FindBytecodeResult { r#type: ContractType::ZK, contract: zk, init_code }) + Some(FindBytecodeResult { + r#type: ContractType::ZK, + contract: zk, + init_code, + info: zk_info, + }) } else { - Some(FindBytecodeResult { r#type: ContractType::EVM, contract: zk, init_code }) + Some(FindBytecodeResult { + r#type: ContractType::EVM, + contract: zk, + init_code, + info: evm_info, + }) } } - _ => evm.or(zk).map(|(r#type, contract)| FindBytecodeResult { + _ => evm.or(zk).map(|(r#type, (info, contract))| FindBytecodeResult { r#type, + info, contract, init_code, }), @@ -256,9 +272,9 @@ impl DualCompiledContracts { while let Some(dep) = queue.pop_front() { // try to insert in the list of visited, if it's already present, skip if visited.insert(dep) { - if let Some(contract) = self.find_by_zk_deployed_bytecode(dep) { + if let Some((info, contract)) = self.find_by_zk_deployed_bytecode(dep) { debug!( - name = contract.name, + name = info.name, deps = contract.zk_factory_deps.len(), "new factory dependency" ); @@ -292,13 +308,68 @@ impl DualCompiledContracts { } /// Returns an iterator over all `[DualCompiledContract]`s in the collection - pub fn iter(&self) -> impl Iterator { + pub fn iter(&self) -> impl Iterator { self.contracts.iter() } /// Adds a new `[DualCompiledContract]` to the collection - pub fn push(&mut self, contract: DualCompiledContract) { - self.contracts.push(contract); + /// + /// Will replace any contract with matching `info` + pub fn insert(&mut self, info: ContractInfo, contract: DualCompiledContract) { + self.contracts.insert(info, contract); + } + + /// Attempt reading an existing `[DualCompiledContract]` + pub fn get(&self, info: &ContractInfo) -> Option<&DualCompiledContract> { + self.contracts.get(info) + } + + /// Search for matching contracts in the collection + /// + /// Contracts are ordered in descending best-fit order + pub fn find<'a: 'b, 'b>( + &'a self, + path: Option<&'b str>, + name: Option<&'b str>, + ) -> impl Iterator + 'b { + let full_matches = self + .contracts + .iter() + .filter(move |(info, _)| { + // if user provides a path we should check that it matches + // we check using `ends_with` to account for prefixes + path.is_some_and(|needle| + info.path.as_ref() + .is_some_and( + |contract_path| contract_path.ends_with(needle))) + // if user provides a name we should check that it matches + && name.is_some_and(|name| name == info.name.as_str()) + }) + .map(|(_, contract)| (FindMatchType::FullMatch, contract)); + + let path_matches = self + .contracts + .iter() + .filter(move |(info, _)| { + // if a path is provided, check that it matches + // if no path is provided, don't match it + path.is_some_and(|needle| { + info.path.as_ref().is_some_and(|contract_path| contract_path.ends_with(needle)) + }) + }) + .map(|(_, contract)| (FindMatchType::Path, contract)); + + let name_matches = self + .contracts + .iter() + .filter(move |(info, _)| { + // if name is provided, check that it matches + // if no name is provided, don't match it + name.map(|name| name == info.name.as_str()).unwrap_or(false) + }) + .map(|(_, contract)| (FindMatchType::Name, contract)); + + full_matches.chain(path_matches).chain(name_matches) } /// Retrieves the length of the collection. @@ -310,4 +381,175 @@ impl DualCompiledContracts { pub fn is_empty(&self) -> bool { self.contracts.is_empty() } + + /// Extend the inner set of contracts with the given iterator + pub fn extend(&mut self, iter: impl IntoIterator) { + self.contracts.extend(iter); + } + + /// Populate the target's factory deps based on the new list + pub fn extend_factory_deps_by_hash( + &self, + mut target: DualCompiledContract, + factory_deps: impl IntoIterator, + ) -> DualCompiledContract { + let deps_bytecodes = factory_deps + .into_iter() + .flat_map(|hash| self.find_by_zk_bytecode_hash(hash)) + .map(|(_, contract)| contract.zk_deployed_bytecode.clone()); + + target.zk_factory_deps.extend(deps_bytecodes); + target + } + + /// Populate the target's factory deps based on the new list + /// + /// Will return `None` if no matching `target` exists + /// Will not override existing factory deps + pub fn insert_factory_deps( + &mut self, + target: &ContractInfo, + factory_deps: impl IntoIterator>, + ) -> Option<&DualCompiledContract> { + self.contracts.get_mut(target).map(|contract| { + contract.zk_factory_deps.extend(factory_deps); + &*contract + }) + } +} + +#[cfg(test)] +mod tests { + use alloy_primitives::Bytes; + use zksync_types::bytecode::BytecodeHash; + + use super::*; + + fn find_sample() -> DualCompiledContracts { + let evm_empty_bytes = Bytes::from_static(&[0]).to_vec(); + let zk_empty_bytes = vec![0u8; 32]; + + let zk_bytecode_hash = BytecodeHash::for_bytecode(&zk_empty_bytes).value(); + + let sample_contract = DualCompiledContract { + zk_bytecode_hash, + zk_deployed_bytecode: zk_empty_bytes, + zk_factory_deps: Default::default(), + evm_bytecode_hash: B256::from_slice(&keccak256(&evm_empty_bytes)[..]), + evm_deployed_bytecode: evm_empty_bytes.clone(), + evm_bytecode: evm_empty_bytes, + }; + + let infos = [ + ContractInfo::new("src/Foo.sol:Foo"), + ContractInfo::new("src/Foo.sol:DoubleFoo"), + ContractInfo::new("test/Foo.t.sol:FooTest"), + ContractInfo::new("Bar"), + ContractInfo::new("BarScript"), + ContractInfo::new("script/Qux.sol:Foo"), + ContractInfo::new("script/Qux.sol:QuxScript"), + ]; + + let contracts = infos.into_iter().map(|info| (info, sample_contract.clone())); + DualCompiledContracts { + contracts: contracts.collect(), + zk_artifact_path: PathBuf::from("zkout"), + evm_artifact_path: PathBuf::from("out"), + } + } + + #[track_caller] + fn assert_find_results<'a>( + results: impl Iterator, + assertions: Vec, + ) { + let results = results.collect::>(); + + let num_assertions = assertions.len(); + let num_results = results.len(); + assert!( + num_assertions == num_results, + "unexpected number of results! Expected: {num_assertions}, got: {num_results}" + ); + + for (i, (assertion, (result, _))) in assertions.into_iter().zip(results).enumerate() { + assert!( + assertion == result, + "assertion failed for match #{i}! Expected: {assertion:?}, got: {result:?}" + ); + } + } + + #[test] + fn find_nothing() { + let collection = find_sample(); + + assert_find_results(collection.find(None, None), vec![]); + } + + #[test] + fn find_by_full_match() { + let collection = find_sample(); + + let foo_find_asserts = vec![ + FindMatchType::FullMatch, + FindMatchType::Path, + // DoubleFoo + FindMatchType::Path, + FindMatchType::Name, + // Qux.sol:Foo + FindMatchType::Name, + ]; + assert_find_results( + collection.find(Some("src/Foo.sol"), Some("Foo")), + foo_find_asserts.clone(), + ); + assert_find_results(collection.find(Some("Foo.sol"), Some("Foo")), foo_find_asserts); + + let foo_test_find_asserts = + vec![FindMatchType::FullMatch, FindMatchType::Path, FindMatchType::Name]; + assert_find_results( + collection.find(Some("test/Foo.t.sol"), Some("FooTest")), + foo_test_find_asserts.clone(), + ); + assert_find_results( + collection.find(Some("Foo.t.sol"), Some("FooTest")), + foo_test_find_asserts, + ); + } + + #[test] + fn find_by_path() { + let collection = find_sample(); + + let foo_find_asserts = vec![FindMatchType::Path, FindMatchType::Path]; + assert_find_results(collection.find(Some("src/Foo.sol"), None), foo_find_asserts.clone()); + assert_find_results(collection.find(Some("Foo.sol"), None), foo_find_asserts); + + assert_find_results( + collection.find(Some("test/Foo.t.sol"), None), + vec![FindMatchType::Path], + ); + assert_find_results( + collection.find(Some("Foo.t.sol"), Some("FooTester")), + vec![FindMatchType::Path], + ); + } + + #[test] + fn find_by_name() { + let collection = find_sample(); + + assert_find_results( + collection.find(None, Some("Foo")), + vec![FindMatchType::Name, FindMatchType::Name], + ); + assert_find_results(collection.find(None, Some("QuxScript")), vec![FindMatchType::Name]); + + assert_find_results(collection.find(None, Some("BarScript")), vec![FindMatchType::Name]); + assert_find_results( + collection.find(Some("Bar.s.sol"), Some("BarScript")), + vec![FindMatchType::Name], + ); + } } diff --git a/crates/zksync/compilers/src/lib.rs b/crates/zksync/compilers/src/lib.rs index 4316c45df..4fd584b52 100644 --- a/crates/zksync/compilers/src/lib.rs +++ b/crates/zksync/compilers/src/lib.rs @@ -6,7 +6,7 @@ pub mod artifacts; pub mod compilers; pub mod dual_compiled_contracts; -pub mod libraries; +pub mod link; // TODO: Used in integration tests. // find out why cargo complains about unused dev_dependency for these cases diff --git a/crates/zksync/compilers/src/libraries.rs b/crates/zksync/compilers/src/libraries.rs deleted file mode 100644 index b3db8823c..000000000 --- a/crates/zksync/compilers/src/libraries.rs +++ /dev/null @@ -1,129 +0,0 @@ -//! Handles resolution and storage of missing libraries emitted by zksolc - -use std::{ - fs, - io::Write, - path::{Path, PathBuf}, -}; - -use serde::{Deserialize, Serialize}; -use tracing::{trace, warn}; - -use foundry_compilers::info::ContractInfo; - -/// Missing Library entry -#[derive(Debug, Clone, Deserialize, Serialize)] -pub struct ZkMissingLibrary { - /// Contract name - pub contract_name: String, - /// Contract path - pub contract_path: String, - /// Missing Libraries - pub missing_libraries: Vec, -} - -/// Return the missing libraries cache path -pub(crate) fn get_missing_libraries_cache_path(project_root: impl AsRef) -> PathBuf { - project_root.as_ref().join(".zksolc-libraries-cache/missing_library_dependencies.json") -} - -/// Add libraries to missing libraries cache -pub fn add_dependencies_to_missing_libraries_cache( - project_root: impl AsRef, - libraries: &[ZkMissingLibrary], -) -> eyre::Result<()> { - let file_path = get_missing_libraries_cache_path(project_root); - fs::create_dir_all(file_path.parent().unwrap()).unwrap(); - fs::File::create(file_path)? - .write_all(serde_json::to_string_pretty(libraries).unwrap().as_bytes())?; - Ok(()) -} - -/// Returns the detected missing libraries from previous compilation -pub fn get_detected_missing_libraries( - project_root: impl AsRef, -) -> eyre::Result> { - let library_paths = get_missing_libraries_cache_path(project_root); - if !library_paths.exists() { - eyre::bail!("No missing libraries found"); - } - - Ok(serde_json::from_reader(fs::File::open(&library_paths)?)?) -} - -/// Performs cleanup of cached missing libraries -pub fn cleanup_detected_missing_libraries(project_root: impl AsRef) -> eyre::Result<()> { - fs::remove_file(get_missing_libraries_cache_path(project_root))?; - Ok(()) -} - -/// Retrieve ordered list of libraries to deploy -/// -/// Libraries are grouped in batches, where the next batch -/// may have dependencies on the previous one, thus -/// it's recommended to build & deploy one batch before moving onto the next -pub fn resolve_libraries( - mut missing_libraries: Vec, - already_deployed_libraries: &[ContractInfo], -) -> eyre::Result>> { - trace!(?missing_libraries, ?already_deployed_libraries, "filtering out missing libraries"); - missing_libraries.retain(|lib| { - !already_deployed_libraries.contains(&ContractInfo { - path: Some(lib.contract_path.to_string()), - name: lib.contract_name.to_string(), - }) - }); - - let mut batches = Vec::new(); - loop { - if missing_libraries.is_empty() { - break Ok(batches); - } - - let mut batch = Vec::new(); - loop { - // find library with no further dependencies - let Some(next_lib) = missing_libraries - .iter() - .enumerate() - .find(|(_, lib)| lib.missing_libraries.is_empty()) - .map(|(i, _)| i) - .map(|i| missing_libraries.remove(i)) - else { - // no such library, and we didn't collect any library already - if batch.is_empty() { - warn!( - ?missing_libraries, - ?batches, - "unable to find library ready to be deployed" - ); - //TODO: determine if this error message is accurate - eyre::bail!("Library dependency cycle detected"); - } - - break; - }; - - let info = - ContractInfo { path: Some(next_lib.contract_path), name: next_lib.contract_name }; - batch.push(info); - } - - // remove this batch from each library's missing_library if listed as dependency - // this potentially allows more libraries to be included in the next batch - for lib in &mut missing_libraries { - lib.missing_libraries.retain(|maybe_missing_lib| { - let mut split = maybe_missing_lib.split(':'); - let lib_path = split.next().unwrap(); - let lib_name = split.next().unwrap(); - - !batch.contains(&ContractInfo { - path: Some(lib_path.to_string()), - name: lib_name.to_string(), - }) - }) - } - - batches.push(batch); - } -} diff --git a/crates/zksync/compilers/src/link.rs b/crates/zksync/compilers/src/link.rs new file mode 100644 index 000000000..05d84c5e7 --- /dev/null +++ b/crates/zksync/compilers/src/link.rs @@ -0,0 +1,137 @@ +//! Contains items and functions to link via zksolc + +use std::{ + path::Path, + process::{Command, Stdio}, +}; + +use alloy_primitives::{ + map::{HashMap, HashSet}, + Address, Bytes, +}; +use foundry_compilers::error::SolcError; +use serde::{Deserialize, Serialize}; + +use crate::compilers::zksolc::ZkSolcCompiler; + +type LinkId = String; + +/// A library that zksolc will link against +#[derive(Debug, Clone, Serialize, PartialEq, Eq, Hash)] +#[serde(into = "String")] +pub struct Library { + /// Path to the library source + pub filename: String, + /// Name of the library + pub name: String, + /// Address of the library + pub address: Address, +} + +impl From for String { + fn from(val: Library) -> Self { + format!("{}:{}={}", val.filename, val.name, val.address) + } +} + +#[derive(Debug, Clone, Serialize)] +/// JSON Input for `zksolc link` +pub struct LinkJsonInput { + /// List of input bytecodes (linked or unlinked) + pub bytecodes: HashMap, + /// List of libraries to link against + pub libraries: HashSet, +} + +/// Representation of a linked object given by zksolc +#[derive(Debug, Clone, Deserialize)] +pub struct LinkedObject { + // TODO(zk): obtain factoryDeps from output + // might come in handy to have the libraries used as well + /// Fully linked bytecode + pub bytecode: String, + /// Bytecode hash of the fully linked object + pub hash: String, +} + +/// Representation of a linked object given by zksolc +#[derive(Debug, Clone, Deserialize)] +pub struct UnlinkedObject { + /// List of unlinked libraries + pub linker_symbols: HashSet, + /// List of factory dependencies missing from input + pub factory_dependencies: HashSet, +} + +/// Represent a missing library returned by the compiler +/// +/// Deserialized from: ":" +#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Deserialize)] +#[serde(try_from = "String")] +pub struct MissingLibrary { + /// Source path of the contract + pub filename: String, + /// Name of the contract + pub library: String, +} + +impl TryFrom for MissingLibrary { + type Error = &'static str; + + fn try_from(value: String) -> Result { + let mut split = value.split(':'); + let path = split.next().ok_or("failed to parse unlinked library filename")?.to_string(); + let name = split.next().ok_or("failed to parse unlinked library name")?.to_string(); + + Ok(Self { filename: path, library: name }) + } +} + +/// JSON Output for `zksolc link` +#[derive(Debug, Clone, Deserialize)] +pub struct LinkJsonOutput { + /// Fully linked bytecodes resulting from given input + #[serde(default)] + pub linked: HashMap, + /// Not fully linked bytecodes + #[serde(default)] + pub unlinked: HashMap, + /// List of fully linked bytecodes in input + #[serde(default)] + pub ignored: HashMap, +} + +// taken from compilers +fn map_io_err(zksolc_path: &Path) -> impl FnOnce(std::io::Error) -> SolcError + '_ { + move |err| SolcError::io(err, zksolc_path) +} + +/// Invoke `zksolc link` given the `zksolc` binary and json input to use +#[tracing::instrument(level = tracing::Level::TRACE, ret)] +pub fn zksolc_link( + zksolc: &ZkSolcCompiler, + input: LinkJsonInput, +) -> Result { + let zksolc = &zksolc.zksolc; + let mut cmd = Command::new(zksolc); + + cmd.arg("--standard-json") + .arg("--link") + .stdin(Stdio::piped()) + .stderr(Stdio::piped()) + .stdout(Stdio::piped()); + + let mut child = cmd.spawn().map_err(map_io_err(zksolc))?; + + let stdin = child.stdin.as_mut().unwrap(); + let _ = serde_json::to_writer(stdin, &input); + + let output = child.wait_with_output().map_err(map_io_err(zksolc))?; + tracing::trace!(?output); + + if output.status.success() { + serde_json::from_slice(&output.stdout).map_err(Into::into) + } else { + Err(SolcError::solc_output(&output)) + } +} diff --git a/crates/zksync/core/src/lib.rs b/crates/zksync/core/src/lib.rs index 9aee67260..8d8eb0696 100644 --- a/crates/zksync/core/src/lib.rs +++ b/crates/zksync/core/src/lib.rs @@ -19,7 +19,7 @@ pub mod vm; pub mod state; use alloy_network::TransactionBuilder; -use alloy_primitives::{address, hex, keccak256, Address, Bytes, U256 as rU256}; +use alloy_primitives::{address, hex, keccak256, Address, Bytes, B256, U256 as rU256}; use alloy_transport::Transport; use alloy_zksync::{ network::transaction_request::TransactionRequest as ZkTransactionRequest, @@ -30,7 +30,8 @@ use eyre::eyre; use revm::{Database, InnerEvmContext}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; -use zksync_types::bytecode::BytecodeHash; +use zksync_multivm::vm_m6::test_utils::get_create_zksync_address; +use zksync_types::{bytecode::BytecodeHash, Nonce}; pub use utils::{fix_l2_gas_limit, fix_l2_gas_price}; pub use vm::{balance, deploy_nonce, encode_create_params, tx_nonce}; @@ -212,6 +213,38 @@ pub fn try_decode_create2(data: &[u8]) -> Result<(H256, H256, Vec)> { Ok((H256(salt.0), H256(bytecode_hash.0), constructor_args.to_vec())) } +/// Compute a CREATE address according to zksync +pub fn compute_create_address(sender: Address, nonce: u64) -> Address { + get_create_zksync_address(sender.to_h160(), Nonce(nonce as u32)).to_address() +} + +/// Compute a CREATE2 address according to zksync +pub fn compute_create2_address( + sender: Address, + bytecode_hash: H256, + salt: B256, + constructor_input: &[u8], +) -> Address { + const CREATE2_PREFIX: &[u8] = b"zksyncCreate2"; + let prefix = keccak256(CREATE2_PREFIX); + let sender = sender.to_h256(); + let constructor_input_hash = keccak256(constructor_input); + + let payload = [ + prefix.as_slice(), + sender.0.as_slice(), + salt.0.as_slice(), + bytecode_hash.0.as_slice(), + constructor_input_hash.as_slice(), + ] + .concat(); + let hash = keccak256(payload); + + let address = &hash[12..]; + + Address::from_slice(address) +} + /// Try decoding the provided transaction data into create parameters. pub fn try_decode_create(data: &[u8]) -> Result<(H256, Vec)> { let decoded_calldata = diff --git a/crates/zksync/core/src/vm/runner.rs b/crates/zksync/core/src/vm/runner.rs index cf60a3174..f611b4755 100644 --- a/crates/zksync/core/src/vm/runner.rs +++ b/crates/zksync/core/src/vm/runner.rs @@ -9,7 +9,7 @@ use tracing::{debug, info}; use zksync_basic_types::H256; use zksync_types::{ ethabi, fee::Fee, l2::L2Tx, transaction_request::PaymasterParams, CONTRACT_DEPLOYER_ADDRESS, - U256, + CREATE2_FACTORY_ADDRESS, U256, }; use core::convert::Into; @@ -52,7 +52,10 @@ where let caller = env.tx.caller; let nonce = ZKVMData::new(&mut ecx).get_tx_nonce(caller); let (transact_to, is_create) = match env.tx.transact_to { - TransactTo::Call(to) => (to.to_h160(), false), + TransactTo::Call(to) => { + let to = to.to_h160(); + (to, to == CONTRACT_DEPLOYER_ADDRESS || to == CREATE2_FACTORY_ADDRESS) + } TransactTo::Create => (CONTRACT_DEPLOYER_ADDRESS, true), }; @@ -329,7 +332,7 @@ fn get_historical_block_hashes(ecx: &mut EvmContext) -> HashMa let (block_number, overflow) = ecx.env.block.number.overflowing_sub(alloy_primitives::U256::from(i)); if overflow { - break + break; } match ecx.block_hash(block_number.to_u256().as_u64()) { Ok(block_hash) => { diff --git a/testdata/zk/WithLibraries.sol b/testdata/zk/WithLibraries.sol index 51ca18879..bffda5a2b 100644 --- a/testdata/zk/WithLibraries.sol +++ b/testdata/zk/WithLibraries.sol @@ -9,7 +9,7 @@ library Foo { } contract UsesFoo { - uint256 number; + uint256 public number; constructor() { number = Foo.add(42, 0); diff --git a/testdata/zk/WithLibraries.t.sol b/testdata/zk/WithLibraries.t.sol new file mode 100644 index 000000000..34e81313d --- /dev/null +++ b/testdata/zk/WithLibraries.t.sol @@ -0,0 +1,17 @@ +// SPDX-License-Identifier: UNLICENSED + +pragma solidity >=0.8.7; + +import "ds-test/test.sol"; +import "../cheats/Vm.sol"; + +import {UsesFoo} from "./WithLibraries.sol"; + +contract DeployTimeLinking is DSTest { + function testUseUnlinkedContract() external { + // we check that `UsesFoo` is fully linked + // and that the inner library is usable + UsesFoo user = new UsesFoo(); + assertEq(user.number(), 42); + } +}