From 032640de20f30ea64a7d34faa5ed5d7269d0b1f7 Mon Sep 17 00:00:00 2001 From: Wodann Date: Sun, 30 Oct 2022 18:04:46 +0100 Subject: [PATCH 001/406] feat: calls to revm with database in typescript (#3182) Co-authored-by: Franco Victorio --- .cargo/config | 2 + .gitattributes | 2 +- .github/workflows/rethnet-ci.yml | 132 + Cargo.toml | 11 + crates/README.md | 49 + crates/eth_execution_api/Cargo.toml | 9 + crates/eth_execution_api/src/lib.rs | 14 + crates/rethnet/Cargo.toml | 14 + crates/rethnet/src/lib.rs | 46 + crates/rethnet/src/main.rs | 13 + crates/rethnet_evm/Cargo.toml | 14 + crates/rethnet_evm/src/db.rs | 1 + crates/rethnet_evm/src/db/layered_db.rs | 328 ++ crates/rethnet_evm/src/debug.rs | 103 + crates/rethnet_evm/src/inspector.rs | 33 + crates/rethnet_evm/src/lib.rs | 24 + crates/rethnet_evm/src/sync.rs | 113 + crates/rethnet_evm/src/sync/client.rs | 305 ++ crates/rethnet_evm/src/sync/request.rs | 207 ++ crates/rethnet_evm_napi/.mocharc.json | 4 + crates/rethnet_evm_napi/Cargo.toml | 22 + crates/rethnet_evm_napi/build.rs | 3 + crates/rethnet_evm_napi/package.json | 23 + crates/rethnet_evm_napi/src/cast.rs | 71 + crates/rethnet_evm_napi/src/db.rs | 27 + crates/rethnet_evm_napi/src/db/debug.rs | 516 ++++ crates/rethnet_evm_napi/src/db/immutable.rs | 190 ++ crates/rethnet_evm_napi/src/db/mutable.rs | 105 + crates/rethnet_evm_napi/src/lib.rs | 659 ++++ crates/rethnet_evm_napi/src/sync.rs | 84 + .../src/threadsafe_function.rs | 302 ++ crates/rethnet_evm_napi/test/evm/RethnetDb.ts | 99 + crates/rethnet_evm_napi/tsconfig.json | 17 + crates/rethnet_evm_napi/yarn.lock | 2189 +++++++++++++ crates/tools/Cargo.toml | 13 + crates/tools/src/execution_api.rs | 96 + crates/tools/src/main.rs | 26 + crates/tools/src/update.rs | 54 + package.json | 9 +- packages/e2e/package.json | 1 + packages/hardhat-core/package.json | 8 +- .../provider/fork/rpcToTxData.ts | 9 +- .../hardhat-network/provider/modules/eth.ts | 11 +- .../internal/hardhat-network/provider/node.ts | 85 +- .../provider/utils/assertions.ts | 109 + .../provider/utils/convertToRethnet.ts | 184 ++ .../provider/utils/HardhatDb.ts | 127 + .../hardhat-network/stack-traces/execution.ts | 16 +- .../hardhat-network/stack-traces/test.ts | 22 +- packages/hardhat-docker/package.json | 1 - packages/hardhat-ethers/package.json | 1 + packages/hardhat-etherscan/package.json | 3 +- packages/hardhat-ganache/package.json | 1 + packages/hardhat-shorthand/package.json | 5 +- packages/hardhat-solhint/package.json | 1 + packages/hardhat-solpp/package.json | 1 + packages/hardhat-toolbox/package.json | 10 +- packages/hardhat-truffle4/package.json | 1 + packages/hardhat-truffle5/package.json | 1 + packages/hardhat-vyper/package.json | 1 + packages/hardhat-waffle/package.json | 1 + packages/hardhat-web3-legacy/package.json | 1 + packages/hardhat-web3/package.json | 1 + rust-toolchain | 1 + yarn.lock | 2735 +++++++++-------- 65 files changed, 7917 insertions(+), 1349 deletions(-) create mode 100644 .cargo/config create mode 100644 .github/workflows/rethnet-ci.yml create mode 100644 Cargo.toml create mode 100644 crates/README.md create mode 100644 crates/eth_execution_api/Cargo.toml create mode 100644 crates/eth_execution_api/src/lib.rs create mode 100644 crates/rethnet/Cargo.toml create mode 100644 crates/rethnet/src/lib.rs create mode 100644 crates/rethnet/src/main.rs create mode 100644 crates/rethnet_evm/Cargo.toml create mode 100644 crates/rethnet_evm/src/db.rs create mode 100644 crates/rethnet_evm/src/db/layered_db.rs create mode 100644 crates/rethnet_evm/src/debug.rs create mode 100644 crates/rethnet_evm/src/inspector.rs create mode 100644 crates/rethnet_evm/src/lib.rs create mode 100644 crates/rethnet_evm/src/sync.rs create mode 100644 crates/rethnet_evm/src/sync/client.rs create mode 100644 crates/rethnet_evm/src/sync/request.rs create mode 100644 crates/rethnet_evm_napi/.mocharc.json create mode 100644 crates/rethnet_evm_napi/Cargo.toml create mode 100644 crates/rethnet_evm_napi/build.rs create mode 100644 crates/rethnet_evm_napi/package.json create mode 100644 crates/rethnet_evm_napi/src/cast.rs create mode 100644 crates/rethnet_evm_napi/src/db.rs create mode 100644 crates/rethnet_evm_napi/src/db/debug.rs create mode 100644 crates/rethnet_evm_napi/src/db/immutable.rs create mode 100644 crates/rethnet_evm_napi/src/db/mutable.rs create mode 100644 crates/rethnet_evm_napi/src/lib.rs create mode 100644 crates/rethnet_evm_napi/src/sync.rs create mode 100644 crates/rethnet_evm_napi/src/threadsafe_function.rs create mode 100644 crates/rethnet_evm_napi/test/evm/RethnetDb.ts create mode 100644 crates/rethnet_evm_napi/tsconfig.json create mode 100644 crates/rethnet_evm_napi/yarn.lock create mode 100644 crates/tools/Cargo.toml create mode 100644 crates/tools/src/execution_api.rs create mode 100644 crates/tools/src/main.rs create mode 100644 crates/tools/src/update.rs create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts create mode 100644 packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts create mode 100644 rust-toolchain diff --git a/.cargo/config b/.cargo/config new file mode 100644 index 0000000000..9be14551c2 --- /dev/null +++ b/.cargo/config @@ -0,0 +1,2 @@ +[alias] +gen-execution-api = "run --bin tools -- gen-execution-api" diff --git a/.gitattributes b/.gitattributes index 4583bf4050..1c64654f26 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,3 +1,3 @@ *.sol linguist-language=Solidity # prevent github actions to checkout files with crlf line endings -* -text +* text=auto diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml new file mode 100644 index 0000000000..63c54a8cda --- /dev/null +++ b/.github/workflows/rethnet-ci.yml @@ -0,0 +1,132 @@ +name: rethnet CI + +on: + push: + branches: + - $default-branch + - "rethnet/main" + paths: + - "config/**" + - "crates/**" + - "Cargo.toml" + - "rust-toolchain" + pull_request: + branches: ["**"] + paths: + - "config/**" + - "crates/**" + - "Cargo.toml" + - "rust-toolchain" + +env: + RUSTFLAGS: -Dwarnings + +concurrency: + group: ${{ github.workflow }}-${{ github.ref }} + cancel-in-progress: true + +jobs: + check: + name: Check + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v1 + + - name: Cargo check + uses: actions-rs/cargo@v1 + with: + command: check + args: --all-targets + + test-js: + name: Test Node.js + runs-on: ${{ matrix.os }} + needs: check + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "windows-latest", "macOS-latest"] + include: + - RUSTFLAGS: "-Dwarnings" + - os: "windows-latest" + RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + defaults: + run: + working-directory: crates/rethnet_evm_napi + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v2 + with: + node-version: 14 + cache: yarn + + - name: Install node dependencies + run: yarn --frozen-lockfile + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + components: rustfmt + + - uses: Swatinem/rust-cache@v1 + + - name: Build + run: yarn build + + - name: Test + run: yarn test + + test-rs: + name: Test Rust + runs-on: ${{ matrix.os }} + needs: check + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "windows-latest", "macOS-latest"] + include: + - RUSTFLAGS: "-Dwarnings" + - os: "windows-latest" + RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + steps: + - uses: actions/checkout@v3 + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + components: rustfmt + + - uses: Swatinem/rust-cache@v1 + + - name: Doctests + uses: actions-rs/cargo@v1 + env: + RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + with: + command: test + args: --doc --all-features + + - name: Install latest nextest release + uses: taiki-e/install-action@nextest + + - name: Test with latest nextest release + uses: actions-rs/cargo@v1 + env: + RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} + with: + command: nextest + args: run --all-features diff --git a/Cargo.toml b/Cargo.toml new file mode 100644 index 0000000000..646a5d918e --- /dev/null +++ b/Cargo.toml @@ -0,0 +1,11 @@ +[workspace] +members = [ + "crates/*", +] +resolver = "2" + +[profile.dev] +rpath = true + +[profile.release] +rpath = true diff --git a/crates/README.md b/crates/README.md new file mode 100644 index 0000000000..69fe4cc01e --- /dev/null +++ b/crates/README.md @@ -0,0 +1,49 @@ +# Rethnet + +[licence-badge]: https://img.shields.io/badge/license-MIT%20OR%20Apache--2.0-blue +[license]: COPYRIGHT + +**Rethnet** is a debugging runtime for the Ethereum Virtual Machine (or EVM). It can be consumed as a Rust or as a Node.js native module. + +## Building from Source + +Make sure you have the following dependencies installed on your machine: + +- [Rust](https://www.rust-lang.org/tools/install) + +Rethnet is part of the [Hardhat monorepo](https://github.com/NomicFoundation/hardhat). Clone the source code using ssh: + +```bash +git clone git@github.com:NomicFoundation/hardhat.git +``` + +or https: + +```bash +git clone https://github.com/NomicFoundation/hardhat.git +``` + +Use `cargo` to build a release version: + +```bash +cd hardhat +cargo build --release +``` + +## Building a Node.js native module + +Make sure you have the following dependencies installed on your machine: + +- [node.js](https://nodejs.org) + +Use `npm` (or `yarn`) to build a release version: + +```bash +cd crates/rethnet_evm_napi +npm run build +# yarn build +``` + +## Contributing + +Rethnet is still under development by [Nomic Foundation](https://github.com/NomicFoundation/). As such, progress is being merged with the `rethnet/main` branch until its first release. diff --git a/crates/eth_execution_api/Cargo.toml b/crates/eth_execution_api/Cargo.toml new file mode 100644 index 0000000000..2fa1b98453 --- /dev/null +++ b/crates/eth_execution_api/Cargo.toml @@ -0,0 +1,9 @@ +[package] +name = "eth_execution_api" +version = "1.0.0-beta.1" +edition = "2021" + +[dependencies] +derive_builder = { version = "0.11.2", default-features = false } +serde = { version = "1.0.145", default-features = false, features = ["derive"] } +serde_json = { version = "1.0.85", default-features = false, features = ["alloc"] } diff --git a/crates/eth_execution_api/src/lib.rs b/crates/eth_execution_api/src/lib.rs new file mode 100644 index 0000000000..7d12d9af81 --- /dev/null +++ b/crates/eth_execution_api/src/lib.rs @@ -0,0 +1,14 @@ +pub fn add(left: usize, right: usize) -> usize { + left + right +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn it_works() { + let result = add(2, 2); + assert_eq!(result, 4); + } +} diff --git a/crates/rethnet/Cargo.toml b/crates/rethnet/Cargo.toml new file mode 100644 index 0000000000..6f3717d1d5 --- /dev/null +++ b/crates/rethnet/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "rethnet" +version = "0.1.0-dev" +edition = "2021" + +[dependencies] +anyhow = "1.0.65" +clap = { version = "3.2.22", default-features = false, features = ["std", "derive"] } +pretty_env_logger = { version = "0.4.0", default-features = false } + +[dev-dependencies.cargo-husky] +version = "1.5.0" +default-features = false +features = ["precommit-hook", "run-cargo-test", "run-cargo-fmt", "run-cargo-clippy", "run-for-all"] diff --git a/crates/rethnet/src/lib.rs b/crates/rethnet/src/lib.rs new file mode 100644 index 0000000000..d98eb273de --- /dev/null +++ b/crates/rethnet/src/lib.rs @@ -0,0 +1,46 @@ +use std::ffi::OsString; + +use clap::{Parser, Subcommand}; + +#[derive(Parser)] +#[clap(author, version, about, long_about = None)] +struct Args { + #[clap(subcommand)] + command: Command, +} + +#[derive(Subcommand)] +#[allow(clippy::large_enum_variant)] +enum Command { + Start, +} + +#[derive(Copy, Debug, Clone, PartialEq, Eq)] +pub enum ExitStatus { + Success, + Error, +} + +impl From for ExitStatus { + fn from(value: bool) -> Self { + if value { + ExitStatus::Success + } else { + ExitStatus::Error + } + } +} + +pub fn run_with_args(args: I) -> Result +where + I: IntoIterator, + T: Into + Clone, +{ + let args = Args::parse_from(args); + match args.command { + Command::Start => { + println!("Hello, world!"); + Ok(ExitStatus::Success) + } + } +} diff --git a/crates/rethnet/src/main.rs b/crates/rethnet/src/main.rs new file mode 100644 index 0000000000..0529586d51 --- /dev/null +++ b/crates/rethnet/src/main.rs @@ -0,0 +1,13 @@ +use rethnet::{run_with_args, ExitStatus}; + +/// Main entry point for the `rethnet` executable. +fn main() -> anyhow::Result<()> { + pretty_env_logger::try_init()?; + + let status = run_with_args(std::env::args_os()).unwrap(); + match status { + ExitStatus::Success => (), + ExitStatus::Error => std::process::exit(1), + } + Ok(()) +} diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml new file mode 100644 index 0000000000..cf60a9df78 --- /dev/null +++ b/crates/rethnet_evm/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "rethnet_evm" +version = "0.1.0-dev" +edition = "2021" + +[dependencies] +anyhow = { version = "1.0.64", default-features = false, features = ["std"] } +bytes = { version = "1.2.1", default-features = false } +hashbrown = { version = "0.12.3", default-features = false, features = ["serde"] } +log = { version = "0.4.17", default-features = false } +primitive-types = { version = "0.11.1", default-features = false, features = ["impl-serde"] } +revm = { git = "https://github.com/bluealloy/revm/", version = "2.1.0", default-features = false, features = ["dev", "k256", "with-serde"] } +sha3 = { version = "0.10.4", default-features = false } +tokio = { version = "1.21.2", default-features = false, features = ["sync"] } diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs new file mode 100644 index 0000000000..2108b1340a --- /dev/null +++ b/crates/rethnet_evm/src/db.rs @@ -0,0 +1 @@ +pub mod layered_db; diff --git a/crates/rethnet_evm/src/db/layered_db.rs b/crates/rethnet_evm/src/db/layered_db.rs new file mode 100644 index 0000000000..fb10a3c110 --- /dev/null +++ b/crates/rethnet_evm/src/db/layered_db.rs @@ -0,0 +1,328 @@ +use anyhow::anyhow; +use bytes::Bytes; +use hashbrown::HashMap; +use primitive_types::{H160, H256, U256}; +use revm::{Account, AccountInfo, Bytecode, Database, DatabaseCommit, KECCAK_EMPTY}; + +use crate::DatabaseDebug; + +/// A database consisting of layers. +pub struct LayeredDatabase { + stack: Vec, +} + +impl LayeredDatabase { + /// Creates a [`LayeredDatabase`] with the provided layer at the bottom. + pub fn with_layer(layer: Layer) -> Self { + Self { stack: vec![layer] } + } + + /// Returns the index of the top layer. + pub fn last_layer_id(&self) -> usize { + self.stack.len() - 1 + } + + /// Returns a mutable reference to the top layer. + pub fn last_layer_mut(&mut self) -> &mut Layer { + // The `LayeredDatabase` always has at least one layer + self.stack.last_mut().unwrap() + } + + /// Adds the provided layer to the top, returning its index and a + /// mutable reference to the layer. + pub fn add_layer(&mut self, layer: Layer) -> (usize, &mut Layer) { + let layer_id = self.stack.len(); + self.stack.push(layer); + (layer_id, self.stack.last_mut().unwrap()) + } + + /// Reverts to the layer with specified `layer_id`, removing all + /// layers above it. + pub fn revert_to_layer(&mut self, layer_id: usize) { + assert!(layer_id < self.stack.len(), "Invalid layer id."); + self.stack.truncate(layer_id + 1); + } + + /// Returns an iterator over the object's layers. + pub fn iter(&self) -> impl Iterator { + self.stack.iter().rev() + } +} + +impl LayeredDatabase { + /// Adds a default layer to the top, returning its index and a + /// mutable reference to the layer. + pub fn add_layer_default(&mut self) -> (usize, &mut Layer) { + self.add_layer(Layer::default()) + } +} + +impl Default for LayeredDatabase { + fn default() -> Self { + Self { + stack: vec![Layer::default()], + } + } +} + +/// A layer with information needed for [`Rethnet`]. +#[derive(Debug, Default)] +pub struct RethnetLayer { + /// Address -> AccountInfo + account_infos: HashMap, + /// Address -> Storage + storage: HashMap>, + /// Code hash -> Address + contracts: HashMap, + /// Block number -> Block hash + block_hashes: HashMap, +} + +impl RethnetLayer { + /// Creates a `RethnetLayer` with the provided genesis accounts. + pub fn with_genesis_accounts(genesis_accounts: HashMap) -> Self { + Self { + account_infos: genesis_accounts, + ..Default::default() + } + } + + /// Insert the `AccountInfo` with at the specified `address`. + pub fn insert_account(&mut self, address: H160, mut account_info: AccountInfo) { + if let Some(code) = account_info.code.take() { + if !code.is_empty() { + account_info.code_hash = code.hash(); + self.contracts.insert(code.hash(), code.bytes().clone()); + } + } + + if account_info.code_hash.is_zero() { + account_info.code_hash = KECCAK_EMPTY; + } + + self.account_infos.insert(address, account_info); + } +} + +impl Database for LayeredDatabase { + type Error = anyhow::Error; + + fn basic(&mut self, address: H160) -> anyhow::Result> { + Ok(self + .iter() + .find_map(|layer| layer.account_infos.get(&address).cloned())) + } + + fn code_by_hash(&mut self, code_hash: H256) -> anyhow::Result { + self.iter() + .find_map(|layer| { + layer.contracts.get(&code_hash).map(|bytecode| unsafe { + Bytecode::new_raw_with_hash(bytecode.clone(), code_hash) + }) + }) + .ok_or_else(|| { + anyhow!( + "Layered database does not contain contract with code hash: {}.", + code_hash, + ) + }) + } + + fn storage(&mut self, address: H160, index: U256) -> anyhow::Result { + self.iter() + .find_map(|layer| { + layer + .storage + .get(&address) + .and_then(|storage| storage.get(&index)) + .cloned() + }) + .ok_or_else(|| { + anyhow!( + "Layered database does not contain storage with address: {}; and index: {}.", + address, + index + ) + }) + } + + fn block_hash(&mut self, number: U256) -> anyhow::Result { + self.iter() + .find_map(|layer| layer.block_hashes.get(&number).cloned()) + .ok_or_else(|| { + anyhow!( + "Layered database does not contain block hash with number: {}.", + number + ) + }) + } +} + +impl DatabaseCommit for LayeredDatabase { + fn commit(&mut self, changes: HashMap) { + let last_layer = self.last_layer_mut(); + + changes.into_iter().for_each(|(address, account)| { + if account.is_empty() || account.is_destroyed { + last_layer.account_infos.remove(&address); + } else { + last_layer.insert_account(address, account.info); + + let storage = last_layer + .storage + .entry(address) + .and_modify(|storage| { + if account.storage_cleared { + storage.clear(); + } + }) + .or_default(); + + account.storage.into_iter().for_each(|(index, value)| { + let value = value.present_value(); + if value.is_zero() { + storage.remove(&index); + } else { + storage.insert(index, value); + } + }); + + if storage.is_empty() { + last_layer.storage.remove(&address); + } + } + }); + } +} + +impl DatabaseDebug for LayeredDatabase { + type Error = anyhow::Error; + + fn insert_account( + &mut self, + address: H160, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + self.last_layer_mut() + .account_infos + .insert(address, account_info); + + Ok(()) + } + + fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error> { + self.last_layer_mut() + .block_hashes + .insert(block_number, block_hash); + + Ok(()) + } + + fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { + if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { + account_info.balance = balance; + } else { + let mut account_info = self + .iter() + .find_map(|layer| layer.account_infos.get(&address).cloned()) + .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; + + account_info.balance = balance; + self.last_layer_mut().insert_account(address, account_info); + } + + Ok(()) + } + + fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error> { + let code_hash = code.hash(); + + let old_code_hash = + if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { + let old_code_hash = if account_info.code_hash != KECCAK_EMPTY { + Some(code_hash) + } else { + None + }; + + account_info.code_hash = code_hash; + + old_code_hash + } else { + let mut account_info = self + .iter() + .find_map(|layer| layer.account_infos.get(&address).cloned()) + .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; + + account_info.code_hash = code_hash; + self.last_layer_mut().insert_account(address, account_info); + + None + }; + + if let Some(code_hash) = old_code_hash { + self.last_layer_mut().contracts.remove(&code_hash); + } + + self.last_layer_mut() + .contracts + .insert(code_hash, code.bytes().clone()); + + Ok(()) + } + + fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { + if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { + account_info.nonce = nonce; + } else { + let mut account_info = self + .iter() + .find_map(|layer| layer.account_infos.get(&address).cloned()) + .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; + + account_info.nonce = nonce; + self.last_layer_mut().insert_account(address, account_info); + } + + Ok(()) + } + + fn set_account_storage_slot( + &mut self, + address: H160, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + match self.last_layer_mut().storage.entry(address) { + hashbrown::hash_map::Entry::Occupied(mut entry) => { + entry.get_mut().insert(index, value); + } + hashbrown::hash_map::Entry::Vacant(entry) => { + let mut account_storage = HashMap::new(); + account_storage.insert(index, value); + entry.insert(account_storage); + } + } + + Ok(()) + } + + fn storage_root(&mut self) -> Result { + todo!() + } + + fn checkpoint(&mut self) -> Result<(), Self::Error> { + self.add_layer_default(); + Ok(()) + } + + fn revert(&mut self) -> Result<(), Self::Error> { + let last_layer_id = self.last_layer_id(); + if last_layer_id > 0 { + self.revert_to_layer(last_layer_id - 1); + Ok(()) + } else { + Err(anyhow!("No checkpoints to revert.")) + } + } +} diff --git a/crates/rethnet_evm/src/debug.rs b/crates/rethnet_evm/src/debug.rs new file mode 100644 index 0000000000..50db8edafb --- /dev/null +++ b/crates/rethnet_evm/src/debug.rs @@ -0,0 +1,103 @@ +use primitive_types::{H160, H256, U256}; +use revm::{AccountInfo, Bytecode}; + +/// A trait for debug operation on a database. +pub trait DatabaseDebug { + /// The database's error type. + type Error; + + /// Inserts an account with the specified `address`. + fn insert_account( + &mut self, + address: H160, + account_info: AccountInfo, + ) -> Result<(), Self::Error>; + + /// Inserts a block with the specified `block_number` and `block_hash`. + fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error>; + + /// Sets the account balance at the specified address to the provided value. + fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error>; + + /// Sets the account code at the specified address to the provided value. + fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error>; + + /// Sets the account nonce at the specified address to the provided value. + fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error>; + + /// Sets the storage slot at the specified address and index to the provided value. + fn set_account_storage_slot( + &mut self, + address: H160, + index: U256, + value: U256, + ) -> Result<(), Self::Error>; + + /// Retrieves the storage root of the database. + fn storage_root(&mut self) -> Result; + + /// Creates a checkpoint that can be reverted to using [`revert`]. + fn checkpoint(&mut self) -> Result<(), Self::Error>; + + /// Reverts to the previous checkpoint, created using [`checkpoint`]. + fn revert(&mut self) -> Result<(), Self::Error>; +} + +/// A trait for objects that support [`DatabaseDebug`]. +pub trait HasDatabaseDebug { + /// The database's error type. + type Error; + + /// Retrieves the owned `DatabaseDebug`. + fn db_debug(&mut self) -> &mut dyn DatabaseDebug; +} + +impl DatabaseDebug for T { + type Error = ::Error; + + fn insert_account( + &mut self, + address: H160, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + self.db_debug().insert_account(address, account_info) + } + + fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error> { + self.db_debug().insert_block(block_number, block_hash) + } + + fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { + self.db_debug().set_account_balance(address, balance) + } + + fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error> { + self.db_debug().set_account_code(address, code) + } + + fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { + self.db_debug().set_account_nonce(address, nonce) + } + + fn set_account_storage_slot( + &mut self, + address: H160, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + self.db_debug() + .set_account_storage_slot(address, index, value) + } + + fn storage_root(&mut self) -> Result { + self.db_debug().storage_root() + } + + fn checkpoint(&mut self) -> Result<(), Self::Error> { + self.db_debug().checkpoint() + } + + fn revert(&mut self) -> Result<(), Self::Error> { + self.db_debug().revert() + } +} diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs new file mode 100644 index 0000000000..306146bb6e --- /dev/null +++ b/crates/rethnet_evm/src/inspector.rs @@ -0,0 +1,33 @@ +use log::trace; +use revm::{opcode, Database, EVMData, Inspector, Interpreter, Return}; + +pub struct RethnetInspector; + +impl Default for RethnetInspector { + fn default() -> Self { + Self + } +} + +impl Inspector for RethnetInspector +where + D: Database, +{ + fn step( + &mut self, + interp: &mut Interpreter, + _data: &mut EVMData<'_, D>, + _is_static: bool, + ) -> Return { + let opcode = unsafe { *interp.instruction_pointer }; + trace!( + "opcode: {:?} | fee: {} | gasLeft: {} | gasSpent: {}", + opcode::OPCODE_JUMPMAP[usize::from(opcode)], + opcode::spec_opcode_gas(_data.env.cfg.spec_id)[usize::from(opcode)].get_gas(), + interp.gas().remaining(), + interp.gas().spend() + ); + + Return::Continue + } +} diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs new file mode 100644 index 0000000000..44366e89e6 --- /dev/null +++ b/crates/rethnet_evm/src/lib.rs @@ -0,0 +1,24 @@ +//! The Rethnet EVM +//! +//! The Rethnet EVM exposes APIs for running and interacting with a multi-threaded Ethereum +//! Virtual Machine (or EVM). +#![warn(missing_docs)] + +pub use bytes::Bytes; +pub use db::layered_db::{LayeredDatabase, RethnetLayer}; +pub use debug::{DatabaseDebug, HasDatabaseDebug}; +pub use hashbrown::HashMap; +pub use primitive_types::{H160, H256, U256}; +pub use revm::{ + db::{DatabaseRef, EmptyDB}, + Account, AccountInfo, BlockEnv, Bytecode, CfgEnv, CreateScheme, Database, DatabaseCommit, + ExecutionResult, Log, Return, SpecId, TransactOut, TransactTo, TxEnv, EVM, +}; + +/// State mapping of addresses to accounts. +pub type State = HashMap; + +mod db; +mod debug; +mod inspector; +pub mod sync; diff --git a/crates/rethnet_evm/src/sync.rs b/crates/rethnet_evm/src/sync.rs new file mode 100644 index 0000000000..f04668594e --- /dev/null +++ b/crates/rethnet_evm/src/sync.rs @@ -0,0 +1,113 @@ +//! Synchronisation types for the Rethnet EVM. + +mod client; +pub(self) mod request; + +use anyhow::bail; + +use revm::{CfgEnv, Database, DatabaseCommit, EVM}; +use tokio::sync::mpsc::UnboundedReceiver; + +use crate::DatabaseDebug; + +pub use self::client::Client; +use self::request::Request; + +/// The asynchronous Rethnet runtime. +/// +/// Depending on the traits of the database passed to [`new`], [`Rethnet`] will support +/// running with`Request::Debug` and `Request::DatabaseMut`. +pub struct Rethnet { + evm: EVM, + request_receiver: UnboundedReceiver, +} + +impl Rethnet { + /// Creates a new [`Rethnet`] instance. + pub fn new(request_receiver: UnboundedReceiver, cfg: CfgEnv, db: D) -> Self { + let mut evm = EVM::new(); + evm.env.cfg = cfg; + evm.database(db); + + Self { + evm, + request_receiver, + } + } + + /// Runs [`Rethnet`] immutably. + pub async fn run(mut self) -> anyhow::Result<()> + where + D: Database, + { + while let Some(request) = self.request_receiver.recv().await { + match request { + Request::Debug(_) => { + bail!("Rethnet client does not support `DatabaseDebug`.") + } + Request::Database(request) => request.handle_event(&mut self.evm)?, + Request::DatabaseMut(_) => { + bail!("Rethnet client does not support `DatabaseCommit`.") + } + Request::Terminate => return Ok(()), + } + } + + Ok(()) + } + + /// Runs [`Rethnet`] immutably with debug capability. + pub async fn run_debug(mut self) -> anyhow::Result<()> + where + D: Database + DatabaseDebug, + { + while let Some(request) = self.request_receiver.recv().await { + match request { + Request::Debug(request) => request.handle_event(&mut self.evm)?, + Request::Database(request) => request.handle_event(&mut self.evm)?, + Request::DatabaseMut(_) => { + bail!("Rethnet client does not support `DatabaseCommit`.") + } + Request::Terminate => return Ok(()), + } + } + + Ok(()) + } + + /// Runs [`Rethnet`] mutably. + pub async fn run_mut(mut self) -> anyhow::Result<()> + where + D: Database + DatabaseCommit, + { + while let Some(request) = self.request_receiver.recv().await { + match request { + Request::Debug(_) => { + bail!("Rethnet client does not support `DatabaseDebug`.") + } + Request::Database(request) => request.handle_event(&mut self.evm)?, + Request::DatabaseMut(request) => request.handle_event(&mut self.evm)?, + Request::Terminate => return Ok(()), + } + } + + Ok(()) + } + + /// Runs [`Rethnet`] mutably with debug capability. + pub async fn run_mut_debug(mut self) -> anyhow::Result<()> + where + D: Database + DatabaseCommit + DatabaseDebug, + { + while let Some(request) = self.request_receiver.recv().await { + match request { + Request::Debug(request) => request.handle_event(&mut self.evm)?, + Request::Database(request) => request.handle_event(&mut self.evm)?, + Request::DatabaseMut(request) => request.handle_event(&mut self.evm)?, + Request::Terminate => return Ok(()), + } + } + + Ok(()) + } +} diff --git a/crates/rethnet_evm/src/sync/client.rs b/crates/rethnet_evm/src/sync/client.rs new file mode 100644 index 0000000000..305f47dbe5 --- /dev/null +++ b/crates/rethnet_evm/src/sync/client.rs @@ -0,0 +1,305 @@ +use std::future::Future; + +use bytes::Bytes; +use primitive_types::{H160, H256, U256}; +use revm::{AccountInfo, BlockEnv, CfgEnv, Database, DatabaseCommit, ExecutionResult, TxEnv}; +use tokio::{ + runtime::{Builder, Runtime}, + sync::{ + mpsc::{unbounded_channel, UnboundedSender}, + oneshot, + }, + task::JoinHandle, +}; + +use crate::{DatabaseDebug, State}; + +use super::{ + request::{DatabaseMutRequest, DatabaseRequest, DebugRequest, Request}, + Rethnet, +}; + +/// The asynchronous client that communicates with a [`Rethnet`] object. +pub struct Client { + request_sender: UnboundedSender, + rethnet_handle: Option>>, + runtime: Runtime, +} + +impl Client { + /// Creates a [`Client`] by spawning an asynchronous task to run [`Rethnet`]. + fn new(request_sender: UnboundedSender, future: F) -> anyhow::Result + where + F: Future> + Send + 'static, + { + let runtime = Builder::new_multi_thread().build()?; + let rethnet_handle = Some(runtime.spawn(future)); + + Ok(Self { + request_sender, + rethnet_handle, + runtime, + }) + } + + /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. + pub fn with_db(cfg: CfgEnv, db: D) -> anyhow::Result + where + D: Database + Send + 'static, + { + let (request_sender, request_receiver) = unbounded_channel(); + + Self::new(request_sender, async { + Rethnet::new(request_receiver, cfg, db).run().await + }) + } + + /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. + pub fn with_db_debug(cfg: CfgEnv, db: D) -> anyhow::Result + where + D: Database + DatabaseDebug + Send + 'static, + { + let (request_sender, request_receiver) = unbounded_channel(); + + Self::new(request_sender, async { + Rethnet::new(request_receiver, cfg, db).run_debug().await + }) + } + + /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. + pub fn with_db_mut(cfg: CfgEnv, db: D) -> anyhow::Result + where + D: Database + DatabaseCommit + Send + 'static, + { + let (request_sender, request_receiver) = unbounded_channel(); + + Self::new(request_sender, async { + Rethnet::new(request_receiver, cfg, db).run_mut().await + }) + } + + /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. + pub fn with_db_mut_debug(cfg: CfgEnv, db: D) -> anyhow::Result + where + D: Database + + DatabaseCommit + + DatabaseDebug + + Send + + 'static, + { + let (request_sender, request_receiver) = unbounded_channel(); + + Self::new(request_sender, async { + Rethnet::new(request_receiver, cfg, db) + .run_mut_debug() + .await + }) + } + + /// Runs a transaction with committing the state. + pub async fn dry_run(&self, transaction: TxEnv, block: BlockEnv) -> (ExecutionResult, State) { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Database(DatabaseRequest::DryRun { + transaction, + block, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Runs a transaction, committing the state in the process. + pub async fn run(&self, transaction: TxEnv) -> ExecutionResult { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::DatabaseMut(DatabaseMutRequest::Run { + transaction, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Guarantees that a transaction will succeed. + pub async fn guarantee_transaction(&self, transaction: TxEnv) -> anyhow::Result<()> { + let total_gas = U256::from(transaction.gas_limit) + * (transaction.gas_price + transaction.gas_priority_fee.unwrap_or_else(U256::zero)) + + transaction.value; + + let caller = transaction.caller; + + let account_info = + if let Some(account_info) = self.get_account_by_address(caller).await.unwrap_or(None) { + account_info + } else { + let account_info = AccountInfo::default(); + self.insert_account(caller, account_info.clone()).await?; + account_info + }; + + if account_info.balance < total_gas { + self.set_account_balance(caller, total_gas).await?; + } + + Ok(()) + } + + /// Creates a state checkpoint that can be reverted to using [`revert`]. + pub async fn checkpoint(&self) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::Checkpoint { sender })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Reverts to the previous checkpoint, created using [`checkpoint`]. + pub async fn revert(&self) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::Revert { sender })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Retrieves the account corresponding to the address, if it exists. + pub async fn get_account_by_address( + &self, + address: H160, + ) -> anyhow::Result> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Database(DatabaseRequest::AccountByAddress { + address, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Inserts the specified account into the state. + pub async fn insert_account( + &self, + address: H160, + account_info: AccountInfo, + ) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::InsertAccount { + address, + account_info, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Inserts the specified block number and hash into the state. + pub async fn insert_block(&self, block_number: U256, block_hash: H256) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::InsertBlock { + block_number, + block_hash, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Sets the account balance at the specified address to the provided value. + pub async fn set_account_balance(&self, address: H160, balance: U256) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::SetAccountBalance { + address, + balance, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Sets the account code at the specified address to the provided value. + pub async fn set_account_code(&self, address: H160, code: Bytes) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::SetAccountCode { + address, + bytes: code, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Sets the account nonce at the specified address to the provided value. + pub async fn set_account_nonce(&self, address: H160, nonce: u64) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::SetAccountNonce { + address, + nonce, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } + + /// Sets the storage slot at the specified address and index to the provided value. + pub async fn set_account_storage_slot( + &self, + address: H160, + index: U256, + value: U256, + ) -> anyhow::Result<()> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Debug(DebugRequest::SetAccountStorageSlot { + address, + index, + value, + sender, + })) + .expect("Failed to send request"); + + receiver.await.expect("Rethnet unexpectedly crashed") + } +} + +impl Drop for Client { + fn drop(&mut self) { + if let Some(handle) = self.rethnet_handle.take() { + self.request_sender + .send(Request::Terminate) + .expect("Failed to send request"); + + self.runtime + .block_on(handle) + .unwrap() + .expect("Rethnet closed unexpectedly"); + } + } +} diff --git a/crates/rethnet_evm/src/sync/request.rs b/crates/rethnet_evm/src/sync/request.rs new file mode 100644 index 0000000000..839675632c --- /dev/null +++ b/crates/rethnet_evm/src/sync/request.rs @@ -0,0 +1,207 @@ +use anyhow::bail; +use bytes::Bytes; +use primitive_types::{H160, H256, U256}; +use revm::{ + AccountInfo, BlockEnv, Bytecode, Database, DatabaseCommit, ExecutionResult, TxEnv, EVM, +}; +use tokio::sync::oneshot; + +use crate::{inspector::RethnetInspector, DatabaseDebug, State}; + +/// The request type for a [`Client`] to communicate with [`Rethnet`]. +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub enum Request { + Debug(DebugRequest), + Database(DatabaseRequest), + DatabaseMut(DatabaseMutRequest), + Terminate, +} + +#[derive(Debug)] +pub enum DebugRequest { + Checkpoint { + sender: oneshot::Sender>, + }, + InsertAccount { + address: H160, + account_info: AccountInfo, + sender: oneshot::Sender>, + }, + InsertBlock { + block_number: U256, + block_hash: H256, + sender: oneshot::Sender>, + }, + Revert { + sender: oneshot::Sender>, + }, + SetAccountBalance { + address: H160, + balance: U256, + sender: oneshot::Sender>, + }, + SetAccountCode { + address: H160, + bytes: Bytes, + sender: oneshot::Sender>, + }, + SetAccountNonce { + address: H160, + nonce: u64, + sender: oneshot::Sender>, + }, + SetAccountStorageSlot { + address: H160, + index: U256, + value: U256, + sender: oneshot::Sender>, + }, +} + +impl DebugRequest { + pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> + where + D: DatabaseDebug, + { + let sent_response = match self { + DebugRequest::Checkpoint { sender } => { + sender.send(evm.db().unwrap().checkpoint()).is_ok() + } + DebugRequest::InsertAccount { + address, + account_info, + sender, + } => sender + .send(evm.db().unwrap().insert_account(address, account_info)) + .is_ok(), + DebugRequest::InsertBlock { + block_number, + block_hash, + sender, + } => sender + .send(evm.db().unwrap().insert_block(block_number, block_hash)) + .is_ok(), + DebugRequest::Revert { sender } => sender.send(evm.db().unwrap().revert()).is_ok(), + DebugRequest::SetAccountBalance { + address, + balance, + sender, + } => sender + .send(evm.db().unwrap().set_account_balance(address, balance)) + .is_ok(), + DebugRequest::SetAccountCode { + address, + bytes, + sender, + } => sender + .send( + evm.db() + .unwrap() + .set_account_code(address, Bytecode::new_raw(bytes)), + ) + .is_ok(), + DebugRequest::SetAccountNonce { + address, + nonce, + sender, + } => sender + .send(evm.db().unwrap().set_account_nonce(address, nonce)) + .is_ok(), + DebugRequest::SetAccountStorageSlot { + address, + index, + value, + sender, + } => sender + .send( + evm.db() + .unwrap() + .set_account_storage_slot(address, index, value), + ) + .is_ok(), + }; + + if !sent_response { + bail!("Failed to send response"); + } + + Ok(()) + } +} + +#[allow(clippy::large_enum_variant)] +#[derive(Debug)] +pub enum DatabaseRequest { + AccountByAddress { + address: H160, + sender: oneshot::Sender>>, + }, + DryRun { + transaction: TxEnv, + block: BlockEnv, + sender: oneshot::Sender<(ExecutionResult, State)>, + }, +} + +impl DatabaseRequest { + pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> + where + D: Database, + { + let sent_response = match self { + DatabaseRequest::AccountByAddress { address, sender } => { + sender.send(evm.db().unwrap().basic(address)).is_ok() + } + DatabaseRequest::DryRun { + transaction, + block, + sender, + } => { + evm.env.tx = transaction; + evm.env.block = block; + + sender + .send(evm.inspect(RethnetInspector::default())) + .is_ok() + } + }; + + if !sent_response { + bail!("Failed to send response"); + } + + Ok(()) + } +} + +#[derive(Debug)] +pub enum DatabaseMutRequest { + Run { + transaction: TxEnv, + sender: oneshot::Sender, + }, +} + +impl DatabaseMutRequest { + pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> + where + D: Database + DatabaseCommit, + { + let sent_response = match self { + DatabaseMutRequest::Run { + transaction, + sender, + } => { + evm.env.tx = transaction; + sender.send(evm.transact_commit()).is_ok() + } + }; + + if !sent_response { + bail!("Failed to send response"); + } + + Ok(()) + } +} diff --git a/crates/rethnet_evm_napi/.mocharc.json b/crates/rethnet_evm_napi/.mocharc.json new file mode 100644 index 0000000000..2a9d895c1b --- /dev/null +++ b/crates/rethnet_evm_napi/.mocharc.json @@ -0,0 +1,4 @@ +{ + "require": "ts-node/register/transpile-only", + "timeout": 25000 +} diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml new file mode 100644 index 0000000000..16891bebfa --- /dev/null +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -0,0 +1,22 @@ +[package] +name = "rethnet_evm_napi" +version = "0.1.0-dev" +edition = "2021" + +[lib] +crate-type = ["cdylib"] + +[dependencies] +anyhow = "1.0.64" +crossbeam-channel = { version = "0.5.6", default-features = false } +napi = { version = "2.9.0", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi-derive = "2.9.0" +once_cell = "1.15.0" +pretty_env_logger = "0.4.0" +rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } +secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc"] } +serde_json = { version = "1.0.85", default-features = false, features = ["alloc"] } +sha3 = { version = "0.10.5", default-features = false } + +[build-dependencies] +napi-build = "2.0.1" diff --git a/crates/rethnet_evm_napi/build.rs b/crates/rethnet_evm_napi/build.rs new file mode 100644 index 0000000000..0f1b01002b --- /dev/null +++ b/crates/rethnet_evm_napi/build.rs @@ -0,0 +1,3 @@ +fn main() { + napi_build::setup(); +} diff --git a/crates/rethnet_evm_napi/package.json b/crates/rethnet_evm_napi/package.json new file mode 100644 index 0000000000..de7da05fe5 --- /dev/null +++ b/crates/rethnet_evm_napi/package.json @@ -0,0 +1,23 @@ +{ + "package": "rethnet-evm", + "name": "rethnet-evm", + "version": "0.1.0-dev", + "main": "rethnet-evm.node", + "types": "index.d.ts", + "license": "MIT", + "napi": { + "name": "rethnet-evm" + }, + "scripts": { + "build": "napi build --release", + "build:debug": "napi build", + "test": "mocha --recursive \"test/**/*.ts\"" + }, + "devDependencies": { + "@napi-rs/cli": "^2.11.4", + "chai": "^4.3.6", + "@nomicfoundation/ethereumjs-util": "^8.0.0", + "mocha": "^10.0.0", + "typescript": "~4.5.2" + } +} diff --git a/crates/rethnet_evm_napi/src/cast.rs b/crates/rethnet_evm_napi/src/cast.rs new file mode 100644 index 0000000000..5749abe148 --- /dev/null +++ b/crates/rethnet_evm_napi/src/cast.rs @@ -0,0 +1,71 @@ +use napi::{ + bindgen_prelude::{BigInt, Buffer}, + Status, +}; +use rethnet_evm::{AccountInfo, Bytecode, Bytes, H256, U256}; + +use crate::Account; + +/// An attempted conversion that consumes `self`, which may or may not be +/// expensive. It is identical to [`TryInto`], but it allows us to implement +/// the trait for external types. +pub trait TryCast: Sized { + /// The type returned in the event of a conversion error. + type Error; + + /// Performs the conversion. + fn try_cast(self) -> Result; +} + +impl TryCast for Account { + type Error = napi::Error; + + fn try_cast(self) -> std::result::Result { + Ok(AccountInfo { + balance: self.balance.try_cast()?, + nonce: self.nonce.get_u64().1, + code_hash: H256::from_slice(&self.code_hash), + code: self + .code + .map(|code| Bytecode::new_raw(Bytes::copy_from_slice(&code))), + }) + } +} + +impl TryCast for Buffer { + type Error = napi::Error; + + fn try_cast(self) -> std::result::Result { + Ok(H256::from_slice(&self)) + } +} + +impl TryCast for Buffer { + type Error = napi::Error; + + fn try_cast(self) -> Result { + let bytes = Bytes::copy_from_slice(&self); + + Ok(Bytecode::new_raw(bytes)) + } +} + +impl TryCast for BigInt { + type Error = napi::Error; + + fn try_cast(mut self) -> std::result::Result { + let num_words = self.words.len(); + match num_words.cmp(&4) { + std::cmp::Ordering::Less => self.words.append(&mut vec![0u64; 4 - num_words]), + std::cmp::Ordering::Equal => (), + std::cmp::Ordering::Greater => { + return Err(napi::Error::new( + Status::InvalidArg, + "BigInt cannot have more than 4 words.".to_owned(), + )); + } + } + + Ok(U256(self.words.try_into().unwrap())) + } +} diff --git a/crates/rethnet_evm_napi/src/db.rs b/crates/rethnet_evm_napi/src/db.rs new file mode 100644 index 0000000000..3613a4b261 --- /dev/null +++ b/crates/rethnet_evm_napi/src/db.rs @@ -0,0 +1,27 @@ +mod debug; +mod immutable; +mod mutable; + +pub use debug::*; +pub use immutable::*; +pub use mutable::*; +use rethnet_evm::{sync::Client, CfgEnv}; + +pub(super) fn client( + cfg: CfgEnv, + db: JsDatabase, + db_commit: Option, + db_debug: Option, +) -> anyhow::Result { + if let Some(db_commit) = db_commit { + if let Some(db_debug) = db_debug { + Client::with_db_mut_debug(cfg, JsDatabaseCommitDebug::new(db, db_commit, db_debug)) + } else { + Client::with_db_mut(cfg, JsDatabaseCommit::new(db, db_commit)) + } + } else if let Some(db_debug) = db_debug { + Client::with_db_debug(cfg, JsDatabaseDebug::new(db, db_debug)) + } else { + Client::with_db(cfg, db) + } +} diff --git a/crates/rethnet_evm_napi/src/db/debug.rs b/crates/rethnet_evm_napi/src/db/debug.rs new file mode 100644 index 0000000000..70470c7f6c --- /dev/null +++ b/crates/rethnet_evm_napi/src/db/debug.rs @@ -0,0 +1,516 @@ +use std::sync::mpsc::{channel, Sender}; + +use anyhow::anyhow; +use napi::{bindgen_prelude::Buffer, JsUnknown, NapiRaw, Status}; +use rethnet_evm::{ + Account, AccountInfo, Bytecode, Database, DatabaseCommit, DatabaseDebug, HasDatabaseDebug, + HashMap, H160, H256, U256, +}; + +use crate::{ + sync::{await_promise, await_void_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, + DatabaseDebugCallbacks, +}; + +use super::{JsDatabase, JsDatabaseCommitInner}; + +pub struct CheckpointCall { + pub sender: Sender>, +} + +pub struct RevertCall { + pub sender: Sender>, +} + +pub struct GetStorageRootCall { + pub sender: Sender>, +} + +pub struct InsertAccountCall { + pub address: H160, + pub account_info: AccountInfo, + pub sender: Sender>, +} + +pub struct SetAccountBalanceCall { + pub address: H160, + pub balance: U256, + pub sender: Sender>, +} + +pub struct SetAccountCodeCall { + pub address: H160, + pub code: Bytecode, + pub sender: Sender>, +} + +pub struct SetAccountNonceCall { + pub address: H160, + pub nonce: u64, + pub sender: Sender>, +} + +pub struct SetAccountStorageSlotCall { + pub address: H160, + pub index: U256, + pub value: U256, + pub sender: Sender>, +} + +pub struct JsDatabaseDebug { + db: JsDatabase, + db_debug: JsDatabaseDebugInner, +} + +impl JsDatabaseDebug { + /// Creates a new [`JsDatabaseDebug`]. + pub(super) fn new(db: JsDatabase, db_debug: JsDatabaseDebugInner) -> Self { + Self { db, db_debug } + } +} + +impl HasDatabaseDebug for JsDatabaseDebug { + type Error = anyhow::Error; + + fn db_debug(&mut self) -> &mut dyn DatabaseDebug { + &mut self.db_debug + } +} + +impl Database for JsDatabaseDebug { + type Error = anyhow::Error; + + fn basic(&mut self, address: rethnet_evm::H160) -> Result, Self::Error> { + self.db.basic(address) + } + + fn code_by_hash( + &mut self, + code_hash: rethnet_evm::H256, + ) -> Result { + self.db.code_by_hash(code_hash) + } + + fn storage(&mut self, address: rethnet_evm::H160, index: U256) -> Result { + self.db.storage(address, index) + } + + fn block_hash(&mut self, number: U256) -> Result { + self.db.block_hash(number) + } +} + +pub struct JsDatabaseCommitDebug { + db: JsDatabase, + db_commit: JsDatabaseCommitInner, + db_debug: JsDatabaseDebugInner, +} + +impl JsDatabaseCommitDebug { + /// Creates a new [`JsDatabaseCommitDebug`]. + pub(super) fn new( + db: JsDatabase, + db_commit: JsDatabaseCommitInner, + db_debug: JsDatabaseDebugInner, + ) -> Self { + Self { + db, + db_commit, + db_debug, + } + } +} + +impl HasDatabaseDebug for JsDatabaseCommitDebug { + type Error = anyhow::Error; + + fn db_debug(&mut self) -> &mut dyn rethnet_evm::DatabaseDebug { + &mut self.db_debug + } +} + +impl Database for JsDatabaseCommitDebug { + type Error = anyhow::Error; + + fn basic(&mut self, address: H160) -> Result, Self::Error> { + self.db.basic(address) + } + + fn code_by_hash( + &mut self, + code_hash: rethnet_evm::H256, + ) -> Result { + self.db.code_by_hash(code_hash) + } + + fn storage( + &mut self, + address: H160, + index: rethnet_evm::U256, + ) -> Result { + self.db.storage(address, index) + } + + fn block_hash(&mut self, number: rethnet_evm::U256) -> Result { + self.db.block_hash(number) + } +} + +impl DatabaseCommit for JsDatabaseCommitDebug { + fn commit(&mut self, changes: HashMap) { + self.db_commit.commit(changes) + } +} + +pub(crate) struct JsDatabaseDebugInner { + checkpoint_fn: ThreadsafeFunction, + revert_fn: ThreadsafeFunction, + get_storage_root_fn: ThreadsafeFunction, + insert_account_fn: ThreadsafeFunction, + set_account_balance_fn: ThreadsafeFunction, + set_account_code_fn: ThreadsafeFunction, + set_account_nonce_fn: ThreadsafeFunction, + set_account_storage_slot_fn: ThreadsafeFunction, +} + +impl JsDatabaseDebugInner { + /// Creates a new `JsDatabaseDebug`. + pub fn new(env: &napi::Env, callbacks: DatabaseDebugCallbacks) -> napi::Result { + let checkpoint_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.checkpoint_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let promise = ctx.callback.call::(None, &[])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let revert_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.revert_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let promise = ctx.callback.call::(None, &[])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let get_storage_root_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.get_storage_root_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let promise = ctx.callback.call::(None, &[])?; + let result = await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let insert_account_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.insert_account_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let mut account = ctx.env.create_object()?; + + let balance = ctx + .env + .create_bigint_from_words(false, ctx.value.account_info.balance.0.to_vec())?; + account.set_named_property("balance", balance)?; + + let nonce = ctx + .env + .create_bigint_from_u64(ctx.value.account_info.nonce)?; + account.set_named_property("nonce", nonce)?; + + let code_hash = ctx + .env + .create_buffer_copy(ctx.value.account_info.code_hash.as_bytes())? + .into_raw(); + account.set_named_property("codeHash", code_hash)?; + + if let Some(code) = ctx.value.account_info.code { + let code = ctx + .env + .create_buffer_copy(code.bytes().as_ref())? + .into_raw(); + + account.set_named_property("code", code)?; + } else { + account.set_named_property("code", ctx.env.get_null()?)?; + } + + let promise = ctx + .callback + .call(None, &[address.into_unknown(), account.into_unknown()])?; + + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let set_account_balance_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.set_account_balance_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let balance = ctx + .env + .create_bigint_from_words(false, ctx.value.balance.0.to_vec())?; + + let promise = ctx + .callback + .call(None, &[address.into_unknown(), balance.into_unknown()?])?; + + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let set_account_code_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.set_account_code_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let code = ctx + .env + .create_buffer_copy(ctx.value.code.bytes().as_ref())? + .into_raw(); + + let promise = ctx.callback.call(None, &[address, code])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let set_account_nonce_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.set_account_nonce_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let nonce = ctx.env.create_bigint_from_u64(ctx.value.nonce)?; + + let promise = ctx + .callback + .call(None, &[address.into_unknown(), nonce.into_unknown()?])?; + + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + let set_account_storage_slot_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.set_account_storage_slot_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let index = ctx + .env + .create_bigint_from_words(false, ctx.value.index.0.to_vec())?; + + let value = ctx + .env + .create_bigint_from_words(false, ctx.value.value.0.to_vec())?; + + let promise = ctx.callback.call( + None, + &[ + address.into_unknown(), + index.into_unknown()?, + value.into_unknown()?, + ], + )?; + + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + Ok(Self { + checkpoint_fn, + revert_fn, + get_storage_root_fn, + insert_account_fn, + set_account_balance_fn, + set_account_code_fn, + set_account_nonce_fn, + set_account_storage_slot_fn, + }) + } +} + +impl DatabaseDebug for JsDatabaseDebugInner { + type Error = anyhow::Error; + + fn insert_account( + &mut self, + address: H160, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.insert_account_fn.call( + InsertAccountCall { + address, + account_info, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn insert_block(&mut self, _block_number: U256, _block_hash: H256) -> Result<(), Self::Error> { + todo!() + } + + fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.set_account_balance_fn.call( + SetAccountBalanceCall { + address, + balance, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn set_account_code( + &mut self, + address: H160, + code: rethnet_evm::Bytecode, + ) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.set_account_code_fn.call( + SetAccountCodeCall { + address, + code, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.set_account_nonce_fn.call( + SetAccountNonceCall { + address, + nonce, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn set_account_storage_slot( + &mut self, + address: H160, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.set_account_storage_slot_fn.call( + SetAccountStorageSlotCall { + address, + index, + value, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn storage_root(&mut self) -> Result { + let (sender, receiver) = channel(); + + let status = self.get_storage_root_fn.call( + GetStorageRootCall { sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn checkpoint(&mut self) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self.checkpoint_fn.call( + CheckpointCall { sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn revert(&mut self) -> Result<(), Self::Error> { + let (sender, receiver) = channel(); + + let status = self + .revert_fn + .call(RevertCall { sender }, ThreadsafeFunctionCallMode::Blocking); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } +} diff --git a/crates/rethnet_evm_napi/src/db/immutable.rs b/crates/rethnet_evm_napi/src/db/immutable.rs new file mode 100644 index 0000000000..2a46eb0369 --- /dev/null +++ b/crates/rethnet_evm_napi/src/db/immutable.rs @@ -0,0 +1,190 @@ +use std::sync::mpsc::{channel, Sender}; + +use anyhow::anyhow; +use napi::{ + bindgen_prelude::{BigInt, Buffer}, + NapiRaw, Status, +}; +use rethnet_evm::{AccountInfo, Bytecode, Database, H160, H256, U256}; + +use crate::{ + sync::{await_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, + Account, DatabaseCallbacks, +}; + +pub struct GetAccountByAddressCall { + pub address: H160, + pub sender: Sender>, +} + +pub struct GetAccountStorageSlotCall { + pub address: H160, + pub index: U256, + pub sender: Sender>, +} + +pub struct GetBlockHashCall { + pub block_number: U256, + pub sender: Sender>, +} + +pub struct GetCodeByHashCall { + pub code_hash: H256, + pub sender: Sender>, +} + +pub struct JsDatabase { + get_account_by_address_fn: ThreadsafeFunction, + get_account_storage_slot_fn: ThreadsafeFunction, + get_block_hash_fn: ThreadsafeFunction, + get_code_by_hash_fn: ThreadsafeFunction, +} + +impl JsDatabase { + /// Creates a new [`JsDatabase`]. + pub fn new(env: &napi::Env, callbacks: DatabaseCallbacks) -> napi::Result { + let get_account_by_address_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.get_account_by_address_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx.env.create_buffer_copy(ctx.value.address.as_bytes())?; + + let promise = ctx.callback.call(None, &[address.into_raw()])?; + let result = + await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let get_account_storage_slot_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.get_account_storage_slot_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let address = ctx + .env + .create_buffer_copy(ctx.value.address.as_bytes())? + .into_raw(); + + let index = ctx + .env + .create_bigint_from_words(false, ctx.value.index.0.to_vec())?; + + let promise = ctx + .callback + .call(None, &[address.into_unknown(), index.into_unknown()?])?; + + let result = await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let get_block_hash_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.get_block_hash_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let block_number = ctx + .env + .create_bigint_from_words(false, ctx.value.block_number.0.to_vec())?; + + let promise = ctx.callback.call(None, &[block_number.into_unknown()?])?; + let result = await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let get_code_by_hash_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.get_code_by_hash_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let code_hash = ctx.env.create_buffer_copy(ctx.value.code_hash.as_bytes())?; + + let promise = ctx.callback.call(None, &[code_hash.into_raw()])?; + let result = await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + Ok(Self { + get_account_by_address_fn, + get_account_storage_slot_fn, + get_block_hash_fn, + get_code_by_hash_fn, + }) + } +} + +impl Database for JsDatabase { + type Error = anyhow::Error; + + fn basic(&mut self, address: H160) -> anyhow::Result> { + let (sender, receiver) = channel(); + + let status = self.get_account_by_address_fn.call( + GetAccountByAddressCall { address, sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_or_else( + |e| Err(anyhow!(e.to_string())), + |account_info| Ok(Some(account_info)), + ) + } + + fn code_by_hash(&mut self, code_hash: H256) -> anyhow::Result { + let (sender, receiver) = channel(); + + let status = self.get_code_by_hash_fn.call( + GetCodeByHashCall { code_hash, sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn storage(&mut self, address: H160, index: U256) -> anyhow::Result { + let (sender, receiver) = channel(); + + let status = self.get_account_storage_slot_fn.call( + GetAccountStorageSlotCall { + address, + index, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } + + fn block_hash(&mut self, block_number: U256) -> anyhow::Result { + let (sender, receiver) = channel(); + + let status = self.get_block_hash_fn.call( + GetBlockHashCall { + block_number, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } +} diff --git a/crates/rethnet_evm_napi/src/db/mutable.rs b/crates/rethnet_evm_napi/src/db/mutable.rs new file mode 100644 index 0000000000..deec17b8b6 --- /dev/null +++ b/crates/rethnet_evm_napi/src/db/mutable.rs @@ -0,0 +1,105 @@ +use std::sync::mpsc::{channel, Sender}; + +use napi::{JsUnknown, NapiRaw, Status}; +use rethnet_evm::{Account, Database, DatabaseCommit, HashMap, H160}; + +use crate::{ + sync::{await_void_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, + DatabaseCommitCallbacks, +}; + +use super::JsDatabase; + +pub trait HasDatabaseCommit { + /// The database's error type. + type Error; + + /// Retrieves the owned `DatabaseCommit`. + fn db_commit(&mut self) -> &mut dyn DatabaseCommit; +} + +pub struct CommitCall { + pub sender: Sender>, +} + +pub struct JsDatabaseCommit { + db: JsDatabase, + db_commit: JsDatabaseCommitInner, +} + +impl JsDatabaseCommit { + /// Creates a new [`JsDatabaseCommit`]. + pub(super) fn new(db: JsDatabase, db_commit: JsDatabaseCommitInner) -> Self { + Self { db, db_commit } + } +} + +impl Database for JsDatabaseCommit { + type Error = anyhow::Error; + + fn basic(&mut self, address: H160) -> Result, Self::Error> { + self.db.basic(address) + } + + fn code_by_hash( + &mut self, + code_hash: rethnet_evm::H256, + ) -> Result { + self.db.code_by_hash(code_hash) + } + + fn storage( + &mut self, + address: H160, + index: rethnet_evm::U256, + ) -> Result { + self.db.storage(address, index) + } + + fn block_hash(&mut self, number: rethnet_evm::U256) -> Result { + self.db.block_hash(number) + } +} + +impl DatabaseCommit for JsDatabaseCommit { + fn commit(&mut self, changes: HashMap) { + self.db_commit.commit(changes) + } +} + +pub(crate) struct JsDatabaseCommitInner { + commit_fn: ThreadsafeFunction, +} + +impl JsDatabaseCommitInner { + /// Creates a new [`JsDatabaseCommit`]. + pub fn new(env: &napi::Env, callbacks: DatabaseCommitCallbacks) -> napi::Result { + let commit_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.commit_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + let promise = ctx.callback.call::(None, &[])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + handle_error(sender, result) + }, + )?; + + Ok(Self { commit_fn }) + } +} + +impl DatabaseCommit for JsDatabaseCommitInner { + fn commit(&mut self, _changes: HashMap) { + let (sender, receiver) = channel(); + + let status = self + .commit_fn + .call(CommitCall { sender }, ThreadsafeFunctionCallMode::Blocking); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().expect("Failed to commit") + } +} diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs new file mode 100644 index 0000000000..036c9b3769 --- /dev/null +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -0,0 +1,659 @@ +mod cast; +mod db; +mod sync; +mod threadsafe_function; + +use std::{fmt::Debug, str::FromStr}; + +use db::{JsDatabaseCommitInner, JsDatabaseDebugInner}; +use napi::{bindgen_prelude::*, Status}; +use napi_derive::napi; +use once_cell::sync::OnceCell; +use rethnet_evm::{ + sync::Client, AccountInfo, BlockEnv, Bytes, CfgEnv, CreateScheme, HashMap, LayeredDatabase, + RethnetLayer, TransactTo, TxEnv, H160, H256, U256, +}; +use secp256k1::{PublicKey, Secp256k1, SecretKey, SignOnly}; +use sha3::{Digest, Keccak256}; + +use crate::{cast::TryCast, db::JsDatabase}; + +struct Logger; + +unsafe impl Sync for Logger {} + +static LOGGER: OnceCell = OnceCell::new(); + +#[napi(object)] +pub struct Account { + /// Account balance + #[napi(readonly)] + pub balance: BigInt, + /// Account nonce + #[napi(readonly)] + pub nonce: BigInt, + /// 256-bit code hash + #[napi(readonly)] + pub code_hash: Buffer, + /// Optionally, byte code + #[napi(readonly)] + pub code: Option, +} + +impl Debug for Account { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Account") + .field("balance", &self.balance) + .field("nonce", &self.nonce) + .field("code_hash", &self.code_hash.as_ref()) + .finish() + } +} + +impl From for Account { + fn from(account_info: AccountInfo) -> Self { + Self { + balance: BigInt { + sign_bit: false, + words: account_info.balance.0.to_vec(), + }, + nonce: BigInt::from(account_info.nonce), + code_hash: Buffer::from(account_info.code_hash.as_bytes()), + code: account_info + .code + .map(|code| Buffer::from(code.bytes().as_ref())), + } + } +} + +fn private_key_to_address( + context: &Secp256k1, + private_key: String, +) -> napi::Result { + private_to_public_key(context, private_key).map(public_key_to_address) +} + +fn private_to_public_key( + context: &Secp256k1, + private_key: String, +) -> napi::Result { + let private_key = private_key.strip_prefix("0x").unwrap_or(&private_key); + + SecretKey::from_str(private_key).map_or_else( + |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), + |secret_key| Ok(secret_key.public_key(context)), + ) +} + +fn public_key_to_address(public_key: PublicKey) -> H160 { + let hash = Keccak256::digest(&public_key.serialize_uncompressed()[1..]); + // Only take the lower 160 bits of the hash + H160::from_slice(&hash[12..]) +} + +#[napi(object)] +pub struct GenesisAccount { + /// Account private key + pub private_key: String, + /// Account balance + pub balance: BigInt, +} + +#[napi(object)] +pub struct AccessListItem { + pub address: String, + pub storage_keys: Vec, +} + +impl TryFrom for (H160, Vec) { + type Error = napi::Error; + + fn try_from(value: AccessListItem) -> std::result::Result { + let address = H160::from_str(&value.address) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; + + let storage_keys = value + .storage_keys + .into_iter() + .map(|key| { + U256::from_str(&key) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string())) + }) + .collect::, _>>()?; + + Ok((address, storage_keys)) + } +} + +#[napi(object)] +pub struct Transaction { + /// 160-bit address for caller + /// Defaults to `0x00.0` address. + pub from: Option, + /// 160-bit address for receiver + /// Creates a contract if no address is provided. + pub to: Option, + /// Maximum gas allowance for the code execution to avoid infinite loops. + /// Defaults to 2^63. + pub gas_limit: Option, + /// Number of wei to pay for each unit of gas during execution. + /// Defaults to 1 wei. + pub gas_price: Option, + /// Maximum tip per gas that's given directly to the forger. + pub gas_priority_fee: Option, + /// (Up to) 256-bit unsigned value. + pub value: Option, + /// Nonce of sender account. + pub nonce: Option, + /// Input byte data + pub input: Option, + /// A list of addresses and storage keys that the transaction plans to access. + pub access_list: Option>, + /// Transaction is only valid on networks with this chain ID. + pub chain_id: Option, +} + +impl TryFrom for TxEnv { + type Error = napi::Error; + + fn try_from(value: Transaction) -> std::result::Result { + let caller = if let Some(from) = value.from.as_ref() { + H160::from_slice(from) + } else { + H160::default() + }; + + let transact_to = if let Some(to) = value.to.as_ref() { + TransactTo::Call(H160::from_slice(to)) + } else { + TransactTo::Create(CreateScheme::Create) + }; + + let data = value + .input + .map_or(Bytes::default(), |input| Bytes::copy_from_slice(&input)); + + let access_list = value.access_list.map_or(Ok(Vec::new()), |access_list| { + access_list + .into_iter() + .map(|item| item.try_into()) + .collect::)>, _>>() + })?; + + Ok(Self { + caller, + gas_limit: value + .gas_limit + .map_or(2u64.pow(63), |limit| limit.get_u64().1), + gas_price: value + .gas_price + .map_or(Ok(U256::from(0)), BigInt::try_cast)?, + gas_priority_fee: value + .gas_priority_fee + .map_or(Ok(None), |fee| BigInt::try_cast(fee).map(Some))?, + transact_to, + value: value.value.map_or(Ok(U256::default()), BigInt::try_cast)?, + data, + chain_id: value.chain_id.map(|chain_id| chain_id.get_u64().1), + nonce: value.nonce.map(|nonce| nonce.get_u64().1), + access_list, + }) + } +} + +#[napi(object)] +pub struct TransactionOutput { + /// Return value from Call or Create transactions + #[napi(readonly)] + pub output: Option, + /// Optionally, a 160-bit address from Create transactions + #[napi(readonly)] + pub address: Option, +} + +impl From for TransactionOutput { + fn from(value: rethnet_evm::TransactOut) -> Self { + let (output, address) = match value { + rethnet_evm::TransactOut::None => (None, None), + rethnet_evm::TransactOut::Call(output) => (Some(Buffer::from(output.as_ref())), None), + rethnet_evm::TransactOut::Create(output, address) => ( + Some(Buffer::from(output.as_ref())), + address.map(|address| Buffer::from(address.as_bytes())), + ), + }; + + Self { output, address } + } +} + +#[napi(object)] +pub struct Block { + pub number: BigInt, + pub coinbase: Option, + pub timestamp: BigInt, + pub difficulty: Option, + pub basefee: Option, + pub gas_limit: Option, +} + +impl TryFrom for BlockEnv { + type Error = napi::Error; + + fn try_from(value: Block) -> std::result::Result { + let default = BlockEnv::default(); + let coinbase = value + .coinbase + .map_or(default.coinbase, |coinbase| H160::from_slice(&coinbase)); + let difficulty = value.difficulty.map_or_else( + || Ok(default.difficulty), + |difficulty| difficulty.try_cast(), + )?; + let basefee = value + .basefee + .map_or_else(|| Ok(default.basefee), |basefee| basefee.try_cast())?; + let gas_limit = value + .gas_limit + .map_or(Ok(default.gas_limit), |gas_limit| gas_limit.try_cast())?; + + Ok(Self { + number: value.number.try_cast()?, + coinbase, + timestamp: value.timestamp.try_cast()?, + difficulty, + basefee, + gas_limit, + }) + } +} + +/// If not set, uses defaults from [`CfgEnv`]. +#[napi(object)] +pub struct Config { + pub chain_id: Option, + pub spec_id: Option, + pub limit_contract_code_size: Option, + pub disable_block_gas_limit: Option, + pub disable_eip3607: Option, +} + +impl TryFrom for CfgEnv { + type Error = napi::Error; + + fn try_from(value: Config) -> std::result::Result { + let default = CfgEnv::default(); + let chain_id = value + .chain_id + .map_or(Ok(default.chain_id), |chain_id| chain_id.try_cast())?; + + let spec_id = value + .spec_id + .map_or(default.spec_id, |spec_id| spec_id.into()); + + let limit_contract_code_size = value.limit_contract_code_size.map_or(Ok(None), |size| { + // TODO: the lossless check in get_u64 is broken: https://github.com/napi-rs/napi-rs/pull/1348 + if let (false, size, _lossless) = size.get_u64() { + usize::try_from(size).map_or_else( + |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), + |size| Ok(Some(size)), + ) + } else { + Err(napi::Error::new( + Status::InvalidArg, + "BigInt cannot be larger than usize::MAX".to_owned(), + )) + } + })?; + + let disable_block_gas_limit = value + .disable_block_gas_limit + .unwrap_or(default.disable_block_gas_limit); + let disable_eip3607 = value.disable_eip3607.unwrap_or(default.disable_eip3607); + + Ok(Self { + chain_id, + spec_id, + limit_contract_code_size, + disable_block_gas_limit, + disable_eip3607, + ..default + }) + } +} + +#[napi] +pub enum SpecId { + Frontier = 0, + FrontierThawing = 1, + Homestead = 2, + DaoFork = 3, + Tangerine = 4, + SpuriousDragon = 5, + Byzantium = 6, + Constantinople = 7, + Petersburg = 8, + Istanbul = 9, + MuirGlacier = 10, + Berlin = 11, + London = 12, + ArrowGlacier = 13, + GrayGlacier = 14, + Merge = 15, + Latest = 16, +} + +impl From for rethnet_evm::SpecId { + fn from(value: SpecId) -> Self { + match value { + SpecId::Frontier => rethnet_evm::SpecId::FRONTIER, + SpecId::FrontierThawing => rethnet_evm::SpecId::FRONTIER_THAWING, + SpecId::Homestead => rethnet_evm::SpecId::HOMESTEAD, + SpecId::DaoFork => rethnet_evm::SpecId::DAO_FORK, + SpecId::Tangerine => rethnet_evm::SpecId::TANGERINE, + SpecId::SpuriousDragon => rethnet_evm::SpecId::SPURIOUS_DRAGON, + SpecId::Byzantium => rethnet_evm::SpecId::BYZANTIUM, + SpecId::Constantinople => rethnet_evm::SpecId::CONSTANTINOPLE, + SpecId::Petersburg => rethnet_evm::SpecId::PETERSBURG, + SpecId::Istanbul => rethnet_evm::SpecId::ISTANBUL, + SpecId::MuirGlacier => rethnet_evm::SpecId::MUIR_GLACIER, + SpecId::Berlin => rethnet_evm::SpecId::BERLIN, + SpecId::London => rethnet_evm::SpecId::LONDON, + SpecId::ArrowGlacier => rethnet_evm::SpecId::ARROW_GLACIER, + SpecId::GrayGlacier => rethnet_evm::SpecId::GRAY_GLACIER, + SpecId::Merge => rethnet_evm::SpecId::MERGE, + SpecId::Latest => rethnet_evm::SpecId::LATEST, + } + } +} + +#[napi(object)] +pub struct ExecutionResult { + pub exit_code: u8, + pub output: TransactionOutput, + pub gas_used: BigInt, + pub gas_refunded: BigInt, + pub logs: Vec, +} + +impl TryFrom for ExecutionResult { + type Error = napi::Error; + + fn try_from(value: rethnet_evm::ExecutionResult) -> std::result::Result { + let logs = value + .logs + .into_iter() + .map(serde_json::to_value) + .collect::>>()?; + + Ok(Self { + exit_code: value.exit_reason as u8, + output: value.out.into(), + gas_used: BigInt::from(value.gas_used), + gas_refunded: BigInt::from(value.gas_refunded), + logs, + }) + } +} + +#[napi(object)] +pub struct TransactionResult { + pub exec_result: ExecutionResult, + pub state: serde_json::Value, +} + +impl TryFrom<(rethnet_evm::ExecutionResult, rethnet_evm::State)> for TransactionResult { + type Error = napi::Error; + + fn try_from( + value: (rethnet_evm::ExecutionResult, rethnet_evm::State), + ) -> std::result::Result { + let exec_result = value.0.try_into()?; + let state = serde_json::to_value(value.1)?; + + Ok(Self { exec_result, state }) + } +} + +#[napi(object)] +pub struct DatabaseCallbacks { + #[napi(ts_type = "(address: Buffer) => Promise")] + pub get_account_by_address_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, index: bigint) => Promise")] + pub get_account_storage_slot_fn: JsFunction, + #[napi(ts_type = "(blockNumber: bigint) => Promise")] + pub get_block_hash_fn: JsFunction, + #[napi(ts_type = "(codeHash: Buffer) => Promise")] + pub get_code_by_hash_fn: JsFunction, +} + +#[napi(object)] +pub struct DatabaseCommitCallbacks { + #[napi(ts_type = "() => Promise")] + pub commit_fn: JsFunction, +} + +#[napi(object)] +pub struct DatabaseDebugCallbacks { + #[napi(ts_type = "() => Promise")] + pub checkpoint_fn: JsFunction, + #[napi(ts_type = "() => Promise")] + pub revert_fn: JsFunction, + #[napi(ts_type = "() => Promise")] + pub get_storage_root_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, account: Account) => Promise")] + pub insert_account_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, balance: bigint) => Promise")] + pub set_account_balance_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, code: Buffer) => Promise")] + pub set_account_code_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, nonce: bigint) => Promise")] + pub set_account_nonce_fn: JsFunction, + #[napi(ts_type = "(address: Buffer, index: bigint, value: bigint) => Promise")] + pub set_account_storage_slot_fn: JsFunction, +} + +#[napi] +pub struct Rethnet { + client: Client, +} + +#[napi] +impl Rethnet { + #[allow(clippy::new_without_default)] + #[napi(constructor)] + pub fn new(cfg: Config) -> napi::Result { + let cfg = cfg.try_into()?; + + Ok(Self::with_logger(Client::with_db_mut_debug( + cfg, + LayeredDatabase::default(), + )?)) + } + + #[napi(factory)] + pub fn with_callbacks( + env: Env, + cfg: Config, + db_callbacks: DatabaseCallbacks, + db_mut_callbacks: Option, + db_debug_callbacks: Option, + ) -> napi::Result { + let cfg = cfg.try_into()?; + + let db = JsDatabase::new(&env, db_callbacks)?; + let db_commit = db_mut_callbacks.map_or(Ok(None), |db| { + JsDatabaseCommitInner::new(&env, db).map(Some) + })?; + let db_debug = db_debug_callbacks + .map_or(Ok(None), |db| JsDatabaseDebugInner::new(&env, db).map(Some))?; + + db::client(cfg, db, db_commit, db_debug).map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |client| Ok(Self::with_logger(client)), + ) + } + + fn with_logger(client: Client) -> Self { + let _logger = LOGGER.get_or_init(|| { + pretty_env_logger::init(); + Logger + }); + + Self { client } + } + + #[napi(factory)] + pub fn with_genesis_accounts(cfg: Config, accounts: Vec) -> napi::Result { + let cfg = cfg.try_into()?; + + let context = Secp256k1::signing_only(); + let genesis_accounts = accounts + .into_iter() + .map(|account| { + let address = private_key_to_address(&context, account.private_key)?; + account.balance.try_cast().map(|balance| { + let account_info = AccountInfo { + balance, + ..Default::default() + }; + + (address, account_info) + }) + }) + .collect::>>()?; + + let mut database = + LayeredDatabase::with_layer(RethnetLayer::with_genesis_accounts(genesis_accounts)); + database.add_layer_default(); + + Client::with_db(cfg, database).map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |client| Ok(Self::with_logger(client)), + ) + } + + #[napi] + pub async fn dry_run( + &self, + transaction: Transaction, + block: Block, + ) -> Result { + let transaction = transaction.try_into()?; + let block = block.try_into()?; + + self.client.dry_run(transaction, block).await.try_into() + } + + #[napi] + pub async fn run(&self, transaction: Transaction) -> Result { + let transaction = transaction.try_into()?; + self.client.run(transaction).await.try_into() + } + + #[napi] + pub async fn checkpoint(&self) -> Result<()> { + self.client + .checkpoint() + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn revert(&self) -> Result<()> { + self.client + .revert() + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn get_account_by_address(&self, address: Buffer) -> Result> { + let address = H160::from_slice(&address); + self.client + .get_account_by_address(address) + .await + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |account_info| Ok(account_info.map(Account::from)), + ) + } + + #[napi] + pub async fn guarantee_transaction(&self, transaction: Transaction) -> Result<()> { + let transaction = transaction.try_into()?; + + self.client + .guarantee_transaction(transaction) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn insert_account(&self, address: Buffer) -> Result<()> { + let address = H160::from_slice(&address); + self.client + .insert_account(address, AccountInfo::default()) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn insert_block(&self, block_number: BigInt, block_hash: Buffer) -> Result<()> { + let block_number = BigInt::try_cast(block_number)?; + let block_hash = H256::from_slice(&block_hash); + + self.client + .insert_block(block_number, block_hash) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn set_account_balance(&self, address: Buffer, balance: BigInt) -> Result<()> { + let address = H160::from_slice(&address); + let balance = BigInt::try_cast(balance)?; + + self.client + .set_account_balance(address, balance) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn set_account_code(&self, address: Buffer, code: Buffer) -> Result<()> { + let address = H160::from_slice(&address); + let code = Bytes::copy_from_slice(&code); + + self.client + .set_account_code(address, code) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn set_account_nonce(&self, address: Buffer, nonce: BigInt) -> Result<()> { + let address = H160::from_slice(&address); + let nonce = nonce.get_u64().1; + + self.client + .set_account_nonce(address, nonce) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn set_account_storage_slot( + &self, + address: Buffer, + index: BigInt, + value: BigInt, + ) -> Result<()> { + let address = H160::from_slice(&address); + let index = BigInt::try_cast(index)?; + let value = BigInt::try_cast(value)?; + + self.client + .set_account_storage_slot(address, index, value) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } +} diff --git a/crates/rethnet_evm_napi/src/sync.rs b/crates/rethnet_evm_napi/src/sync.rs new file mode 100644 index 0000000000..a45b88bcd3 --- /dev/null +++ b/crates/rethnet_evm_napi/src/sync.rs @@ -0,0 +1,84 @@ +use std::{fmt::Debug, sync::mpsc::Sender}; + +use napi::{bindgen_prelude::FromNapiValue, Env, JsFunction, JsObject, JsUnknown, NapiRaw, Status}; + +use crate::cast::TryCast; + +pub fn await_promise( + env: Env, + result: JsUnknown, + tx: Sender>, +) -> napi::Result<()> +where + I: FromNapiValue + TryCast, + O: 'static, +{ + // If the result is a promise, wait for it to resolve, and send the result to the channel. + // Otherwise, send the result immediately. + if result.is_promise()? { + let result: JsObject = result.try_into()?; + let then: JsFunction = result.get_named_property("then")?; + let tx2 = tx.clone(); + let cb = env.create_function_from_closure("callback", move |ctx| { + let result = ctx.get::(0)?; + tx.send(Ok(result.try_cast()?)).unwrap(); + ctx.env.get_undefined() + })?; + let eb = env.create_function_from_closure("error_callback", move |ctx| { + // TODO: need a way to convert a JsUnknown to an Error + tx2.send(Err(napi::Error::from_reason("Promise rejected"))) + .unwrap(); + ctx.env.get_undefined() + })?; + then.call(Some(&result), &[cb, eb])?; + } else { + let result = unsafe { I::from_napi_value(env.raw(), result.raw())? }; + tx.send(Ok(result.try_cast()?)).unwrap(); + } + + Ok(()) +} + +pub fn await_void_promise( + env: Env, + result: JsUnknown, + tx: Sender>, +) -> napi::Result<()> { + // If the result is a promise, wait for it to resolve, and send the result to the channel. + // Otherwise, send the result immediately. + if result.is_promise()? { + let result: JsObject = result.try_into()?; + let then: JsFunction = result.get_named_property("then")?; + let tx2 = tx.clone(); + let cb = env.create_function_from_closure("callback", move |ctx| { + tx.send(Ok(())).unwrap(); + ctx.env.get_undefined() + })?; + let eb = env.create_function_from_closure("error_callback", move |ctx| { + // TODO: need a way to convert a JsUnknown to an Error + tx2.send(Err(napi::Error::from_reason("Promise rejected"))) + .unwrap(); + ctx.env.get_undefined() + })?; + then.call(Some(&result), &[cb, eb])?; + Ok(()) + } else { + Err(napi::Error::new( + Status::ObjectExpected, + "Expected promise".to_owned(), + )) + } +} + +pub fn handle_error( + tx: Sender>, + res: napi::Result<()>, +) -> napi::Result<()> { + match res { + Ok(_) => Ok(()), + Err(e) => { + tx.send(Err(e)).expect("send error"); + Ok(()) + } + } +} diff --git a/crates/rethnet_evm_napi/src/threadsafe_function.rs b/crates/rethnet_evm_napi/src/threadsafe_function.rs new file mode 100644 index 0000000000..94f08c6457 --- /dev/null +++ b/crates/rethnet_evm_napi/src/threadsafe_function.rs @@ -0,0 +1,302 @@ +// Fork of threadsafe_function from napi-rs that allows calling JS function manually rather than +// only returning args. This enables us to use the return value of the function. + +#![allow(clippy::single_component_path_imports)] + +use std::{ + convert::Into, + ffi::CString, + marker::PhantomData, + os::raw::c_void, + ptr, + sync::{ + atomic::{AtomicBool, AtomicUsize, Ordering}, + Arc, + }, +}; + +use napi::{check_status, sys, Env, JsError, JsFunction, NapiValue, Result, Status}; + +/// ThreadSafeFunction Context object +/// the `value` is the value passed to `call` method +pub struct ThreadSafeCallContext { + pub env: Env, + pub value: T, + pub callback: JsFunction, +} + +#[repr(u8)] +pub enum ThreadsafeFunctionCallMode { + NonBlocking, + Blocking, +} + +impl From for sys::napi_threadsafe_function_call_mode { + fn from(value: ThreadsafeFunctionCallMode) -> Self { + match value { + ThreadsafeFunctionCallMode::Blocking => sys::ThreadsafeFunctionCallMode::blocking, + ThreadsafeFunctionCallMode::NonBlocking => sys::ThreadsafeFunctionCallMode::nonblocking, + } + } +} + +/// Communicate with the addon's main thread by invoking a JavaScript function from other threads. +/// +/// ## Example +/// An example of using `ThreadsafeFunction`: +/// +/// ```rust +/// #[macro_use] +/// extern crate napi_derive; +/// +/// use std::thread; +/// +/// use napi::{ +/// threadsafe_function::{ +/// ThreadSafeCallContext, ThreadsafeFunctionCallMode, ThreadsafeFunctionReleaseMode, +/// }, +/// CallContext, Error, JsFunction, JsNumber, JsUndefined, Result, Status, +/// }; +/// +/// #[js_function(1)] +/// pub fn test_threadsafe_function(ctx: CallContext) -> Result { +/// let func = ctx.get::(0)?; +/// +/// let tsfn = +/// ctx +/// .env +/// .create_threadsafe_function(&func, 0, |ctx: ThreadSafeCallContext>| { +/// ctx.value +/// .iter() +/// .map(|v| ctx.env.create_uint32(*v)) +/// .collect::>>() +/// })?; +/// +/// let tsfn_cloned = tsfn.clone(); +/// +/// thread::spawn(move || { +/// let output: Vec = vec![0, 1, 2, 3]; +/// // It's okay to call a threadsafe function multiple times. +/// tsfn.call(Ok(output.clone()), ThreadsafeFunctionCallMode::Blocking); +/// }); +/// +/// thread::spawn(move || { +/// let output: Vec = vec![3, 2, 1, 0]; +/// // It's okay to call a threadsafe function multiple times. +/// tsfn_cloned.call(Ok(output.clone()), ThreadsafeFunctionCallMode::NonBlocking); +/// }); +/// +/// ctx.env.get_undefined() +/// } +/// ``` +pub struct ThreadsafeFunction { + raw_tsfn: sys::napi_threadsafe_function, + aborted: Arc, + ref_count: Arc, + _phantom: PhantomData, +} + +impl Clone for ThreadsafeFunction { + fn clone(&self) -> Self { + if !self.aborted.load(Ordering::Acquire) { + let acquire_status = unsafe { sys::napi_acquire_threadsafe_function(self.raw_tsfn) }; + debug_assert!( + acquire_status == sys::Status::napi_ok, + "Acquire threadsafe function failed in clone" + ); + } + + Self { + raw_tsfn: self.raw_tsfn, + aborted: Arc::clone(&self.aborted), + ref_count: Arc::clone(&self.ref_count), + _phantom: PhantomData, + } + } +} + +unsafe impl Send for ThreadsafeFunction {} +unsafe impl Sync for ThreadsafeFunction {} + +impl ThreadsafeFunction { + /// See [napi_create_threadsafe_function](https://nodejs.org/api/n-api.html#n_api_napi_create_threadsafe_function) + /// for more information. + pub(crate) fn create) -> Result<()>>( + env: sys::napi_env, + func: sys::napi_value, + max_queue_size: usize, + callback: R, + ) -> Result { + let mut async_resource_name = ptr::null_mut(); + let s = "napi_rs_threadsafe_function"; + let len = s.len(); + let s = CString::new(s)?; + check_status!(unsafe { + sys::napi_create_string_utf8(env, s.as_ptr(), len, &mut async_resource_name) + })?; + + let initial_thread_count = 1usize; + let mut raw_tsfn = ptr::null_mut(); + let ptr = Box::into_raw(Box::new(callback)) as *mut c_void; + check_status!(unsafe { + sys::napi_create_threadsafe_function( + env, + func, + ptr::null_mut(), + async_resource_name, + max_queue_size, + initial_thread_count, + ptr, + Some(thread_finalize_cb::), + ptr, + Some(call_js_cb::), + &mut raw_tsfn, + ) + })?; + + let aborted = Arc::new(AtomicBool::new(false)); + let aborted_ptr = Arc::into_raw(aborted.clone()) as *mut c_void; + check_status!(unsafe { + sys::napi_add_env_cleanup_hook(env, Some(cleanup_cb), aborted_ptr) + })?; + + Ok(ThreadsafeFunction { + raw_tsfn, + aborted, + ref_count: Arc::new(AtomicUsize::new(initial_thread_count)), + _phantom: PhantomData, + }) + } +} + +impl ThreadsafeFunction { + /// See [napi_call_threadsafe_function](https://nodejs.org/api/n-api.html#n_api_napi_call_threadsafe_function) + /// for more information. + pub fn call(&self, value: T, mode: ThreadsafeFunctionCallMode) -> Status { + if self.aborted.load(Ordering::Acquire) { + return Status::Closing; + } + unsafe { + sys::napi_call_threadsafe_function( + self.raw_tsfn, + Box::into_raw(Box::new(value)) as *mut _, + mode.into(), + ) + } + .into() + } +} + +impl Drop for ThreadsafeFunction { + fn drop(&mut self) { + if !self.aborted.load(Ordering::Acquire) && self.ref_count.load(Ordering::Acquire) > 0usize + { + let release_status = unsafe { + sys::napi_release_threadsafe_function( + self.raw_tsfn, + sys::ThreadsafeFunctionReleaseMode::release, + ) + }; + assert!( + release_status == sys::Status::napi_ok, + "Threadsafe Function release failed" + ); + } + } +} + +unsafe extern "C" fn cleanup_cb(cleanup_data: *mut c_void) { + let aborted = Arc::::from_raw(cleanup_data.cast()); + aborted.store(true, Ordering::SeqCst); +} + +unsafe extern "C" fn thread_finalize_cb( + _raw_env: sys::napi_env, + finalize_data: *mut c_void, + _finalize_hint: *mut c_void, +) where + R: 'static + Send + FnMut(ThreadSafeCallContext) -> Result<()>, +{ + // cleanup + drop(Box::::from_raw(finalize_data.cast())); +} + +unsafe extern "C" fn call_js_cb( + raw_env: sys::napi_env, + js_callback: sys::napi_value, + context: *mut c_void, + data: *mut c_void, +) where + R: 'static + Send + FnMut(ThreadSafeCallContext) -> Result<()>, +{ + // env and/or callback can be null when shutting down + if raw_env.is_null() || js_callback.is_null() { + return; + } + + let ctx: &mut R = &mut *context.cast::(); + let val: Result = Ok(*Box::::from_raw(data.cast())); + + let mut recv = ptr::null_mut(); + sys::napi_get_undefined(raw_env, &mut recv); + + let ret = val.and_then(|v| { + (ctx)(ThreadSafeCallContext { + env: Env::from_raw(raw_env), + value: v, + callback: JsFunction::from_raw(raw_env, js_callback).unwrap(), // TODO: unwrap + }) + }); + + let status = match ret { + Ok(()) => sys::Status::napi_ok, + Err(e) => sys::napi_fatal_exception(raw_env, JsError::from(e).into_value(raw_env)), + }; + if status == sys::Status::napi_ok { + return; + } + if status == sys::Status::napi_pending_exception { + let mut error_result = ptr::null_mut(); + assert_eq!( + sys::napi_get_and_clear_last_exception(raw_env, &mut error_result), + sys::Status::napi_ok + ); + + // When shutting down, napi_fatal_exception sometimes returns another exception + let stat = sys::napi_fatal_exception(raw_env, error_result); + assert!(stat == sys::Status::napi_ok || stat == sys::Status::napi_pending_exception); + } else { + let error_code: Status = status.into(); + let error_code_string = format!("{:?}", error_code); + let mut error_code_value = ptr::null_mut(); + assert_eq!( + sys::napi_create_string_utf8( + raw_env, + error_code_string.as_ptr() as *const _, + error_code_string.len(), + &mut error_code_value, + ), + sys::Status::napi_ok, + ); + let error_msg = "Call JavaScript callback failed in thread safe function"; + let mut error_msg_value = ptr::null_mut(); + assert_eq!( + sys::napi_create_string_utf8( + raw_env, + error_msg.as_ptr() as *const _, + error_msg.len(), + &mut error_msg_value, + ), + sys::Status::napi_ok, + ); + let mut error_value = ptr::null_mut(); + assert_eq!( + sys::napi_create_error(raw_env, error_code_value, error_msg_value, &mut error_value), + sys::Status::napi_ok, + ); + assert_eq!( + sys::napi_fatal_exception(raw_env, error_value), + sys::Status::napi_ok + ); + } +} diff --git a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts new file mode 100644 index 0000000000..2ec2cf3316 --- /dev/null +++ b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts @@ -0,0 +1,99 @@ +import { expect } from "chai"; +import { Address } from "@nomicfoundation/ethereumjs-util"; + +import { Block, Config, Rethnet, Transaction } from "../.."; + +describe("Rethnet DB", () => { + const caller = Address.fromString( + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" + ); + const receiver = Address.fromString( + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" + ); + + let rethnet: Rethnet; + + beforeEach(function () { + const cfg: Config = { + chainId: BigInt(0), + limitContractCodeSize: BigInt(2n) ** BigInt(32n), + disableEip3607: true, + }; + rethnet = new Rethnet(cfg); + }); + + // TODO: insertBlock, setAccountCode, setAccountStorageSlot + it("getAccountByAddress", async () => { + await rethnet.insertAccount(caller.buf); + let account = await rethnet.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(0n); + }); + it("setAccountBalance", async () => { + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountBalance(caller.buf, 100n); + + let account = await rethnet.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(100n); + expect(account?.nonce).to.equal(0n); + }); + it("setAccountNonce", async () => { + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountNonce(caller.buf, 5n); + + let account = await rethnet.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(5n); + }); + it("call", async () => { + // Add funds to caller + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountBalance(caller.buf, BigInt("0xffffffff")); + + // send some value + const sendValue: Transaction = { + from: caller.buf, + to: receiver.buf, + gasLimit: BigInt(1000000), + value: 100n, + }; + + const block: Block = { + number: BigInt(1), + timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), + }; + let sendValueChanges = await rethnet.dryRun(sendValue, block); + + // receiver should have 100 (0x64) wei + expect( + sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"].info + .balance + ).to.equal("0x64"); + + // create a contract + const createContract: Transaction = { + from: caller.buf, + + gasLimit: BigInt(1000000), + + // minimal creation bytecode + input: Buffer.from("3859818153F3", "hex"), + }; + + let createContractChanges = await rethnet.dryRun(createContract, block); + + expect( + createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] + ).to.exist; + // check that the code hash is not the null hash (i.e., the address has code) + expect( + createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] + .info.code_hash + ).to.not.equal( + "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" + ); + }); +}); diff --git a/crates/rethnet_evm_napi/tsconfig.json b/crates/rethnet_evm_napi/tsconfig.json new file mode 100644 index 0000000000..470939b26f --- /dev/null +++ b/crates/rethnet_evm_napi/tsconfig.json @@ -0,0 +1,17 @@ +{ + "extends": "../../config/typescript/tsconfig.json", + "compilerOptions": { + "outDir": "./build-test", + "rootDirs": [ + "./test" + ], + "composite": true + }, + "include": [ + "./*.ts", + "./test/**/*.ts" + ], + "exclude": [ + "./node_modules" + ] +} diff --git a/crates/rethnet_evm_napi/yarn.lock b/crates/rethnet_evm_napi/yarn.lock new file mode 100644 index 0000000000..92ca80921d --- /dev/null +++ b/crates/rethnet_evm_napi/yarn.lock @@ -0,0 +1,2189 @@ +# This file is generated by running "yarn install" inside your project. +# Manual changes might be lost - proceed with caution! + +__metadata: + version: 6 + cacheKey: 8 + +"@gar/promisify@npm:^1.1.3": + version: 1.1.3 + resolution: "@gar/promisify@npm:1.1.3" + checksum: 4059f790e2d07bf3c3ff3e0fec0daa8144fe35c1f6e0111c9921bd32106adaa97a4ab096ad7dab1e28ee6a9060083c4d1a4ada42a7f5f3f7a96b8812e2b757c1 + languageName: node + linkType: hard + +"@napi-rs/cli@npm:^2.11.4": + version: 2.11.4 + resolution: "@napi-rs/cli@npm:2.11.4" + bin: + napi: scripts/index.js + checksum: f9f1036520ba43685c782c4eafdd56176930d317ab31c6841c2f033e09f4cac7c9561ccdfeaefcb30c2ac5e64390fb308a5658eb47c373e8131e138ad38b98c1 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-block@npm:^4.0.0": + version: 4.0.0 + resolution: "@nomicfoundation/ethereumjs-block@npm:4.0.0" + dependencies: + "@nomicfoundation/ethereumjs-common": ^3.0.0 + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-trie": ^5.0.0 + "@nomicfoundation/ethereumjs-tx": ^4.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + ethereum-cryptography: 0.1.3 + checksum: a57a33dda7724f0a46ef2e0ca0dbb1b427268f4135e8c23eee9ab5730a79369d52122faba7a010d71bca3046f7ce644ed95e4a34d5f2221ecaa5d94886d84b11 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-blockchain@npm:^6.0.0": + version: 6.0.0 + resolution: "@nomicfoundation/ethereumjs-blockchain@npm:6.0.0" + dependencies: + "@nomicfoundation/ethereumjs-block": ^4.0.0 + "@nomicfoundation/ethereumjs-common": ^3.0.0 + "@nomicfoundation/ethereumjs-ethash": ^2.0.0 + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-trie": ^5.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + abstract-level: ^1.0.3 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + level: ^8.0.0 + lru-cache: ^5.1.1 + memory-level: ^1.0.0 + checksum: 5605c1d249924321de98c1728b5b832ee6488b690a42c829db21afa96f5c152c73afdec6aa4758cb9b24ec7ac19ec9f3146b63cf837e1b91d364e4c37b497881 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-common@npm:^3.0.0": + version: 3.0.0 + resolution: "@nomicfoundation/ethereumjs-common@npm:3.0.0" + dependencies: + "@nomicfoundation/ethereumjs-util": ^8.0.0 + crc-32: ^1.2.0 + checksum: 6a62908e5ccd8a4f56b841bd6ba9eef21dffafdd505f18b6b886d86ba4287cd12a2c632d521c5fddf2c6fca5a840f580d7601d89820098f6c1f8311db41e496b + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-ethash@npm:^2.0.0": + version: 2.0.0 + resolution: "@nomicfoundation/ethereumjs-ethash@npm:2.0.0" + dependencies: + "@nomicfoundation/ethereumjs-block": ^4.0.0 + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + abstract-level: ^1.0.3 + bigint-crypto-utils: ^3.0.23 + ethereum-cryptography: 0.1.3 + checksum: 60133df2d450179f2ab26e8784b1bd79b37411bb047a7dace655499749893750f0f8d6d573f182ebcf4dba35f2da6301b0ad1b80dbe7637bb0d5155ccb189fda + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-rlp@npm:^4.0.0, @nomicfoundation/ethereumjs-rlp@npm:^4.0.0-beta.2": + version: 4.0.0 + resolution: "@nomicfoundation/ethereumjs-rlp@npm:4.0.0" + bin: + rlp: bin/rlp + checksum: b358d239e5a24884f0446d52159c8115b0eb1d6907179dc968df5054dccea7eff72f2d12522c911b6e08bb4b5d3f5f8e1d86a45cb1a24a4831cbb109743d4407 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-statemanager@npm:^1.0.0": + version: 1.0.0 + resolution: "@nomicfoundation/ethereumjs-statemanager@npm:1.0.0" + dependencies: + "@nomicfoundation/ethereumjs-common": ^3.0.0 + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-trie": ^5.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + debug: ^4.3.3 + ethereum-cryptography: 0.1.3 + functional-red-black-tree: ^1.0.1 + checksum: fad02ea922fbe25328186ea2eb43bdba63def57822f373ce213be26125ee8d3c90cf3b6f626e6876637cdb842e3c2b788fb8891fcf1aca3fd655e1c0d9a7e936 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-trie@npm:^5.0.0": + version: 5.0.0 + resolution: "@nomicfoundation/ethereumjs-trie@npm:5.0.0" + dependencies: + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + ethereum-cryptography: 0.1.3 + readable-stream: ^3.6.0 + checksum: 468de7ffe05473f0f05940e74bba01652dd9a4ff155a13e0a5395551e53557afde47d98f496f6323824bccfaeee8de4e22fef9b7f88d3bbd4e97cadc54e2e4f9 + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-tx@npm:^4.0.0": + version: 4.0.0 + resolution: "@nomicfoundation/ethereumjs-tx@npm:4.0.0" + dependencies: + "@nomicfoundation/ethereumjs-common": ^3.0.0 + "@nomicfoundation/ethereumjs-rlp": ^4.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + ethereum-cryptography: 0.1.3 + checksum: d2c0e3384aaa9f3b58232c531a4efd524be257e7257f23c3beed6ec9cf5fba6345cb632b3a464ae0a2aa99fd9e4a2d3e2d5c501593c5466e6ab629f05255791e + languageName: node + linkType: hard + +"@nomicfoundation/ethereumjs-util@npm:^8.0.0": + version: 8.0.0 + resolution: "@nomicfoundation/ethereumjs-util@npm:8.0.0" + dependencies: + "@nomicfoundation/ethereumjs-rlp": ^4.0.0-beta.2 + ethereum-cryptography: 0.1.3 + checksum: a39be4c8d3dea4fae1e969b47138d718cac31bf248bb517766a42c97ca5850ca3ddf16c66d8e404fa0a0363fd6898ae2e716d75da2ed4113e610d26026e4cefb + languageName: node + linkType: hard + +"@npmcli/fs@npm:^2.1.0": + version: 2.1.2 + resolution: "@npmcli/fs@npm:2.1.2" + dependencies: + "@gar/promisify": ^1.1.3 + semver: ^7.3.5 + checksum: 405074965e72d4c9d728931b64d2d38e6ea12066d4fad651ac253d175e413c06fe4350970c783db0d749181da8fe49c42d3880bd1cbc12cd68e3a7964d820225 + languageName: node + linkType: hard + +"@npmcli/move-file@npm:^2.0.0": + version: 2.0.1 + resolution: "@npmcli/move-file@npm:2.0.1" + dependencies: + mkdirp: ^1.0.4 + rimraf: ^3.0.2 + checksum: 52dc02259d98da517fae4cb3a0a3850227bdae4939dda1980b788a7670636ca2b4a01b58df03dd5f65c1e3cb70c50fa8ce5762b582b3f499ec30ee5ce1fd9380 + languageName: node + linkType: hard + +"@tootallnate/once@npm:2": + version: 2.0.0 + resolution: "@tootallnate/once@npm:2.0.0" + checksum: ad87447820dd3f24825d2d947ebc03072b20a42bfc96cbafec16bff8bbda6c1a81fcb0be56d5b21968560c5359a0af4038a68ba150c3e1694fe4c109a063bed8 + languageName: node + linkType: hard + +"@types/node@npm:*": + version: 18.8.3 + resolution: "@types/node@npm:18.8.3" + checksum: 9201adc6dc389644c9f478f950ef8926a93e5827865dcd80d7d12fefacab665c96879c87cd6ec74d5eccdd998c4603d02e1e07a35d71a63fe4c20670a381f6ef + languageName: node + linkType: hard + +"@types/pbkdf2@npm:^3.0.0": + version: 3.1.0 + resolution: "@types/pbkdf2@npm:3.1.0" + dependencies: + "@types/node": "*" + checksum: d15024b1957c21cf3b8887329d9bd8dfde754cf13a09d76ae25f1391cfc62bb8b8d7b760773c5dbaa748172fba8b3e0c3dbe962af6ccbd69b76df12a48dfba40 + languageName: node + linkType: hard + +"@types/secp256k1@npm:^4.0.1": + version: 4.0.3 + resolution: "@types/secp256k1@npm:4.0.3" + dependencies: + "@types/node": "*" + checksum: 1bd10b9afa724084b655dc81b7b315def3d2d0e272014ef16009fa76e17537411c07c0695fdea412bc7b36d2a02687f5fea33522d55b8ef29eda42992f812913 + languageName: node + linkType: hard + +"@ungap/promise-all-settled@npm:1.1.2": + version: 1.1.2 + resolution: "@ungap/promise-all-settled@npm:1.1.2" + checksum: 08d37fdfa23a6fe8139f1305313562ebad973f3fac01bcce2773b2bda5bcb0146dfdcf3cb6a722cf0a5f2ca0bc56a827eac8f1e7b3beddc548f654addf1fc34c + languageName: node + linkType: hard + +"abbrev@npm:1": + version: 1.1.1 + resolution: "abbrev@npm:1.1.1" + checksum: a4a97ec07d7ea112c517036882b2ac22f3109b7b19077dc656316d07d308438aac28e4d9746dc4d84bf6b1e75b4a7b0a5f3cb30592419f128ca9a8cee3bcfa17 + languageName: node + linkType: hard + +"abstract-level@npm:^1.0.0, abstract-level@npm:^1.0.2, abstract-level@npm:^1.0.3": + version: 1.0.3 + resolution: "abstract-level@npm:1.0.3" + dependencies: + buffer: ^6.0.3 + catering: ^2.1.0 + is-buffer: ^2.0.5 + level-supports: ^4.0.0 + level-transcoder: ^1.0.1 + module-error: ^1.0.1 + queue-microtask: ^1.2.3 + checksum: 70d61a3924526ebc257b138992052f9ff571a6cee5a7660836e37a1cc7081273c3acf465dd2f5e1897b38dc743a6fd9dba14a5d8a2a9d39e5787cd3da99f301d + languageName: node + linkType: hard + +"agent-base@npm:6, agent-base@npm:^6.0.2": + version: 6.0.2 + resolution: "agent-base@npm:6.0.2" + dependencies: + debug: 4 + checksum: f52b6872cc96fd5f622071b71ef200e01c7c4c454ee68bc9accca90c98cfb39f2810e3e9aa330435835eedc8c23f4f8a15267f67c6e245d2b33757575bdac49d + languageName: node + linkType: hard + +"agentkeepalive@npm:^4.2.1": + version: 4.2.1 + resolution: "agentkeepalive@npm:4.2.1" + dependencies: + debug: ^4.1.0 + depd: ^1.1.2 + humanize-ms: ^1.2.1 + checksum: 39cb49ed8cf217fd6da058a92828a0a84e0b74c35550f82ee0a10e1ee403c4b78ade7948be2279b188b7a7303f5d396ea2738b134731e464bf28de00a4f72a18 + languageName: node + linkType: hard + +"aggregate-error@npm:^3.0.0": + version: 3.1.0 + resolution: "aggregate-error@npm:3.1.0" + dependencies: + clean-stack: ^2.0.0 + indent-string: ^4.0.0 + checksum: 1101a33f21baa27a2fa8e04b698271e64616b886795fd43c31068c07533c7b3facfcaf4e9e0cab3624bd88f729a592f1c901a1a229c9e490eafce411a8644b79 + languageName: node + linkType: hard + +"ansi-colors@npm:4.1.1": + version: 4.1.1 + resolution: "ansi-colors@npm:4.1.1" + checksum: 138d04a51076cb085da0a7e2d000c5c0bb09f6e772ed5c65c53cb118d37f6c5f1637506d7155fb5f330f0abcf6f12fa2e489ac3f8cdab9da393bf1bb4f9a32b0 + languageName: node + linkType: hard + +"ansi-regex@npm:^5.0.1": + version: 5.0.1 + resolution: "ansi-regex@npm:5.0.1" + checksum: 2aa4bb54caf2d622f1afdad09441695af2a83aa3fe8b8afa581d205e57ed4261c183c4d3877cee25794443fde5876417d859c108078ab788d6af7e4fe52eb66b + languageName: node + linkType: hard + +"ansi-styles@npm:^4.0.0, ansi-styles@npm:^4.1.0": + version: 4.3.0 + resolution: "ansi-styles@npm:4.3.0" + dependencies: + color-convert: ^2.0.1 + checksum: 513b44c3b2105dd14cc42a19271e80f386466c4be574bccf60b627432f9198571ebf4ab1e4c3ba17347658f4ee1711c163d574248c0c1cdc2d5917a0ad582ec4 + languageName: node + linkType: hard + +"anymatch@npm:~3.1.2": + version: 3.1.2 + resolution: "anymatch@npm:3.1.2" + dependencies: + normalize-path: ^3.0.0 + picomatch: ^2.0.4 + checksum: 985163db2292fac9e5a1e072bf99f1b5baccf196e4de25a0b0b81865ebddeb3b3eb4480734ef0a2ac8c002845396b91aa89121f5b84f93981a4658164a9ec6e9 + languageName: node + linkType: hard + +"aproba@npm:^1.0.3 || ^2.0.0": + version: 2.0.0 + resolution: "aproba@npm:2.0.0" + checksum: 5615cadcfb45289eea63f8afd064ab656006361020e1735112e346593856f87435e02d8dcc7ff0d11928bc7d425f27bc7c2a84f6c0b35ab0ff659c814c138a24 + languageName: node + linkType: hard + +"are-we-there-yet@npm:^3.0.0": + version: 3.0.1 + resolution: "are-we-there-yet@npm:3.0.1" + dependencies: + delegates: ^1.0.0 + readable-stream: ^3.6.0 + checksum: 52590c24860fa7173bedeb69a4c05fb573473e860197f618b9a28432ee4379049336727ae3a1f9c4cb083114601c1140cee578376164d0e651217a9843f9fe83 + languageName: node + linkType: hard + +"argparse@npm:^2.0.1": + version: 2.0.1 + resolution: "argparse@npm:2.0.1" + checksum: 83644b56493e89a254bae05702abf3a1101b4fa4d0ca31df1c9985275a5a5bd47b3c27b7fa0b71098d41114d8ca000e6ed90cad764b306f8a503665e4d517ced + languageName: node + linkType: hard + +"assertion-error@npm:^1.1.0": + version: 1.1.0 + resolution: "assertion-error@npm:1.1.0" + checksum: fd9429d3a3d4fd61782eb3962ae76b6d08aa7383123fca0596020013b3ebd6647891a85b05ce821c47d1471ed1271f00b0545cf6a4326cf2fc91efcc3b0fbecf + languageName: node + linkType: hard + +"balanced-match@npm:^1.0.0": + version: 1.0.2 + resolution: "balanced-match@npm:1.0.2" + checksum: 9706c088a283058a8a99e0bf91b0a2f75497f185980d9ffa8b304de1d9e58ebda7c72c07ebf01dadedaac5b2907b2c6f566f660d62bd336c3468e960403b9d65 + languageName: node + linkType: hard + +"base-x@npm:^3.0.2": + version: 3.0.9 + resolution: "base-x@npm:3.0.9" + dependencies: + safe-buffer: ^5.0.1 + checksum: 957101d6fd09e1903e846fd8f69fd7e5e3e50254383e61ab667c725866bec54e5ece5ba49ce385128ae48f9ec93a26567d1d5ebb91f4d56ef4a9cc0d5a5481e8 + languageName: node + linkType: hard + +"base64-js@npm:^1.3.1": + version: 1.5.1 + resolution: "base64-js@npm:1.5.1" + checksum: 669632eb3745404c2f822a18fc3a0122d2f9a7a13f7fb8b5823ee19d1d2ff9ee5b52c53367176ea4ad093c332fd5ab4bd0ebae5a8e27917a4105a4cfc86b1005 + languageName: node + linkType: hard + +"bigint-crypto-utils@npm:^3.0.23": + version: 3.1.7 + resolution: "bigint-crypto-utils@npm:3.1.7" + dependencies: + bigint-mod-arith: ^3.1.0 + checksum: 10fa35d3e3d37639c8d501f45e0044c9062e7aa60783ae514e4d4ed3235ac24ac180e0dd0c77dad8cb5410ef24de42e1ea12527a997fec4c59f15fa83ea477ba + languageName: node + linkType: hard + +"bigint-mod-arith@npm:^3.1.0": + version: 3.1.2 + resolution: "bigint-mod-arith@npm:3.1.2" + checksum: badddd745f6e6c45674b22335d26a9ea83250e749abde20c5f84b24afbc747e259bc36798530953332349ed898f38ec39125b326cae8b8ee2dddfaea7ddf8448 + languageName: node + linkType: hard + +"binary-extensions@npm:^2.0.0": + version: 2.2.0 + resolution: "binary-extensions@npm:2.2.0" + checksum: ccd267956c58d2315f5d3ea6757cf09863c5fc703e50fbeb13a7dc849b812ef76e3cf9ca8f35a0c48498776a7478d7b4a0418e1e2b8cb9cb9731f2922aaad7f8 + languageName: node + linkType: hard + +"blakejs@npm:^1.1.0": + version: 1.2.1 + resolution: "blakejs@npm:1.2.1" + checksum: d699ba116cfa21d0b01d12014a03e484dd76d483133e6dc9eb415aa70a119f08beb3bcefb8c71840106a00b542cba77383f8be60cd1f0d4589cb8afb922eefbe + languageName: node + linkType: hard + +"bn.js@npm:^4.11.9": + version: 4.12.0 + resolution: "bn.js@npm:4.12.0" + checksum: 39afb4f15f4ea537b55eaf1446c896af28ac948fdcf47171961475724d1bb65118cca49fa6e3d67706e4790955ec0e74de584e45c8f1ef89f46c812bee5b5a12 + languageName: node + linkType: hard + +"brace-expansion@npm:^1.1.7": + version: 1.1.11 + resolution: "brace-expansion@npm:1.1.11" + dependencies: + balanced-match: ^1.0.0 + concat-map: 0.0.1 + checksum: faf34a7bb0c3fcf4b59c7808bc5d2a96a40988addf2e7e09dfbb67a2251800e0d14cd2bfc1aa79174f2f5095c54ff27f46fb1289fe2d77dac755b5eb3434cc07 + languageName: node + linkType: hard + +"brace-expansion@npm:^2.0.1": + version: 2.0.1 + resolution: "brace-expansion@npm:2.0.1" + dependencies: + balanced-match: ^1.0.0 + checksum: a61e7cd2e8a8505e9f0036b3b6108ba5e926b4b55089eeb5550cd04a471fe216c96d4fe7e4c7f995c728c554ae20ddfc4244cad10aef255e72b62930afd233d1 + languageName: node + linkType: hard + +"braces@npm:~3.0.2": + version: 3.0.2 + resolution: "braces@npm:3.0.2" + dependencies: + fill-range: ^7.0.1 + checksum: e2a8e769a863f3d4ee887b5fe21f63193a891c68b612ddb4b68d82d1b5f3ff9073af066c343e9867a393fe4c2555dcb33e89b937195feb9c1613d259edfcd459 + languageName: node + linkType: hard + +"brorand@npm:^1.1.0": + version: 1.1.0 + resolution: "brorand@npm:1.1.0" + checksum: 8a05c9f3c4b46572dec6ef71012b1946db6cae8c7bb60ccd4b7dd5a84655db49fe043ecc6272e7ef1f69dc53d6730b9e2a3a03a8310509a3d797a618cbee52be + languageName: node + linkType: hard + +"browser-level@npm:^1.0.1": + version: 1.0.1 + resolution: "browser-level@npm:1.0.1" + dependencies: + abstract-level: ^1.0.2 + catering: ^2.1.1 + module-error: ^1.0.2 + run-parallel-limit: ^1.1.0 + checksum: 67fbc77ce832940bfa25073eccff279f512ad56f545deb996a5b23b02316f5e76f4a79d381acc27eda983f5c9a2566aaf9c97e4fdd0748288c4407307537a29b + languageName: node + linkType: hard + +"browser-stdout@npm:1.3.1": + version: 1.3.1 + resolution: "browser-stdout@npm:1.3.1" + checksum: b717b19b25952dd6af483e368f9bcd6b14b87740c3d226c2977a65e84666ffd67000bddea7d911f111a9b6ddc822b234de42d52ab6507bce4119a4cc003ef7b3 + languageName: node + linkType: hard + +"browserify-aes@npm:^1.2.0": + version: 1.2.0 + resolution: "browserify-aes@npm:1.2.0" + dependencies: + buffer-xor: ^1.0.3 + cipher-base: ^1.0.0 + create-hash: ^1.1.0 + evp_bytestokey: ^1.0.3 + inherits: ^2.0.1 + safe-buffer: ^5.0.1 + checksum: 4a17c3eb55a2aa61c934c286f34921933086bf6d67f02d4adb09fcc6f2fc93977b47d9d884c25619144fccd47b3b3a399e1ad8b3ff5a346be47270114bcf7104 + languageName: node + linkType: hard + +"bs58@npm:^4.0.0": + version: 4.0.1 + resolution: "bs58@npm:4.0.1" + dependencies: + base-x: ^3.0.2 + checksum: b3c5365bb9e0c561e1a82f1a2d809a1a692059fae016be233a6127ad2f50a6b986467c3a50669ce4c18929dcccb297c5909314dd347a25a68c21b68eb3e95ac2 + languageName: node + linkType: hard + +"bs58check@npm:^2.1.2": + version: 2.1.2 + resolution: "bs58check@npm:2.1.2" + dependencies: + bs58: ^4.0.0 + create-hash: ^1.1.0 + safe-buffer: ^5.1.2 + checksum: 43bdf08a5dd04581b78f040bc4169480e17008da482ffe2a6507327bbc4fc5c28de0501f7faf22901cfe57fbca79cbb202ca529003fedb4cb8dccd265b38e54d + languageName: node + linkType: hard + +"buffer-xor@npm:^1.0.3": + version: 1.0.3 + resolution: "buffer-xor@npm:1.0.3" + checksum: 10c520df29d62fa6e785e2800e586a20fc4f6dfad84bcdbd12e1e8a83856de1cb75c7ebd7abe6d036bbfab738a6cf18a3ae9c8e5a2e2eb3167ca7399ce65373a + languageName: node + linkType: hard + +"buffer@npm:^6.0.3": + version: 6.0.3 + resolution: "buffer@npm:6.0.3" + dependencies: + base64-js: ^1.3.1 + ieee754: ^1.2.1 + checksum: 5ad23293d9a731e4318e420025800b42bf0d264004c0286c8cc010af7a270c7a0f6522e84f54b9ad65cbd6db20b8badbfd8d2ebf4f80fa03dab093b89e68c3f9 + languageName: node + linkType: hard + +"cacache@npm:^16.1.0": + version: 16.1.3 + resolution: "cacache@npm:16.1.3" + dependencies: + "@npmcli/fs": ^2.1.0 + "@npmcli/move-file": ^2.0.0 + chownr: ^2.0.0 + fs-minipass: ^2.1.0 + glob: ^8.0.1 + infer-owner: ^1.0.4 + lru-cache: ^7.7.1 + minipass: ^3.1.6 + minipass-collect: ^1.0.2 + minipass-flush: ^1.0.5 + minipass-pipeline: ^1.2.4 + mkdirp: ^1.0.4 + p-map: ^4.0.0 + promise-inflight: ^1.0.1 + rimraf: ^3.0.2 + ssri: ^9.0.0 + tar: ^6.1.11 + unique-filename: ^2.0.0 + checksum: d91409e6e57d7d9a3a25e5dcc589c84e75b178ae8ea7de05cbf6b783f77a5fae938f6e8fda6f5257ed70000be27a681e1e44829251bfffe4c10216002f8f14e6 + languageName: node + linkType: hard + +"camelcase@npm:^6.0.0": + version: 6.3.0 + resolution: "camelcase@npm:6.3.0" + checksum: 8c96818a9076434998511251dcb2761a94817ea17dbdc37f47ac080bd088fc62c7369429a19e2178b993497132c8cbcf5cc1f44ba963e76782ba469c0474938d + languageName: node + linkType: hard + +"catering@npm:^2.1.0, catering@npm:^2.1.1": + version: 2.1.1 + resolution: "catering@npm:2.1.1" + checksum: 205daefa69c935b0c19f3d8f2e0a520dd69aebe9bda55902958003f7c9cff8f967dfb90071b421bd6eb618576f657a89d2bc0986872c9bc04bbd66655e9d4bd6 + languageName: node + linkType: hard + +"chai@npm:^4.3.6": + version: 4.3.6 + resolution: "chai@npm:4.3.6" + dependencies: + assertion-error: ^1.1.0 + check-error: ^1.0.2 + deep-eql: ^3.0.1 + get-func-name: ^2.0.0 + loupe: ^2.3.1 + pathval: ^1.1.1 + type-detect: ^4.0.5 + checksum: acff93fd537f96d4a4d62dd83810285dffcfccb5089e1bf2a1205b28ec82d93dff551368722893cf85004282df10ee68802737c33c90c5493957ed449ed7ce71 + languageName: node + linkType: hard + +"chalk@npm:^4.1.0": + version: 4.1.2 + resolution: "chalk@npm:4.1.2" + dependencies: + ansi-styles: ^4.1.0 + supports-color: ^7.1.0 + checksum: fe75c9d5c76a7a98d45495b91b2172fa3b7a09e0cc9370e5c8feb1c567b85c4288e2b3fded7cfdd7359ac28d6b3844feb8b82b8686842e93d23c827c417e83fc + languageName: node + linkType: hard + +"check-error@npm:^1.0.2": + version: 1.0.2 + resolution: "check-error@npm:1.0.2" + checksum: d9d106504404b8addd1ee3f63f8c0eaa7cd962a1a28eb9c519b1c4a1dc7098be38007fc0060f045ee00f075fbb7a2a4f42abcf61d68323677e11ab98dc16042e + languageName: node + linkType: hard + +"chokidar@npm:3.5.3": + version: 3.5.3 + resolution: "chokidar@npm:3.5.3" + dependencies: + anymatch: ~3.1.2 + braces: ~3.0.2 + fsevents: ~2.3.2 + glob-parent: ~5.1.2 + is-binary-path: ~2.1.0 + is-glob: ~4.0.1 + normalize-path: ~3.0.0 + readdirp: ~3.6.0 + dependenciesMeta: + fsevents: + optional: true + checksum: b49fcde40176ba007ff361b198a2d35df60d9bb2a5aab228279eb810feae9294a6b4649ab15981304447afe1e6ffbf4788ad5db77235dc770ab777c6e771980c + languageName: node + linkType: hard + +"chownr@npm:^2.0.0": + version: 2.0.0 + resolution: "chownr@npm:2.0.0" + checksum: c57cf9dd0791e2f18a5ee9c1a299ae6e801ff58fee96dc8bfd0dcb4738a6ce58dd252a3605b1c93c6418fe4f9d5093b28ffbf4d66648cb2a9c67eaef9679be2f + languageName: node + linkType: hard + +"cipher-base@npm:^1.0.0, cipher-base@npm:^1.0.1, cipher-base@npm:^1.0.3": + version: 1.0.4 + resolution: "cipher-base@npm:1.0.4" + dependencies: + inherits: ^2.0.1 + safe-buffer: ^5.0.1 + checksum: 47d3568dbc17431a339bad1fe7dff83ac0891be8206911ace3d3b818fc695f376df809bea406e759cdea07fff4b454fa25f1013e648851bec790c1d75763032e + languageName: node + linkType: hard + +"classic-level@npm:^1.2.0": + version: 1.2.0 + resolution: "classic-level@npm:1.2.0" + dependencies: + abstract-level: ^1.0.2 + catering: ^2.1.0 + module-error: ^1.0.1 + napi-macros: ~2.0.0 + node-gyp: latest + node-gyp-build: ^4.3.0 + checksum: 88ddd12f2192c2775107d5e462998ac01095cb0222ca01dc2be77d8dcbbf9883c4c0a0248529cceee40a2f1232c68027b1aca731da9f767ad8e9483cbd61dd37 + languageName: node + linkType: hard + +"clean-stack@npm:^2.0.0": + version: 2.2.0 + resolution: "clean-stack@npm:2.2.0" + checksum: 2ac8cd2b2f5ec986a3c743935ec85b07bc174d5421a5efc8017e1f146a1cf5f781ae962618f416352103b32c9cd7e203276e8c28241bbe946160cab16149fb68 + languageName: node + linkType: hard + +"cliui@npm:^7.0.2": + version: 7.0.4 + resolution: "cliui@npm:7.0.4" + dependencies: + string-width: ^4.2.0 + strip-ansi: ^6.0.0 + wrap-ansi: ^7.0.0 + checksum: ce2e8f578a4813806788ac399b9e866297740eecd4ad1823c27fd344d78b22c5f8597d548adbcc46f0573e43e21e751f39446c5a5e804a12aace402b7a315d7f + languageName: node + linkType: hard + +"color-convert@npm:^2.0.1": + version: 2.0.1 + resolution: "color-convert@npm:2.0.1" + dependencies: + color-name: ~1.1.4 + checksum: 79e6bdb9fd479a205c71d89574fccfb22bd9053bd98c6c4d870d65c132e5e904e6034978e55b43d69fcaa7433af2016ee203ce76eeba9cfa554b373e7f7db336 + languageName: node + linkType: hard + +"color-name@npm:~1.1.4": + version: 1.1.4 + resolution: "color-name@npm:1.1.4" + checksum: b0445859521eb4021cd0fb0cc1a75cecf67fceecae89b63f62b201cca8d345baf8b952c966862a9d9a2632987d4f6581f0ec8d957dfacece86f0a7919316f610 + languageName: node + linkType: hard + +"color-support@npm:^1.1.3": + version: 1.1.3 + resolution: "color-support@npm:1.1.3" + bin: + color-support: bin.js + checksum: 9b7356817670b9a13a26ca5af1c21615463b500783b739b7634a0c2047c16cef4b2865d7576875c31c3cddf9dd621fa19285e628f20198b233a5cfdda6d0793b + languageName: node + linkType: hard + +"concat-map@npm:0.0.1": + version: 0.0.1 + resolution: "concat-map@npm:0.0.1" + checksum: 902a9f5d8967a3e2faf138d5cb784b9979bad2e6db5357c5b21c568df4ebe62bcb15108af1b2253744844eb964fc023fbd9afbbbb6ddd0bcc204c6fb5b7bf3af + languageName: node + linkType: hard + +"console-control-strings@npm:^1.1.0": + version: 1.1.0 + resolution: "console-control-strings@npm:1.1.0" + checksum: 8755d76787f94e6cf79ce4666f0c5519906d7f5b02d4b884cf41e11dcd759ed69c57da0670afd9236d229a46e0f9cf519db0cd829c6dca820bb5a5c3def584ed + languageName: node + linkType: hard + +"crc-32@npm:^1.2.0": + version: 1.2.2 + resolution: "crc-32@npm:1.2.2" + bin: + crc32: bin/crc32.njs + checksum: ad2d0ad0cbd465b75dcaeeff0600f8195b686816ab5f3ba4c6e052a07f728c3e70df2e3ca9fd3d4484dc4ba70586e161ca5a2334ec8bf5a41bf022a6103ff243 + languageName: node + linkType: hard + +"create-hash@npm:^1.1.0, create-hash@npm:^1.1.2, create-hash@npm:^1.2.0": + version: 1.2.0 + resolution: "create-hash@npm:1.2.0" + dependencies: + cipher-base: ^1.0.1 + inherits: ^2.0.1 + md5.js: ^1.3.4 + ripemd160: ^2.0.1 + sha.js: ^2.4.0 + checksum: 02a6ae3bb9cd4afee3fabd846c1d8426a0e6b495560a977ba46120c473cb283be6aa1cace76b5f927cf4e499c6146fb798253e48e83d522feba807d6b722eaa9 + languageName: node + linkType: hard + +"create-hmac@npm:^1.1.4, create-hmac@npm:^1.1.7": + version: 1.1.7 + resolution: "create-hmac@npm:1.1.7" + dependencies: + cipher-base: ^1.0.3 + create-hash: ^1.1.0 + inherits: ^2.0.1 + ripemd160: ^2.0.0 + safe-buffer: ^5.0.1 + sha.js: ^2.4.8 + checksum: ba12bb2257b585a0396108c72830e85f882ab659c3320c83584b1037f8ab72415095167ced80dc4ce8e446a8ecc4b2acf36d87befe0707d73b26cf9dc77440ed + languageName: node + linkType: hard + +"debug@npm:4, debug@npm:4.3.4, debug@npm:^4.1.0, debug@npm:^4.3.3": + version: 4.3.4 + resolution: "debug@npm:4.3.4" + dependencies: + ms: 2.1.2 + peerDependenciesMeta: + supports-color: + optional: true + checksum: 3dbad3f94ea64f34431a9cbf0bafb61853eda57bff2880036153438f50fb5a84f27683ba0d8e5426bf41a8c6ff03879488120cf5b3a761e77953169c0600a708 + languageName: node + linkType: hard + +"decamelize@npm:^4.0.0": + version: 4.0.0 + resolution: "decamelize@npm:4.0.0" + checksum: b7d09b82652c39eead4d6678bb578e3bebd848add894b76d0f6b395bc45b2d692fb88d977e7cfb93c4ed6c119b05a1347cef261174916c2e75c0a8ca57da1809 + languageName: node + linkType: hard + +"deep-eql@npm:^3.0.1": + version: 3.0.1 + resolution: "deep-eql@npm:3.0.1" + dependencies: + type-detect: ^4.0.0 + checksum: 4f4c9fb79eb994fb6e81d4aa8b063adc40c00f831588aa65e20857d5d52f15fb23034a6576ecf886f7ff6222d5ae42e71e9b7d57113e0715b1df7ea1e812b125 + languageName: node + linkType: hard + +"delegates@npm:^1.0.0": + version: 1.0.0 + resolution: "delegates@npm:1.0.0" + checksum: a51744d9b53c164ba9c0492471a1a2ffa0b6727451bdc89e31627fdf4adda9d51277cfcbfb20f0a6f08ccb3c436f341df3e92631a3440226d93a8971724771fd + languageName: node + linkType: hard + +"depd@npm:^1.1.2": + version: 1.1.2 + resolution: "depd@npm:1.1.2" + checksum: 6b406620d269619852885ce15965272b829df6f409724415e0002c8632ab6a8c0a08ec1f0bd2add05dc7bd7507606f7e2cc034fa24224ab829580040b835ecd9 + languageName: node + linkType: hard + +"diff@npm:5.0.0": + version: 5.0.0 + resolution: "diff@npm:5.0.0" + checksum: f19fe29284b633afdb2725c2a8bb7d25761ea54d321d8e67987ac851c5294be4afeab532bd84531e02583a3fe7f4014aa314a3eda84f5590e7a9e6b371ef3b46 + languageName: node + linkType: hard + +"elliptic@npm:^6.5.4": + version: 6.5.4 + resolution: "elliptic@npm:6.5.4" + dependencies: + bn.js: ^4.11.9 + brorand: ^1.1.0 + hash.js: ^1.0.0 + hmac-drbg: ^1.0.1 + inherits: ^2.0.4 + minimalistic-assert: ^1.0.1 + minimalistic-crypto-utils: ^1.0.1 + checksum: d56d21fd04e97869f7ffcc92e18903b9f67f2d4637a23c860492fbbff5a3155fd9ca0184ce0c865dd6eb2487d234ce9551335c021c376cd2d3b7cb749c7d10f4 + languageName: node + linkType: hard + +"emoji-regex@npm:^8.0.0": + version: 8.0.0 + resolution: "emoji-regex@npm:8.0.0" + checksum: d4c5c39d5a9868b5fa152f00cada8a936868fd3367f33f71be515ecee4c803132d11b31a6222b2571b1e5f7e13890156a94880345594d0ce7e3c9895f560f192 + languageName: node + linkType: hard + +"encoding@npm:^0.1.13": + version: 0.1.13 + resolution: "encoding@npm:0.1.13" + dependencies: + iconv-lite: ^0.6.2 + checksum: bb98632f8ffa823996e508ce6a58ffcf5856330fde839ae42c9e1f436cc3b5cc651d4aeae72222916545428e54fd0f6aa8862fd8d25bdbcc4589f1e3f3715e7f + languageName: node + linkType: hard + +"env-paths@npm:^2.2.0": + version: 2.2.1 + resolution: "env-paths@npm:2.2.1" + checksum: 65b5df55a8bab92229ab2b40dad3b387fad24613263d103a97f91c9fe43ceb21965cd3392b1ccb5d77088021e525c4e0481adb309625d0cb94ade1d1fb8dc17e + languageName: node + linkType: hard + +"err-code@npm:^2.0.2": + version: 2.0.3 + resolution: "err-code@npm:2.0.3" + checksum: 8b7b1be20d2de12d2255c0bc2ca638b7af5171142693299416e6a9339bd7d88fc8d7707d913d78e0993176005405a236b066b45666b27b797252c771156ace54 + languageName: node + linkType: hard + +"escalade@npm:^3.1.1": + version: 3.1.1 + resolution: "escalade@npm:3.1.1" + checksum: a3e2a99f07acb74b3ad4989c48ca0c3140f69f923e56d0cba0526240ee470b91010f9d39001f2a4a313841d237ede70a729e92125191ba5d21e74b106800b133 + languageName: node + linkType: hard + +"escape-string-regexp@npm:4.0.0": + version: 4.0.0 + resolution: "escape-string-regexp@npm:4.0.0" + checksum: 98b48897d93060f2322108bf29db0feba7dd774be96cd069458d1453347b25ce8682ecc39859d4bca2203cc0ab19c237bcc71755eff49a0f8d90beadeeba5cc5 + languageName: node + linkType: hard + +"ethereum-cryptography@npm:0.1.3": + version: 0.1.3 + resolution: "ethereum-cryptography@npm:0.1.3" + dependencies: + "@types/pbkdf2": ^3.0.0 + "@types/secp256k1": ^4.0.1 + blakejs: ^1.1.0 + browserify-aes: ^1.2.0 + bs58check: ^2.1.2 + create-hash: ^1.2.0 + create-hmac: ^1.1.7 + hash.js: ^1.1.7 + keccak: ^3.0.0 + pbkdf2: ^3.0.17 + randombytes: ^2.1.0 + safe-buffer: ^5.1.2 + scrypt-js: ^3.0.0 + secp256k1: ^4.0.1 + setimmediate: ^1.0.5 + checksum: 54bae7a4a96bd81398cdc35c91cfcc74339f71a95ed1b5b694663782e69e8e3afd21357de3b8bac9ff4877fd6f043601e200a7ad9133d94be6fd7d898ee0a449 + languageName: node + linkType: hard + +"evp_bytestokey@npm:^1.0.3": + version: 1.0.3 + resolution: "evp_bytestokey@npm:1.0.3" + dependencies: + md5.js: ^1.3.4 + node-gyp: latest + safe-buffer: ^5.1.1 + checksum: ad4e1577f1a6b721c7800dcc7c733fe01f6c310732bb5bf2240245c2a5b45a38518b91d8be2c610611623160b9d1c0e91f1ce96d639f8b53e8894625cf20fa45 + languageName: node + linkType: hard + +"fill-range@npm:^7.0.1": + version: 7.0.1 + resolution: "fill-range@npm:7.0.1" + dependencies: + to-regex-range: ^5.0.1 + checksum: cc283f4e65b504259e64fd969bcf4def4eb08d85565e906b7d36516e87819db52029a76b6363d0f02d0d532f0033c9603b9e2d943d56ee3b0d4f7ad3328ff917 + languageName: node + linkType: hard + +"find-up@npm:5.0.0": + version: 5.0.0 + resolution: "find-up@npm:5.0.0" + dependencies: + locate-path: ^6.0.0 + path-exists: ^4.0.0 + checksum: 07955e357348f34660bde7920783204ff5a26ac2cafcaa28bace494027158a97b9f56faaf2d89a6106211a8174db650dd9f503f9c0d526b1202d5554a00b9095 + languageName: node + linkType: hard + +"flat@npm:^5.0.2": + version: 5.0.2 + resolution: "flat@npm:5.0.2" + bin: + flat: cli.js + checksum: 12a1536ac746db74881316a181499a78ef953632ddd28050b7a3a43c62ef5462e3357c8c29d76072bb635f147f7a9a1f0c02efef6b4be28f8db62ceb3d5c7f5d + languageName: node + linkType: hard + +"fs-minipass@npm:^2.0.0, fs-minipass@npm:^2.1.0": + version: 2.1.0 + resolution: "fs-minipass@npm:2.1.0" + dependencies: + minipass: ^3.0.0 + checksum: 1b8d128dae2ac6cc94230cc5ead341ba3e0efaef82dab46a33d171c044caaa6ca001364178d42069b2809c35a1c3c35079a32107c770e9ffab3901b59af8c8b1 + languageName: node + linkType: hard + +"fs.realpath@npm:^1.0.0": + version: 1.0.0 + resolution: "fs.realpath@npm:1.0.0" + checksum: 99ddea01a7e75aa276c250a04eedeffe5662bce66c65c07164ad6264f9de18fb21be9433ead460e54cff20e31721c811f4fb5d70591799df5f85dce6d6746fd0 + languageName: node + linkType: hard + +"fsevents@npm:~2.3.2": + version: 2.3.2 + resolution: "fsevents@npm:2.3.2" + dependencies: + node-gyp: latest + checksum: 97ade64e75091afee5265e6956cb72ba34db7819b4c3e94c431d4be2b19b8bb7a2d4116da417950c3425f17c8fe693d25e20212cac583ac1521ad066b77ae31f + conditions: os=darwin + languageName: node + linkType: hard + +"fsevents@patch:fsevents@~2.3.2#~builtin": + version: 2.3.2 + resolution: "fsevents@patch:fsevents@npm%3A2.3.2#~builtin::version=2.3.2&hash=18f3a7" + dependencies: + node-gyp: latest + conditions: os=darwin + languageName: node + linkType: hard + +"functional-red-black-tree@npm:^1.0.1": + version: 1.0.1 + resolution: "functional-red-black-tree@npm:1.0.1" + checksum: ca6c170f37640e2d94297da8bb4bf27a1d12bea3e00e6a3e007fd7aa32e37e000f5772acf941b4e4f3cf1c95c3752033d0c509af157ad8f526e7f00723b9eb9f + languageName: node + linkType: hard + +"gauge@npm:^4.0.3": + version: 4.0.4 + resolution: "gauge@npm:4.0.4" + dependencies: + aproba: ^1.0.3 || ^2.0.0 + color-support: ^1.1.3 + console-control-strings: ^1.1.0 + has-unicode: ^2.0.1 + signal-exit: ^3.0.7 + string-width: ^4.2.3 + strip-ansi: ^6.0.1 + wide-align: ^1.1.5 + checksum: 788b6bfe52f1dd8e263cda800c26ac0ca2ff6de0b6eee2fe0d9e3abf15e149b651bd27bf5226be10e6e3edb5c4e5d5985a5a1a98137e7a892f75eff76467ad2d + languageName: node + linkType: hard + +"get-caller-file@npm:^2.0.5": + version: 2.0.5 + resolution: "get-caller-file@npm:2.0.5" + checksum: b9769a836d2a98c3ee734a88ba712e62703f1df31b94b784762c433c27a386dd6029ff55c2a920c392e33657d80191edbf18c61487e198844844516f843496b9 + languageName: node + linkType: hard + +"get-func-name@npm:^2.0.0": + version: 2.0.0 + resolution: "get-func-name@npm:2.0.0" + checksum: 8d82e69f3e7fab9e27c547945dfe5cc0c57fc0adf08ce135dddb01081d75684a03e7a0487466f478872b341d52ac763ae49e660d01ab83741f74932085f693c3 + languageName: node + linkType: hard + +"glob-parent@npm:~5.1.2": + version: 5.1.2 + resolution: "glob-parent@npm:5.1.2" + dependencies: + is-glob: ^4.0.1 + checksum: f4f2bfe2425296e8a47e36864e4f42be38a996db40420fe434565e4480e3322f18eb37589617a98640c5dc8fdec1a387007ee18dbb1f3f5553409c34d17f425e + languageName: node + linkType: hard + +"glob@npm:7.2.0": + version: 7.2.0 + resolution: "glob@npm:7.2.0" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.0.4 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: 78a8ea942331f08ed2e055cb5b9e40fe6f46f579d7fd3d694f3412fe5db23223d29b7fee1575440202e9a7ff9a72ab106a39fee39934c7bedafe5e5f8ae20134 + languageName: node + linkType: hard + +"glob@npm:^7.1.3, glob@npm:^7.1.4": + version: 7.2.3 + resolution: "glob@npm:7.2.3" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^3.1.1 + once: ^1.3.0 + path-is-absolute: ^1.0.0 + checksum: 29452e97b38fa704dabb1d1045350fb2467cf0277e155aa9ff7077e90ad81d1ea9d53d3ee63bd37c05b09a065e90f16aec4a65f5b8de401d1dac40bc5605d133 + languageName: node + linkType: hard + +"glob@npm:^8.0.1": + version: 8.0.3 + resolution: "glob@npm:8.0.3" + dependencies: + fs.realpath: ^1.0.0 + inflight: ^1.0.4 + inherits: 2 + minimatch: ^5.0.1 + once: ^1.3.0 + checksum: 50bcdea19d8e79d8de5f460b1939ffc2b3299eac28deb502093fdca22a78efebc03e66bf54f0abc3d3d07d8134d19a32850288b7440d77e072aa55f9d33b18c5 + languageName: node + linkType: hard + +"graceful-fs@npm:^4.2.6": + version: 4.2.10 + resolution: "graceful-fs@npm:4.2.10" + checksum: 3f109d70ae123951905d85032ebeae3c2a5a7a997430df00ea30df0e3a6c60cf6689b109654d6fdacd28810a053348c4d14642da1d075049e6be1ba5216218da + languageName: node + linkType: hard + +"has-flag@npm:^4.0.0": + version: 4.0.0 + resolution: "has-flag@npm:4.0.0" + checksum: 261a1357037ead75e338156b1f9452c016a37dcd3283a972a30d9e4a87441ba372c8b81f818cd0fbcd9c0354b4ae7e18b9e1afa1971164aef6d18c2b6095a8ad + languageName: node + linkType: hard + +"has-unicode@npm:^2.0.1": + version: 2.0.1 + resolution: "has-unicode@npm:2.0.1" + checksum: 1eab07a7436512db0be40a710b29b5dc21fa04880b7f63c9980b706683127e3c1b57cb80ea96d47991bdae2dfe479604f6a1ba410106ee1046a41d1bd0814400 + languageName: node + linkType: hard + +"hash-base@npm:^3.0.0": + version: 3.1.0 + resolution: "hash-base@npm:3.1.0" + dependencies: + inherits: ^2.0.4 + readable-stream: ^3.6.0 + safe-buffer: ^5.2.0 + checksum: 26b7e97ac3de13cb23fc3145e7e3450b0530274a9562144fc2bf5c1e2983afd0e09ed7cc3b20974ba66039fad316db463da80eb452e7373e780cbee9a0d2f2dc + languageName: node + linkType: hard + +"hash.js@npm:^1.0.0, hash.js@npm:^1.0.3, hash.js@npm:^1.1.7": + version: 1.1.7 + resolution: "hash.js@npm:1.1.7" + dependencies: + inherits: ^2.0.3 + minimalistic-assert: ^1.0.1 + checksum: e350096e659c62422b85fa508e4b3669017311aa4c49b74f19f8e1bc7f3a54a584fdfd45326d4964d6011f2b2d882e38bea775a96046f2a61b7779a979629d8f + languageName: node + linkType: hard + +"he@npm:1.2.0": + version: 1.2.0 + resolution: "he@npm:1.2.0" + bin: + he: bin/he + checksum: 3d4d6babccccd79c5c5a3f929a68af33360d6445587d628087f39a965079d84f18ce9c3d3f917ee1e3978916fc833bb8b29377c3b403f919426f91bc6965e7a7 + languageName: node + linkType: hard + +"hmac-drbg@npm:^1.0.1": + version: 1.0.1 + resolution: "hmac-drbg@npm:1.0.1" + dependencies: + hash.js: ^1.0.3 + minimalistic-assert: ^1.0.0 + minimalistic-crypto-utils: ^1.0.1 + checksum: bd30b6a68d7f22d63f10e1888aee497d7c2c5c0bb469e66bbdac99f143904d1dfe95f8131f95b3e86c86dd239963c9d972fcbe147e7cffa00e55d18585c43fe0 + languageName: node + linkType: hard + +"http-cache-semantics@npm:^4.1.0": + version: 4.1.0 + resolution: "http-cache-semantics@npm:4.1.0" + checksum: 974de94a81c5474be07f269f9fd8383e92ebb5a448208223bfb39e172a9dbc26feff250192ecc23b9593b3f92098e010406b0f24bd4d588d631f80214648ed42 + languageName: node + linkType: hard + +"http-proxy-agent@npm:^5.0.0": + version: 5.0.0 + resolution: "http-proxy-agent@npm:5.0.0" + dependencies: + "@tootallnate/once": 2 + agent-base: 6 + debug: 4 + checksum: e2ee1ff1656a131953839b2a19cd1f3a52d97c25ba87bd2559af6ae87114abf60971e498021f9b73f9fd78aea8876d1fb0d4656aac8a03c6caa9fc175f22b786 + languageName: node + linkType: hard + +"https-proxy-agent@npm:^5.0.0": + version: 5.0.1 + resolution: "https-proxy-agent@npm:5.0.1" + dependencies: + agent-base: 6 + debug: 4 + checksum: 571fccdf38184f05943e12d37d6ce38197becdd69e58d03f43637f7fa1269cf303a7d228aa27e5b27bbd3af8f09fd938e1c91dcfefff2df7ba77c20ed8dfc765 + languageName: node + linkType: hard + +"humanize-ms@npm:^1.2.1": + version: 1.2.1 + resolution: "humanize-ms@npm:1.2.1" + dependencies: + ms: ^2.0.0 + checksum: 9c7a74a2827f9294c009266c82031030eae811ca87b0da3dceb8d6071b9bde22c9f3daef0469c3c533cc67a97d8a167cd9fc0389350e5f415f61a79b171ded16 + languageName: node + linkType: hard + +"iconv-lite@npm:^0.6.2": + version: 0.6.3 + resolution: "iconv-lite@npm:0.6.3" + dependencies: + safer-buffer: ">= 2.1.2 < 3.0.0" + checksum: 3f60d47a5c8fc3313317edfd29a00a692cc87a19cac0159e2ce711d0ebc9019064108323b5e493625e25594f11c6236647d8e256fbe7a58f4a3b33b89e6d30bf + languageName: node + linkType: hard + +"ieee754@npm:^1.2.1": + version: 1.2.1 + resolution: "ieee754@npm:1.2.1" + checksum: 5144c0c9815e54ada181d80a0b810221a253562422e7c6c3a60b1901154184f49326ec239d618c416c1c5945a2e197107aee8d986a3dd836b53dffefd99b5e7e + languageName: node + linkType: hard + +"imurmurhash@npm:^0.1.4": + version: 0.1.4 + resolution: "imurmurhash@npm:0.1.4" + checksum: 7cae75c8cd9a50f57dadd77482359f659eaebac0319dd9368bcd1714f55e65badd6929ca58569da2b6494ef13fdd5598cd700b1eba23f8b79c5f19d195a3ecf7 + languageName: node + linkType: hard + +"indent-string@npm:^4.0.0": + version: 4.0.0 + resolution: "indent-string@npm:4.0.0" + checksum: 824cfb9929d031dabf059bebfe08cf3137365e112019086ed3dcff6a0a7b698cb80cf67ccccde0e25b9e2d7527aa6cc1fed1ac490c752162496caba3e6699612 + languageName: node + linkType: hard + +"infer-owner@npm:^1.0.4": + version: 1.0.4 + resolution: "infer-owner@npm:1.0.4" + checksum: 181e732764e4a0611576466b4b87dac338972b839920b2a8cde43642e4ed6bd54dc1fb0b40874728f2a2df9a1b097b8ff83b56d5f8f8e3927f837fdcb47d8a89 + languageName: node + linkType: hard + +"inflight@npm:^1.0.4": + version: 1.0.6 + resolution: "inflight@npm:1.0.6" + dependencies: + once: ^1.3.0 + wrappy: 1 + checksum: f4f76aa072ce19fae87ce1ef7d221e709afb59d445e05d47fba710e85470923a75de35bfae47da6de1b18afc3ce83d70facf44cfb0aff89f0a3f45c0a0244dfd + languageName: node + linkType: hard + +"inherits@npm:2, inherits@npm:^2.0.1, inherits@npm:^2.0.3, inherits@npm:^2.0.4": + version: 2.0.4 + resolution: "inherits@npm:2.0.4" + checksum: 4a48a733847879d6cf6691860a6b1e3f0f4754176e4d71494c41f3475553768b10f84b5ce1d40fbd0e34e6bfbb864ee35858ad4dd2cf31e02fc4a154b724d7f1 + languageName: node + linkType: hard + +"ip@npm:^2.0.0": + version: 2.0.0 + resolution: "ip@npm:2.0.0" + checksum: cfcfac6b873b701996d71ec82a7dd27ba92450afdb421e356f44044ed688df04567344c36cbacea7d01b1c39a4c732dc012570ebe9bebfb06f27314bca625349 + languageName: node + linkType: hard + +"is-binary-path@npm:~2.1.0": + version: 2.1.0 + resolution: "is-binary-path@npm:2.1.0" + dependencies: + binary-extensions: ^2.0.0 + checksum: 84192eb88cff70d320426f35ecd63c3d6d495da9d805b19bc65b518984b7c0760280e57dbf119b7e9be6b161784a5a673ab2c6abe83abb5198a432232ad5b35c + languageName: node + linkType: hard + +"is-buffer@npm:^2.0.5": + version: 2.0.5 + resolution: "is-buffer@npm:2.0.5" + checksum: 764c9ad8b523a9f5a32af29bdf772b08eb48c04d2ad0a7240916ac2688c983bf5f8504bf25b35e66240edeb9d9085461f9b5dae1f3d2861c6b06a65fe983de42 + languageName: node + linkType: hard + +"is-extglob@npm:^2.1.1": + version: 2.1.1 + resolution: "is-extglob@npm:2.1.1" + checksum: df033653d06d0eb567461e58a7a8c9f940bd8c22274b94bf7671ab36df5719791aae15eef6d83bbb5e23283967f2f984b8914559d4449efda578c775c4be6f85 + languageName: node + linkType: hard + +"is-fullwidth-code-point@npm:^3.0.0": + version: 3.0.0 + resolution: "is-fullwidth-code-point@npm:3.0.0" + checksum: 44a30c29457c7fb8f00297bce733f0a64cd22eca270f83e58c105e0d015e45c019491a4ab2faef91ab51d4738c670daff901c799f6a700e27f7314029e99e348 + languageName: node + linkType: hard + +"is-glob@npm:^4.0.1, is-glob@npm:~4.0.1": + version: 4.0.3 + resolution: "is-glob@npm:4.0.3" + dependencies: + is-extglob: ^2.1.1 + checksum: d381c1319fcb69d341cc6e6c7cd588e17cd94722d9a32dbd60660b993c4fb7d0f19438674e68dfec686d09b7c73139c9166b47597f846af387450224a8101ab4 + languageName: node + linkType: hard + +"is-lambda@npm:^1.0.1": + version: 1.0.1 + resolution: "is-lambda@npm:1.0.1" + checksum: 93a32f01940220532e5948538699ad610d5924ac86093fcee83022252b363eb0cc99ba53ab084a04e4fb62bf7b5731f55496257a4c38adf87af9c4d352c71c35 + languageName: node + linkType: hard + +"is-number@npm:^7.0.0": + version: 7.0.0 + resolution: "is-number@npm:7.0.0" + checksum: 456ac6f8e0f3111ed34668a624e45315201dff921e5ac181f8ec24923b99e9f32ca1a194912dc79d539c97d33dba17dc635202ff0b2cf98326f608323276d27a + languageName: node + linkType: hard + +"is-plain-obj@npm:^2.1.0": + version: 2.1.0 + resolution: "is-plain-obj@npm:2.1.0" + checksum: cec9100678b0a9fe0248a81743041ed990c2d4c99f893d935545cfbc42876cbe86d207f3b895700c690ad2fa520e568c44afc1605044b535a7820c1d40e38daa + languageName: node + linkType: hard + +"is-unicode-supported@npm:^0.1.0": + version: 0.1.0 + resolution: "is-unicode-supported@npm:0.1.0" + checksum: a2aab86ee7712f5c2f999180daaba5f361bdad1efadc9610ff5b8ab5495b86e4f627839d085c6530363c6d6d4ecbde340fb8e54bdb83da4ba8e0865ed5513c52 + languageName: node + linkType: hard + +"isexe@npm:^2.0.0": + version: 2.0.0 + resolution: "isexe@npm:2.0.0" + checksum: 26bf6c5480dda5161c820c5b5c751ae1e766c587b1f951ea3fcfc973bafb7831ae5b54a31a69bd670220e42e99ec154475025a468eae58ea262f813fdc8d1c62 + languageName: node + linkType: hard + +"js-yaml@npm:4.1.0": + version: 4.1.0 + resolution: "js-yaml@npm:4.1.0" + dependencies: + argparse: ^2.0.1 + bin: + js-yaml: bin/js-yaml.js + checksum: c7830dfd456c3ef2c6e355cc5a92e6700ceafa1d14bba54497b34a99f0376cecbb3e9ac14d3e5849b426d5a5140709a66237a8c991c675431271c4ce5504151a + languageName: node + linkType: hard + +"keccak@npm:^3.0.0": + version: 3.0.2 + resolution: "keccak@npm:3.0.2" + dependencies: + node-addon-api: ^2.0.0 + node-gyp: latest + node-gyp-build: ^4.2.0 + readable-stream: ^3.6.0 + checksum: 39a7d6128b8ee4cb7dcd186fc7e20c6087cc39f573a0f81b147c323f688f1f7c2b34f62c4ae189fe9b81c6730b2d1228d8a399cdc1f3d8a4c8f030cdc4f20272 + languageName: node + linkType: hard + +"level-supports@npm:^4.0.0": + version: 4.0.1 + resolution: "level-supports@npm:4.0.1" + checksum: d4552b42bb8cdeada07b0f6356c7a90fefe76279147331f291aceae26e3e56d5f927b09ce921647c0230bfe03ddfbdcef332be921e5c2194421ae2bfa3cf6368 + languageName: node + linkType: hard + +"level-transcoder@npm:^1.0.1": + version: 1.0.1 + resolution: "level-transcoder@npm:1.0.1" + dependencies: + buffer: ^6.0.3 + module-error: ^1.0.1 + checksum: 304f08d802faf3491a533b6d87ad8be3cabfd27f2713bbe9d4c633bf50fcb9460eab5a6776bf015e101ead7ba1c1853e05e7f341112f17a9d0cb37ee5a421a25 + languageName: node + linkType: hard + +"level@npm:^8.0.0": + version: 8.0.0 + resolution: "level@npm:8.0.0" + dependencies: + browser-level: ^1.0.1 + classic-level: ^1.2.0 + checksum: 13eb25bd71bfdca6cd714d1233adf9da97de9a8a4bf9f28d62a390b5c96d0250abaf983eb90eb8c4e89c7a985bb330750683d106f12670e5ea8fba1d7e608a1f + languageName: node + linkType: hard + +"locate-path@npm:^6.0.0": + version: 6.0.0 + resolution: "locate-path@npm:6.0.0" + dependencies: + p-locate: ^5.0.0 + checksum: 72eb661788a0368c099a184c59d2fee760b3831c9c1c33955e8a19ae4a21b4116e53fa736dc086cdeb9fce9f7cc508f2f92d2d3aae516f133e16a2bb59a39f5a + languageName: node + linkType: hard + +"log-symbols@npm:4.1.0": + version: 4.1.0 + resolution: "log-symbols@npm:4.1.0" + dependencies: + chalk: ^4.1.0 + is-unicode-supported: ^0.1.0 + checksum: fce1497b3135a0198803f9f07464165e9eb83ed02ceb2273930a6f8a508951178d8cf4f0378e9d28300a2ed2bc49050995d2bd5f53ab716bb15ac84d58c6ef74 + languageName: node + linkType: hard + +"loupe@npm:^2.3.1": + version: 2.3.4 + resolution: "loupe@npm:2.3.4" + dependencies: + get-func-name: ^2.0.0 + checksum: 5af91db61aa18530f1749a64735ee194ac263e65e9f4d1562bf3036c591f1baa948289c193e0e34c7b5e2c1b75d3c1dc4fce87f5edb3cee10b0c0df46bc9ffb3 + languageName: node + linkType: hard + +"lru-cache@npm:^5.1.1": + version: 5.1.1 + resolution: "lru-cache@npm:5.1.1" + dependencies: + yallist: ^3.0.2 + checksum: c154ae1cbb0c2206d1501a0e94df349653c92c8cbb25236d7e85190bcaf4567a03ac6eb43166fabfa36fd35623694da7233e88d9601fbf411a9a481d85dbd2cb + languageName: node + linkType: hard + +"lru-cache@npm:^6.0.0": + version: 6.0.0 + resolution: "lru-cache@npm:6.0.0" + dependencies: + yallist: ^4.0.0 + checksum: f97f499f898f23e4585742138a22f22526254fdba6d75d41a1c2526b3b6cc5747ef59c5612ba7375f42aca4f8461950e925ba08c991ead0651b4918b7c978297 + languageName: node + linkType: hard + +"lru-cache@npm:^7.7.1": + version: 7.14.0 + resolution: "lru-cache@npm:7.14.0" + checksum: efdd329f2c1bb790b71d497c6c59272e6bc2d7dd060ba55fc136becd3dd31fc8346edb446275504d94cb60d3c8385dbf5267b79b23789e409b2bdf302d13f0d7 + languageName: node + linkType: hard + +"make-fetch-happen@npm:^10.0.3": + version: 10.2.1 + resolution: "make-fetch-happen@npm:10.2.1" + dependencies: + agentkeepalive: ^4.2.1 + cacache: ^16.1.0 + http-cache-semantics: ^4.1.0 + http-proxy-agent: ^5.0.0 + https-proxy-agent: ^5.0.0 + is-lambda: ^1.0.1 + lru-cache: ^7.7.1 + minipass: ^3.1.6 + minipass-collect: ^1.0.2 + minipass-fetch: ^2.0.3 + minipass-flush: ^1.0.5 + minipass-pipeline: ^1.2.4 + negotiator: ^0.6.3 + promise-retry: ^2.0.1 + socks-proxy-agent: ^7.0.0 + ssri: ^9.0.0 + checksum: 2332eb9a8ec96f1ffeeea56ccefabcb4193693597b132cd110734d50f2928842e22b84cfa1508e921b8385cdfd06dda9ad68645fed62b50fff629a580f5fb72c + languageName: node + linkType: hard + +"md5.js@npm:^1.3.4": + version: 1.3.5 + resolution: "md5.js@npm:1.3.5" + dependencies: + hash-base: ^3.0.0 + inherits: ^2.0.1 + safe-buffer: ^5.1.2 + checksum: 098494d885684bcc4f92294b18ba61b7bd353c23147fbc4688c75b45cb8590f5a95fd4584d742415dcc52487f7a1ef6ea611cfa1543b0dc4492fe026357f3f0c + languageName: node + linkType: hard + +"memory-level@npm:^1.0.0": + version: 1.0.0 + resolution: "memory-level@npm:1.0.0" + dependencies: + abstract-level: ^1.0.0 + functional-red-black-tree: ^1.0.1 + module-error: ^1.0.1 + checksum: 80b1b7aedaf936e754adbcd7b9303018c3684fb32f9992fd967c448f145d177f16c724fbba9ed3c3590a9475fd563151eae664d69b83d2ad48714852e9fc5c72 + languageName: node + linkType: hard + +"minimalistic-assert@npm:^1.0.0, minimalistic-assert@npm:^1.0.1": + version: 1.0.1 + resolution: "minimalistic-assert@npm:1.0.1" + checksum: cc7974a9268fbf130fb055aff76700d7e2d8be5f761fb5c60318d0ed010d839ab3661a533ad29a5d37653133385204c503bfac995aaa4236f4e847461ea32ba7 + languageName: node + linkType: hard + +"minimalistic-crypto-utils@npm:^1.0.1": + version: 1.0.1 + resolution: "minimalistic-crypto-utils@npm:1.0.1" + checksum: 6e8a0422b30039406efd4c440829ea8f988845db02a3299f372fceba56ffa94994a9c0f2fd70c17f9969eedfbd72f34b5070ead9656a34d3f71c0bd72583a0ed + languageName: node + linkType: hard + +"minimatch@npm:5.0.1": + version: 5.0.1 + resolution: "minimatch@npm:5.0.1" + dependencies: + brace-expansion: ^2.0.1 + checksum: b34b98463da4754bc526b244d680c69d4d6089451ebe512edaf6dd9eeed0279399cfa3edb19233513b8f830bf4bfcad911dddcdf125e75074100d52f724774f0 + languageName: node + linkType: hard + +"minimatch@npm:^3.0.4, minimatch@npm:^3.1.1": + version: 3.1.2 + resolution: "minimatch@npm:3.1.2" + dependencies: + brace-expansion: ^1.1.7 + checksum: c154e566406683e7bcb746e000b84d74465b3a832c45d59912b9b55cd50dee66e5c4b1e5566dba26154040e51672f9aa450a9aef0c97cfc7336b78b7afb9540a + languageName: node + linkType: hard + +"minimatch@npm:^5.0.1": + version: 5.1.0 + resolution: "minimatch@npm:5.1.0" + dependencies: + brace-expansion: ^2.0.1 + checksum: 15ce53d31a06361e8b7a629501b5c75491bc2b59712d53e802b1987121d91b433d73fcc5be92974fde66b2b51d8fb28d75a9ae900d249feb792bb1ba2a4f0a90 + languageName: node + linkType: hard + +"minipass-collect@npm:^1.0.2": + version: 1.0.2 + resolution: "minipass-collect@npm:1.0.2" + dependencies: + minipass: ^3.0.0 + checksum: 14df761028f3e47293aee72888f2657695ec66bd7d09cae7ad558da30415fdc4752bbfee66287dcc6fd5e6a2fa3466d6c484dc1cbd986525d9393b9523d97f10 + languageName: node + linkType: hard + +"minipass-fetch@npm:^2.0.3": + version: 2.1.2 + resolution: "minipass-fetch@npm:2.1.2" + dependencies: + encoding: ^0.1.13 + minipass: ^3.1.6 + minipass-sized: ^1.0.3 + minizlib: ^2.1.2 + dependenciesMeta: + encoding: + optional: true + checksum: 3f216be79164e915fc91210cea1850e488793c740534985da017a4cbc7a5ff50506956d0f73bb0cb60e4fe91be08b6b61ef35101706d3ef5da2c8709b5f08f91 + languageName: node + linkType: hard + +"minipass-flush@npm:^1.0.5": + version: 1.0.5 + resolution: "minipass-flush@npm:1.0.5" + dependencies: + minipass: ^3.0.0 + checksum: 56269a0b22bad756a08a94b1ffc36b7c9c5de0735a4dd1ab2b06c066d795cfd1f0ac44a0fcae13eece5589b908ecddc867f04c745c7009be0b566421ea0944cf + languageName: node + linkType: hard + +"minipass-pipeline@npm:^1.2.4": + version: 1.2.4 + resolution: "minipass-pipeline@npm:1.2.4" + dependencies: + minipass: ^3.0.0 + checksum: b14240dac0d29823c3d5911c286069e36d0b81173d7bdf07a7e4a91ecdef92cdff4baaf31ea3746f1c61e0957f652e641223970870e2353593f382112257971b + languageName: node + linkType: hard + +"minipass-sized@npm:^1.0.3": + version: 1.0.3 + resolution: "minipass-sized@npm:1.0.3" + dependencies: + minipass: ^3.0.0 + checksum: 79076749fcacf21b5d16dd596d32c3b6bf4d6e62abb43868fac21674078505c8b15eaca4e47ed844985a4514854f917d78f588fcd029693709417d8f98b2bd60 + languageName: node + linkType: hard + +"minipass@npm:^3.0.0, minipass@npm:^3.1.1, minipass@npm:^3.1.6": + version: 3.3.5 + resolution: "minipass@npm:3.3.5" + dependencies: + yallist: ^4.0.0 + checksum: f89f02bcaa0e0e4bb4c44ec796008e69fbca62db0aba6ead1bc57d25bdaefdf42102130f4f9ecb7d9c6b6cd35ff7b0c7b97d001d3435da8e629fb68af3aea57e + languageName: node + linkType: hard + +"minizlib@npm:^2.1.1, minizlib@npm:^2.1.2": + version: 2.1.2 + resolution: "minizlib@npm:2.1.2" + dependencies: + minipass: ^3.0.0 + yallist: ^4.0.0 + checksum: f1fdeac0b07cf8f30fcf12f4b586795b97be856edea22b5e9072707be51fc95d41487faec3f265b42973a304fe3a64acd91a44a3826a963e37b37bafde0212c3 + languageName: node + linkType: hard + +"mkdirp@npm:^1.0.3, mkdirp@npm:^1.0.4": + version: 1.0.4 + resolution: "mkdirp@npm:1.0.4" + bin: + mkdirp: bin/cmd.js + checksum: a96865108c6c3b1b8e1d5e9f11843de1e077e57737602de1b82030815f311be11f96f09cce59bd5b903d0b29834733e5313f9301e3ed6d6f6fba2eae0df4298f + languageName: node + linkType: hard + +"mocha@npm:^10.0.0": + version: 10.0.0 + resolution: "mocha@npm:10.0.0" + dependencies: + "@ungap/promise-all-settled": 1.1.2 + ansi-colors: 4.1.1 + browser-stdout: 1.3.1 + chokidar: 3.5.3 + debug: 4.3.4 + diff: 5.0.0 + escape-string-regexp: 4.0.0 + find-up: 5.0.0 + glob: 7.2.0 + he: 1.2.0 + js-yaml: 4.1.0 + log-symbols: 4.1.0 + minimatch: 5.0.1 + ms: 2.1.3 + nanoid: 3.3.3 + serialize-javascript: 6.0.0 + strip-json-comments: 3.1.1 + supports-color: 8.1.1 + workerpool: 6.2.1 + yargs: 16.2.0 + yargs-parser: 20.2.4 + yargs-unparser: 2.0.0 + bin: + _mocha: bin/_mocha + mocha: bin/mocha.js + checksum: ba49ddcf8015a467e744b06c396aab361b1281302e38e7c1269af25ba51ff9ab681a9c36e9046bb7491e751cd7d5ce85e276a00ce7e204f96b2c418e4595edfe + languageName: node + linkType: hard + +"module-error@npm:^1.0.1, module-error@npm:^1.0.2": + version: 1.0.2 + resolution: "module-error@npm:1.0.2" + checksum: 5d653e35bd55b3e95f8aee2cdac108082ea892e71b8f651be92cde43e4ee86abee4fa8bd7fc3fe5e68b63926d42f63c54cd17b87a560c31f18739295575a3962 + languageName: node + linkType: hard + +"ms@npm:2.1.2": + version: 2.1.2 + resolution: "ms@npm:2.1.2" + checksum: 673cdb2c3133eb050c745908d8ce632ed2c02d85640e2edb3ace856a2266a813b30c613569bf3354fdf4ea7d1a1494add3bfa95e2713baa27d0c2c71fc44f58f + languageName: node + linkType: hard + +"ms@npm:2.1.3, ms@npm:^2.0.0": + version: 2.1.3 + resolution: "ms@npm:2.1.3" + checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d + languageName: node + linkType: hard + +"nanoid@npm:3.3.3": + version: 3.3.3 + resolution: "nanoid@npm:3.3.3" + bin: + nanoid: bin/nanoid.cjs + checksum: ada019402a07464a694553c61d2dca8a4353645a7d92f2830f0d487fedff403678a0bee5323a46522752b2eab95a0bc3da98b6cccaa7c0c55cd9975130e6d6f0 + languageName: node + linkType: hard + +"napi-macros@npm:~2.0.0": + version: 2.0.0 + resolution: "napi-macros@npm:2.0.0" + checksum: 30384819386977c1f82034757014163fa60ab3c5a538094f778d38788bebb52534966279956f796a92ea771c7f8ae072b975df65de910d051ffbdc927f62320c + languageName: node + linkType: hard + +"negotiator@npm:^0.6.3": + version: 0.6.3 + resolution: "negotiator@npm:0.6.3" + checksum: b8ffeb1e262eff7968fc90a2b6767b04cfd9842582a9d0ece0af7049537266e7b2506dfb1d107a32f06dd849ab2aea834d5830f7f4d0e5cb7d36e1ae55d021d9 + languageName: node + linkType: hard + +"node-addon-api@npm:^2.0.0": + version: 2.0.2 + resolution: "node-addon-api@npm:2.0.2" + dependencies: + node-gyp: latest + checksum: 31fb22d674648204f8dd94167eb5aac896c841b84a9210d614bf5d97c74ef059cc6326389cf0c54d2086e35312938401d4cc82e5fcd679202503eb8ac84814f8 + languageName: node + linkType: hard + +"node-gyp-build@npm:^4.2.0, node-gyp-build@npm:^4.3.0": + version: 4.5.0 + resolution: "node-gyp-build@npm:4.5.0" + bin: + node-gyp-build: bin.js + node-gyp-build-optional: optional.js + node-gyp-build-test: build-test.js + checksum: d888bae0fb88335f69af1b57a2294a931c5042f36e413d8d364c992c9ebfa0b96ffe773179a5a2c8f04b73856e8634e09cce108dbb9804396d3cc8c5455ff2db + languageName: node + linkType: hard + +"node-gyp@npm:latest": + version: 9.1.0 + resolution: "node-gyp@npm:9.1.0" + dependencies: + env-paths: ^2.2.0 + glob: ^7.1.4 + graceful-fs: ^4.2.6 + make-fetch-happen: ^10.0.3 + nopt: ^5.0.0 + npmlog: ^6.0.0 + rimraf: ^3.0.2 + semver: ^7.3.5 + tar: ^6.1.2 + which: ^2.0.2 + bin: + node-gyp: bin/node-gyp.js + checksum: 1437fa4a879b5b9010604128e8da8609b57c66034262087539ee04a8b764b8436af2be01bab66f8fc729a3adba2dcc21b10a32b9f552696c3fa8cd657d134fc4 + languageName: node + linkType: hard + +"nopt@npm:^5.0.0": + version: 5.0.0 + resolution: "nopt@npm:5.0.0" + dependencies: + abbrev: 1 + bin: + nopt: bin/nopt.js + checksum: d35fdec187269503843924e0114c0c6533fb54bbf1620d0f28b4b60ba01712d6687f62565c55cc20a504eff0fbe5c63e22340c3fad549ad40469ffb611b04f2f + languageName: node + linkType: hard + +"normalize-path@npm:^3.0.0, normalize-path@npm:~3.0.0": + version: 3.0.0 + resolution: "normalize-path@npm:3.0.0" + checksum: 88eeb4da891e10b1318c4b2476b6e2ecbeb5ff97d946815ffea7794c31a89017c70d7f34b3c2ebf23ef4e9fc9fb99f7dffe36da22011b5b5c6ffa34f4873ec20 + languageName: node + linkType: hard + +"npmlog@npm:^6.0.0": + version: 6.0.2 + resolution: "npmlog@npm:6.0.2" + dependencies: + are-we-there-yet: ^3.0.0 + console-control-strings: ^1.1.0 + gauge: ^4.0.3 + set-blocking: ^2.0.0 + checksum: ae238cd264a1c3f22091cdd9e2b106f684297d3c184f1146984ecbe18aaa86343953f26b9520dedd1b1372bc0316905b736c1932d778dbeb1fcf5a1001390e2a + languageName: node + linkType: hard + +"once@npm:^1.3.0": + version: 1.4.0 + resolution: "once@npm:1.4.0" + dependencies: + wrappy: 1 + checksum: cd0a88501333edd640d95f0d2700fbde6bff20b3d4d9bdc521bdd31af0656b5706570d6c6afe532045a20bb8dc0849f8332d6f2a416e0ba6d3d3b98806c7db68 + languageName: node + linkType: hard + +"p-limit@npm:^3.0.2": + version: 3.1.0 + resolution: "p-limit@npm:3.1.0" + dependencies: + yocto-queue: ^0.1.0 + checksum: 7c3690c4dbf62ef625671e20b7bdf1cbc9534e83352a2780f165b0d3ceba21907e77ad63401708145ca4e25bfc51636588d89a8c0aeb715e6c37d1c066430360 + languageName: node + linkType: hard + +"p-locate@npm:^5.0.0": + version: 5.0.0 + resolution: "p-locate@npm:5.0.0" + dependencies: + p-limit: ^3.0.2 + checksum: 1623088f36cf1cbca58e9b61c4e62bf0c60a07af5ae1ca99a720837356b5b6c5ba3eb1b2127e47a06865fee59dd0453cad7cc844cda9d5a62ac1a5a51b7c86d3 + languageName: node + linkType: hard + +"p-map@npm:^4.0.0": + version: 4.0.0 + resolution: "p-map@npm:4.0.0" + dependencies: + aggregate-error: ^3.0.0 + checksum: cb0ab21ec0f32ddffd31dfc250e3afa61e103ef43d957cc45497afe37513634589316de4eb88abdfd969fe6410c22c0b93ab24328833b8eb1ccc087fc0442a1c + languageName: node + linkType: hard + +"path-exists@npm:^4.0.0": + version: 4.0.0 + resolution: "path-exists@npm:4.0.0" + checksum: 505807199dfb7c50737b057dd8d351b82c033029ab94cb10a657609e00c1bc53b951cfdbccab8de04c5584d5eff31128ce6afd3db79281874a5ef2adbba55ed1 + languageName: node + linkType: hard + +"path-is-absolute@npm:^1.0.0": + version: 1.0.1 + resolution: "path-is-absolute@npm:1.0.1" + checksum: 060840f92cf8effa293bcc1bea81281bd7d363731d214cbe5c227df207c34cd727430f70c6037b5159c8a870b9157cba65e775446b0ab06fd5ecc7e54615a3b8 + languageName: node + linkType: hard + +"pathval@npm:^1.1.1": + version: 1.1.1 + resolution: "pathval@npm:1.1.1" + checksum: 090e3147716647fb7fb5b4b8c8e5b55e5d0a6086d085b6cd23f3d3c01fcf0ff56fd3cc22f2f4a033bd2e46ed55d61ed8379e123b42afe7d531a2a5fc8bb556d6 + languageName: node + linkType: hard + +"pbkdf2@npm:^3.0.17": + version: 3.1.2 + resolution: "pbkdf2@npm:3.1.2" + dependencies: + create-hash: ^1.1.2 + create-hmac: ^1.1.4 + ripemd160: ^2.0.1 + safe-buffer: ^5.0.1 + sha.js: ^2.4.8 + checksum: 2c950a100b1da72123449208e231afc188d980177d021d7121e96a2de7f2abbc96ead2b87d03d8fe5c318face097f203270d7e27908af9f471c165a4e8e69c92 + languageName: node + linkType: hard + +"picomatch@npm:^2.0.4, picomatch@npm:^2.2.1": + version: 2.3.1 + resolution: "picomatch@npm:2.3.1" + checksum: 050c865ce81119c4822c45d3c84f1ced46f93a0126febae20737bd05ca20589c564d6e9226977df859ed5e03dc73f02584a2b0faad36e896936238238b0446cf + languageName: node + linkType: hard + +"promise-inflight@npm:^1.0.1": + version: 1.0.1 + resolution: "promise-inflight@npm:1.0.1" + checksum: 22749483091d2c594261517f4f80e05226d4d5ecc1fc917e1886929da56e22b5718b7f2a75f3807e7a7d471bc3be2907fe92e6e8f373ddf5c64bae35b5af3981 + languageName: node + linkType: hard + +"promise-retry@npm:^2.0.1": + version: 2.0.1 + resolution: "promise-retry@npm:2.0.1" + dependencies: + err-code: ^2.0.2 + retry: ^0.12.0 + checksum: f96a3f6d90b92b568a26f71e966cbbc0f63ab85ea6ff6c81284dc869b41510e6cdef99b6b65f9030f0db422bf7c96652a3fff9f2e8fb4a0f069d8f4430359429 + languageName: node + linkType: hard + +"queue-microtask@npm:^1.2.2, queue-microtask@npm:^1.2.3": + version: 1.2.3 + resolution: "queue-microtask@npm:1.2.3" + checksum: b676f8c040cdc5b12723ad2f91414d267605b26419d5c821ff03befa817ddd10e238d22b25d604920340fd73efd8ba795465a0377c4adf45a4a41e4234e42dc4 + languageName: node + linkType: hard + +"randombytes@npm:^2.1.0": + version: 2.1.0 + resolution: "randombytes@npm:2.1.0" + dependencies: + safe-buffer: ^5.1.0 + checksum: d779499376bd4cbb435ef3ab9a957006c8682f343f14089ed5f27764e4645114196e75b7f6abf1cbd84fd247c0cb0651698444df8c9bf30e62120fbbc52269d6 + languageName: node + linkType: hard + +"readable-stream@npm:^3.6.0": + version: 3.6.0 + resolution: "readable-stream@npm:3.6.0" + dependencies: + inherits: ^2.0.3 + string_decoder: ^1.1.1 + util-deprecate: ^1.0.1 + checksum: d4ea81502d3799439bb955a3a5d1d808592cf3133350ed352aeaa499647858b27b1c4013984900238b0873ec8d0d8defce72469fb7a83e61d53f5ad61cb80dc8 + languageName: node + linkType: hard + +"readdirp@npm:~3.6.0": + version: 3.6.0 + resolution: "readdirp@npm:3.6.0" + dependencies: + picomatch: ^2.2.1 + checksum: 1ced032e6e45670b6d7352d71d21ce7edf7b9b928494dcaba6f11fba63180d9da6cd7061ebc34175ffda6ff529f481818c962952004d273178acd70f7059b320 + languageName: node + linkType: hard + +"require-directory@npm:^2.1.1": + version: 2.1.1 + resolution: "require-directory@npm:2.1.1" + checksum: fb47e70bf0001fdeabdc0429d431863e9475e7e43ea5f94ad86503d918423c1543361cc5166d713eaa7029dd7a3d34775af04764bebff99ef413111a5af18c80 + languageName: node + linkType: hard + +"rethnet-evm@workspace:.": + version: 0.0.0-use.local + resolution: "rethnet-evm@workspace:." + dependencies: + "@napi-rs/cli": ^2.11.4 + "@nomicfoundation/ethereumjs-blockchain": ^6.0.0 + "@nomicfoundation/ethereumjs-statemanager": ^1.0.0 + "@nomicfoundation/ethereumjs-util": ^8.0.0 + chai: ^4.3.6 + mocha: ^10.0.0 + typescript: ~4.5.2 + languageName: unknown + linkType: soft + +"retry@npm:^0.12.0": + version: 0.12.0 + resolution: "retry@npm:0.12.0" + checksum: 623bd7d2e5119467ba66202d733ec3c2e2e26568074923bc0585b6b99db14f357e79bdedb63cab56cec47491c4a0da7e6021a7465ca6dc4f481d3898fdd3158c + languageName: node + linkType: hard + +"rimraf@npm:^3.0.2": + version: 3.0.2 + resolution: "rimraf@npm:3.0.2" + dependencies: + glob: ^7.1.3 + bin: + rimraf: bin.js + checksum: 87f4164e396f0171b0a3386cc1877a817f572148ee13a7e113b238e48e8a9f2f31d009a92ec38a591ff1567d9662c6b67fd8818a2dbbaed74bc26a87a2a4a9a0 + languageName: node + linkType: hard + +"ripemd160@npm:^2.0.0, ripemd160@npm:^2.0.1": + version: 2.0.2 + resolution: "ripemd160@npm:2.0.2" + dependencies: + hash-base: ^3.0.0 + inherits: ^2.0.1 + checksum: 006accc40578ee2beae382757c4ce2908a826b27e2b079efdcd2959ee544ddf210b7b5d7d5e80467807604244e7388427330f5c6d4cd61e6edaddc5773ccc393 + languageName: node + linkType: hard + +"run-parallel-limit@npm:^1.1.0": + version: 1.1.0 + resolution: "run-parallel-limit@npm:1.1.0" + dependencies: + queue-microtask: ^1.2.2 + checksum: 672c3b87e7f939c684b9965222b361421db0930223ed1e43ebf0e7e48ccc1a022ea4de080bef4d5468434e2577c33b7681e3f03b7593fdc49ad250a55381123c + languageName: node + linkType: hard + +"safe-buffer@npm:^5.0.1, safe-buffer@npm:^5.1.0, safe-buffer@npm:^5.1.1, safe-buffer@npm:^5.1.2, safe-buffer@npm:^5.2.0, safe-buffer@npm:~5.2.0": + version: 5.2.1 + resolution: "safe-buffer@npm:5.2.1" + checksum: b99c4b41fdd67a6aaf280fcd05e9ffb0813654894223afb78a31f14a19ad220bba8aba1cb14eddce1fcfb037155fe6de4e861784eb434f7d11ed58d1e70dd491 + languageName: node + linkType: hard + +"safer-buffer@npm:>= 2.1.2 < 3.0.0": + version: 2.1.2 + resolution: "safer-buffer@npm:2.1.2" + checksum: cab8f25ae6f1434abee8d80023d7e72b598cf1327164ddab31003c51215526801e40b66c5e65d658a0af1e9d6478cadcb4c745f4bd6751f97d8644786c0978b0 + languageName: node + linkType: hard + +"scrypt-js@npm:^3.0.0": + version: 3.0.1 + resolution: "scrypt-js@npm:3.0.1" + checksum: b7c7d1a68d6ca946f2fbb0778e0c4ec63c65501b54023b2af7d7e9f48fdb6c6580d6f7675cd53bda5944c5ebc057560d5a6365079752546865defb3b79dea454 + languageName: node + linkType: hard + +"secp256k1@npm:^4.0.1": + version: 4.0.3 + resolution: "secp256k1@npm:4.0.3" + dependencies: + elliptic: ^6.5.4 + node-addon-api: ^2.0.0 + node-gyp: latest + node-gyp-build: ^4.2.0 + checksum: 21e219adc0024fbd75021001358780a3cc6ac21273c3fcaef46943af73969729709b03f1df7c012a0baab0830fb9a06ccc6b42f8d50050c665cb98078eab477b + languageName: node + linkType: hard + +"semver@npm:^7.3.5": + version: 7.3.7 + resolution: "semver@npm:7.3.7" + dependencies: + lru-cache: ^6.0.0 + bin: + semver: bin/semver.js + checksum: 2fa3e877568cd6ce769c75c211beaed1f9fce80b28338cadd9d0b6c40f2e2862bafd62c19a6cff42f3d54292b7c623277bcab8816a2b5521cf15210d43e75232 + languageName: node + linkType: hard + +"serialize-javascript@npm:6.0.0": + version: 6.0.0 + resolution: "serialize-javascript@npm:6.0.0" + dependencies: + randombytes: ^2.1.0 + checksum: 56f90b562a1bdc92e55afb3e657c6397c01a902c588c0fe3d4c490efdcc97dcd2a3074ba12df9e94630f33a5ce5b76a74784a7041294628a6f4306e0ec84bf93 + languageName: node + linkType: hard + +"set-blocking@npm:^2.0.0": + version: 2.0.0 + resolution: "set-blocking@npm:2.0.0" + checksum: 6e65a05f7cf7ebdf8b7c75b101e18c0b7e3dff4940d480efed8aad3a36a4005140b660fa1d804cb8bce911cac290441dc728084a30504d3516ac2ff7ad607b02 + languageName: node + linkType: hard + +"setimmediate@npm:^1.0.5": + version: 1.0.5 + resolution: "setimmediate@npm:1.0.5" + checksum: c9a6f2c5b51a2dabdc0247db9c46460152ffc62ee139f3157440bd48e7c59425093f42719ac1d7931f054f153e2d26cf37dfeb8da17a794a58198a2705e527fd + languageName: node + linkType: hard + +"sha.js@npm:^2.4.0, sha.js@npm:^2.4.8": + version: 2.4.11 + resolution: "sha.js@npm:2.4.11" + dependencies: + inherits: ^2.0.1 + safe-buffer: ^5.0.1 + bin: + sha.js: ./bin.js + checksum: ebd3f59d4b799000699097dadb831c8e3da3eb579144fd7eb7a19484cbcbb7aca3c68ba2bb362242eb09e33217de3b4ea56e4678184c334323eca24a58e3ad07 + languageName: node + linkType: hard + +"signal-exit@npm:^3.0.7": + version: 3.0.7 + resolution: "signal-exit@npm:3.0.7" + checksum: a2f098f247adc367dffc27845853e9959b9e88b01cb301658cfe4194352d8d2bb32e18467c786a7fe15f1d44b233ea35633d076d5e737870b7139949d1ab6318 + languageName: node + linkType: hard + +"smart-buffer@npm:^4.2.0": + version: 4.2.0 + resolution: "smart-buffer@npm:4.2.0" + checksum: b5167a7142c1da704c0e3af85c402002b597081dd9575031a90b4f229ca5678e9a36e8a374f1814c8156a725d17008ae3bde63b92f9cfd132526379e580bec8b + languageName: node + linkType: hard + +"socks-proxy-agent@npm:^7.0.0": + version: 7.0.0 + resolution: "socks-proxy-agent@npm:7.0.0" + dependencies: + agent-base: ^6.0.2 + debug: ^4.3.3 + socks: ^2.6.2 + checksum: 720554370154cbc979e2e9ce6a6ec6ced205d02757d8f5d93fe95adae454fc187a5cbfc6b022afab850a5ce9b4c7d73e0f98e381879cf45f66317a4895953846 + languageName: node + linkType: hard + +"socks@npm:^2.6.2": + version: 2.7.0 + resolution: "socks@npm:2.7.0" + dependencies: + ip: ^2.0.0 + smart-buffer: ^4.2.0 + checksum: 0b5d94e2b3c11e7937b40fc5dac1e80d8b92a330e68c51f1d271ce6980c70adca42a3f8cd47c4a5769956bada074823b53374f2dc5f2ea5c2121b222dec6eadf + languageName: node + linkType: hard + +"ssri@npm:^9.0.0": + version: 9.0.1 + resolution: "ssri@npm:9.0.1" + dependencies: + minipass: ^3.1.1 + checksum: fb58f5e46b6923ae67b87ad5ef1c5ab6d427a17db0bead84570c2df3cd50b4ceb880ebdba2d60726588272890bae842a744e1ecce5bd2a2a582fccd5068309eb + languageName: node + linkType: hard + +"string-width@npm:^1.0.2 || 2 || 3 || 4, string-width@npm:^4.1.0, string-width@npm:^4.2.0, string-width@npm:^4.2.3": + version: 4.2.3 + resolution: "string-width@npm:4.2.3" + dependencies: + emoji-regex: ^8.0.0 + is-fullwidth-code-point: ^3.0.0 + strip-ansi: ^6.0.1 + checksum: e52c10dc3fbfcd6c3a15f159f54a90024241d0f149cf8aed2982a2d801d2e64df0bf1dc351cf8e95c3319323f9f220c16e740b06faecd53e2462df1d2b5443fb + languageName: node + linkType: hard + +"string_decoder@npm:^1.1.1": + version: 1.3.0 + resolution: "string_decoder@npm:1.3.0" + dependencies: + safe-buffer: ~5.2.0 + checksum: 8417646695a66e73aefc4420eb3b84cc9ffd89572861fe004e6aeb13c7bc00e2f616247505d2dbbef24247c372f70268f594af7126f43548565c68c117bdeb56 + languageName: node + linkType: hard + +"strip-ansi@npm:^6.0.0, strip-ansi@npm:^6.0.1": + version: 6.0.1 + resolution: "strip-ansi@npm:6.0.1" + dependencies: + ansi-regex: ^5.0.1 + checksum: f3cd25890aef3ba6e1a74e20896c21a46f482e93df4a06567cebf2b57edabb15133f1f94e57434e0a958d61186087b1008e89c94875d019910a213181a14fc8c + languageName: node + linkType: hard + +"strip-json-comments@npm:3.1.1": + version: 3.1.1 + resolution: "strip-json-comments@npm:3.1.1" + checksum: 492f73e27268f9b1c122733f28ecb0e7e8d8a531a6662efbd08e22cccb3f9475e90a1b82cab06a392f6afae6d2de636f977e231296400d0ec5304ba70f166443 + languageName: node + linkType: hard + +"supports-color@npm:8.1.1": + version: 8.1.1 + resolution: "supports-color@npm:8.1.1" + dependencies: + has-flag: ^4.0.0 + checksum: c052193a7e43c6cdc741eb7f378df605636e01ad434badf7324f17fb60c69a880d8d8fcdcb562cf94c2350e57b937d7425ab5b8326c67c2adc48f7c87c1db406 + languageName: node + linkType: hard + +"supports-color@npm:^7.1.0": + version: 7.2.0 + resolution: "supports-color@npm:7.2.0" + dependencies: + has-flag: ^4.0.0 + checksum: 3dda818de06ebbe5b9653e07842d9479f3555ebc77e9a0280caf5a14fb877ffee9ed57007c3b78f5a6324b8dbeec648d9e97a24e2ed9fdb81ddc69ea07100f4a + languageName: node + linkType: hard + +"tar@npm:^6.1.11, tar@npm:^6.1.2": + version: 6.1.11 + resolution: "tar@npm:6.1.11" + dependencies: + chownr: ^2.0.0 + fs-minipass: ^2.0.0 + minipass: ^3.0.0 + minizlib: ^2.1.1 + mkdirp: ^1.0.3 + yallist: ^4.0.0 + checksum: a04c07bb9e2d8f46776517d4618f2406fb977a74d914ad98b264fc3db0fe8224da5bec11e5f8902c5b9bcb8ace22d95fbe3c7b36b8593b7dfc8391a25898f32f + languageName: node + linkType: hard + +"to-regex-range@npm:^5.0.1": + version: 5.0.1 + resolution: "to-regex-range@npm:5.0.1" + dependencies: + is-number: ^7.0.0 + checksum: f76fa01b3d5be85db6a2a143e24df9f60dd047d151062d0ba3df62953f2f697b16fe5dad9b0ac6191c7efc7b1d9dcaa4b768174b7b29da89d4428e64bc0a20ed + languageName: node + linkType: hard + +"type-detect@npm:^4.0.0, type-detect@npm:^4.0.5": + version: 4.0.8 + resolution: "type-detect@npm:4.0.8" + checksum: 62b5628bff67c0eb0b66afa371bd73e230399a8d2ad30d852716efcc4656a7516904570cd8631a49a3ce57c10225adf5d0cbdcb47f6b0255fe6557c453925a15 + languageName: node + linkType: hard + +"typescript@npm:~4.5.2": + version: 4.5.5 + resolution: "typescript@npm:4.5.5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 506f4c919dc8aeaafa92068c997f1d213b9df4d9756d0fae1a1e7ab66b585ab3498050e236113a1c9e57ee08c21ec6814ca7a7f61378c058d79af50a4b1f5a5e + languageName: node + linkType: hard + +"typescript@patch:typescript@~4.5.2#~builtin": + version: 4.5.5 + resolution: "typescript@patch:typescript@npm%3A4.5.5#~builtin::version=4.5.5&hash=a1c5e5" + bin: + tsc: bin/tsc + tsserver: bin/tsserver + checksum: 858c61fa63f7274ca4aaaffeced854d550bf416cff6e558c4884041b3311fb662f476f167cf5c9f8680c607239797e26a2ee0bcc6467fbc05bfcb218e1c6c671 + languageName: node + linkType: hard + +"unique-filename@npm:^2.0.0": + version: 2.0.1 + resolution: "unique-filename@npm:2.0.1" + dependencies: + unique-slug: ^3.0.0 + checksum: 807acf3381aff319086b64dc7125a9a37c09c44af7620bd4f7f3247fcd5565660ac12d8b80534dcbfd067e6fe88a67e621386dd796a8af828d1337a8420a255f + languageName: node + linkType: hard + +"unique-slug@npm:^3.0.0": + version: 3.0.0 + resolution: "unique-slug@npm:3.0.0" + dependencies: + imurmurhash: ^0.1.4 + checksum: 49f8d915ba7f0101801b922062ee46b7953256c93ceca74303bd8e6413ae10aa7e8216556b54dc5382895e8221d04f1efaf75f945c2e4a515b4139f77aa6640c + languageName: node + linkType: hard + +"util-deprecate@npm:^1.0.1": + version: 1.0.2 + resolution: "util-deprecate@npm:1.0.2" + checksum: 474acf1146cb2701fe3b074892217553dfcf9a031280919ba1b8d651a068c9b15d863b7303cb15bd00a862b498e6cf4ad7b4a08fb134edd5a6f7641681cb54a2 + languageName: node + linkType: hard + +"which@npm:^2.0.2": + version: 2.0.2 + resolution: "which@npm:2.0.2" + dependencies: + isexe: ^2.0.0 + bin: + node-which: ./bin/node-which + checksum: 1a5c563d3c1b52d5f893c8b61afe11abc3bab4afac492e8da5bde69d550de701cf9806235f20a47b5c8fa8a1d6a9135841de2596535e998027a54589000e66d1 + languageName: node + linkType: hard + +"wide-align@npm:^1.1.5": + version: 1.1.5 + resolution: "wide-align@npm:1.1.5" + dependencies: + string-width: ^1.0.2 || 2 || 3 || 4 + checksum: d5fc37cd561f9daee3c80e03b92ed3e84d80dde3365a8767263d03dacfc8fa06b065ffe1df00d8c2a09f731482fcacae745abfbb478d4af36d0a891fad4834d3 + languageName: node + linkType: hard + +"workerpool@npm:6.2.1": + version: 6.2.1 + resolution: "workerpool@npm:6.2.1" + checksum: c2c6eebbc5225f10f758d599a5c016fa04798bcc44e4c1dffb34050cd361d7be2e97891aa44419e7afe647b1f767b1dc0b85a5e046c409d890163f655028b09d + languageName: node + linkType: hard + +"wrap-ansi@npm:^7.0.0": + version: 7.0.0 + resolution: "wrap-ansi@npm:7.0.0" + dependencies: + ansi-styles: ^4.0.0 + string-width: ^4.1.0 + strip-ansi: ^6.0.0 + checksum: a790b846fd4505de962ba728a21aaeda189b8ee1c7568ca5e817d85930e06ef8d1689d49dbf0e881e8ef84436af3a88bc49115c2e2788d841ff1b8b5b51a608b + languageName: node + linkType: hard + +"wrappy@npm:1": + version: 1.0.2 + resolution: "wrappy@npm:1.0.2" + checksum: 159da4805f7e84a3d003d8841557196034155008f817172d4e986bd591f74aa82aa7db55929a54222309e01079a65a92a9e6414da5a6aa4b01ee44a511ac3ee5 + languageName: node + linkType: hard + +"y18n@npm:^5.0.5": + version: 5.0.8 + resolution: "y18n@npm:5.0.8" + checksum: 54f0fb95621ee60898a38c572c515659e51cc9d9f787fb109cef6fde4befbe1c4602dc999d30110feee37456ad0f1660fa2edcfde6a9a740f86a290999550d30 + languageName: node + linkType: hard + +"yallist@npm:^3.0.2": + version: 3.1.1 + resolution: "yallist@npm:3.1.1" + checksum: 48f7bb00dc19fc635a13a39fe547f527b10c9290e7b3e836b9a8f1ca04d4d342e85714416b3c2ab74949c9c66f9cebb0473e6bc353b79035356103b47641285d + languageName: node + linkType: hard + +"yallist@npm:^4.0.0": + version: 4.0.0 + resolution: "yallist@npm:4.0.0" + checksum: 343617202af32df2a15a3be36a5a8c0c8545208f3d3dfbc6bb7c3e3b7e8c6f8e7485432e4f3b88da3031a6e20afa7c711eded32ddfb122896ac5d914e75848d5 + languageName: node + linkType: hard + +"yargs-parser@npm:20.2.4": + version: 20.2.4 + resolution: "yargs-parser@npm:20.2.4" + checksum: d251998a374b2743a20271c2fd752b9fbef24eb881d53a3b99a7caa5e8227fcafd9abf1f345ac5de46435821be25ec12189a11030c12ee6481fef6863ed8b924 + languageName: node + linkType: hard + +"yargs-parser@npm:^20.2.2": + version: 20.2.9 + resolution: "yargs-parser@npm:20.2.9" + checksum: 8bb69015f2b0ff9e17b2c8e6bfe224ab463dd00ca211eece72a4cd8a906224d2703fb8a326d36fdd0e68701e201b2a60ed7cf81ce0fd9b3799f9fe7745977ae3 + languageName: node + linkType: hard + +"yargs-unparser@npm:2.0.0": + version: 2.0.0 + resolution: "yargs-unparser@npm:2.0.0" + dependencies: + camelcase: ^6.0.0 + decamelize: ^4.0.0 + flat: ^5.0.2 + is-plain-obj: ^2.1.0 + checksum: 68f9a542c6927c3768c2f16c28f71b19008710abd6b8f8efbac6dcce26bbb68ab6503bed1d5994bdbc2df9a5c87c161110c1dfe04c6a3fe5c6ad1b0e15d9a8a3 + languageName: node + linkType: hard + +"yargs@npm:16.2.0": + version: 16.2.0 + resolution: "yargs@npm:16.2.0" + dependencies: + cliui: ^7.0.2 + escalade: ^3.1.1 + get-caller-file: ^2.0.5 + require-directory: ^2.1.1 + string-width: ^4.2.0 + y18n: ^5.0.5 + yargs-parser: ^20.2.2 + checksum: b14afbb51e3251a204d81937c86a7e9d4bdbf9a2bcee38226c900d00f522969ab675703bee2a6f99f8e20103f608382936034e64d921b74df82b63c07c5e8f59 + languageName: node + linkType: hard + +"yocto-queue@npm:^0.1.0": + version: 0.1.0 + resolution: "yocto-queue@npm:0.1.0" + checksum: f77b3d8d00310def622123df93d4ee654fc6a0096182af8bd60679ddcdfb3474c56c6c7190817c84a2785648cdee9d721c0154eb45698c62176c322fb46fc700 + languageName: node + linkType: hard diff --git a/crates/tools/Cargo.toml b/crates/tools/Cargo.toml new file mode 100644 index 0000000000..b29bb37019 --- /dev/null +++ b/crates/tools/Cargo.toml @@ -0,0 +1,13 @@ +[package] +name = "tools" +version = "0.1.0" +edition = "2021" + +[dependencies] +anyhow = { version = "1.0.63" } +cfg-if = "1.0.0" +clap = { version = "3.2.20", features = ["derive"] } +difference = { version = "2.0.0", default-features = false } +reqwest = { version = "0.11.12", features = ["blocking"] } +tempfile = "3.3.0" +toml = { version = "0.5.9", default-features = false } diff --git a/crates/tools/src/execution_api.rs b/crates/tools/src/execution_api.rs new file mode 100644 index 0000000000..4c0cc662fa --- /dev/null +++ b/crates/tools/src/execution_api.rs @@ -0,0 +1,96 @@ +use std::{ + fs::File, + io::Write, + path::Path, + process::{Command, Stdio}, +}; + +use anyhow::{anyhow, bail}; +use cfg_if::cfg_if; + +use crate::update::{project_root, Mode}; + +const EXECUTION_API_DIR: &str = "crates/eth_execution_api"; +const EXECUTION_API_RAW_REPO: &str = "https://raw.githubusercontent.com/ethereum/execution-apis"; + +fn get_version(crate_path: &Path) -> anyhow::Result { + let crate_manifest_path = crate_path.join("Cargo.toml"); + + let contents = std::fs::read_to_string(Path::new(&crate_manifest_path))?; + let crate_manifest: toml::Value = toml::from_str(&contents)?; + + let package = crate_manifest + .get("package") + .ok_or_else(|| anyhow!("Cargo.toml does not contain `package` section."))?; + + let version = package + .get("version") + .ok_or_else(|| anyhow!("Cargo.toml does not contain `version` under `package` section."))?; + + version + .as_str() + .map(ToOwned::to_owned) + .ok_or_else(|| anyhow!("Expected `version` to be a string value.")) +} + +fn get_openrpc_json(version: &str) -> anyhow::Result { + let url = format!("{}/v{}/refs-openrpc.json", EXECUTION_API_RAW_REPO, version); + + reqwest::blocking::get(url) + .map_err(|e| { + anyhow!( + "Failed to retrieve `openrpc.json` for version: {} due to error: `{}`.", + version, + e.to_string() + ) + })? + .text() + .map_err(|e| { + anyhow!( + "Failed to convert retrieved `openrpc.json` to UTF-8, due to: {}.", + e + ) + }) +} + +pub fn generate(_mode: Mode) -> anyhow::Result<()> { + let crate_path = project_root().join(EXECUTION_API_DIR); + let version = get_version(&crate_path)?; + let openrpc_json = get_openrpc_json(&version)?; + + cfg_if! { + if #[cfg(windows)] { + let program = "npx.cmd"; + } else { + let program = "npx"; + } + }; + + let tempdir = tempfile::tempdir()?; + let tempfile_path = tempdir.path().join("openrpc.json"); + + { + let mut tempfile = File::create(&tempfile_path)?; + tempfile.write_all(openrpc_json.as_bytes())?; + } + + let src_path = crate_path.join("src"); + let mut command = Command::new(program) + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .arg("open-rpc-typings") + .arg("-d") + .arg(tempfile_path.to_str().unwrap()) + .arg("--output-rs") + .arg(src_path.to_str().unwrap()) + .arg("--name-rs") + .arg("lib") + .spawn()?; + + let status = command.wait()?; + if status.success() { + Ok(()) + } else { + bail!("Failed to generate execution api, due to: {}", status) + } +} diff --git a/crates/tools/src/main.rs b/crates/tools/src/main.rs new file mode 100644 index 0000000000..b9c9f37597 --- /dev/null +++ b/crates/tools/src/main.rs @@ -0,0 +1,26 @@ +use clap::{Parser, Subcommand}; + +mod execution_api; +mod update; + +use update::Mode; + +#[derive(Parser)] +#[clap(name = "tasks", version, author)] +struct Args { + #[clap(subcommand)] + command: Command, +} + +#[derive(Subcommand)] +enum Command { + /// Generate Ethereum execution API + GenExecutionApi, +} + +fn main() -> anyhow::Result<()> { + let args = Args::parse(); + match args.command { + Command::GenExecutionApi => execution_api::generate(Mode::Overwrite), + } +} diff --git a/crates/tools/src/update.rs b/crates/tools/src/update.rs new file mode 100644 index 0000000000..50c6ab928a --- /dev/null +++ b/crates/tools/src/update.rs @@ -0,0 +1,54 @@ +use std::{ + fs, + io::Write, + path::{Path, PathBuf}, + process::{Command, Stdio}, +}; + +use anyhow::bail; + +#[derive(Clone, Copy, PartialEq, Eq)] +pub enum Mode { + Overwrite, + #[allow(unused)] + Verify, +} + +#[allow(unused)] +pub fn update(path: &Path, contents: &str, mode: Mode) -> anyhow::Result<()> { + let old_contents = fs::read_to_string(path)?; + let old_contents = old_contents.replace("\r\n", "\n"); + let contents = contents.replace("\r\n", "\n"); + if old_contents == contents { + return Ok(()); + } + + if mode == Mode::Verify { + let changes = difference::Changeset::new(&old_contents, &contents, "\n"); + bail!("`{}` is not up-to-date:\n{}", path.display(), changes,); + } + eprintln!("updating {}", path.display()); + fs::write(path, contents)?; + Ok(()) +} + +#[allow(unused)] +pub fn reformat(text: impl std::fmt::Display) -> anyhow::Result { + let mut rustfmt = Command::new("rustfmt") + .stdin(Stdio::piped()) + .stdout(Stdio::piped()) + .spawn()?; + write!(rustfmt.stdin.take().unwrap(), "{}", text)?; + let output = rustfmt.wait_with_output()?; + let stdout = String::from_utf8(output.stdout)?; + let preamble = "Generated file, do not edit by hand, see `crates/tools`"; + Ok(format!("//! {}\n\n{}", preamble, stdout)) +} + +pub fn project_root() -> PathBuf { + Path::new(&env!("CARGO_MANIFEST_DIR")) + .ancestors() + .nth(2) + .unwrap() + .to_path_buf() +} diff --git a/package.json b/package.json index 068a596030..a230f6db62 100644 --- a/package.json +++ b/package.json @@ -5,16 +5,19 @@ "license": "SEE LICENSE IN EACH PACKAGE'S LICENSE FILE", "private": true, "workspaces": [ - "packages/*" + "packages/*", + "crates/rethnet_evm_napi" ], "devDependencies": { "@changesets/cli": "^2.16.0", + "@open-rpc/typings": "^1.12.1", "prettier": "2.4.1", "shelljs": "^0.8.5", "typescript": "~4.5.2", "wsrun": "^5.2.2" }, "scripts": { + "prebuild": "cd crates/rethnet_evm_napi && yarn build", "build": "tsc --build packages/hardhat-core packages/hardhat-docker packages/hardhat-ethers packages/hardhat-etherscan packages/hardhat-ganache packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-waffle packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox", "watch": "tsc --build --watch packages/hardhat-core/src packages/hardhat-docker packages/hardhat-ethers packages/hardhat-etherscan packages/hardhat-ganache packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-waffle packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox", "clean": "wsrun --exclude-missing clean", @@ -22,9 +25,5 @@ "lint": "wsrun --exclude-missing --stages lint && yarn prettier --check", "lint:fix": "wsrun --exclude-missing --stages lint:fix && yarn prettier --write", "prettier": "prettier *.md \"{docs,.github}/**/*.{md,yml}\" \"scripts/**/*.js\"" - }, - "dependencies": {}, - "resolutions": { - "**/antlr4": "4.7.1" } } diff --git a/packages/e2e/package.json b/packages/e2e/package.json index c4e31f27ee..b8e79b9a08 100644 --- a/packages/e2e/package.json +++ b/packages/e2e/package.json @@ -15,6 +15,7 @@ "test:npm": "node run-tests.js npm", "test:yarn": "node run-tests.js yarn", "test": "npm run test:npm && npm run test:yarn", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "clean": "rimraf dist" }, diff --git a/packages/hardhat-core/package.json b/packages/hardhat-core/package.json index 234af5a991..0144ef72d6 100644 --- a/packages/hardhat-core/package.json +++ b/packages/hardhat-core/package.json @@ -19,9 +19,7 @@ "task-runner", "solidity" ], - "bin": { - "hardhat": "internal/cli/cli.js" - }, + "bin": "internal/cli/cli.js", "engines": { "node": "^14.0.0 || ^16.0.0 || ^18.0.0" }, @@ -30,10 +28,11 @@ "lint:fix": "yarn prettier --write && yarn eslint --fix", "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", - "test": "mocha --recursive \"test/**/*.ts\"", + "test": "mocha --recursive \"test/**/*.ts\" --exit", "test:except-tracing": "mocha --recursive \"test/**/*.ts\" --invert --grep \"Stack traces\"", "test:tracing": "mocha --recursive \"test/internal/hardhat-network/{helpers,stack-traces}/**/*.ts\"", "test:forking": "mocha --recursive \"test/internal/hardhat-network/{helpers,jsonrpc,provider}/**/*.ts\"", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf builtin-tasks internal types utils *.d.ts *.map *.js build-test tsconfig.tsbuildinfo test/internal/hardhat-network/provider/.hardhat_node_test_cache" @@ -140,6 +139,7 @@ "qs": "^6.7.0", "raw-body": "^2.4.1", "resolve": "1.17.0", + "rethnet-evm": "^0.1.0-dev", "semver": "^6.3.0", "solc": "0.7.3", "source-map-support": "^0.5.13", diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/rpcToTxData.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/rpcToTxData.ts index 09782ac180..57ce82a883 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/rpcToTxData.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/rpcToTxData.ts @@ -27,9 +27,10 @@ export function rpcToTxData( chainId: rpcTransaction.chainId ?? undefined, maxFeePerGas: rpcTransaction.maxFeePerGas, maxPriorityFeePerGas: rpcTransaction.maxPriorityFeePerGas, - accessList: rpcTransaction.accessList?.map((item) => [ - item.address, - item.storageKeys ?? [], - ]), + accessList: + rpcTransaction.accessList?.map((item) => [ + item.address, + item.storageKeys ?? [], + ]) ?? undefined, }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts index 311b20f405..9e52201690 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts @@ -472,13 +472,12 @@ export class EthModule { blockTag: OptionalRpcNewBlockTag ): Promise { const blockNumberOrPending = await this._resolveNewBlockTag(blockTag); - - return numberToRpcQuantity( - await this._node.getAccountBalance( - new Address(address), - blockNumberOrPending - ) + const accountBalance = await this._node.getAccountBalance( + new Address(address), + blockNumberOrPending ); + + return numberToRpcQuantity(accountBalance); } // eth_getBlockByHash diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index df73e5fc6e..ef9a1639d4 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -16,6 +16,7 @@ import { privateToAddress, setLengthLeft, toBuffer, + bufferToBigInt, } from "@nomicfoundation/ethereumjs-util"; import { Bloom, @@ -79,6 +80,12 @@ import { VMTracer } from "../stack-traces/vm-tracer"; import "./ethereumjs-workarounds"; import { rpcQuantityToBigInt } from "../../core/jsonrpc/types/base-types"; import { JsonRpcClient } from "../jsonrpc/client"; +import { assertEthereumJsAndRethnetResults } from "./utils/assertions"; +import { + createRethnetFromHardhatDB, + ethereumjsTransactionToRethnet, + HardhatDB, +} from "./utils/convertToRethnet"; import { bloomFilter, Filter, filterLogs, LATEST_BLOCK, Type } from "./filter"; import { ForkBlockchain } from "./fork/ForkBlockchain"; import { ForkStateManager } from "./fork/ForkStateManager"; @@ -283,6 +290,7 @@ export class HardhatNode extends EventEmitter { hardfork, hardforkActivations, mixHashGenerator, + allowUnlimitedContractSize ?? false, tracingConfig, forkNetworkId, forkBlockNum, @@ -350,6 +358,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu // blockNumber => state root private _irregularStatesByBlockNumber: Map = new Map(); + private _hardhatDB; + public _rethnet; + private constructor( private readonly _vm: VM, private readonly _stateManager: StateManager, @@ -366,6 +377,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu public readonly hardfork: HardforkName, private readonly _hardforkActivations: HardforkHistoryConfig, private _mixHashGenerator: RandomBufferGenerator, + allowUnlimitedContractSize: boolean, tracingConfig?: TracingConfig, private _forkNetworkId?: number, private _forkBlockNumber?: bigint, @@ -391,6 +403,22 @@ Hardhat Network's forking functionality only works with blocks from at least spu this._vmTraceDecoder = new VmTraceDecoder(contractsIdentifier); this._solidityTracer = new SolidityTracer(); + this._hardhatDB = new HardhatDB(this._stateManager, this._blockchain); + + const limitContractCodeSize = allowUnlimitedContractSize + ? 2n ** 64n - 1n + : undefined; + + this._rethnet = createRethnetFromHardhatDB( + { + chainId: BigInt(this._configChainId), + limitContractCodeSize, + disableBlockGasLimit: true, + disableEip3607: true, + }, + this._hardhatDB + ); + if (tracingConfig === undefined || tracingConfig.buildInfos === undefined) { return; } @@ -1749,9 +1777,10 @@ Hardhat Network's forking functionality only works with blocks from at least spu ): Promise { const parentBlock = await this.getLatestBlock(); + const coinbase = this.getCoinbaseAddress(); const headerData: HeaderData = { gasLimit: this.getBlockGasLimit(), - coinbase: this.getCoinbaseAddress(), + coinbase, nonce: this.isPostMergeHardfork() ? "0x0000000000000000" : "0x0000000000000042", @@ -1797,7 +1826,24 @@ Hardhat Network's forking functionality only works with blocks from at least spu ) { transactionQueue.removeLastSenderTransactions(); } else { + const rethnetTx = ethereumjsTransactionToRethnet(tx); + const difficulty = this._getBlockEnvDifficulty( + BigIntUtils.fromBigIntLike(headerData.difficulty), + headerData.mixHash !== undefined + ? bufferToBigInt(toBuffer(headerData.mixHash)) + : undefined + ); + const rethnetResult = await this._rethnet.dryRun(rethnetTx, { + number: BigInt(parentBlock.header.number) + 1n, + coinbase: coinbase.buf, + timestamp: blockTimestamp, + basefee: headerData.baseFeePerGas, + gasLimit: blockGasLimit, + difficulty, + }); + const txResult = await blockBuilder.addTransaction(tx); + assertEthereumJsAndRethnetResults(rethnetResult.execResult, txResult); traces.push(await this._gatherTraces(txResult.execResult)); results.push(txResult); @@ -2392,13 +2438,37 @@ Hardhat Network's forking functionality only works with blocks from at least spu } ); - return await this._vm.runTx({ + const rethnetTx = ethereumjsTransactionToRethnet(tx); + await this._rethnet.guaranteeTransaction(rethnetTx); + const difficulty = this._getBlockEnvDifficulty( + blockContext.header.difficulty, + bufferToBigInt(blockContext.header.mixHash) + ); + const rethnetResult = await this._rethnet.dryRun(rethnetTx, { + number: blockContext.header.number, + coinbase: blockContext.header.coinbase.buf, + timestamp: blockContext.header.timestamp, + basefee: blockContext.header.baseFeePerGas, + gasLimit: blockContext.header.gasLimit, + difficulty, + }); + + await this._stateManager.setStateRoot(initialStateRoot); + + const ethereumjsResult = await this._vm.runTx({ block: blockContext, tx, skipNonce: true, skipBalance: true, skipBlockGasLimitValidation: true, }); + + assertEthereumJsAndRethnetResults( + rethnetResult.execResult, + ethereumjsResult + ); + + return ethereumjsResult; } finally { if (originalCommon !== undefined) { (this._vm as any)._common = originalCommon; @@ -2629,4 +2699,15 @@ Hardhat Network's forking functionality only works with blocks from at least spu ); } } + + private _getBlockEnvDifficulty( + difficulty: bigint | undefined, + mixHash: bigint | undefined + ): bigint | undefined { + if (this.isPostMergeHardfork()) { + return mixHash; + } + + return difficulty; + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts index 7dfad757c0..9703473241 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts @@ -1,3 +1,8 @@ +import assert, { AssertionError } from "assert"; +import { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; +import { ExecutionResult } from "rethnet-evm"; +import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; + import { InternalError } from "../../../core/providers/errors"; export function assertHardhatNetworkInvariant( @@ -11,3 +16,107 @@ export function assertHardhatNetworkInvariant( ); } } + +export function assertEthereumJsAndRethnetResults( + rethnetResult: ExecutionResult, + ethereumjsResult: RunTxResult +): asserts rethnetResult { + assertEthereumJsAndRethnetExitCodes( + rethnetResult.exitCode, + ethereumjsResult.execResult.exceptionError?.error + ); + assertEqual( + rethnetResult.gasRefunded, + ethereumjsResult.gasRefund, + "Gas refunded" + ); + + assertEqual( + rethnetResult.gasUsed, + ethereumjsResult.totalGasSpent, + "Gas used" + ); + + const rethnetCreatedAddress = rethnetResult.output.address?.toString("hex"); + const ethereumjsCreatedAddress = ethereumjsResult.createdAddress + ?.toString() + .slice(2); // remove the 0x prefix + + assertEqual( + rethnetCreatedAddress, + ethereumjsCreatedAddress, + "Created address" + ); + + if (ethereumjsResult.createdAddress === undefined) { + assertEqual( + rethnetResult.output.output?.toString("hex"), + ethereumjsResult.execResult.returnValue.toString("hex"), + "Return value" + ); + } + // TODO: Compare logs? +} + +function assertEthereumJsAndRethnetExitCodes( + rethnetExitCode: number, + ethereumjsExitCode: ERROR | undefined +) { + // assert(ethereumjsExitCode === undefined && !( + // rethnetExitCode === 0x00 || + // rethnetExitCode === 0x02 || + // rethnetExitCode === 0x03), "Expected a successful exit code"); + + const mapping = new Map([ + [ERROR.OUT_OF_GAS, [0x50]], + [ERROR.CODESTORE_OUT_OF_GAS, undefined], + [ERROR.CODESIZE_EXCEEDS_MAXIMUM, undefined], + [ERROR.STACK_UNDERFLOW, [0x57]], + [ERROR.STACK_OVERFLOW, [0x58]], + [ERROR.INVALID_JUMP, [0x54]], + [ERROR.INVALID_OPCODE, [0x51, 0x53]], + [ERROR.OUT_OF_RANGE, [0x59]], // ? + [ERROR.REVERT, [0x20]], + [ERROR.STATIC_STATE_CHANGE, [0x52]], // ? + [ERROR.INTERNAL_ERROR, undefined], + [ERROR.CREATE_COLLISION, [0x60]], + [ERROR.STOP, [0x01]], + [ERROR.REFUND_EXHAUSTED, undefined], + [ERROR.VALUE_OVERFLOW, undefined], + [ERROR.INSUFFICIENT_BALANCE, undefined], + [ERROR.INVALID_BEGINSUB, undefined], + [ERROR.INVALID_RETURNSUB, undefined], + [ERROR.INVALID_JUMPSUB, undefined], + [ERROR.INVALID_BYTECODE_RESULT, [0x53]], // ? + [ERROR.INVALID_EOF_FORMAT, undefined], + [ERROR.INITCODE_SIZE_VIOLATION, [0x64]], // ? + [ERROR.AUTHCALL_UNSET, undefined], + [ERROR.AUTHCALL_NONZERO_VALUEEXT, undefined], + [ERROR.AUTH_INVALID_S, undefined], + [ERROR.BLS_12_381_INVALID_INPUT_LENGTH, undefined], + [ERROR.BLS_12_381_POINT_NOT_ON_CURVE, undefined], + [ERROR.BLS_12_381_INPUT_EMPTY, undefined], + [ERROR.BLS_12_381_FP_NOT_IN_FIELD, undefined], + ]); + + if (ethereumjsExitCode !== undefined) { + const expected = mapping.get(ethereumjsExitCode); + if (expected !== undefined) { + assert( + expected.includes(rethnetExitCode), + `Expected rethnet's exit code ${rethnetExitCode} to be included in ${expected.join( + ", " + )}` + ); + } + } +} + +function assertEqual(rethnetValue: any, ethereumJsValue: any, field: string) { + if (rethnetValue !== ethereumJsValue) { + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new AssertionError({ + message: `Expected '${field}' to match, but rethnet returned ${rethnetValue} and ethereumjs returned ${ethereumJsValue}`, + }); + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts new file mode 100644 index 0000000000..1a711f7303 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -0,0 +1,184 @@ +import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; +import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; +import { + AccessListEIP2930Transaction, + FeeMarketEIP1559Transaction, + TypedTransaction, +} from "@nomicfoundation/ethereumjs-tx"; +import { + Account, + Address, + bigIntToBuffer, + bufferToBigInt, + setLengthLeft, +} from "@nomicfoundation/ethereumjs-util"; +import { + Account as RethnetAccount, + Config, + Rethnet, + Transaction, +} from "rethnet-evm"; +import { HardhatError } from "../../../core/errors"; +import { ERRORS } from "../../../core/errors-list"; + +export class HardhatDB { + private _stateManager: StateManager; + private _blockchain: BlockchainInterface | undefined; + + constructor(stateManager: StateManager, blockchain?: BlockchainInterface) { + this._stateManager = stateManager; + this._blockchain = blockchain; + } + + public async commit() { + return this._stateManager.commit(); + } + + public async checkpoint() { + return this._stateManager.checkpoint(); + } + + public async revert() { + return this._stateManager.revert(); + } + + public async getAccountByAddress(address: Buffer) { + return this._stateManager.getAccount(new Address(address)); + } + + public async getAccountStorageSlot(address: Buffer, index: bigint) { + const key = setLengthLeft(bigIntToBuffer(index), 32); + let data = await this._stateManager.getContractStorage( + new Address(address), + key + ); + + const EXPECTED_DATA_SIZE = 32; + if (data.length < EXPECTED_DATA_SIZE) { + data = Buffer.concat( + [Buffer.alloc(EXPECTED_DATA_SIZE - data.length, 0), data], + EXPECTED_DATA_SIZE + ); + } + + return bufferToBigInt(data); + } + + public async getBlockHash(blockNumber: bigint) { + const block = await this._blockchain?.getBlock(blockNumber); + if (block === undefined || block === null) { + throw new HardhatError(ERRORS.GENERAL.UNSUPPORTED_OPERATION, { + error: "Block not found", + }); + } + + return block.header.hash(); + } + + public async getCodeByHash(codeHash: Buffer) { + const db = (this._stateManager as any)._trie._db; + return db.get(Buffer.concat([Buffer.from("c"), codeHash])); + } + + public async getStorageRoot() { + return this._stateManager.getStateRoot(); + } + + public async insertAccount( + address: Buffer, + account: RethnetAccount + ): Promise { + return this._stateManager.putAccount( + new Address(address), + new Account(account.nonce, account.balance, undefined, account.codeHash) + ); + } + + public async setAccountBalance(address: Buffer, balance: bigint) { + return this._stateManager.modifyAccountFields(new Address(address), { + balance, + }); + } + + public async setAccountCode(address: Buffer, code: Buffer) { + return this._stateManager.putContractCode(new Address(address), code); + } + + public async setAccountNonce(address: Buffer, nonce: bigint) { + return this._stateManager.modifyAccountFields(new Address(address), { + nonce, + }); + } + + public async setAccountStorageSlot( + address: Buffer, + index: bigint, + value: bigint + ) { + return this._stateManager.putContractStorage( + new Address(address), + setLengthLeft(bigIntToBuffer(index), 32), + setLengthLeft(bigIntToBuffer(value), 32) + ); + } +} + +export function ethereumjsTransactionToRethnet( + tx: TypedTransaction +): Transaction { + const chainId = (_tx: TypedTransaction) => { + if (_tx instanceof AccessListEIP2930Transaction) { + return (_tx as AccessListEIP2930Transaction).chainId; + } else if (_tx instanceof FeeMarketEIP1559Transaction) { + return (_tx as FeeMarketEIP1559Transaction).chainId; + } else { + return undefined; + } + }; + + const rethnetTx: Transaction = { + from: tx.getSenderAddress().toBuffer(), + to: tx.to?.buf, + gasLimit: tx.gasLimit, + gasPrice: + (tx as FeeMarketEIP1559Transaction)?.maxFeePerGas ?? (tx as any).gasPrice, + gasPriorityFee: (tx as FeeMarketEIP1559Transaction)?.maxPriorityFeePerGas, + value: tx.value, + nonce: tx.nonce, + input: tx.data, + accessList: (tx as AccessListEIP2930Transaction)?.AccessListJSON, + chainId: chainId(tx), + }; + + return rethnetTx; +} + +export function createRethnetFromHardhatDB( + cfg: Config, + hardhatDB: HardhatDB +): Rethnet { + return Rethnet.withCallbacks( + cfg, + { + getAccountByAddressFn: + HardhatDB.prototype.getAccountByAddress.bind(hardhatDB), + getAccountStorageSlotFn: + HardhatDB.prototype.getAccountStorageSlot.bind(hardhatDB), + getBlockHashFn: HardhatDB.prototype.getBlockHash.bind(hardhatDB), + getCodeByHashFn: HardhatDB.prototype.getCodeByHash.bind(hardhatDB), + }, + null, + { + checkpointFn: HardhatDB.prototype.checkpoint.bind(hardhatDB), + revertFn: HardhatDB.prototype.revert.bind(hardhatDB), + getStorageRootFn: HardhatDB.prototype.getStorageRoot.bind(hardhatDB), + insertAccountFn: HardhatDB.prototype.insertAccount.bind(hardhatDB), + setAccountBalanceFn: + HardhatDB.prototype.setAccountBalance.bind(hardhatDB), + setAccountCodeFn: HardhatDB.prototype.setAccountCode.bind(hardhatDB), + setAccountNonceFn: HardhatDB.prototype.setAccountNonce.bind(hardhatDB), + setAccountStorageSlotFn: + HardhatDB.prototype.setAccountStorageSlot.bind(hardhatDB), + } + ); +} diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts new file mode 100644 index 0000000000..7a748fc539 --- /dev/null +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts @@ -0,0 +1,127 @@ +import { assert } from "chai"; +import { DefaultStateManager } from "@nomicfoundation/ethereumjs-statemanager"; +import { Address } from "@nomicfoundation/ethereumjs-util"; + +import { Block, Config, Rethnet, Transaction } from "rethnet-evm"; +import { HardhatDB } from "../../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; + +describe("Hardhat DB", () => { + const caller = Address.fromString( + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" + ); + const receiver = Address.fromString( + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" + ); + + let db: HardhatDB; + let rethnet: Rethnet; + + beforeEach(function () { + db = new HardhatDB(new DefaultStateManager()); + + const cfg: Config = { + chainId: BigInt(0), + limitContractCodeSize: 2n ** 64n - 1n, + disableEip3607: true, + }; + rethnet = Rethnet.withCallbacks( + cfg, + { + getAccountByAddressFn: HardhatDB.prototype.getAccountByAddress.bind(db), + getAccountStorageSlotFn: + HardhatDB.prototype.getAccountStorageSlot.bind(db), + getBlockHashFn: HardhatDB.prototype.getBlockHash.bind(db), + getCodeByHashFn: HardhatDB.prototype.getCodeByHash.bind(db), + }, + { + commitFn: HardhatDB.prototype.commit.bind(db), + }, + { + checkpointFn: HardhatDB.prototype.checkpoint.bind(db), + revertFn: HardhatDB.prototype.revert.bind(db), + getStorageRootFn: HardhatDB.prototype.getStorageRoot.bind(db), + insertAccountFn: HardhatDB.prototype.insertAccount.bind(db), + setAccountBalanceFn: HardhatDB.prototype.setAccountBalance.bind(db), + setAccountCodeFn: HardhatDB.prototype.setAccountCode.bind(db), + setAccountNonceFn: HardhatDB.prototype.setAccountNonce.bind(db), + setAccountStorageSlotFn: + HardhatDB.prototype.setAccountStorageSlot.bind(db), + } + ); + }); + + // TODO: insertBlock, setAccountCode, setAccountStorageSlot + it("getAccountByAddress", async () => { + await rethnet.insertAccount(caller.buf); + const account = await rethnet.getAccountByAddress(caller.buf); + + assert.equal(account?.balance, 0n); + assert.equal(account?.nonce, 0n); + }); + it("setAccountBalance", async () => { + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountBalance(caller.buf, 100n); + + const account = await rethnet.getAccountByAddress(caller.buf); + + assert.equal(account?.balance, 100n); + assert.equal(account?.nonce, 0n); + }); + it("setAccountNonce", async () => { + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountNonce(caller.buf, 5n); + + const account = await rethnet.getAccountByAddress(caller.buf); + + assert.equal(account?.balance, 0n); + assert.equal(account?.nonce, 5n); + }); + it("call", async () => { + // Add funds to caller + await rethnet.insertAccount(caller.buf); + await rethnet.setAccountBalance(caller.buf, BigInt("0xffffffff")); + + // send some value + const sendValue: Transaction = { + from: caller.buf, + to: receiver.buf, + gasLimit: BigInt(1000000), + value: 100n, + }; + + const block: Block = { + number: BigInt(1), + timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), + }; + const sendValueChanges = await rethnet.dryRun(sendValue, block); + + // receiver should have 100 (0x64) wei + assert.equal( + sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"].info + .balance, + "0x64" + ); + + // create a contract + const createContract: Transaction = { + from: caller.buf, + + gasLimit: BigInt(1000000), + + // minimal creation bytecode + input: Buffer.from("3859818153F3", "hex"), + }; + + const createContractChanges = await rethnet.dryRun(createContract, block); + + assert.exists( + createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] + ); + // check that the code hash is not the null hash (i.e., the address has code) + assert.notEqual( + createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] + .info.code_hash, + "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" + ); + }); +}); diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts index 07d382f8be..9d246e504e 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts @@ -7,7 +7,10 @@ import { } from "@nomicfoundation/ethereumjs-util"; import { VM } from "@nomicfoundation/ethereumjs-vm"; import abi from "ethereumjs-abi"; +import { Rethnet } from "rethnet-evm"; +import { assertEthereumJsAndRethnetResults } from "../../../../src/internal/hardhat-network/provider/utils/assertions"; +import { ethereumjsTransactionToRethnet } from "../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; import { MessageTrace } from "../../../../src/internal/hardhat-network/stack-traces/message-trace"; import { VMTracer } from "../../../../src/internal/hardhat-network/stack-traces/vm-tracer"; @@ -59,6 +62,7 @@ export function encodeCall( export async function traceTransaction( vm: VM, + rethnet: Rethnet, txData: TxData ): Promise { const tx = new Transaction({ @@ -78,7 +82,17 @@ export async function traceTransaction( vmTracer.enableTracing(); try { - await vm.runTx({ tx: signedTx }); + const rethnetTx = ethereumjsTransactionToRethnet(signedTx); + + const rethnetResult = await rethnet.dryRun(rethnetTx, { + number: 0n, + coinbase: Buffer.from("0000000000000000000000000000000000000000", "hex"), + timestamp: BigInt(Math.floor(Date.now() / 1000)), + gasLimit: 4000000n, + }); + + const txResult = await vm.runTx({ tx: signedTx }); + assertEthereumJsAndRethnetResults(rethnetResult.execResult, txResult); const messageTrace = vmTracer.getLastTopLevelMessageTrace(); if (messageTrace === undefined) { diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts index 8ebd68aaeb..db7fd9b60a 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts @@ -4,6 +4,7 @@ import { assert } from "chai"; import fs from "fs"; import fsExtra from "fs-extra"; import path from "path"; +import { Rethnet } from "rethnet-evm"; import semver from "semver"; import { ReturnData } from "../../../../src/internal/hardhat-network/provider/return-data"; @@ -36,6 +37,10 @@ import { } from "../../../../src/types"; import { setCWD } from "../helpers/cwd"; +import { + createRethnetFromHardhatDB, + HardhatDB, +} from "../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; import { SUPPORTED_SOLIDITY_VERSION_RANGE } from "../../../../src/internal/hardhat-network/stack-traces/constants"; import { compileFiles, @@ -433,6 +438,15 @@ async function runTest( const logger = new ConsoleLogger(); const vm = await instantiateVm(); + const hardhatDB = new HardhatDB(vm.stateManager, vm.blockchain); + + const rethnet = createRethnetFromHardhatDB( + { + chainId: vm._common.chainId(), + limitContractCodeSize: 2n ** 64n - 1n, + }, + hardhatDB + ); const txIndexToContract: Map = new Map(); @@ -444,6 +458,7 @@ async function runTest( txIndex, tx, vm, + rethnet, compilerOutput, txIndexToContract ); @@ -467,6 +482,7 @@ async function runTest( txIndex, tx, vm, + rethnet, compilerOutput, contract! ); @@ -574,6 +590,7 @@ async function runDeploymentTransactionTest( txIndex: number, tx: DeploymentTransaction, vm: VM, + rethnet: Rethnet, compilerOutput: CompilerOutput, txIndexToContract: Map ): Promise { @@ -605,7 +622,7 @@ async function runDeploymentTransactionTest( const data = Buffer.concat([deploymentBytecode, params]); - const trace = await traceTransaction(vm, { + const trace = await traceTransaction(vm, rethnet, { value: tx.value, data, gasLimit: tx.gas, @@ -618,6 +635,7 @@ async function runCallTransactionTest( txIndex: number, tx: CallTransaction, vm: VM, + rethnet: Rethnet, compilerOutput: CompilerOutput, contract: DeployedContract ): Promise { @@ -638,7 +656,7 @@ async function runCallTransactionTest( data = Buffer.from([]); } - const trace = await traceTransaction(vm, { + const trace = await traceTransaction(vm, rethnet, { to: contract.address, value: tx.value, data, diff --git a/packages/hardhat-docker/package.json b/packages/hardhat-docker/package.json index 20bd9e3e55..9a73bb3684 100644 --- a/packages/hardhat-docker/package.json +++ b/packages/hardhat-docker/package.json @@ -48,7 +48,6 @@ "ts-node": "^8.1.0", "typescript": "~4.5.2" }, - "peerDependencies": {}, "dependencies": { "dockerode": "^2.5.8", "fs-extra": "^7.0.1", diff --git a/packages/hardhat-ethers/package.json b/packages/hardhat-ethers/package.json index 7a2d25da11..831cf328db 100644 --- a/packages/hardhat-ethers/package.json +++ b/packages/hardhat-ethers/package.json @@ -21,6 +21,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist internal types *.{d.ts,js}{,.map} build-test tsconfig.tsbuildinfo" diff --git a/packages/hardhat-etherscan/package.json b/packages/hardhat-etherscan/package.json index 471c59240a..86003eadd7 100644 --- a/packages/hardhat-etherscan/package.json +++ b/packages/hardhat-etherscan/package.json @@ -25,6 +25,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" @@ -42,8 +43,8 @@ "chalk": "^2.4.2", "debug": "^4.1.1", "fs-extra": "^7.0.1", - "semver": "^6.3.0", "lodash": "^4.17.11", + "semver": "^6.3.0", "table": "^6.8.0", "undici": "^5.4.0" }, diff --git a/packages/hardhat-ganache/package.json b/packages/hardhat-ganache/package.json index 190c56bae5..6d540f10ac 100644 --- a/packages/hardhat-ganache/package.json +++ b/packages/hardhat-ganache/package.json @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-shorthand/package.json b/packages/hardhat-shorthand/package.json index d28469fd3e..eae825cf8d 100644 --- a/packages/hardhat-shorthand/package.json +++ b/packages/hardhat-shorthand/package.json @@ -7,8 +7,8 @@ "author": "Nomic Labs LLC", "license": "MIT", "bin": { - "hh": "dist/src/index.js", - "hardhat-completion": "dist/src/completion.js" + "hardhat-completion": "dist/src/completion.js", + "hh": "dist/src/index.js" }, "keywords": [ "ethereum", @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-solhint/package.json b/packages/hardhat-solhint/package.json index 51fcfbd93e..4db0416ede 100644 --- a/packages/hardhat-solhint/package.json +++ b/packages/hardhat-solhint/package.json @@ -23,6 +23,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-solpp/package.json b/packages/hardhat-solpp/package.json index 599253e011..f2bf13393d 100644 --- a/packages/hardhat-solpp/package.json +++ b/packages/hardhat-solpp/package.json @@ -23,6 +23,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-toolbox/package.json b/packages/hardhat-toolbox/package.json index 02b3ea7962..8647af3096 100644 --- a/packages/hardhat-toolbox/package.json +++ b/packages/hardhat-toolbox/package.json @@ -23,6 +23,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" @@ -33,12 +34,11 @@ "LICENSE", "README.md" ], - "dependencies": {}, "devDependencies": { "@ethersproject/abi": "^5.4.7", "@ethersproject/providers": "^5.4.7", - "@nomicfoundation/hardhat-network-helpers": "^1.0.0", "@nomicfoundation/hardhat-chai-matchers": "^1.0.0", + "@nomicfoundation/hardhat-network-helpers": "^1.0.0", "@nomiclabs/hardhat-ethers": "^2.0.0", "@nomiclabs/hardhat-etherscan": "^3.0.0", "@typechain/ethers-v5": "^10.1.0", @@ -68,15 +68,15 @@ "peerDependencies": { "@ethersproject/abi": "^5.4.7", "@ethersproject/providers": "^5.4.7", - "@nomicfoundation/hardhat-network-helpers": "^1.0.0", "@nomicfoundation/hardhat-chai-matchers": "^1.0.0", + "@nomicfoundation/hardhat-network-helpers": "^1.0.0", "@nomiclabs/hardhat-ethers": "^2.0.0", "@nomiclabs/hardhat-etherscan": "^3.0.0", + "@typechain/ethers-v5": "^10.1.0", + "@typechain/hardhat": "^6.1.2", "@types/chai": "^4.2.0", "@types/mocha": "^9.1.0", "@types/node": ">=12.0.0", - "@typechain/ethers-v5": "^10.1.0", - "@typechain/hardhat": "^6.1.2", "chai": "^4.2.0", "ethers": "^5.4.7", "hardhat": "^2.11.0", diff --git a/packages/hardhat-truffle4/package.json b/packages/hardhat-truffle4/package.json index 8471d71e73..a0183015c5 100644 --- a/packages/hardhat-truffle4/package.json +++ b/packages/hardhat-truffle4/package.json @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-truffle5/package.json b/packages/hardhat-truffle5/package.json index 5bf7d23528..bffae3f2f3 100644 --- a/packages/hardhat-truffle5/package.json +++ b/packages/hardhat-truffle5/package.json @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-vyper/package.json b/packages/hardhat-vyper/package.json index 671d42c0a2..4e14888034 100644 --- a/packages/hardhat-vyper/package.json +++ b/packages/hardhat-vyper/package.json @@ -21,6 +21,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\"", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-waffle/package.json b/packages/hardhat-waffle/package.json index 4462d540c0..d1370c87c4 100644 --- a/packages/hardhat-waffle/package.json +++ b/packages/hardhat-waffle/package.json @@ -21,6 +21,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-web3-legacy/package.json b/packages/hardhat-web3-legacy/package.json index d2c2116eb6..865daba491 100644 --- a/packages/hardhat-web3-legacy/package.json +++ b/packages/hardhat-web3-legacy/package.json @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/packages/hardhat-web3/package.json b/packages/hardhat-web3/package.json index f3bffd8a32..a4546c6277 100644 --- a/packages/hardhat-web3/package.json +++ b/packages/hardhat-web3/package.json @@ -22,6 +22,7 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit && node web3-lazy-object-tests/when-accessing-web3-class.js && node web3-lazy-object-tests/when-accessing-web3-object.js && node web3-lazy-object-tests/when-requiring-web3-module.js", + "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf dist" diff --git a/rust-toolchain b/rust-toolchain new file mode 100644 index 0000000000..58e4eb6b29 --- /dev/null +++ b/rust-toolchain @@ -0,0 +1 @@ +1.63 diff --git a/yarn.lock b/yarn.lock index 69de1afc93..57b45024d6 100644 --- a/yarn.lock +++ b/yarn.lock @@ -10,90 +10,90 @@ "@babel/highlight" "^7.10.4" "@babel/code-frame@^7.0.0": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.16.7.tgz#44416b6bd7624b998f5b1af5d470856c40138789" - integrity sha512-iAXqUn8IIeBTNd72xsFlgaXHkMBMt6y4HJp1tIaK465CWLT/fG1aqB7ykr95gHHmlBdGbFeWWfyB4NJJ0nmeIg== + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.18.6.tgz#3b25d38c89600baa2dcc219edfa88a74eb2c427a" + integrity sha512-TDCmlK5eOvH+eH7cdAFlNXeVJqWIQ7gW9tY1GJIpUtFb6CmjVyq2VM3u71bOyR8CRihcCgMUYoDNyLXao3+70Q== dependencies: - "@babel/highlight" "^7.16.7" + "@babel/highlight" "^7.18.6" -"@babel/helper-validator-identifier@^7.16.7": - version "7.16.7" - resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.16.7.tgz#e8c602438c4a8195751243da9031d1607d247cad" - integrity sha512-hsEnFemeiW4D08A5gUAZxLBTXpZ39P+a+DGDsHw1yxqyQ/jzFEnxf5uTEGp+3bzAbNOxU1paTgYS4ECU/IgfDw== +"@babel/helper-validator-identifier@^7.18.6": + version "7.19.1" + resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.19.1.tgz#7eea834cf32901ffdc1a7ee555e2f9c27e249ca2" + integrity sha512-awrNfaMtnHUr653GgGEs++LlAvW6w+DcPrOliSMXWCKo597CwL5Acf/wWdNkf/tfEQE3mjkeD1YOVZOUV/od1w== -"@babel/highlight@^7.10.4", "@babel/highlight@^7.16.7": - version "7.17.12" - resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.17.12.tgz#257de56ee5afbd20451ac0a75686b6b404257351" - integrity sha512-7yykMVF3hfZY2jsHZEEgLc+3x4o1O+fYyULu11GynEUQNwB6lua+IIQn1FiJxNucd5UlyJryrwsOh8PL9Sn8Qg== +"@babel/highlight@^7.10.4", "@babel/highlight@^7.18.6": + version "7.18.6" + resolved "https://registry.yarnpkg.com/@babel/highlight/-/highlight-7.18.6.tgz#81158601e93e2563795adcbfbdf5d64be3f2ecdf" + integrity sha512-u7stbOuYjaPezCuLj29hNW1v64M2Md2qupEKP1fHc7WdOA3DgLh37suiSrZYY7haUB7iBeQZ9P1uiRF359do3g== dependencies: - "@babel/helper-validator-identifier" "^7.16.7" + "@babel/helper-validator-identifier" "^7.18.6" chalk "^2.0.0" js-tokens "^4.0.0" "@babel/runtime@^7.10.4", "@babel/runtime@^7.4.4", "@babel/runtime@^7.5.5": - version "7.18.3" - resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.18.3.tgz#c7b654b57f6f63cf7f8b418ac9ca04408c4579f4" - integrity sha512-38Y8f7YUhce/K7RMwTp7m0uCumpv9hZkitCbBClqQIow1qSbCvGkcegKOXpEWCQLfWmevgRiWokZ1GkpfhbZug== + version "7.19.4" + resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.19.4.tgz#a42f814502ee467d55b38dd1c256f53a7b885c78" + integrity sha512-EXpLCrk55f+cYqmHsSR+yD/0gAIMxxA9QK9lnQWzhMCvt+YmoBN7Zx94s++Kv0+unHk39vxNO8t+CMA2WSS3wA== dependencies: regenerator-runtime "^0.13.4" -"@changesets/apply-release-plan@^6.0.0": - version "6.0.0" - resolved "https://registry.yarnpkg.com/@changesets/apply-release-plan/-/apply-release-plan-6.0.0.tgz#6c663ff99d919bba3902343d76c35cbbbb046520" - integrity sha512-gp6nIdVdfYdwKww2+f8whckKmvfE4JEm4jJgBhTmooi0uzHWhnxvk6JIzQi89qEAMINN0SeVNnXiAtbFY0Mj3w== +"@changesets/apply-release-plan@^6.1.1": + version "6.1.1" + resolved "https://registry.yarnpkg.com/@changesets/apply-release-plan/-/apply-release-plan-6.1.1.tgz#14ec261c11c9b90d110a83b8b96412ddb7303ddf" + integrity sha512-LaQiP/Wf0zMVR0HNrLQAjz3rsNsr0d/RlnP6Ef4oi8VafOwnY1EoWdK4kssuUJGgNgDyHpomS50dm8CU3D7k7g== dependencies: "@babel/runtime" "^7.10.4" - "@changesets/config" "^2.0.0" + "@changesets/config" "^2.2.0" "@changesets/get-version-range-type" "^0.3.2" - "@changesets/git" "^1.3.2" - "@changesets/types" "^5.0.0" + "@changesets/git" "^1.5.0" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" detect-indent "^6.0.0" fs-extra "^7.0.1" lodash.startcase "^4.4.0" outdent "^0.5.0" - prettier "^1.19.1" + prettier "^2.7.1" resolve-from "^5.0.0" semver "^5.4.1" -"@changesets/assemble-release-plan@^5.1.3": - version "5.1.3" - resolved "https://registry.yarnpkg.com/@changesets/assemble-release-plan/-/assemble-release-plan-5.1.3.tgz#b415c5db64e5a30c53aed8c1adc5ab4c4aaad283" - integrity sha512-I+TTkUoqvxBEuDLoJfJYKDXIJ+nyiTbVJ8KGhpXEsLq4N/ms/AStSbouJwF2d/p3cB+RCPr5+gXh31GSN4kA7w== +"@changesets/assemble-release-plan@^5.2.2": + version "5.2.2" + resolved "https://registry.yarnpkg.com/@changesets/assemble-release-plan/-/assemble-release-plan-5.2.2.tgz#9824f14a7a6e411c7153f1ccc2a42bbe35688129" + integrity sha512-B1qxErQd85AeZgZFZw2bDKyOfdXHhG+X5S+W3Da2yCem8l/pRy4G/S7iOpEcMwg6lH8q2ZhgbZZwZ817D+aLuQ== dependencies: "@babel/runtime" "^7.10.4" "@changesets/errors" "^0.1.4" - "@changesets/get-dependents-graph" "^1.3.2" - "@changesets/types" "^5.0.0" + "@changesets/get-dependents-graph" "^1.3.4" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" semver "^5.4.1" -"@changesets/changelog-git@^0.1.11": - version "0.1.11" - resolved "https://registry.yarnpkg.com/@changesets/changelog-git/-/changelog-git-0.1.11.tgz#80eb45d3562aba2164f25ccc31ac97b9dcd1ded3" - integrity sha512-sWJvAm+raRPeES9usNpZRkooeEB93lOpUN0Lmjz5vhVAb7XGIZrHEJ93155bpE1S0c4oJ5Di9ZWgzIwqhWP/Wg== +"@changesets/changelog-git@^0.1.13": + version "0.1.13" + resolved "https://registry.yarnpkg.com/@changesets/changelog-git/-/changelog-git-0.1.13.tgz#182e130add456255d8ee2b4c8eaf88048944aaaf" + integrity sha512-zvJ50Q+EUALzeawAxax6nF2WIcSsC5PwbuLeWkckS8ulWnuPYx8Fn/Sjd3rF46OzeKA8t30loYYV6TIzp4DIdg== dependencies: - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" "@changesets/cli@^2.16.0": - version "2.23.0" - resolved "https://registry.yarnpkg.com/@changesets/cli/-/cli-2.23.0.tgz#e325b2d1b0484188671f684773b8cd5d42d068f1" - integrity sha512-Gi3tMi0Vr6eNd8GX6q73tbOm9XOzGfuLEm4PYVeWG2neg5DlRGNOjYwrFULJ/An3N9MHtHn4r5h1Qvnju9Ijug== + version "2.25.0" + resolved "https://registry.yarnpkg.com/@changesets/cli/-/cli-2.25.0.tgz#c338fefe69daa8348a504e7f54382eb1a4bb7a10" + integrity sha512-Svu5KD2enurVHGEEzCRlaojrHjVYgF9srmMP9VQSy9c1TspX6C9lDPpulsSNIjYY9BuU/oiWpjBgR7RI9eQiAA== dependencies: "@babel/runtime" "^7.10.4" - "@changesets/apply-release-plan" "^6.0.0" - "@changesets/assemble-release-plan" "^5.1.3" - "@changesets/changelog-git" "^0.1.11" - "@changesets/config" "^2.0.0" + "@changesets/apply-release-plan" "^6.1.1" + "@changesets/assemble-release-plan" "^5.2.2" + "@changesets/changelog-git" "^0.1.13" + "@changesets/config" "^2.2.0" "@changesets/errors" "^0.1.4" - "@changesets/get-dependents-graph" "^1.3.2" - "@changesets/get-release-plan" "^3.0.9" - "@changesets/git" "^1.3.2" + "@changesets/get-dependents-graph" "^1.3.4" + "@changesets/get-release-plan" "^3.0.15" + "@changesets/git" "^1.5.0" "@changesets/logger" "^0.0.5" - "@changesets/pre" "^1.0.11" - "@changesets/read" "^0.5.5" - "@changesets/types" "^5.0.0" - "@changesets/write" "^0.1.8" + "@changesets/pre" "^1.0.13" + "@changesets/read" "^0.5.8" + "@changesets/types" "^5.2.0" + "@changesets/write" "^0.2.1" "@manypkg/get-packages" "^1.1.3" "@types/is-ci" "^3.0.0" "@types/semver" "^6.0.0" @@ -114,15 +114,15 @@ term-size "^2.1.0" tty-table "^4.1.5" -"@changesets/config@^2.0.0": - version "2.0.0" - resolved "https://registry.yarnpkg.com/@changesets/config/-/config-2.0.0.tgz#1770fdfeba2155cf07154c37e96b55cbd27969f0" - integrity sha512-r5bIFY6CN3K6SQ+HZbjyE3HXrBIopONR47mmX7zUbORlybQXtympq9rVAOzc0Oflbap8QeIexc+hikfZoREXDg== +"@changesets/config@^2.2.0": + version "2.2.0" + resolved "https://registry.yarnpkg.com/@changesets/config/-/config-2.2.0.tgz#382f6cd801fa56273942659114c8060378dfe066" + integrity sha512-GGaokp3nm5FEDk/Fv2PCRcQCOxGKKPRZ7prcMqxEr7VSsG75MnChQE8plaW1k6V8L2bJE+jZWiRm19LbnproOw== dependencies: "@changesets/errors" "^0.1.4" - "@changesets/get-dependents-graph" "^1.3.2" + "@changesets/get-dependents-graph" "^1.3.4" "@changesets/logger" "^0.0.5" - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" fs-extra "^7.0.1" micromatch "^4.0.2" @@ -134,28 +134,28 @@ dependencies: extendable-error "^0.1.5" -"@changesets/get-dependents-graph@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@changesets/get-dependents-graph/-/get-dependents-graph-1.3.2.tgz#f3ec7ce75f4afb6e3e4b6a87fde065f552c85998" - integrity sha512-tsqA6qZRB86SQuApSoDvI8yEWdyIlo/WLI4NUEdhhxLMJ0dapdeT6rUZRgSZzK1X2nv5YwR0MxQBbDAiDibKrg== +"@changesets/get-dependents-graph@^1.3.4": + version "1.3.4" + resolved "https://registry.yarnpkg.com/@changesets/get-dependents-graph/-/get-dependents-graph-1.3.4.tgz#d8bf537f45a7ff773da99143675f49e250996838" + integrity sha512-+C4AOrrFY146ydrgKOo5vTZfj7vetNu1tWshOID+UjPUU9afYGDXI8yLnAeib1ffeBXV3TuGVcyphKpJ3cKe+A== dependencies: - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" chalk "^2.1.0" fs-extra "^7.0.1" semver "^5.4.1" -"@changesets/get-release-plan@^3.0.9": - version "3.0.9" - resolved "https://registry.yarnpkg.com/@changesets/get-release-plan/-/get-release-plan-3.0.9.tgz#d445660f3679cb65e05e02adfbca037a25b45943" - integrity sha512-5C1r4DcOjVxcCvPmXpymeyT6mdSTLCNiB2L+5uf19BRkDKndJdIQorH5Fe2XBR2nHUcZQFT+2TXDzCepat969w== +"@changesets/get-release-plan@^3.0.15": + version "3.0.15" + resolved "https://registry.yarnpkg.com/@changesets/get-release-plan/-/get-release-plan-3.0.15.tgz#55577b235b785125a462d5d2a2dffe4dbf94e590" + integrity sha512-W1tFwxE178/en+zSj/Nqbc3mvz88mcdqUMJhRzN1jDYqN3QI4ifVaRF9mcWUU+KI0gyYEtYR65tour690PqTcA== dependencies: "@babel/runtime" "^7.10.4" - "@changesets/assemble-release-plan" "^5.1.3" - "@changesets/config" "^2.0.0" - "@changesets/pre" "^1.0.11" - "@changesets/read" "^0.5.5" - "@changesets/types" "^5.0.0" + "@changesets/assemble-release-plan" "^5.2.2" + "@changesets/config" "^2.2.0" + "@changesets/pre" "^1.0.13" + "@changesets/read" "^0.5.8" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" "@changesets/get-version-range-type@^0.3.2": @@ -163,14 +163,14 @@ resolved "https://registry.yarnpkg.com/@changesets/get-version-range-type/-/get-version-range-type-0.3.2.tgz#8131a99035edd11aa7a44c341cbb05e668618c67" integrity sha512-SVqwYs5pULYjYT4op21F2pVbcrca4qA/bAA3FmFXKMN7Y+HcO8sbZUTx3TAy2VXulP2FACd1aC7f2nTuqSPbqg== -"@changesets/git@^1.3.2": - version "1.3.2" - resolved "https://registry.yarnpkg.com/@changesets/git/-/git-1.3.2.tgz#336051d9a6d965806b1bc473559a9a2cc70773a6" - integrity sha512-p5UL+urAg0Nnpt70DLiBe2iSsMcDubTo9fTOD/61krmcJ466MGh71OHwdAwu1xG5+NKzeysdy1joRTg8CXcEXA== +"@changesets/git@^1.5.0": + version "1.5.0" + resolved "https://registry.yarnpkg.com/@changesets/git/-/git-1.5.0.tgz#71bbcf11f3b346d56eeaf3d3201e6dc3e270ea5a" + integrity sha512-Xo8AT2G7rQJSwV87c8PwMm6BAc98BnufRMsML7m7Iw8Or18WFvFmxqG5aOL5PBvhgq9KrKvaeIBNIymracSuHg== dependencies: "@babel/runtime" "^7.10.4" "@changesets/errors" "^0.1.4" - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" is-subdir "^1.1.1" spawndamnit "^2.0.0" @@ -182,35 +182,35 @@ dependencies: chalk "^2.1.0" -"@changesets/parse@^0.3.13": - version "0.3.13" - resolved "https://registry.yarnpkg.com/@changesets/parse/-/parse-0.3.13.tgz#82788c1fc18da4750b07357a7a06142d0d975aa1" - integrity sha512-wh9Ifa0dungY6d2nMz6XxF6FZ/1I7j+mEgPAqrIyKS64nifTh1Ua82qKKMMK05CL7i4wiB2NYc3SfnnCX3RVeA== +"@changesets/parse@^0.3.15": + version "0.3.15" + resolved "https://registry.yarnpkg.com/@changesets/parse/-/parse-0.3.15.tgz#1bc74f8c43b0861d71f4fccf78950411004ba308" + integrity sha512-3eDVqVuBtp63i+BxEWHPFj2P1s3syk0PTrk2d94W9JD30iG+OER0Y6n65TeLlY8T2yB9Fvj6Ev5Gg0+cKe/ZUA== dependencies: - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" js-yaml "^3.13.1" -"@changesets/pre@^1.0.11": - version "1.0.11" - resolved "https://registry.yarnpkg.com/@changesets/pre/-/pre-1.0.11.tgz#46a56790fdceabd03407559bbf91340c8e83fb6a" - integrity sha512-CXZnt4SV9waaC9cPLm7818+SxvLKIDHUxaiTXnJYDp1c56xIexx1BNfC1yMuOdzO2a3rAIcZua5Odxr3dwSKfg== +"@changesets/pre@^1.0.13": + version "1.0.13" + resolved "https://registry.yarnpkg.com/@changesets/pre/-/pre-1.0.13.tgz#49c3ae8bb444a1ce3e0fe4cb21f238318b6763e9" + integrity sha512-jrZc766+kGZHDukjKhpBXhBJjVQMied4Fu076y9guY1D3H622NOw8AQaLV3oQsDtKBTrT2AUFjt9Z2Y9Qx+GfA== dependencies: "@babel/runtime" "^7.10.4" "@changesets/errors" "^0.1.4" - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" "@manypkg/get-packages" "^1.1.3" fs-extra "^7.0.1" -"@changesets/read@^0.5.5": - version "0.5.5" - resolved "https://registry.yarnpkg.com/@changesets/read/-/read-0.5.5.tgz#9ed90ef3e9f1ba3436ba5580201854a3f4163058" - integrity sha512-bzonrPWc29Tsjvgh+8CqJ0apQOwWim0zheeD4ZK44ApSa/GudnZJTODtA3yNOOuQzeZmL0NUebVoHIurtIkA7w== +"@changesets/read@^0.5.8": + version "0.5.8" + resolved "https://registry.yarnpkg.com/@changesets/read/-/read-0.5.8.tgz#84e24fd12e6759cef090088261c08b1dfe0f350e" + integrity sha512-eYaNfxemgX7f7ELC58e7yqQICW5FB7V+bd1lKt7g57mxUrTveYME+JPaBPpYx02nP53XI6CQp6YxnR9NfmFPKw== dependencies: "@babel/runtime" "^7.10.4" - "@changesets/git" "^1.3.2" + "@changesets/git" "^1.5.0" "@changesets/logger" "^0.0.5" - "@changesets/parse" "^0.3.13" - "@changesets/types" "^5.0.0" + "@changesets/parse" "^0.3.15" + "@changesets/types" "^5.2.0" chalk "^2.1.0" fs-extra "^7.0.1" p-filter "^2.1.0" @@ -220,21 +220,21 @@ resolved "https://registry.yarnpkg.com/@changesets/types/-/types-4.1.0.tgz#fb8f7ca2324fd54954824e864f9a61a82cb78fe0" integrity sha512-LDQvVDv5Kb50ny2s25Fhm3d9QSZimsoUGBsUioj6MC3qbMUCuC8GPIvk/M6IvXx3lYhAs0lwWUQLb+VIEUCECw== -"@changesets/types@^5.0.0": - version "5.0.0" - resolved "https://registry.yarnpkg.com/@changesets/types/-/types-5.0.0.tgz#d5eb52d074bc0358ce47d54bca54370b907812a0" - integrity sha512-IT1kBLSbAgTS4WtpU6P5ko054hq12vk4tgeIFRVE7Vnm4a/wgbNvBalgiKP0MjEXbCkZbItiGQHkCGxYWR55sA== +"@changesets/types@^5.2.0": + version "5.2.0" + resolved "https://registry.yarnpkg.com/@changesets/types/-/types-5.2.0.tgz#c4927f5bf9668f778c12b4226cfd07a1f5b79c9b" + integrity sha512-km/66KOqJC+eicZXsm2oq8A8bVTSpkZJ60iPV/Nl5Z5c7p9kk8xxh6XGRTlnludHldxOOfudhnDN2qPxtHmXzA== -"@changesets/write@^0.1.8": - version "0.1.8" - resolved "https://registry.yarnpkg.com/@changesets/write/-/write-0.1.8.tgz#feed408f644c496bc52afc4dd1353670b4152ecb" - integrity sha512-oIHeFVMuP6jf0TPnKPpaFpvvAf3JBc+s2pmVChbeEgQTBTALoF51Z9kqxQfG4XONZPHZnqkmy564c7qohhhhTQ== +"@changesets/write@^0.2.1": + version "0.2.1" + resolved "https://registry.yarnpkg.com/@changesets/write/-/write-0.2.1.tgz#c00d95851e2ca70385434a360a90ead9a1d07c74" + integrity sha512-KUd49nt2fnYdGixIqTi1yVE1nAoZYUMdtB3jBfp77IMqjZ65hrmZE5HdccDlTeClZN0420ffpnfET3zzeY8pdw== dependencies: "@babel/runtime" "^7.10.4" - "@changesets/types" "^5.0.0" + "@changesets/types" "^5.2.0" fs-extra "^7.0.1" human-id "^1.0.2" - prettier "^1.19.1" + prettier "^2.7.1" "@ensdomains/address-encoder@^0.1.7": version "0.1.9" @@ -347,15 +347,15 @@ patch-package "^6.2.2" postinstall-postinstall "^2.1.0" -"@ethereumjs/common@^2.3.0", "@ethereumjs/common@^2.4.0", "@ethereumjs/common@^2.5.0", "@ethereumjs/common@^2.6.4": - version "2.6.4" - resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.6.4.tgz#1b3cdd3aa4ee3b0ca366756fc35e4a03022a01cc" - integrity sha512-RDJh/R/EAr+B7ZRg5LfJ0BIpf/1LydFgYdvZEuTraojCbVypO2sQ+QnpP5u2wJf9DASyooKqu8O4FJEWUV6NXw== +"@ethereumjs/common@^2.5.0", "@ethereumjs/common@^2.6.4": + version "2.6.5" + resolved "https://registry.yarnpkg.com/@ethereumjs/common/-/common-2.6.5.tgz#0a75a22a046272579d91919cb12d84f2756e8d30" + integrity sha512-lRyVQOeCDaIVtgfbowla32pzeDv2Obr8oR8Put5RdUBNRGr1VGPGQNGP6elWIpgK3YdpzqTOh4GyUGOureVeeA== dependencies: crc-32 "^1.2.0" - ethereumjs-util "^7.1.4" + ethereumjs-util "^7.1.5" -"@ethereumjs/tx@^3.2.1", "@ethereumjs/tx@^3.3.2": +"@ethereumjs/tx@^3.3.2": version "3.5.2" resolved "https://registry.yarnpkg.com/@ethereumjs/tx/-/tx-3.5.2.tgz#197b9b6299582ad84f9527ca961466fce2296c1c" integrity sha512-gQDNJWKrSDGu2w7w0PzVXVBNMzb7wwdDOmOqczmhNjqFxFuIbhVJDwiGEnxFNC2/b8ifcZzY7MLcluizohRzNw== @@ -378,37 +378,7 @@ "@ethersproject/properties" ">=5.0.0-beta.131" "@ethersproject/strings" ">=5.0.0-beta.130" -"@ethersproject/abi@5.0.7": - version "5.0.7" - resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.0.7.tgz#79e52452bd3ca2956d0e1c964207a58ad1a0ee7b" - integrity sha512-Cqktk+hSIckwP/W8O47Eef60VwmoSC/L3lY0+dIBhQPCNn9E4V7rwmm2aFrNRRDJfFlGuZ1khkQUOc3oBX+niw== - dependencies: - "@ethersproject/address" "^5.0.4" - "@ethersproject/bignumber" "^5.0.7" - "@ethersproject/bytes" "^5.0.4" - "@ethersproject/constants" "^5.0.4" - "@ethersproject/hash" "^5.0.4" - "@ethersproject/keccak256" "^5.0.3" - "@ethersproject/logger" "^5.0.5" - "@ethersproject/properties" "^5.0.3" - "@ethersproject/strings" "^5.0.4" - -"@ethersproject/abi@5.6.3", "@ethersproject/abi@^5.0.0-beta.146", "@ethersproject/abi@^5.1.2", "@ethersproject/abi@^5.4.7", "@ethersproject/abi@^5.5.0", "@ethersproject/abi@^5.6.3": - version "5.6.3" - resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.6.3.tgz#2d643544abadf6e6b63150508af43475985c23db" - integrity sha512-CxKTdoZY4zDJLWXG6HzNH6znWK0M79WzzxHegDoecE3+K32pzfHOzuXg2/oGSTecZynFgpkjYXNPOqXVJlqClw== - dependencies: - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/hash" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" - -"@ethersproject/abi@^5.0.9": +"@ethersproject/abi@5.7.0", "@ethersproject/abi@^5.0.0-beta.146", "@ethersproject/abi@^5.0.9", "@ethersproject/abi@^5.1.2", "@ethersproject/abi@^5.4.7", "@ethersproject/abi@^5.5.0", "@ethersproject/abi@^5.6.3", "@ethersproject/abi@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/abi/-/abi-5.7.0.tgz#b3f3e045bbbeed1af3947335c247ad625a44e449" integrity sha512-351ktp42TiRcYB3H1OP8yajPeAQstMW/yCFokj/AthP9bLHzQFPlOrxOcwYEDkUAICmOHljvN4K39OMTMUa9RA== @@ -423,20 +393,7 @@ "@ethersproject/properties" "^5.7.0" "@ethersproject/strings" "^5.7.0" -"@ethersproject/abstract-provider@5.6.1", "@ethersproject/abstract-provider@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.6.1.tgz#02ddce150785caf0c77fe036a0ebfcee61878c59" - integrity sha512-BxlIgogYJtp1FS8Muvj8YfdClk3unZH0vRMVX791Z9INBNT/kuACZ9GzaY1Y4yFq+YSy6/w4gzj3HCRKrK9hsQ== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/networks" "^5.6.3" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/transactions" "^5.6.2" - "@ethersproject/web" "^5.6.1" - -"@ethersproject/abstract-provider@^5.7.0": +"@ethersproject/abstract-provider@5.7.0", "@ethersproject/abstract-provider@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-provider/-/abstract-provider-5.7.0.tgz#b0a8550f88b6bf9d51f90e4795d48294630cb9ef" integrity sha512-R41c9UkchKCpAqStMYUpdunjo3pkEvZC3FAwZn5S5MGbXoMQOHIdHItezTETxAO5bevtMApSyEhn9+CHcDsWBw== @@ -449,18 +406,7 @@ "@ethersproject/transactions" "^5.7.0" "@ethersproject/web" "^5.7.0" -"@ethersproject/abstract-signer@5.6.2", "@ethersproject/abstract-signer@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.6.2.tgz#491f07fc2cbd5da258f46ec539664713950b0b33" - integrity sha512-n1r6lttFBG0t2vNiI3HoWaS/KdOt8xyDjzlP2cuevlWLG6EX0OwcKLyG/Kp/cuwNxdy/ous+R/DEMdTUwWQIjQ== - dependencies: - "@ethersproject/abstract-provider" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - -"@ethersproject/abstract-signer@^5.7.0": +"@ethersproject/abstract-signer@5.7.0", "@ethersproject/abstract-signer@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/abstract-signer/-/abstract-signer-5.7.0.tgz#13f4f32117868452191a4649723cb086d2b596b2" integrity sha512-a16V8bq1/Cz+TGCkE2OPMTOUDLS3grCpdjoJCYNnVBbdYEMSgKrU0+B90s8b6H+ByYTBZN7a3g76jdIJi7UfKQ== @@ -471,18 +417,7 @@ "@ethersproject/logger" "^5.7.0" "@ethersproject/properties" "^5.7.0" -"@ethersproject/address@5.6.1", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.0.4", "@ethersproject/address@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.6.1.tgz#ab57818d9aefee919c5721d28cd31fd95eff413d" - integrity sha512-uOgF0kS5MJv9ZvCz7x6T2EXJSzotiybApn4XlOgoTX0xdtyVIJ7pF+6cGPxiEq/dpBiTfMiw7Yc81JcwhSYA0Q== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/rlp" "^5.6.1" - -"@ethersproject/address@^5.7.0": +"@ethersproject/address@5.7.0", "@ethersproject/address@>=5.0.0-beta.128", "@ethersproject/address@^5.0.2", "@ethersproject/address@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/address/-/address-5.7.0.tgz#19b56c4d74a3b0a46bfdbb6cfcc0a153fc697f37" integrity sha512-9wYhYt7aghVGo758POM5nqcOMaE168Q6aRLJZwUmiqSrAungkG74gSSeKEIR7ukixesdRZGPgVqme6vmxs1fkA== @@ -493,38 +428,22 @@ "@ethersproject/logger" "^5.7.0" "@ethersproject/rlp" "^5.7.0" -"@ethersproject/base64@5.6.1", "@ethersproject/base64@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.6.1.tgz#2c40d8a0310c9d1606c2c37ae3092634b41d87cb" - integrity sha512-qB76rjop6a0RIYYMiB4Eh/8n+Hxu2NIZm8S/Q7kNo5pmZfXhHGHmS4MinUainiBC54SCyRnwzL+KZjj8zbsSsw== - dependencies: - "@ethersproject/bytes" "^5.6.1" - -"@ethersproject/base64@^5.7.0": +"@ethersproject/base64@5.7.0", "@ethersproject/base64@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/base64/-/base64-5.7.0.tgz#ac4ee92aa36c1628173e221d0d01f53692059e1c" integrity sha512-Dr8tcHt2mEbsZr/mwTPIQAf3Ai0Bks/7gTw9dSqk1mQvhW3XvRlmDJr/4n+wg1JmCl16NZue17CDh8xb/vZ0sQ== dependencies: "@ethersproject/bytes" "^5.7.0" -"@ethersproject/basex@5.6.1", "@ethersproject/basex@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.6.1.tgz#badbb2f1d4a6f52ce41c9064f01eab19cc4c5305" - integrity sha512-a52MkVz4vuBXR06nvflPMotld1FJWSj2QT0985v7P/emPZO00PucFAkbcmq2vpVU7Ts7umKiSI6SppiLykVWsA== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/properties" "^5.6.0" - -"@ethersproject/bignumber@5.6.2", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.0.7", "@ethersproject/bignumber@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.6.2.tgz#72a0717d6163fab44c47bcc82e0c550ac0315d66" - integrity sha512-v7+EEUbhGqT3XJ9LMPsKvXYHFc8eHxTowFCG/HgJErmq4XHJ2WR7aeyICg3uTOAQ7Icn0GFHAohXEhxQHq4Ubw== +"@ethersproject/basex@5.7.0", "@ethersproject/basex@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/basex/-/basex-5.7.0.tgz#97034dc7e8938a8ca943ab20f8a5e492ece4020b" + integrity sha512-ywlh43GwZLv2Voc2gQVTKBoVQ1mti3d8HK5aMxsfu/nRDnMmNqaSJ3r3n85HBByT8OpoY96SXM1FogC533T4zw== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - bn.js "^5.2.1" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/properties" "^5.7.0" -"@ethersproject/bignumber@^5.7.0": +"@ethersproject/bignumber@5.7.0", "@ethersproject/bignumber@>=5.0.0-beta.130", "@ethersproject/bignumber@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/bignumber/-/bignumber-5.7.0.tgz#e2f03837f268ba655ffba03a57853e18a18dc9c2" integrity sha512-n1CAdIHRWjSucQO3MC1zPSVgV/6dy/fjL9pMrPP9peL+QxEg9wOsVqwD4+818B6LUEtaXzVHQiuivzRoxPxUGw== @@ -533,65 +452,37 @@ "@ethersproject/logger" "^5.7.0" bn.js "^5.2.1" -"@ethersproject/bytes@5.6.1", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.0.4", "@ethersproject/bytes@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.6.1.tgz#24f916e411f82a8a60412344bf4a813b917eefe7" - integrity sha512-NwQt7cKn5+ZE4uDn+X5RAXLp46E1chXoaMmrxAyA0rblpxz8t58lVkrHXoRIn0lz1joQElQ8410GqhTqMOwc6g== - dependencies: - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/bytes@^5.7.0": +"@ethersproject/bytes@5.7.0", "@ethersproject/bytes@>=5.0.0-beta.129", "@ethersproject/bytes@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/bytes/-/bytes-5.7.0.tgz#a00f6ea8d7e7534d6d87f47188af1148d71f155d" integrity sha512-nsbxwgFXWh9NyYWo+U8atvmMsSdKJprTcICAkvbBffT75qDocbuggBU0SJiVK2MuTrp0q+xvLkTnGMPK1+uA9A== dependencies: "@ethersproject/logger" "^5.7.0" -"@ethersproject/constants@5.6.1", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.0.4", "@ethersproject/constants@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.6.1.tgz#e2e974cac160dd101cf79fdf879d7d18e8cb1370" - integrity sha512-QSq9WVnZbxXYFftrjSjZDUshp6/eKp6qrtdBtUCm0QxCV5z1fG/w3kdlcsjMCQuQHUnAclKoK7XpXMezhRDOLg== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - -"@ethersproject/constants@^5.7.0": +"@ethersproject/constants@5.7.0", "@ethersproject/constants@>=5.0.0-beta.128", "@ethersproject/constants@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/constants/-/constants-5.7.0.tgz#df80a9705a7e08984161f09014ea012d1c75295e" integrity sha512-DHI+y5dBNvkpYUMiRQyxRBYBefZkJfo70VUkUAsRjcPs47muV9evftfZ0PJVCXYbAiCgght0DtcF9srFQmIgWA== dependencies: "@ethersproject/bignumber" "^5.7.0" -"@ethersproject/contracts@5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.6.2.tgz#20b52e69ebc1b74274ff8e3d4e508de971c287bc" - integrity sha512-hguUA57BIKi6WY0kHvZp6PwPlWF87MCeB4B7Z7AbUpTxfFXFdn/3b0GmjZPagIHS+3yhcBJDnuEfU4Xz+Ks/8g== +"@ethersproject/contracts@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/contracts/-/contracts-5.7.0.tgz#c305e775abd07e48aa590e1a877ed5c316f8bd1e" + integrity sha512-5GJbzEU3X+d33CdfPhcyS+z8MzsTrBGk/sc+G+59+tPa9yFkl6HQ9D6L0QMgNTA9q8dT0XKxxkyp883XsQvbbg== dependencies: - "@ethersproject/abi" "^5.6.3" - "@ethersproject/abstract-provider" "^5.6.1" - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/transactions" "^5.6.2" + "@ethersproject/abi" "^5.7.0" + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" -"@ethersproject/hash@5.6.1", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.0.4", "@ethersproject/hash@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.6.1.tgz#224572ea4de257f05b4abf8ae58b03a67e99b0f4" - integrity sha512-L1xAHurbaxG8VVul4ankNX5HgQ8PNCTrnVXEiFnE9xoRnaUcgfD12tZINtDinSllxPLCtGwguQxJ5E6keE84pA== - dependencies: - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" - -"@ethersproject/hash@^5.7.0": +"@ethersproject/hash@5.7.0", "@ethersproject/hash@>=5.0.0-beta.128", "@ethersproject/hash@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/hash/-/hash-5.7.0.tgz#eb7aca84a588508369562e16e514b539ba5240a7" integrity sha512-qX5WrQfnah1EFnO5zJv1v46a8HW0+E5xuBBDTwMFZLuVTx0tbU2kkx15NqdjxecrLGatQN9FGQKpb1FKdHCt+g== @@ -606,52 +497,44 @@ "@ethersproject/properties" "^5.7.0" "@ethersproject/strings" "^5.7.0" -"@ethersproject/hdnode@5.6.2", "@ethersproject/hdnode@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.6.2.tgz#26f3c83a3e8f1b7985c15d1db50dc2903418b2d2" - integrity sha512-tERxW8Ccf9CxW2db3WsN01Qao3wFeRsfYY9TCuhmG0xNpl2IO8wgXU3HtWIZ49gUWPggRy4Yg5axU0ACaEKf1Q== - dependencies: - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/basex" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/pbkdf2" "^5.6.1" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/sha2" "^5.6.1" - "@ethersproject/signing-key" "^5.6.2" - "@ethersproject/strings" "^5.6.1" - "@ethersproject/transactions" "^5.6.2" - "@ethersproject/wordlists" "^5.6.1" - -"@ethersproject/json-wallets@5.6.1", "@ethersproject/json-wallets@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.6.1.tgz#3f06ba555c9c0d7da46756a12ac53483fe18dd91" - integrity sha512-KfyJ6Zwz3kGeX25nLihPwZYlDqamO6pfGKNnVMWWfEVVp42lTfCZVXXy5Ie8IZTN0HKwAngpIPi7gk4IJzgmqQ== - dependencies: - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/hdnode" "^5.6.2" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/pbkdf2" "^5.6.1" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/random" "^5.6.1" - "@ethersproject/strings" "^5.6.1" - "@ethersproject/transactions" "^5.6.2" - aes-js "3.0.0" - scrypt-js "3.0.1" +"@ethersproject/hdnode@5.7.0", "@ethersproject/hdnode@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/hdnode/-/hdnode-5.7.0.tgz#e627ddc6b466bc77aebf1a6b9e47405ca5aef9cf" + integrity sha512-OmyYo9EENBPPf4ERhR7oj6uAtUAhYGqOnIS+jE5pTXvdKBS99ikzq1E7Iv0ZQZ5V36Lqx1qZLeak0Ra16qpeOg== + dependencies: + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" -"@ethersproject/keccak256@5.6.1", "@ethersproject/keccak256@>=5.0.0-beta.127", "@ethersproject/keccak256@^5.0.3", "@ethersproject/keccak256@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.6.1.tgz#b867167c9b50ba1b1a92bccdd4f2d6bd168a91cc" - integrity sha512-bB7DQHCTRDooZZdL3lk9wpL0+XuG3XLGHLh3cePnybsO3V0rdCAOQGpn/0R3aODmnTOOkCATJiD2hnL+5bwthA== +"@ethersproject/json-wallets@5.7.0", "@ethersproject/json-wallets@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/json-wallets/-/json-wallets-5.7.0.tgz#5e3355287b548c32b368d91014919ebebddd5360" + integrity sha512-8oee5Xgu6+RKgJTkvEMl2wDgSPSAQ9MB/3JYjFV9jlKvcYHUXZC+cQp0njgmxdHkYWn8s6/IqIZYm0YWCjO/0g== dependencies: - "@ethersproject/bytes" "^5.6.1" - js-sha3 "0.8.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/pbkdf2" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + aes-js "3.0.0" + scrypt-js "3.0.1" -"@ethersproject/keccak256@^5.7.0": +"@ethersproject/keccak256@5.7.0", "@ethersproject/keccak256@>=5.0.0-beta.127", "@ethersproject/keccak256@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/keccak256/-/keccak256-5.7.0.tgz#3186350c6e1cd6aba7940384ec7d6d9db01f335a" integrity sha512-2UcPboeL/iW+pSg6vZ6ydF8tCnv3Iu/8tUmLLzWWGzxWKFFqOBQFLo6uLUv6BDrLgCDfN28RJ/wtByx+jZ4KBg== @@ -659,95 +542,68 @@ "@ethersproject/bytes" "^5.7.0" js-sha3 "0.8.0" -"@ethersproject/logger@5.6.0", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.0.5", "@ethersproject/logger@^5.6.0": - version "5.6.0" - resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.6.0.tgz#d7db1bfcc22fd2e4ab574cba0bb6ad779a9a3e7a" - integrity sha512-BiBWllUROH9w+P21RzoxJKzqoqpkyM1pRnEKG69bulE9TSQD8SAIvTQqIMZmmCO8pUNkgLP1wndX1gKghSpBmg== - -"@ethersproject/logger@^5.7.0": +"@ethersproject/logger@5.7.0", "@ethersproject/logger@>=5.0.0-beta.129", "@ethersproject/logger@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/logger/-/logger-5.7.0.tgz#6ce9ae168e74fecf287be17062b590852c311892" integrity sha512-0odtFdXu/XHtjQXJYA3u9G0G8btm0ND5Cu8M7i5vhEcE8/HmF4Lbdqanwyv4uQTr2tx6b7fQRmgLrsnpQlmnig== -"@ethersproject/networks@5.6.3", "@ethersproject/networks@^5.6.3": - version "5.6.3" - resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.6.3.tgz#3ee3ab08f315b433b50c99702eb32e0cf31f899f" - integrity sha512-QZxRH7cA5Ut9TbXwZFiCyuPchdWi87ZtVNHWZd0R6YFgYtes2jQ3+bsslJ0WdyDe0i6QumqtoYqvY3rrQFRZOQ== - dependencies: - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/networks@^5.7.0": - version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.0.tgz#df72a392f1a63a57f87210515695a31a245845ad" - integrity sha512-MG6oHSQHd4ebvJrleEQQ4HhVu8Ichr0RDYEfHzsVAVjHNM+w36x9wp9r+hf1JstMXtseXDtkiVoARAG6M959AA== +"@ethersproject/networks@5.7.1", "@ethersproject/networks@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/networks/-/networks-5.7.1.tgz#118e1a981d757d45ccea6bb58d9fd3d9db14ead6" + integrity sha512-n/MufjFYv3yFcUyfhnXotyDlNdFb7onmkSy8aQERi2PjNcnWQ66xXxa3XlS8nCcA8aJKJjIIMNJTC7tu80GwpQ== dependencies: "@ethersproject/logger" "^5.7.0" -"@ethersproject/pbkdf2@5.6.1", "@ethersproject/pbkdf2@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.6.1.tgz#f462fe320b22c0d6b1d72a9920a3963b09eb82d1" - integrity sha512-k4gRQ+D93zDRPNUfmduNKq065uadC2YjMP/CqwwX5qG6R05f47boq6pLZtV/RnC4NZAYOPH1Cyo54q0c9sshRQ== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/sha2" "^5.6.1" - -"@ethersproject/properties@5.6.0", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.0.3", "@ethersproject/properties@^5.6.0": - version "5.6.0" - resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.6.0.tgz#38904651713bc6bdd5bdd1b0a4287ecda920fa04" - integrity sha512-szoOkHskajKePTJSZ46uHUWWkbv7TzP2ypdEK6jGMqJaEt2sb0jCgfBo0gH0m2HBpRixMuJ6TBRaQCF7a9DoCg== +"@ethersproject/pbkdf2@5.7.0", "@ethersproject/pbkdf2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/pbkdf2/-/pbkdf2-5.7.0.tgz#d2267d0a1f6e123f3771007338c47cccd83d3102" + integrity sha512-oR/dBRZR6GTyaofd86DehG72hY6NpAjhabkhxgr3X2FpJtJuodEl2auADWBZfhDHgVCbu3/H/Ocq2uC6dpNjjw== dependencies: - "@ethersproject/logger" "^5.6.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" -"@ethersproject/properties@^5.7.0": +"@ethersproject/properties@5.7.0", "@ethersproject/properties@>=5.0.0-beta.131", "@ethersproject/properties@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/properties/-/properties-5.7.0.tgz#a6e12cb0439b878aaf470f1902a176033067ed30" integrity sha512-J87jy8suntrAkIZtecpxEPxY//szqr1mlBaYlQ0r4RCaiD2hjheqF9s1LVE8vVuJCXisjIP+JgtK/Do54ej4Sw== dependencies: "@ethersproject/logger" "^5.7.0" -"@ethersproject/providers@5.6.8", "@ethersproject/providers@^5.4.7": - version "5.6.8" - resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.6.8.tgz#22e6c57be215ba5545d3a46cf759d265bb4e879d" - integrity sha512-Wf+CseT/iOJjrGtAOf3ck9zS7AgPmr2fZ3N97r4+YXN3mBePTG2/bJ8DApl9mVwYL+RpYbNxMEkEp4mPGdwG/w== - dependencies: - "@ethersproject/abstract-provider" "^5.6.1" - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/base64" "^5.6.1" - "@ethersproject/basex" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/hash" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/networks" "^5.6.3" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/random" "^5.6.1" - "@ethersproject/rlp" "^5.6.1" - "@ethersproject/sha2" "^5.6.1" - "@ethersproject/strings" "^5.6.1" - "@ethersproject/transactions" "^5.6.2" - "@ethersproject/web" "^5.6.1" +"@ethersproject/providers@5.7.2", "@ethersproject/providers@^5.4.7": + version "5.7.2" + resolved "https://registry.yarnpkg.com/@ethersproject/providers/-/providers-5.7.2.tgz#f8b1a4f275d7ce58cf0a2eec222269a08beb18cb" + integrity sha512-g34EWZ1WWAVgr4aptGlVBF8mhl3VWjv+8hoAnzStu8Ah22VHBsuGzP17eb6xDVRzw895G4W7vvx60lFFur/1Rg== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/base64" "^5.7.0" + "@ethersproject/basex" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/networks" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/rlp" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/web" "^5.7.0" bech32 "1.1.4" ws "7.4.6" -"@ethersproject/random@5.6.1", "@ethersproject/random@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.6.1.tgz#66915943981bcd3e11bbd43733f5c3ba5a790255" - integrity sha512-/wtPNHwbmng+5yi3fkipA8YBT59DdkGRoC2vWk09Dci/q5DlgnMkhIycjHlavrvrjJBkFjO/ueLyT+aUDfc4lA== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/rlp@5.6.1", "@ethersproject/rlp@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.6.1.tgz#df8311e6f9f24dcb03d59a2bac457a28a4fe2bd8" - integrity sha512-uYjmcZx+DKlFUk7a5/W9aQVaoEC7+1MOBgNtvNg13+RnuUwT4F0zTovC0tmay5SmRslb29V1B7Y5KCri46WhuQ== +"@ethersproject/random@5.7.0", "@ethersproject/random@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/random/-/random-5.7.0.tgz#af19dcbc2484aae078bb03656ec05df66253280c" + integrity sha512-19WjScqRA8IIeWclFme75VMXSBvi4e6InrUNuaR4s5pTF2qNhcGdCUwdxUVGtDDqC00sDLCO93jPQoDUH4HVmQ== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" -"@ethersproject/rlp@^5.7.0": +"@ethersproject/rlp@5.7.0", "@ethersproject/rlp@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/rlp/-/rlp-5.7.0.tgz#de39e4d5918b9d74d46de93af80b7685a9c21304" integrity sha512-rBxzX2vK8mVF7b0Tol44t5Tb8gomOHkj5guL+HhzQ1yBh/ydjGnpw6at+X6Iw0Kp3OzzzkcKp8N9r0W4kYSs9w== @@ -755,28 +611,16 @@ "@ethersproject/bytes" "^5.7.0" "@ethersproject/logger" "^5.7.0" -"@ethersproject/sha2@5.6.1", "@ethersproject/sha2@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.6.1.tgz#211f14d3f5da5301c8972a8827770b6fd3e51656" - integrity sha512-5K2GyqcW7G4Yo3uenHegbXRPDgARpWUiXc6RiF7b6i/HXUoWlb7uCARh7BAHg7/qT/Q5ydofNwiZcim9qpjB6g== - dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - hash.js "1.1.7" - -"@ethersproject/signing-key@5.6.2", "@ethersproject/signing-key@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.6.2.tgz#8a51b111e4d62e5a62aee1da1e088d12de0614a3" - integrity sha512-jVbu0RuP7EFpw82vHcL+GP35+KaNruVAZM90GxgQnGqB6crhBqW/ozBfFvdeImtmb4qPko0uxXjn8l9jpn0cwQ== +"@ethersproject/sha2@5.7.0", "@ethersproject/sha2@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/sha2/-/sha2-5.7.0.tgz#9a5f7a7824ef784f7f7680984e593a800480c9fb" + integrity sha512-gKlH42riwb3KYp0reLsFTokByAKoJdgFCwI+CCiX/k+Jm2mbNs6oOaCjYQSlI1+XBVejwH2KrmCbMAT/GnRDQw== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - bn.js "^5.2.1" - elliptic "6.5.4" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/logger" "^5.7.0" hash.js "1.1.7" -"@ethersproject/signing-key@^5.7.0": +"@ethersproject/signing-key@5.7.0", "@ethersproject/signing-key@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/signing-key/-/signing-key-5.7.0.tgz#06b2df39411b00bc57c7c09b01d1e41cf1b16ab3" integrity sha512-MZdy2nL3wO0u7gkB4nA/pEf8lu1TlFswPNmy8AiYkfKTdO6eXBJyUdmHO/ehm/htHw9K/qF8ujnTyUAD+Ry54Q== @@ -788,28 +632,19 @@ elliptic "6.5.4" hash.js "1.1.7" -"@ethersproject/solidity@5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.6.1.tgz#5845e71182c66d32e6ec5eefd041fca091a473e2" - integrity sha512-KWqVLkUUoLBfL1iwdzUVlkNqAUIFMpbbeH0rgCfKmJp0vFtY4AsaN91gHKo9ZZLkC4UOm3cI3BmMV4N53BOq4g== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/sha2" "^5.6.1" - "@ethersproject/strings" "^5.6.1" - -"@ethersproject/strings@5.6.1", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.0.4", "@ethersproject/strings@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.6.1.tgz#dbc1b7f901db822b5cafd4ebf01ca93c373f8952" - integrity sha512-2X1Lgk6Jyfg26MUnsHiT456U9ijxKUybz8IM1Vih+NJxYtXhmvKBcHOmvGqpFSVJ0nQ4ZCoIViR8XlRw1v/+Cw== +"@ethersproject/solidity@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/solidity/-/solidity-5.7.0.tgz#5e9c911d8a2acce2a5ebb48a5e2e0af20b631cb8" + integrity sha512-HmabMd2Dt/raavyaGukF4XxizWKhKQ24DoLtdNbBmNKUOPqwjsKQSdV9GQtj9CBEea9DlzETlVER1gYeXXBGaA== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/logger" "^5.6.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/sha2" "^5.7.0" + "@ethersproject/strings" "^5.7.0" -"@ethersproject/strings@^5.7.0": +"@ethersproject/strings@5.7.0", "@ethersproject/strings@>=5.0.0-beta.130", "@ethersproject/strings@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/strings/-/strings-5.7.0.tgz#54c9d2a7c57ae8f1205c88a9d3a56471e14d5ed2" integrity sha512-/9nu+lj0YswRNSH0NXYqrh8775XNyEdUQAuf3f+SmOrnVewcJ5SBNAjF7lpgehKi4abvNNXyf+HX86czCdJ8Mg== @@ -818,22 +653,7 @@ "@ethersproject/constants" "^5.7.0" "@ethersproject/logger" "^5.7.0" -"@ethersproject/transactions@5.6.2", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.6.2.tgz#793a774c01ced9fe7073985bb95a4b4e57a6370b" - integrity sha512-BuV63IRPHmJvthNkkt9G70Ullx6AcM+SDc+a8Aw/8Yew6YwT51TcBKEp1P4oOQ/bP25I18JJr7rcFRgFtU9B2Q== - dependencies: - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/rlp" "^5.6.1" - "@ethersproject/signing-key" "^5.6.2" - -"@ethersproject/transactions@^5.7.0": +"@ethersproject/transactions@5.7.0", "@ethersproject/transactions@^5.0.0-beta.135", "@ethersproject/transactions@^5.6.2", "@ethersproject/transactions@^5.7.0": version "5.7.0" resolved "https://registry.yarnpkg.com/@ethersproject/transactions/-/transactions-5.7.0.tgz#91318fc24063e057885a6af13fdb703e1f993d3b" integrity sha512-kmcNicCp1lp8qanMTC3RIikGgoJ80ztTyvtsFvCYpSCfkjhD0jZ2LOrnbcuxuToLIUYYf+4XwD1rP+B/erDIhQ== @@ -848,51 +668,40 @@ "@ethersproject/rlp" "^5.7.0" "@ethersproject/signing-key" "^5.7.0" -"@ethersproject/units@5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.6.1.tgz#ecc590d16d37c8f9ef4e89e2005bda7ddc6a4e6f" - integrity sha512-rEfSEvMQ7obcx3KWD5EWWx77gqv54K6BKiZzKxkQJqtpriVsICrktIQmKl8ReNToPeIYPnFHpXvKpi068YFZXw== - dependencies: - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/constants" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - -"@ethersproject/wallet@5.6.2": - version "5.6.2" - resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.6.2.tgz#cd61429d1e934681e413f4bc847a5f2f87e3a03c" - integrity sha512-lrgh0FDQPuOnHcF80Q3gHYsSUODp6aJLAdDmDV0xKCN/T7D99ta1jGVhulg3PY8wiXEngD0DfM0I2XKXlrqJfg== - dependencies: - "@ethersproject/abstract-provider" "^5.6.1" - "@ethersproject/abstract-signer" "^5.6.2" - "@ethersproject/address" "^5.6.1" - "@ethersproject/bignumber" "^5.6.2" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/hash" "^5.6.1" - "@ethersproject/hdnode" "^5.6.2" - "@ethersproject/json-wallets" "^5.6.1" - "@ethersproject/keccak256" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/random" "^5.6.1" - "@ethersproject/signing-key" "^5.6.2" - "@ethersproject/transactions" "^5.6.2" - "@ethersproject/wordlists" "^5.6.1" - -"@ethersproject/web@5.6.1", "@ethersproject/web@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.6.1.tgz#6e2bd3ebadd033e6fe57d072db2b69ad2c9bdf5d" - integrity sha512-/vSyzaQlNXkO1WV+RneYKqCJwualcUdx/Z3gseVovZP0wIlOFcCE1hkRhKBH8ImKbGQbMl9EAAyJFrJu7V0aqA== +"@ethersproject/units@5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/units/-/units-5.7.0.tgz#637b563d7e14f42deeee39245275d477aae1d8b1" + integrity sha512-pD3xLMy3SJu9kG5xDGI7+xhTEmGXlEqXU4OfNapmfnxLVY4EMSSRp7j1k7eezutBPH7RBN/7QPnwR7hzNlEFeg== dependencies: - "@ethersproject/base64" "^5.6.1" - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/constants" "^5.7.0" + "@ethersproject/logger" "^5.7.0" -"@ethersproject/web@^5.7.0": +"@ethersproject/wallet@5.7.0": version "5.7.0" - resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.0.tgz#40850c05260edad8b54827923bbad23d96aac0bc" - integrity sha512-ApHcbbj+muRASVDSCl/tgxaH2LBkRMEYfLOLVa0COipx0+nlu0QKet7U2lEg0vdkh8XRSLf2nd1f1Uk9SrVSGA== + resolved "https://registry.yarnpkg.com/@ethersproject/wallet/-/wallet-5.7.0.tgz#4e5d0790d96fe21d61d38fb40324e6c7ef350b2d" + integrity sha512-MhmXlJXEJFBFVKrDLB4ZdDzxcBxQ3rLyCkhNqVu3CDYvR97E+8r01UgrI+TI99Le+aYm/in/0vp86guJuM7FCA== + dependencies: + "@ethersproject/abstract-provider" "^5.7.0" + "@ethersproject/abstract-signer" "^5.7.0" + "@ethersproject/address" "^5.7.0" + "@ethersproject/bignumber" "^5.7.0" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/hdnode" "^5.7.0" + "@ethersproject/json-wallets" "^5.7.0" + "@ethersproject/keccak256" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/random" "^5.7.0" + "@ethersproject/signing-key" "^5.7.0" + "@ethersproject/transactions" "^5.7.0" + "@ethersproject/wordlists" "^5.7.0" + +"@ethersproject/web@5.7.1", "@ethersproject/web@^5.7.0": + version "5.7.1" + resolved "https://registry.yarnpkg.com/@ethersproject/web/-/web-5.7.1.tgz#de1f285b373149bee5928f4eb7bcb87ee5fbb4ae" + integrity sha512-Gueu8lSvyjBWL4cYsWsjh6MtMwM0+H4HvqFPZfB6dV8ctbP9zFAO73VG1cMWae0FLPCtz0peKPpZY8/ugJJX2w== dependencies: "@ethersproject/base64" "^5.7.0" "@ethersproject/bytes" "^5.7.0" @@ -900,16 +709,16 @@ "@ethersproject/properties" "^5.7.0" "@ethersproject/strings" "^5.7.0" -"@ethersproject/wordlists@5.6.1", "@ethersproject/wordlists@^5.6.1": - version "5.6.1" - resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.6.1.tgz#1e78e2740a8a21e9e99947e47979d72e130aeda1" - integrity sha512-wiPRgBpNbNwCQFoCr8bcWO8o5I810cqO6mkdtKfLKFlLxeCWcnzDi4Alu8iyNzlhYuS9npCwivMbRWF19dyblw== +"@ethersproject/wordlists@5.7.0", "@ethersproject/wordlists@^5.7.0": + version "5.7.0" + resolved "https://registry.yarnpkg.com/@ethersproject/wordlists/-/wordlists-5.7.0.tgz#8fb2c07185d68c3e09eb3bfd6e779ba2774627f5" + integrity sha512-S2TFNJNfHWVHNE6cNDjbVlZ6MgE17MIxMbMg2zv3wn+3XSJGosL1m9ZVv3GXCf/2ymSsQ+hRI5IzoMJTG6aoVA== dependencies: - "@ethersproject/bytes" "^5.6.1" - "@ethersproject/hash" "^5.6.1" - "@ethersproject/logger" "^5.6.0" - "@ethersproject/properties" "^5.6.0" - "@ethersproject/strings" "^5.6.1" + "@ethersproject/bytes" "^5.7.0" + "@ethersproject/hash" "^5.7.0" + "@ethersproject/logger" "^5.7.0" + "@ethersproject/properties" "^5.7.0" + "@ethersproject/strings" "^5.7.0" "@fvictorio/tabtab@^0.0.3": version "0.0.3" @@ -945,6 +754,65 @@ "@types/istanbul-reports" "^1.1.1" "@types/yargs" "^13.0.0" +"@json-schema-spec/json-pointer@^0.1.2": + version "0.1.2" + resolved "https://registry.yarnpkg.com/@json-schema-spec/json-pointer/-/json-pointer-0.1.2.tgz#5153d5d01b94134015ee829deca5a970cf0406fd" + integrity sha512-BYY7IavBjwsWWSmVcMz2A9mKiDD9RvacnsItgmy1xV8cmgbtxFfKmKMtkVpD7pYtkx4mIW4800yZBXueVFIWPw== + +"@json-schema-tools/dereferencer@1.5.1": + version "1.5.1" + resolved "https://registry.yarnpkg.com/@json-schema-tools/dereferencer/-/dereferencer-1.5.1.tgz#cea5c6dfc92ef7e249969b0f8ccf76997387a708" + integrity sha512-CUpdGpxNTq1ebMkrgVxS03FHfwkGiw63c+GNzqFAqwqsxR0OsR79aqK8h2ybxTIEhdwiaknSnlUgtUIy7FJ+3A== + dependencies: + "@json-schema-tools/reference-resolver" "^1.2.1" + "@json-schema-tools/traverse" "^1.7.5" + fast-safe-stringify "^2.0.7" + +"@json-schema-tools/meta-schema@^1.6.10": + version "1.7.0" + resolved "https://registry.yarnpkg.com/@json-schema-tools/meta-schema/-/meta-schema-1.7.0.tgz#ce3e3a490a6499c44251d6203a40ed96bde8ac4b" + integrity sha512-3pDzVUssW3hVnf8gvSu1sKaVIpLyvmpbxgGfkUoaBiErFKRS2CZOufHD0pUFoa5e6Cd5oa72s402nJbnDz76CA== + +"@json-schema-tools/reference-resolver@^1.2.1": + version "1.2.5" + resolved "https://registry.yarnpkg.com/@json-schema-tools/reference-resolver/-/reference-resolver-1.2.5.tgz#47179269010eeb1f6fc289fa355da7111b633baf" + integrity sha512-xNQgX/ABnwvbIeexL5Czv08lXjHAL80HEUogza7E19eIL/EXD8HM4FvxG1JuTGyi5fA+sSP64C9pabELizcBBw== + dependencies: + "@json-schema-spec/json-pointer" "^0.1.2" + isomorphic-fetch "^3.0.0" + +"@json-schema-tools/referencer@^1.0.4": + version "1.0.5" + resolved "https://registry.yarnpkg.com/@json-schema-tools/referencer/-/referencer-1.0.5.tgz#cde95124db71df9c330c3880a51a63cfc3e676e9" + integrity sha512-Z3X7jAm2qn5plDvrw5SH0SK/YZ/4elRuUTNYOl/ydSVAeWQ+9SJUAefTYehUlFFJDbZqUwmJd8K8RQ1FEya1fA== + dependencies: + "@json-schema-tools/traverse" "^1.7.8" + +"@json-schema-tools/titleizer@1.0.6", "@json-schema-tools/titleizer@^1.0.5": + version "1.0.6" + resolved "https://registry.yarnpkg.com/@json-schema-tools/titleizer/-/titleizer-1.0.6.tgz#638c77f3ec1ca03279f82432c0a64ee7c6408964" + integrity sha512-JRW9GXFjeOZF/SUhORYiTfd4QLRQpwm/v96HnJnGH+0s7U5LVi1dyA6+uyrsG/P1h3XR6P67NyBEsYQqgeMGMQ== + dependencies: + "@json-schema-tools/traverse" "^1.7.8" + +"@json-schema-tools/transpiler@^1.10.2": + version "1.10.2" + resolved "https://registry.yarnpkg.com/@json-schema-tools/transpiler/-/transpiler-1.10.2.tgz#6e45b6850ae45c20b4c2da00e9381c78841e2548" + integrity sha512-8juIY5wol4VIJlJ2pm3Ex4BrzlWrqblqNh/3ExVapWTBQ3jbCqqsVkVtzduuh6skcPkodXeYt3R+1Pyq7K032w== + dependencies: + "@json-schema-tools/referencer" "^1.0.4" + "@json-schema-tools/titleizer" "^1.0.5" + "@json-schema-tools/traverse" "^1.8.0" + lodash.camelcase "^4.3.0" + lodash.deburr "^4.1.0" + lodash.snakecase "^4.1.1" + lodash.trim "^4.5.1" + +"@json-schema-tools/traverse@^1.7.5", "@json-schema-tools/traverse@^1.7.8", "@json-schema-tools/traverse@^1.8.0": + version "1.10.1" + resolved "https://registry.yarnpkg.com/@json-schema-tools/traverse/-/traverse-1.10.1.tgz#1129579c37fef9bb997082c62840f5989a667c0d" + integrity sha512-vYY5EIxCPzEXEWL/vTjdHy4g92tv1ApUQCjPJsj9gEoXLNNVwJlwwgRZisuvgFBZ3zeLzQygrbehERSpYdmFZA== + "@manypkg/find-root@^1.1.0": version "1.1.0" resolved "https://registry.yarnpkg.com/@manypkg/find-root/-/find-root-1.1.0.tgz#a62d8ed1cd7e7d4c11d9d52a8397460b5d4ad29f" @@ -978,15 +846,25 @@ tweetnacl "^1.0.3" tweetnacl-util "^0.15.1" -"@noble/hashes@1.1.1", "@noble/hashes@~1.1.1": - version "1.1.1" - resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.1.tgz#c056d9b7166c1e7387a7453c2aff199bf7d88e5f" - integrity sha512-Lkp9+NijmV7eSVZqiUvt3UCuuHeJpUVmRrvh430gyJjJiuJMqkeHf6/A9lQ/smmbWV/0spDeJscscPzyB4waZg== +"@napi-rs/cli@^2.11.4": + version "2.12.0" + resolved "https://registry.yarnpkg.com/@napi-rs/cli/-/cli-2.12.0.tgz#c4071e1dce2e0a3c9562fb3c437e94816564ce13" + integrity sha512-DWx9jDpun9JqDBypiXKjcYMm7gEnh83bry7b6UkItpmVE3w3tNrj91fOEPKDbFQZ7EULfFt+aQBbqtUHq5oNzQ== -"@noble/secp256k1@1.6.0", "@noble/secp256k1@~1.6.0": - version "1.6.0" - resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.6.0.tgz#602afbbfcfb7e169210469b697365ef740d7e930" - integrity sha512-DWSsg8zMHOYMYBqIQi96BQuthZrp98LCeMNcUOaffCIVYQ5yxDbNikLF+H7jEnmNNmXbtVic46iCuVWzar+MgA== +"@noble/hashes@1.1.2": + version "1.1.2" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.2.tgz#e9e035b9b166ca0af657a7848eb2718f0f22f183" + integrity sha512-KYRCASVTv6aeUi1tsF8/vpyR7zpfs3FUzy2Jqm+MU+LmUKhQ0y2FpfwqkCcxSg2ua4GALJd8k2R76WxwZGbQpA== + +"@noble/hashes@~1.1.1": + version "1.1.3" + resolved "https://registry.yarnpkg.com/@noble/hashes/-/hashes-1.1.3.tgz#360afc77610e0a61f3417e497dcf36862e4f8111" + integrity sha512-CE0FCR57H2acVI5UOzIGSSIYxZ6v/HOhDR0Ro9VLyhnzLwx0o8W1mmgaqlEUx4049qJDlIBRztv5k+MM8vbO3A== + +"@noble/secp256k1@1.6.3", "@noble/secp256k1@~1.6.0": + version "1.6.3" + resolved "https://registry.yarnpkg.com/@noble/secp256k1/-/secp256k1-1.6.3.tgz#7eed12d9f4404b416999d0c87686836c4c5c9b94" + integrity sha512-T04e4iTurVy7I8Sw4+c5OSN9/RkPlo1uKxAomtxQNLq8j1uPAqnsqG1bqvY3Jv7c13gyr6dui0zmh/I3+f/JaQ== "@nodelib/fs.scandir@2.1.5": version "2.1.5" @@ -1073,16 +951,11 @@ mcl-wasm "^0.7.1" rustbn.js "~0.2.0" -"@nomicfoundation/ethereumjs-rlp@^4.0.0": +"@nomicfoundation/ethereumjs-rlp@^4.0.0", "@nomicfoundation/ethereumjs-rlp@^4.0.0-beta.2": version "4.0.0" resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-rlp/-/ethereumjs-rlp-4.0.0.tgz#d9a9c5f0f10310c8849b6525101de455a53e771d" integrity sha512-GaSOGk5QbUk4eBP5qFbpXoZoZUj/NrW7MRa0tKY4Ew4c2HAS0GXArEMAamtFrkazp0BO4K5p2ZCG3b2FmbShmw== -"@nomicfoundation/ethereumjs-rlp@^4.0.0-beta.2": - version "4.0.0-rc.1" - resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-rlp/-/ethereumjs-rlp-4.0.0-rc.1.tgz#eb6b56c9e5d6cbdd4f4ce0c337c879a74ab60187" - integrity sha512-ncRj4Ip4IPi+gwgexj3UCyWgfnOJ6ARwEQmiI/H3owS9vOnSCGV1yatX7v1n+wTdvazR0VQLdjt94nkeyDeesQ== - "@nomicfoundation/ethereumjs-statemanager@^1.0.0": version "1.0.0" resolved "https://registry.yarnpkg.com/@nomicfoundation/ethereumjs-statemanager/-/ethereumjs-statemanager-1.0.0.tgz#14a9d4e1c828230368f7ab520c144c34d8721e4b" @@ -1228,6 +1101,38 @@ ethers "^4.0.0-beta.1" source-map-support "^0.5.19" +"@open-rpc/meta-schema@1.14.2": + version "1.14.2" + resolved "https://registry.yarnpkg.com/@open-rpc/meta-schema/-/meta-schema-1.14.2.tgz#1af0b1ea77e50b5076ae31267e97efcb9ff352f8" + integrity sha512-vD4Nbkrb7wYFRcSQf+j228LwOy1C6/KKpy5NADlpMElGrAWPRxhTa2yTi6xG+x88OHzg2+cydQ0GAD6o40KUcg== + +"@open-rpc/schema-utils-js@1.15.0": + version "1.15.0" + resolved "https://registry.yarnpkg.com/@open-rpc/schema-utils-js/-/schema-utils-js-1.15.0.tgz#59bbc92ab25c9b6b08f96d37995ee61e4c062fdc" + integrity sha512-YHTt3n3RZo1lRy8oknn2G1y0PWlo7HWtnwLOKfvVxjauKMOmlvBbpPHQZibpzIhgt+yPe4mht1ldhKOwq2tCUw== + dependencies: + "@json-schema-tools/dereferencer" "1.5.1" + "@json-schema-tools/meta-schema" "^1.6.10" + "@json-schema-tools/reference-resolver" "^1.2.1" + "@open-rpc/meta-schema" "1.14.2" + ajv "^6.10.0" + detect-node "^2.0.4" + fast-safe-stringify "^2.0.7" + fs-extra "^9.0.0" + is-url "^1.2.4" + isomorphic-fetch "^3.0.0" + +"@open-rpc/typings@^1.12.1": + version "1.12.1" + resolved "https://registry.yarnpkg.com/@open-rpc/typings/-/typings-1.12.1.tgz#60caa88090fc88af71fbc4e14b99c78a3c422340" + integrity sha512-QaVUuN9w2ZkReps2bZ5ZvLldH+TPSBN/B/J1s9Otqhw0ECjkIawKNZFiH1FP2pAiBoDr7WvhMWm1GQk9I2/kFQ== + dependencies: + "@json-schema-tools/titleizer" "1.0.6" + "@json-schema-tools/transpiler" "^1.10.2" + "@open-rpc/schema-utils-js" "1.15.0" + commander "^6.0.0" + fs-extra "^10.0.0" + "@resolver-engine/core@^0.3.3": version "0.3.3" resolved "https://registry.yarnpkg.com/@resolver-engine/core/-/core-0.3.3.tgz#590f77d85d45bc7ecc4e06c654f41345db6ca967" @@ -1360,6 +1265,11 @@ resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-0.14.0.tgz#9fb3a3cf3132328151f353de4632e01e52102bea" integrity sha512-9NET910DNaIPngYnLLPeg+Ogzqsi9uM4mSboU5y6p8S5DzMTVEsJZrawi+BoDNUVBa2DhJqQYUFvMDfgU062LQ== +"@sindresorhus/is@^4.0.0", "@sindresorhus/is@^4.6.0": + version "4.6.0" + resolved "https://registry.yarnpkg.com/@sindresorhus/is/-/is-4.6.0.tgz#3c7c9c46e678feefe7a2e5bb609d3dbd665ffb3f" + integrity sha512-t09vSN3MdfsyCHoFcTRCH/iUtG7OJ0CsjzB8cjAmKc/va/kIgeDI/TxsigdncE/4be734m0cvIYwNaV4i2XqAw== + "@sinonjs/commons@^1.6.0", "@sinonjs/commons@^1.7.0", "@sinonjs/commons@^1.8.1": version "1.8.3" resolved "https://registry.yarnpkg.com/@sinonjs/commons/-/commons-1.8.3.tgz#3802ddd21a50a949b6721ddd72da36e67e7f1b2d" @@ -1384,18 +1294,11 @@ type-detect "^4.0.8" "@sinonjs/text-encoding@^0.7.1": - version "0.7.1" - resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.1.tgz#8da5c6530915653f3a1f38fd5f101d8c3f8079c5" - integrity sha512-+iTbntw2IZPb/anVDbypzfQa+ay64MW0Zo8aJ8gZPWMMK6/OubMVb6lUPMagqjOPnmtauXnFCACVl3O7ogjeqQ== + version "0.7.2" + resolved "https://registry.yarnpkg.com/@sinonjs/text-encoding/-/text-encoding-0.7.2.tgz#5981a8db18b56ba38ef0efb7d995b12aa7b51918" + integrity sha512-sXXKG+uL9IrKqViTtao2Ws6dy0znu9sOaP1di/jKGW1M6VssO8vlpXCQcpZ+jisQ1tTFAC5Jo/EOzFbggBagFQ== -"@solidity-parser/parser@^0.14.0": - version "0.14.2" - resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.2.tgz#2d8f2bddb217621df882ceeae7d7b42ae8664db3" - integrity sha512-10cr0s+MtRtqjEw0WFJrm2rwULN30xx7btd/v9cmqME2617/2M5MbHDkFIGIGTa7lwNw4bN9mVGfhlLzrYw8pA== - dependencies: - antlr4ts "^0.5.0-alpha.4" - -"@solidity-parser/parser@^0.14.1": +"@solidity-parser/parser@^0.14.0", "@solidity-parser/parser@^0.14.1": version "0.14.3" resolved "https://registry.yarnpkg.com/@solidity-parser/parser/-/parser-0.14.3.tgz#0d627427b35a40d8521aaa933cc3df7d07bfa36f" integrity sha512-29g2SZ29HtsqA58pLCtopI1P/cPy5/UAzlcAXO6T/CNJimG6yA8kx4NaseMyJULiC+TEs02Y9/yeHzClqoA0hw== @@ -1409,77 +1312,91 @@ dependencies: defer-to-connect "^1.0.1" -"@truffle/abi-utils@^0.2.13": - version "0.2.13" - resolved "https://registry.yarnpkg.com/@truffle/abi-utils/-/abi-utils-0.2.13.tgz#63b7f5e5b61a86e563b2ea0c93a39b094086d205" - integrity sha512-WzjyNvx+naXmG/XKF+xLI+tJZLUlPGkd29rY4xBCiY9m/xWk0ZUL6gvVvnRr3leLJkBweJUSBiGUW770V8hHOg== +"@szmarczak/http-timer@^4.0.5": + version "4.0.6" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-4.0.6.tgz#b4a914bb62e7c272d4e5989fe4440f812ab1d807" + integrity sha512-4BAffykYOgO+5nzBWYwE3W90sBgLJoUPRWWcL8wlyiM8IB8ipJz3UMJ9KXQd1RKQXpKp8Tutn80HZtWsu2u76w== + dependencies: + defer-to-connect "^2.0.0" + +"@szmarczak/http-timer@^5.0.1": + version "5.0.1" + resolved "https://registry.yarnpkg.com/@szmarczak/http-timer/-/http-timer-5.0.1.tgz#c7c1bf1141cdd4751b0399c8fc7b8b664cd5be3a" + integrity sha512-+PmQX0PiAYPMeVYe237LJAYvOMYW1j2rH5YROyS3b4CTVJum34HfRvKvAzozHAQG0TnHNdUfY9nCeUyRAs//cw== + dependencies: + defer-to-connect "^2.0.1" + +"@truffle/abi-utils@^0.3.3": + version "0.3.3" + resolved "https://registry.yarnpkg.com/@truffle/abi-utils/-/abi-utils-0.3.3.tgz#4484489b160ad35aa0716427c73de10ebed40146" + integrity sha512-/KpaaPi0daffnwLhFulMtwdQ7YfZZtj24J34kZJi7OQw9x0dSNSskGFTU5SEv9d3fOfHNYqCevsqtv1zek0+zg== dependencies: change-case "3.0.2" - faker "5.5.3" - fast-check "^2.12.1" + fast-check "3.1.1" + web3-utils "1.7.4" "@truffle/blockchain-utils@^0.1.3": - version "0.1.3" - resolved "https://registry.yarnpkg.com/@truffle/blockchain-utils/-/blockchain-utils-0.1.3.tgz#846b64314fc105d1d4af0996f0294111bf17911c" - integrity sha512-K21Wf10u6VmS12/f9OrLN98f1RCqzrmuM2zlsly4b7BF/Xdh55Iq/jNSOnsNUJa+6Iaqqz6zeidquCYu9nTFng== + version "0.1.4" + resolved "https://registry.yarnpkg.com/@truffle/blockchain-utils/-/blockchain-utils-0.1.4.tgz#1365b88c3d2922a066d947e93748f09b0fac2e93" + integrity sha512-HegAo5A8UX9vE8dtceBRgCY207gOb9wj54c8mNOOWHcFpkyJz7kZYGo44As6Imh10/0hD2j7vHQ56Jf+uszJ3A== -"@truffle/codec@^0.13.1": - version "0.13.1" - resolved "https://registry.yarnpkg.com/@truffle/codec/-/codec-0.13.1.tgz#acc66371a920e360b09d346a8c86d3349e73c502" - integrity sha512-ZqpfofLEwzcdRYgInHOOoNYLqCEJ+nkYl4NTJkrszMAu9MLnHQjZqrMtfem/H8HDU3OOIgbpFlzipMdrnecjJw== +"@truffle/codec@^0.14.7": + version "0.14.7" + resolved "https://registry.yarnpkg.com/@truffle/codec/-/codec-0.14.7.tgz#2f7d2916f428be5da7a82559356c8570898e5cd9" + integrity sha512-qEbnghn/q3+hKoaRIlXfcdty0jhA1CcvWxjCgwOxEBVOGEBazjentyCzTGkrxyL9I4q1VDqk5Bmxl8umotzRTg== dependencies: - "@truffle/abi-utils" "^0.2.13" - "@truffle/compile-common" "^0.7.31" + "@truffle/abi-utils" "^0.3.3" + "@truffle/compile-common" "^0.9.0" big.js "^6.0.3" bn.js "^5.1.3" - cbor "^5.1.0" + cbor "^5.2.0" debug "^4.3.1" lodash "^4.17.21" - semver "^7.3.4" + semver "7.3.7" utf8 "^3.0.0" - web3-utils "1.5.3" + web3-utils "1.7.4" -"@truffle/compile-common@^0.7.31": - version "0.7.31" - resolved "https://registry.yarnpkg.com/@truffle/compile-common/-/compile-common-0.7.31.tgz#ab0b6219d5a02c4364b10ccd97615fc515402367" - integrity sha512-BGhWPd6NoI4VZfYBg+RgrCyLaxxq40vDOp6Ouofa1NQdN6LSPwlqWf0JWvPIKFNRp+TA9aWRHGmZntYyE94OZg== +"@truffle/compile-common@^0.9.0": + version "0.9.0" + resolved "https://registry.yarnpkg.com/@truffle/compile-common/-/compile-common-0.9.0.tgz#92ed5f91820b56e15604818464fa3803c6da13e7" + integrity sha512-kpTTU/7ZlQedH6cemCgrqXL4sUjsWAPj7X4LaqQ+KSna3egNJZ6wrKt2kpSYPpCpLihq2IpcBwWar3dTPZ7a5Q== dependencies: - "@truffle/error" "^0.1.0" + "@truffle/error" "^0.1.1" colors "1.4.0" "@truffle/contract-schema@^3.4.7": - version "3.4.7" - resolved "https://registry.yarnpkg.com/@truffle/contract-schema/-/contract-schema-3.4.7.tgz#8706e3a9f763891b1cfad5cde771e7f6175fa301" - integrity sha512-vbOHMq/a8rVPh+cFMBDDGPqqiKrXXOc+f1kB4znfh3ewOX8rJxZhGJvdMm3WNMJHR5RstqDV7ZIZ7ePwtSXH8Q== + version "3.4.10" + resolved "https://registry.yarnpkg.com/@truffle/contract-schema/-/contract-schema-3.4.10.tgz#c11a814c13ad55a5e454fb35ddfa291ae0d24ace" + integrity sha512-BhRNRoRvlj2th6E5RNS0BnS0ZxQe01JJz8I7MjkGqdeXSvrn6qDCAnbmvhNgUv0l5h8w5+gBOQhAJhILf1shdQ== dependencies: ajv "^6.10.0" debug "^4.3.1" "@truffle/debug-utils@^6.0.22": - version "6.0.26" - resolved "https://registry.yarnpkg.com/@truffle/debug-utils/-/debug-utils-6.0.26.tgz#12ca10f399143f7c50c0dc605843c14f75cc4f7f" - integrity sha512-+wxeXLRl23rzpOf76PkUTUqbsEzS8zAgLnZKFMEyS/vkVY5CpNVIhddCQcqQcDaIn9BRcmbuB5xMYR6hs8wrSw== + version "6.0.37" + resolved "https://registry.yarnpkg.com/@truffle/debug-utils/-/debug-utils-6.0.37.tgz#840f64fdf5cd4dcecf81cdd70ea2dc0b1f4ac598" + integrity sha512-KGJB91Yn2WEwU6/dxCa7a6nt+f6zZzvZ7P+vYnTFvCtUafvBsxpV/3XwxjmWrk53aZEFh6uLB2XSss25vAzV2g== dependencies: - "@truffle/codec" "^0.13.1" + "@truffle/codec" "^0.14.7" "@trufflesuite/chromafi" "^3.0.0" bn.js "^5.1.3" chalk "^2.4.2" debug "^4.3.1" highlightjs-solidity "^2.0.5" -"@truffle/error@^0.1.0": - version "0.1.0" - resolved "https://registry.yarnpkg.com/@truffle/error/-/error-0.1.0.tgz#5e9fed79e6cda624c926d314b280a576f8b22a36" - integrity sha512-RbUfp5VreNhsa2Q4YbBjz18rOQI909pG32bghl1hulO7IpvcqTS+C3Ge5cNbiWQ1WGzy1wIeKLW0tmQtHFB7qg== +"@truffle/error@^0.1.0", "@truffle/error@^0.1.1": + version "0.1.1" + resolved "https://registry.yarnpkg.com/@truffle/error/-/error-0.1.1.tgz#e52026ac8ca7180d83443dca73c03e07ace2a301" + integrity sha512-sE7c9IHIGdbK4YayH4BC8i8qMjoAOeg6nUXUDZZp8wlU21/EMpaG+CLx+KqcIPyR+GSWIW3Dm0PXkr2nlggFDA== "@truffle/interface-adapter@^0.5.16": - version "0.5.17" - resolved "https://registry.yarnpkg.com/@truffle/interface-adapter/-/interface-adapter-0.5.17.tgz#34e7e28930f6aa4c722f1b2ede127d379f891e70" - integrity sha512-2MJ+YLAL4y2QqlWc90NKizBLpavcETTzV8EpYkYJgAM326xKrAt+N3wx3f3tgRPSsbdtiEVKf1JRXHmDYQ+xIg== + version "0.5.23" + resolved "https://registry.yarnpkg.com/@truffle/interface-adapter/-/interface-adapter-0.5.23.tgz#a4103270e3c73746089b9e5807aaa1359f4725bb" + integrity sha512-nU8kChKgcUP+tELId1PMgHnmd2KcBdBer59TxfVqAZXRmt6blm2tpBbGYtKzTIdZlf6kMqVbZXdB6u1CJDqfxg== dependencies: bn.js "^5.1.3" ethers "^4.0.32" - web3 "1.5.3" + web3 "1.7.4" "@trufflesuite/chromafi@^3.0.0": version "3.0.0" @@ -1511,12 +1428,11 @@ ethers "^5.0.2" "@typechain/hardhat@^6.1.2": - version "6.1.2" - resolved "https://registry.yarnpkg.com/@typechain/hardhat/-/hardhat-6.1.2.tgz#d3beccc6937d93f9b437616b741f839a8b953693" - integrity sha512-k4Ea3pVITKB2DH8p1a5U38cyy7KZPD04Spo4q5b4wO+n2mT+uAz5dxckPtbczn/Kk5wiFq+ZkuOtw5ZKFhL/+w== + version "6.1.3" + resolved "https://registry.yarnpkg.com/@typechain/hardhat/-/hardhat-6.1.3.tgz#e6121502eb64903ab562d89e094a5506e7b86822" + integrity sha512-e1H9MVl286ma0HuD9CBL248+pbdA7lWF6+I7FYwzykIrjilKhvLUv0Q7LtcyZztzgbP2g4Tyg1UPE+xy+qR7cA== dependencies: fs-extra "^9.1.0" - lodash "^4.17.15" "@types/async-eventemitter@^0.2.1": version "0.2.1" @@ -1531,9 +1447,9 @@ bignumber.js "*" "@types/bn.js@*", "@types/bn.js@^5.1.0": - version "5.1.0" - resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.0.tgz#32c5d271503a12653c62cf4d2b45e6eab8cebc68" - integrity sha512-QSSVYj7pYFN49kW77o2s9xTCwZ8F2xLbjLLSEVh8D2F4JUhZtPAGOFLTD+ffqksBx/u4cE/KImFjyhqCjn/LIA== + version "5.1.1" + resolved "https://registry.yarnpkg.com/@types/bn.js/-/bn.js-5.1.1.tgz#b51e1b55920a4ca26e9285ff79936bbdec910682" + integrity sha512-qNrYbZqMx0uJAfKnKclPh+dTwK33KfLHYqtyODwd5HnXOjnkhc4qgn3BrK6RWyGZm5+sIFE7Q7Vz6QQtJB7w7g== dependencies: "@types/node" "*" @@ -1544,6 +1460,16 @@ dependencies: "@types/node" "*" +"@types/cacheable-request@^6.0.1", "@types/cacheable-request@^6.0.2": + version "6.0.2" + resolved "https://registry.yarnpkg.com/@types/cacheable-request/-/cacheable-request-6.0.2.tgz#c324da0197de0a98a2312156536ae262429ff6b9" + integrity sha512-B3xVo+dlKM6nnKTcmm5ZtY/OL8bOAOd2Olee9M1zft65ox50OzjEHW91sDiU9j6cvW8Ejg1/Qkf4xd2kugApUA== + dependencies: + "@types/http-cache-semantics" "*" + "@types/keyv" "*" + "@types/node" "*" + "@types/responselike" "*" + "@types/cbor@^5.0.1": version "5.0.1" resolved "https://registry.yarnpkg.com/@types/cbor/-/cbor-5.0.1.tgz#e147bbe09ada4db7000ec6c23eafb5f67f5422a5" @@ -1559,9 +1485,9 @@ "@types/chai" "*" "@types/chai@*", "@types/chai@^4.2.0": - version "4.3.1" - resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.1.tgz#e2c6e73e0bdeb2521d00756d099218e9f5d90a04" - integrity sha512-/zPMqDkzSZ8t3VtxOa4KPq7uzzW978M9Tvh+j7GHKuo6k6GTLxPJ4J5gE5cjfJ26pnXst0N5Hax8Sr0T2Mi9zQ== + version "4.3.3" + resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.3.3.tgz#3c90752792660c4b562ad73b3fbd68bf3bc7ae07" + integrity sha512-hC7OMnszpxhZPduX+m+nrx+uFoLkWOMiR4oa/AZF3MuSETYTZmFfJAHqZEM8MVlvfG7BEUcgvtwoCTxBp6hm3g== "@types/ci-info@^2.0.0": version "2.0.0" @@ -1608,7 +1534,15 @@ dependencies: "@types/node" "*" -"@types/glob@*", "@types/glob@^7.1.1": +"@types/glob@*": + version "8.0.0" + resolved "https://registry.yarnpkg.com/@types/glob/-/glob-8.0.0.tgz#321607e9cbaec54f687a0792b2d1d370739455d2" + integrity sha512-l6NQsDDyQUVeoTynNpC9uRvCUint/gSUXQA2euwmTuWGvPY5LSDUu6tkCtJB2SvGQlJQzLaKqcGZP4//7EDveA== + dependencies: + "@types/minimatch" "*" + "@types/node" "*" + +"@types/glob@^7.1.1": version "7.2.0" resolved "https://registry.yarnpkg.com/@types/glob/-/glob-7.2.0.tgz#bc1b5bf3aa92f25bd5dd39f35c57361bdce5b2eb" integrity sha512-ZUxbzKl0IfJILTS6t7ip5fQQM/J3TJYubDm3nMbgubNNYS62eXeUpoLUC8/7fJNiFYHTrGPQn7hspDUzIHX3UA== @@ -1616,6 +1550,11 @@ "@types/minimatch" "*" "@types/node" "*" +"@types/http-cache-semantics@*": + version "4.0.1" + resolved "https://registry.yarnpkg.com/@types/http-cache-semantics/-/http-cache-semantics-4.0.1.tgz#0ea7b61496902b95890dc4c3a116b60cb8dae812" + integrity sha512-SZs7ekbP8CN0txVG2xVRH6EgKmEm31BOxA07vkFaETzZz1xh+cbt8BcI0slpymvwhx5dlFnQG2rTlPVQn+iRPQ== + "@types/is-ci@^3.0.0": version "3.0.0" resolved "https://registry.yarnpkg.com/@types/is-ci/-/is-ci-3.0.0.tgz#7e8910af6857601315592436f030aaa3ed9783c3" @@ -1660,10 +1599,17 @@ dependencies: "@types/node" "*" +"@types/keyv@*": + version "4.2.0" + resolved "https://registry.yarnpkg.com/@types/keyv/-/keyv-4.2.0.tgz#65b97868ab757906f2dbb653590d7167ad023fa0" + integrity sha512-xoBtGl5R9jeKUhc8ZqeYaRDx04qqJ10yhhXYGmJ4Jr8qKpvMsDQQrNUvF/wUJ4klOtmJeJM+p2Xo3zp9uaC3tw== + dependencies: + keyv "*" + "@types/lodash@^4.14.123": - version "4.14.182" - resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.182.tgz#05301a4d5e62963227eaafe0ce04dd77c54ea5c2" - integrity sha512-/THyiqyQAP9AfARo4pF+aCGcyiQ94tX/Is2I7HofNRqoYLgN1PBoOWu2/zTA5zMxzP5EFutMtWtGAFRKUe961Q== + version "4.14.186" + resolved "https://registry.yarnpkg.com/@types/lodash/-/lodash-4.14.186.tgz#862e5514dd7bd66ada6c70ee5fce844b06c8ee97" + integrity sha512-eHcVlLXP0c2FlMPm56ITode2AgLMSa6aJ05JTTbYbI+7EMkCEE5qk2E41d5g2lCVTqRe0GnnRFurmlCsDODrPw== "@types/lru-cache@^5.1.0": version "5.1.1" @@ -1671,9 +1617,9 @@ integrity sha512-ssE3Vlrys7sdIzs5LOxCzTVMsU7i9oa/IaW92wF32JFb3CVczqOkru2xspuKczHEbG3nvmPY7IFqVmGGHdNbYw== "@types/minimatch@*": - version "3.0.5" - resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-3.0.5.tgz#1001cc5e6a3704b83c236027e77f2f58ea010f40" - integrity sha512-Klz949h02Gz2uZCMGwDUSDS1YBlTdDDgbWHi+81l29tQALUtvz4rAYi5uoVhE5Lagoq6DeqAUlbrHvW/mXDgdQ== + version "5.1.2" + resolved "https://registry.yarnpkg.com/@types/minimatch/-/minimatch-5.1.2.tgz#07508b45797cb81ec3f273011b054cd0755eddca" + integrity sha512-K0VQKziLUWkVKiRVrx4a40iPaxTUefQmjtkQofBkYRcoaaL/8rhwDWww9qWbrgicNOgnpIsMxyNIUM4+n6dUIA== "@types/minimist@^1.2.0": version "1.2.2" @@ -1706,9 +1652,9 @@ form-data "^3.0.0" "@types/node@*": - version "17.0.44" - resolved "https://registry.yarnpkg.com/@types/node/-/node-17.0.44.tgz#16dd0bb5338f016d8ca10631789f0d0612fe5d5b" - integrity sha512-gWYiOlu6Y4oyLYBvsJAPlwHbC8H4tX+tLsHy6Ee976wedwwZKrG2hFl3Y/HiH6bIyLTbDWQexQF/ohwKkOpUCg== + version "18.11.4" + resolved "https://registry.yarnpkg.com/@types/node/-/node-18.11.4.tgz#7017a52e18dfaad32f55eebd539993014441949c" + integrity sha512-BxcJpBu8D3kv/GZkx/gSMz6VnTJREBj/4lbzYOQueUOELkt8WrO6zAcSPmp9uRPEW/d+lUO8QK0W2xnS1hEU0A== "@types/node@^10.0.3": version "10.17.60" @@ -1721,9 +1667,9 @@ integrity sha512-J8xLz7q2OFulZ2cyGTLE1TbbZcjpno7FaN6zdJNrgAdrJ+DZzh/uFR6YrTb4C+nXakvud8Q4+rbhoIWlYQbUFQ== "@types/node@^14.0.0": - version "14.18.22" - resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.22.tgz#fd2a15dca290fc9ad565b672fde746191cd0c6e6" - integrity sha512-qzaYbXVzin6EPjghf/hTdIbnVW1ErMx8rPzwRNJhlbyJhu2SyqlvjGOY/tbUt6VFyzg56lROcOeSQRInpt63Yw== + version "14.18.32" + resolved "https://registry.yarnpkg.com/@types/node/-/node-14.18.32.tgz#8074f7106731f1a12ba993fe8bad86ee73905014" + integrity sha512-Y6S38pFr04yb13qqHf8uk1nHE3lXgQ30WZbv1mLliV9pt0NjvqdWttLcrOYLnXbOafknVYRHZGoMSpR9UwfYow== "@types/node@^8.0.0": version "8.10.66" @@ -1743,9 +1689,9 @@ "@types/node" "*" "@types/prettier@^2.1.1": - version "2.6.3" - resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.6.3.tgz#68ada76827b0010d0db071f739314fa429943d0a" - integrity sha512-ymZk3LEC/fsut+/Q5qejp6R9O1rMxz3XaRHDV6kX8MrGAhOSPqVARbDi+EZvInBpw+BnCX3TD240byVkOfQsHg== + version "2.7.1" + resolved "https://registry.yarnpkg.com/@types/prettier/-/prettier-2.7.1.tgz#dfd20e2dc35f027cdd6c1908e80a5ddc7499670e" + integrity sha512-ri0UmynRRvZiiUJdiz38MmIblKK+oH30MztdBVR95dv/Ubw6neWSb8u1XpRb72L4qsZOhz+L+z9JD40SJmfWow== "@types/qs@^6.2.31", "@types/qs@^6.5.3": version "6.9.7" @@ -1764,6 +1710,13 @@ resolved "https://registry.yarnpkg.com/@types/resolve/-/resolve-1.20.2.tgz#97d26e00cd4a0423b4af620abecf3e6f442b7975" integrity sha512-60BCwRFOZCQhDncwQdxxeOEEkbc5dIMccYLwbxsS4TUNeVECQ/pBJ0j09mrHOl/JJvpRPGwO9SvE4nR2Nb/a4Q== +"@types/responselike@*", "@types/responselike@^1.0.0": + version "1.0.0" + resolved "https://registry.yarnpkg.com/@types/responselike/-/responselike-1.0.0.tgz#251f4fe7d154d2bad125abe1b429b23afd262e29" + integrity sha512-85Y2BjiufFzaMIlvJDvTTB8Fxl2xfLo4HgmHzVBz08w4wDePCTjYw66PdrolO0kzli3yam/YCgRufyo1DdQVTA== + dependencies: + "@types/node" "*" + "@types/secp256k1@^4.0.1": version "4.0.3" resolved "https://registry.yarnpkg.com/@types/secp256k1/-/secp256k1-4.0.3.tgz#1b8e55d8e00f08ee7220b4d59a6abe89c37a901c" @@ -1776,6 +1729,11 @@ resolved "https://registry.yarnpkg.com/@types/semver/-/semver-6.2.3.tgz#5798ecf1bec94eaa64db39ee52808ec0693315aa" integrity sha512-KQf+QAMWKMrtBMsB8/24w53tEsxllMj6TuA80TT/5igJalLI/zm0L3oXRbIAl4Ohfc85gyHX/jhMwsVkmhLU4A== +"@types/semver@^7.3.12": + version "7.3.12" + resolved "https://registry.yarnpkg.com/@types/semver/-/semver-7.3.12.tgz#920447fdd78d76b19de0438b7f60df3c4a80bf1c" + integrity sha512-WwA1MW0++RfXmCr12xeYOOC5baSC9mSb0ZqCquFzKhcoF4TvHu5MKOuXsncgZcpVFhB1pXd5hZmM0ryAoCp12A== + "@types/shelljs@^0.8.6": version "0.8.11" resolved "https://registry.yarnpkg.com/@types/shelljs/-/shelljs-0.8.11.tgz#17a5696c825974e96828e96e89585d685646fcb8" @@ -1793,9 +1751,9 @@ "@types/sinon" "*" "@types/sinon@*": - version "10.0.11" - resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.11.tgz#8245827b05d3fc57a6601bd35aee1f7ad330fc42" - integrity sha512-dmZsHlBsKUtBpHriNjlK0ndlvEh8dcb9uV9Afsbt89QIyydpC7NcR+nWlAhASfy3GHnxTl4FX/aKE7XZUt/B4g== + version "10.0.13" + resolved "https://registry.yarnpkg.com/@types/sinon/-/sinon-10.0.13.tgz#60a7a87a70d9372d0b7b38cc03e825f46981fb83" + integrity sha512-UVjDqJblVNQYvVNUsj0PuYYw0ELRmgt1Nt5Vk0pT5f16ROGfcKJY8o1HVuMOJOpD727RrGB9EGvoaTQE5tgxZQ== dependencies: "@types/sinonjs__fake-timers" "*" @@ -1862,15 +1820,14 @@ tsutils "^3.21.0" "@typescript-eslint/eslint-plugin@^5.30.7": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.33.0.tgz#059798888720ec52ffa96c5f868e31a8f70fa3ec" - integrity sha512-jHvZNSW2WZ31OPJ3enhLrEKvAZNyAFWZ6rx9tUwaessTc4sx9KmgMNhVcqVAl1ETnT5rU5fpXTLmY9YvC1DCNg== + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-5.41.0.tgz#f8eeb1c6bb2549f795f3ba71aec3b38d1ab6b1e1" + integrity sha512-DXUS22Y57/LAFSg3x7Vi6RNAuLpTXwxB9S2nIA7msBb/Zt8p7XqMwdpdc1IU7CkOQUPgAqR5fWvxuKCbneKGmA== dependencies: - "@typescript-eslint/scope-manager" "5.33.0" - "@typescript-eslint/type-utils" "5.33.0" - "@typescript-eslint/utils" "5.33.0" + "@typescript-eslint/scope-manager" "5.41.0" + "@typescript-eslint/type-utils" "5.41.0" + "@typescript-eslint/utils" "5.41.0" debug "^4.3.4" - functional-red-black-tree "^1.0.1" ignore "^5.2.0" regexpp "^3.2.0" semver "^7.3.7" @@ -1906,20 +1863,21 @@ "@typescript-eslint/types" "4.29.2" "@typescript-eslint/visitor-keys" "4.29.2" -"@typescript-eslint/scope-manager@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.33.0.tgz#509d7fa540a2c58f66bdcfcf278a3fa79002e18d" - integrity sha512-/Jta8yMNpXYpRDl8EwF/M8It2A9sFJTubDo0ATZefGXmOqlaBffEw0ZbkbQ7TNDK6q55NPHFshGBPAZvZkE8Pw== +"@typescript-eslint/scope-manager@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/scope-manager/-/scope-manager-5.41.0.tgz#28e3a41d626288d0628be14cf9de8d49fc30fadf" + integrity sha512-xOxPJCnuktUkY2xoEZBKXO5DBCugFzjrVndKdUnyQr3+9aDWZReKq9MhaoVnbL+maVwWJu/N0SEtrtEUNb62QQ== dependencies: - "@typescript-eslint/types" "5.33.0" - "@typescript-eslint/visitor-keys" "5.33.0" + "@typescript-eslint/types" "5.41.0" + "@typescript-eslint/visitor-keys" "5.41.0" -"@typescript-eslint/type-utils@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.33.0.tgz#92ad1fba973c078d23767ce2d8d5a601baaa9338" - integrity sha512-2zB8uEn7hEH2pBeyk3NpzX1p3lF9dKrEbnXq1F7YkpZ6hlyqb2yZujqgRGqXgRBTHWIUG3NGx/WeZk224UKlIA== +"@typescript-eslint/type-utils@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/type-utils/-/type-utils-5.41.0.tgz#2371601171e9f26a4e6da918a7913f7266890cdf" + integrity sha512-L30HNvIG6A1Q0R58e4hu4h+fZqaO909UcnnPbwKiN6Rc3BUEx6ez2wgN7aC0cBfcAjZfwkzE+E2PQQ9nEuoqfA== dependencies: - "@typescript-eslint/utils" "5.33.0" + "@typescript-eslint/typescript-estree" "5.41.0" + "@typescript-eslint/utils" "5.41.0" debug "^4.3.4" tsutils "^3.21.0" @@ -1928,10 +1886,10 @@ resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-4.29.2.tgz#fc0489c6b89773f99109fb0aa0aaddff21f52fcd" integrity sha512-K6ApnEXId+WTGxqnda8z4LhNMa/pZmbTFkDxEBLQAbhLZL50DjeY0VIDCml/0Y3FlcbqXZrABqrcKxq+n0LwzQ== -"@typescript-eslint/types@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.33.0.tgz#d41c584831805554b063791338b0220b613a275b" - integrity sha512-nIMt96JngB4MYFYXpZ/3ZNU4GWPNdBbcB5w2rDOCpXOVUkhtNlG2mmm8uXhubhidRZdwMaMBap7Uk8SZMU/ppw== +"@typescript-eslint/types@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/types/-/types-5.41.0.tgz#6800abebc4e6abaf24cdf220fb4ce28f4ab09a85" + integrity sha512-5BejraMXMC+2UjefDvrH0Fo/eLwZRV6859SXRg+FgbhA0R0l6lDqDGAQYhKbXhPN2ofk2kY5sgGyLNL907UXpA== "@typescript-eslint/typescript-estree@4.29.2": version "4.29.2" @@ -1946,30 +1904,32 @@ semver "^7.3.5" tsutils "^3.21.0" -"@typescript-eslint/typescript-estree@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.33.0.tgz#02d9c9ade6f4897c09e3508c27de53ad6bfa54cf" - integrity sha512-tqq3MRLlggkJKJUrzM6wltk8NckKyyorCSGMq4eVkyL5sDYzJJcMgZATqmF8fLdsWrW7OjjIZ1m9v81vKcaqwQ== +"@typescript-eslint/typescript-estree@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-5.41.0.tgz#bf5c6b3138adbdc73ba4871d060ae12c59366c61" + integrity sha512-SlzFYRwFSvswzDSQ/zPkIWcHv8O5y42YUskko9c4ki+fV6HATsTODUPbRbcGDFYP86gaJL5xohUEytvyNNcXWg== dependencies: - "@typescript-eslint/types" "5.33.0" - "@typescript-eslint/visitor-keys" "5.33.0" + "@typescript-eslint/types" "5.41.0" + "@typescript-eslint/visitor-keys" "5.41.0" debug "^4.3.4" globby "^11.1.0" is-glob "^4.0.3" semver "^7.3.7" tsutils "^3.21.0" -"@typescript-eslint/utils@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.33.0.tgz#46797461ce3146e21c095d79518cc0f8ec574038" - integrity sha512-JxOAnXt9oZjXLIiXb5ZIcZXiwVHCkqZgof0O8KPgz7C7y0HS42gi75PdPlqh1Tf109M0fyUw45Ao6JLo7S5AHw== +"@typescript-eslint/utils@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/utils/-/utils-5.41.0.tgz#f41ae5883994a249d00b2ce69f4188f3a23fa0f9" + integrity sha512-QlvfwaN9jaMga9EBazQ+5DDx/4sAdqDkcs05AsQHMaopluVCUyu1bTRUVKzXbgjDlrRAQrYVoi/sXJ9fmG+KLQ== dependencies: "@types/json-schema" "^7.0.9" - "@typescript-eslint/scope-manager" "5.33.0" - "@typescript-eslint/types" "5.33.0" - "@typescript-eslint/typescript-estree" "5.33.0" + "@types/semver" "^7.3.12" + "@typescript-eslint/scope-manager" "5.41.0" + "@typescript-eslint/types" "5.41.0" + "@typescript-eslint/typescript-estree" "5.41.0" eslint-scope "^5.1.1" eslint-utils "^3.0.0" + semver "^7.3.7" "@typescript-eslint/visitor-keys@4.29.2": version "4.29.2" @@ -1979,12 +1939,12 @@ "@typescript-eslint/types" "4.29.2" eslint-visitor-keys "^2.0.0" -"@typescript-eslint/visitor-keys@5.33.0": - version "5.33.0" - resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.33.0.tgz#fbcbb074e460c11046e067bc3384b5d66b555484" - integrity sha512-/XsqCzD4t+Y9p5wd9HZiptuGKBlaZO5showwqODii5C0nZawxWLF+Q6k5wYHBrQv96h6GYKyqqMHCSTqta8Kiw== +"@typescript-eslint/visitor-keys@5.41.0": + version "5.41.0" + resolved "https://registry.yarnpkg.com/@typescript-eslint/visitor-keys/-/visitor-keys-5.41.0.tgz#d3510712bc07d5540160ed3c0f8f213b73e3bcd9" + integrity sha512-vilqeHj267v8uzzakbm13HkPMl7cbYpKVjgFWZPIOHIJHZtinvypUhJ5xBXfWYg4eFKqztbMMpOgFpT9Gfx4fw== dependencies: - "@typescript-eslint/types" "5.33.0" + "@typescript-eslint/types" "5.41.0" eslint-visitor-keys "^3.3.0" "@ungap/promise-all-settled@1.1.2": @@ -2022,6 +1982,11 @@ abort-controller@^3.0.0: dependencies: event-target-shim "^5.0.0" +abortcontroller-polyfill@^1.7.3: + version "1.7.5" + resolved "https://registry.yarnpkg.com/abortcontroller-polyfill/-/abortcontroller-polyfill-1.7.5.tgz#6738495f4e901fbb57b6c0611d0c75f76c485bed" + integrity sha512-JMJ5soJWP18htbbxJjG7bG6yuI6pRhgJ0scHHTfkUjf6wjP912xZWvM+A4sJK3gqd9E8fcPbDnOefbA9Th/FIQ== + abstract-level@^1.0.0, abstract-level@^1.0.2, abstract-level@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/abstract-level/-/abstract-level-1.0.3.tgz#78a67d3d84da55ee15201486ab44c09560070741" @@ -2087,9 +2052,9 @@ acorn@^7.4.0: integrity sha512-nQyp0o1/mNdbTO1PO6kHkwSrmgZ0MT/jCCpNiwbUjGoRN4dlBhqJtoQuCnEOKzgTVwg0ZWiCoQy6SxMebQVh8A== address@^1.0.1: - version "1.2.0" - resolved "https://registry.yarnpkg.com/address/-/address-1.2.0.tgz#d352a62c92fee90f89a693eccd2a8b2139ab02d9" - integrity sha512-tNEZYz5G/zYunxFm7sfhAxkXEuLj3K6BKwv6ZURlsF6yiUQ65z0Q2wZW9L5cPUl9ocofGvXOdFYbFHp0+6MOig== + version "1.2.1" + resolved "https://registry.yarnpkg.com/address/-/address-1.2.1.tgz#25bb61095b7522d65b357baa11bc05492d4c8acd" + integrity sha512-B+6bi5D34+fDYENiH5qOlA0cV2rAGKuWZ9LeyUUehbXy8e0VS9e498yO0Jeeh+iM+6KbfudHTFjXw2MmJD4QRA== adm-zip@^0.4.16: version "0.4.16" @@ -2227,11 +2192,16 @@ ansi-styles@~1.0.0: resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-1.0.0.tgz#cb102df1c56f5123eab8b67cd7b98027a0279178" integrity sha512-3iF4FIKdxaVYT3JqQuY3Wat/T2t7TRbbQ94Fu50ZUCbLy4TFbTzr90NOHQodQkNqmeEGCw8WbeP78WNi6SKYUA== -antlr4@4.7.1, antlr4@~4.8.0: +antlr4@4.7.1: version "4.7.1" resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.7.1.tgz#69984014f096e9e775f53dd9744bf994d8959773" integrity sha512-haHyTW7Y9joE5MVs37P2lNYfU2RWBLfcRDD8OWldcdZm5TiCE91B5Xl1oWSwiDUSd4rlExpt2pu1fksYQjRBYQ== +antlr4@~4.8.0: + version "4.8.0" + resolved "https://registry.yarnpkg.com/antlr4/-/antlr4-4.8.0.tgz#f938ec171be7fc2855cd3a533e87647185b32b6a" + integrity sha512-en/MxQ4OkPgGJQ3wD/muzj1uDnFSzdFIhc2+c6bHZokWkuBb6RRvFjpWhPxWLbgQvaEzldJZ0GSQpfSAaE3hqg== + antlr4ts@^0.5.0-alpha.4: version "0.5.0-alpha.4" resolved "https://registry.yarnpkg.com/antlr4ts/-/antlr4ts-0.5.0-alpha.4.tgz#71702865a87478ed0b40c0709f422cf14d51652a" @@ -3074,26 +3044,26 @@ big-integer@1.6.36: integrity sha512-t70bfa7HYEA1D9idDbmuv7YbsbVkQ+Hp+8KFSul4aE5e/i1bjCNIRYJZlA8Q8p0r9T8cF/RVvwUgRA//FydEyg== big.js@^6.0.3: - version "6.2.0" - resolved "https://registry.yarnpkg.com/big.js/-/big.js-6.2.0.tgz#39c60822aecb0f34a1d79a90fe9908a0ddf45e1d" - integrity sha512-paIKvJiAaOYdLt6MfnvxkDo64lTOV257XYJyX3oJnJQocIclUn+48k6ZerH/c5FxWE6DGJu1TKDYis7tqHg9kg== + version "6.2.1" + resolved "https://registry.yarnpkg.com/big.js/-/big.js-6.2.1.tgz#7205ce763efb17c2e41f26f121c420c6a7c2744f" + integrity sha512-bCtHMwL9LeDIozFn+oNhhFoq+yQ3BNdnsLSASUxLciOb1vgvpHsIO1dsENiGMgbb4SkP5TrzWzRiLddn8ahVOQ== bigint-crypto-utils@^3.0.23: - version "3.0.24" - resolved "https://registry.yarnpkg.com/bigint-crypto-utils/-/bigint-crypto-utils-3.0.24.tgz#ae8b4f3d6236a7a34df13d411b7c3bbe6779c5d8" - integrity sha512-TZZ04h0BCfE5kAR5VTiUp+8eWHcclXFCI+4EFL5Y8z0++XJqqUnEjgwzBbVIpMHbSBu3Gjv7VT6msIXJzzSfTg== + version "3.1.7" + resolved "https://registry.yarnpkg.com/bigint-crypto-utils/-/bigint-crypto-utils-3.1.7.tgz#c4c1b537c7c1ab7aadfaecf3edfd45416bf2c651" + integrity sha512-zpCQpIE2Oy5WIQpjC9iYZf8Uh9QqoS51ZCooAcNvzv1AQ3VWdT52D0ksr1+/faeK8HVIej1bxXcP75YcqH3KPA== dependencies: - bigint-mod-arith "^3.0.1" + bigint-mod-arith "^3.1.0" -bigint-mod-arith@^3.0.1: - version "3.0.2" - resolved "https://registry.yarnpkg.com/bigint-mod-arith/-/bigint-mod-arith-3.0.2.tgz#a25a723af3ee3a79d452c370a55e3adc0e3f0cc3" - integrity sha512-tlhD4h/D1sv4pJfZzBesKOlfXRCQTeMMUrGbpc2PAawMAjb/S/OPAQfi667w6COt/UHOfvOW47sCSMaSEj4zIg== +bigint-mod-arith@^3.1.0: + version "3.1.2" + resolved "https://registry.yarnpkg.com/bigint-mod-arith/-/bigint-mod-arith-3.1.2.tgz#658e416bc593a463d97b59766226d0a3021a76b1" + integrity sha512-nx8J8bBeiRR+NlsROFH9jHswW5HO8mgfOSqW0AmjicMMvaONDa8AO+5ViKDUUNytBPWiwfvZP4/Bj4Y3lUfvgQ== bignumber.js@*, bignumber.js@^9.0.0, bignumber.js@^9.0.1, bignumber.js@^9.0.2: - version "9.0.2" - resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.0.2.tgz#71c6c6bed38de64e24a65ebe16cfcf23ae693673" - integrity sha512-GAcQvbpsM0pUb0zw1EI0KhQEZ+lRwR5fYaAp3vPOYuP7aDvGy6cVN6XHLauvF8SOga2y0dcLcjt3iQDTSEliyw== + version "9.1.0" + resolved "https://registry.yarnpkg.com/bignumber.js/-/bignumber.js-9.1.0.tgz#8d340146107fe3a6cb8d40699643c302e8773b62" + integrity sha512-4LwHK4nfDOraBCtst+wOWIHbu1vhvAPJK8g8nROd4iuc3PSEjWif/qwbkh8jwCJz6yDBvtU4KPynETgrfh7y3A== bignumber.js@^7.2.1: version "7.2.1" @@ -3161,10 +3131,10 @@ bn.js@^5.0.0, bn.js@^5.1.0, bn.js@^5.1.1, bn.js@^5.1.2, bn.js@^5.1.3, bn.js@^5.2 resolved "https://registry.yarnpkg.com/bn.js/-/bn.js-5.2.1.tgz#0bc527a6a0d18d0aa8d5b0538ce4a77dccfa7b70" integrity sha512-eXRvHzWyYPBuB4NBy0cmYQjGitUrtqwbvlzP3G6VFnNRbsZQIxQ10PbKKHt8gZ/HW/D/747aDl+QkDqg3KQLMQ== -body-parser@1.20.0, body-parser@^1.16.0: - version "1.20.0" - resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.0.tgz#3de69bd89011c11573d7bfee6a64f11b6bd27cc5" - integrity sha512-DfJ+q6EPcGKZD1QWUjSpqp+Q7bDQTsQIF4zfUAtZ6qk+H/3/QRhg9CEp39ss+/T2vw0+HaidC0ecJj/DRLIaKg== +body-parser@1.20.1, body-parser@^1.16.0: + version "1.20.1" + resolved "https://registry.yarnpkg.com/body-parser/-/body-parser-1.20.1.tgz#b1812a8912c195cd371a3ee5e66faa2338a5c668" + integrity sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw== dependencies: bytes "3.1.2" content-type "~1.0.4" @@ -3174,7 +3144,7 @@ body-parser@1.20.0, body-parser@^1.16.0: http-errors "2.0.0" iconv-lite "0.4.24" on-finished "2.4.1" - qs "6.10.3" + qs "6.11.0" raw-body "2.5.1" type-is "~1.6.18" unpipe "1.0.0" @@ -3384,12 +3354,19 @@ buffer@^5.0.5, buffer@^5.2.1, buffer@^5.5.0, buffer@^5.6.0: ieee754 "^1.1.13" bufferutil@^4.0.1: - version "4.0.6" - resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.6.tgz#ebd6c67c7922a0e902f053e5d8be5ec850e48433" - integrity sha512-jduaYOYtnio4aIAyc6UbvPCVcgq7nYpVnucyxr6eCYg/Woad9Hf/oxxBRDnGGjPfjUm6j5O/uBWhIu4iLebFaw== + version "4.0.7" + resolved "https://registry.yarnpkg.com/bufferutil/-/bufferutil-4.0.7.tgz#60c0d19ba2c992dd8273d3f73772ffc894c153ad" + integrity sha512-kukuqc39WOHtdxtw4UScxF/WVnMFVSQVKhtx3AjZJzhd0RGZZldcrfSEbVsWWe6KNH253574cq5F+wpv0G9pJw== dependencies: node-gyp-build "^4.3.0" +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + bytes@3.1.2: version "3.1.2" resolved "https://registry.yarnpkg.com/bytes/-/bytes-3.1.2.tgz#8b0beeb98605adf1b128fa4386403c009e0221a5" @@ -3425,6 +3402,16 @@ cache-base@^1.0.1: union-value "^1.0.0" unset-value "^1.0.0" +cacheable-lookup@^5.0.3: + version "5.0.4" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-5.0.4.tgz#5a6b865b2c44357be3d5ebc2a467b032719a7005" + integrity sha512-2/kNscPhpcxrOigMZzbiWF7dz8ilhb/nIHU3EyZiXWXpeq/au8qJ8VhdftMkty3n7Gj6HIGalQG8oiBNB3AJgA== + +cacheable-lookup@^6.0.4: + version "6.1.0" + resolved "https://registry.yarnpkg.com/cacheable-lookup/-/cacheable-lookup-6.1.0.tgz#0330a543471c61faa4e9035db583aad753b36385" + integrity sha512-KJ/Dmo1lDDhmW2XDPMo+9oiy/CeqosPguPCrgcVzKyZrL6pM1gU2GmPY/xo6OQPTUaA/c0kwHuywB4E6nmT9ww== + cacheable-request@^6.0.0: version "6.1.0" resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-6.1.0.tgz#20ffb8bd162ba4be11e9567d823db651052ca912" @@ -3438,6 +3425,19 @@ cacheable-request@^6.0.0: normalize-url "^4.1.0" responselike "^1.0.2" +cacheable-request@^7.0.2: + version "7.0.2" + resolved "https://registry.yarnpkg.com/cacheable-request/-/cacheable-request-7.0.2.tgz#ea0d0b889364a25854757301ca12b2da77f91d27" + integrity sha512-pouW8/FmiPQbuGpkXQ9BAPv/Mo5xDGANgSNXzTzJ8DrKGuXOssM4wIQRjfanNRh3Yu5cfYPvcorqbhg2KIJtew== + dependencies: + clone-response "^1.0.2" + get-stream "^5.1.0" + http-cache-semantics "^4.0.0" + keyv "^4.0.0" + lowercase-keys "^2.0.0" + normalize-url "^6.0.1" + responselike "^2.0.0" + cachedown@1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/cachedown/-/cachedown-1.0.0.tgz#d43f036e4510696b31246d7db31ebf0f7ac32d15" @@ -3521,9 +3521,9 @@ camelcase@^6.0.0: integrity sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA== caniuse-lite@^1.0.30000844: - version "1.0.30001354" - resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001354.tgz#95c5efdb64148bb4870771749b9a619304755ce5" - integrity sha512-mImKeCkyGDAHNywYFA4bqnLAzTUvVkqPvhY4DV47X+Gl2c5Z8c3KNETnXp14GQt11LvxE8AwjzGxJ+rsikiOzg== + version "1.0.30001423" + resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001423.tgz#57176d460aa8cd85ee1a72016b961eb9aca55d91" + integrity sha512-09iwWGOlifvE1XuHokFMP7eR38a0JnajoyL3/i87c8ZjRWRrdKo1fqjNfugfBD0UDBIOz0U+jtNhJ0EPm1VleQ== caseless@^0.12.0, caseless@~0.12.0: version "0.12.0" @@ -3535,7 +3535,7 @@ catering@^2.1.0, catering@^2.1.1: resolved "https://registry.yarnpkg.com/catering/-/catering-2.1.1.tgz#66acba06ed5ee28d5286133982a927de9a04b510" integrity sha512-K7Qy8O9p76sL3/3m7/zLKbRkyOlSZAgzEaLhyj2mXS8PsCud2Eo4hAb8aLtZqHh0QGqLcb9dlJSu6lHRVENm1w== -cbor@^5.0.2, cbor@^5.1.0: +cbor@^5.0.2, cbor@^5.2.0: version "5.2.0" resolved "https://registry.yarnpkg.com/cbor/-/cbor-5.2.0.tgz#4cca67783ccd6de7b50ab4ed62636712f287a67c" integrity sha512-5IMhi9e1QU76ppa5/ajP1BmMWZ2FHkhAhjeVKQ/EFCgYSEaeVaoGtL7cxJskf9oCCk+XjzaIdc3IuU/dbA/o2A== @@ -3550,7 +3550,7 @@ chai-as-promised@^7.1.1: dependencies: check-error "^1.0.2" -chai@^4.2.0: +chai@^4.2.0, chai@^4.3.6: version "4.3.6" resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.6.tgz#ffe4ba2d9fa9d6680cc0b370adae709ec9011e9c" integrity sha512-bbcp3YfHCUzMOvKqsztczerVgBKSsEijCySNlHHbX3VG1nskvqjz5Rfso1gGwD6w6oOV3eI60pKuMOV5MV7p3Q== @@ -3659,9 +3659,9 @@ cheerio-select@^2.1.0: domutils "^3.0.1" cheerio@^1.0.0-rc.2: - version "1.0.0-rc.11" - resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.11.tgz#1be84be1a126958366bcc57a11648cd9b30a60c2" - integrity sha512-bQwNaDIBKID5ts/DsdhxrjqFXYfLw4ste+wMKqWA8DyKcS4qwsPP4Bk8ZNaTJjvpiX/qW3BT4sU7d6Bh5i+dag== + version "1.0.0-rc.12" + resolved "https://registry.yarnpkg.com/cheerio/-/cheerio-1.0.0-rc.12.tgz#788bf7466506b1c6bf5fae51d24a2c4d62e47683" + integrity sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q== dependencies: cheerio-select "^2.1.0" dom-serializer "^2.0.0" @@ -3670,7 +3670,6 @@ cheerio@^1.0.0-rc.2: htmlparser2 "^8.0.1" parse5 "^7.0.0" parse5-htmlparser2-tree-adapter "^7.0.0" - tslib "^2.4.0" chokidar@3.3.0: version "3.3.0" @@ -3713,9 +3712,9 @@ ci-info@^2.0.0: integrity sha512-5tK7EtrZ0N+OLFMthtqOj4fI2Jeb88C4CAZPu25LDVUgXJ0A3Js4PMGqrn0JU1W0Mh1/Z8wZzYPxqUrXeBboCQ== ci-info@^3.1.0, ci-info@^3.2.0: - version "3.3.2" - resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.3.2.tgz#6d2967ffa407466481c6c90b6e16b3098f080128" - integrity sha512-xmDt/QIAdeZ9+nfdPsaBCpMvHNLFiLdjj59qjqn+6iPe6YmHGQ35sBnQ8uslRBXFmXkiZQOJRjvQeoGppoTjjg== + version "3.5.0" + resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.5.0.tgz#bfac2a29263de4c829d806b1ab478e35091e171f" + integrity sha512-yH4RezKOGlOhxkmhbeNuC4eYZKAUsEaGtBuBzDDP1eFUKiccDWzBABxBfOx31IDwDIXMTxWuwAxUGModvkbuVw== cids@^0.7.1: version "0.7.5" @@ -3825,10 +3824,19 @@ cliui@^7.0.2: strip-ansi "^6.0.0" wrap-ansi "^7.0.0" +cliui@^8.0.1: + version "8.0.1" + resolved "https://registry.yarnpkg.com/cliui/-/cliui-8.0.1.tgz#0c04b075db02cbfe60dc8e6cf2f5486b1a3608aa" + integrity sha512-BSeNnyus75C4//NQ9gQt1/csTXyo/8Sb+afLAkzAptFuMsod9HFokGNudZpi/oQV73hnVK+sR+5PVRMd+Dr7YQ== + dependencies: + string-width "^4.2.0" + strip-ansi "^6.0.1" + wrap-ansi "^7.0.0" + clone-response@^1.0.2: - version "1.0.2" - resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.2.tgz#d1dc973920314df67fbeb94223b4ee350239e96b" - integrity sha512-yjLXh88P599UOyPTFX0POsd7WxnbsVsGohcwzHOLspIhhpalPw1BcqED8NblyZLKcGrL8dTgMlcaZxV2jAD41Q== + version "1.0.3" + resolved "https://registry.yarnpkg.com/clone-response/-/clone-response-1.0.3.tgz#af2032aa47816399cf5f0a1d0db902f517abb8c3" + integrity sha512-ROoL94jJH2dUVML2Y/5PEDNaSHgeOdSDicUyS7izcF63G6sTc/FTjLub4b8Il9S8S0beOfYt0TaA5qvFK+w0wA== dependencies: mimic-response "^1.0.0" @@ -3945,6 +3953,11 @@ commander@^2.12.2, commander@^2.19.0: resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33" integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ== +commander@^6.0.0: + version "6.2.1" + resolved "https://registry.yarnpkg.com/commander/-/commander-6.2.1.tgz#0792eb682dfbc325999bb2b84fddddba110ac73c" + integrity sha512-U7VdrJFnJgo4xjrHpTzu0yrHPGImdsmD95ZlgYSEajAn2JKzDhDTPG9kBTefmObL2w/ngeZnilk+OV9CG3d7UA== + component-emitter@^1.2.1: version "1.3.0" resolved "https://registry.yarnpkg.com/component-emitter/-/component-emitter-1.3.0.tgz#16e4070fba8ae29b679f2215853ee181ab2eabc0" @@ -3995,11 +4008,9 @@ content-type@~1.0.4: integrity sha512-hIP3EEPs8tB9AT1L+NUqtwOAps4mk2Zob89MWXMHjHWg9milF/j4osnnQLXBCBFBk/tvIG/tUc9mOUJiPBhPXA== convert-source-map@^1.5.1: - version "1.8.0" - resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.8.0.tgz#f3373c32d21b4d780dd8004514684fb791ca4369" - integrity sha512-+OQdjP49zViI/6i7nIJpA8rAl4sV/JdPfU9nZs3VqOwGIgizICvuN2ru6fMd+4llL0tar18UYJXfZ/TWtmhUjA== - dependencies: - safe-buffer "~5.1.1" + version "1.9.0" + resolved "https://registry.yarnpkg.com/convert-source-map/-/convert-source-map-1.9.0.tgz#7faae62353fb4213366d0ca98358d22e8368b05f" + integrity sha512-ASFBup0Mz1uyiIjANan1jzLQami9z1PoYSZCiiYW2FczPbenXc45FZdBZLzOT+r6+iciuEModtmCti+hjaAk0A== cookie-signature@1.0.6: version "1.0.6" @@ -4027,9 +4038,9 @@ copy-descriptor@^0.1.0: integrity sha512-XgZ0pFcakEUlbwQEVNg3+QAis1FyTL3Qel9FYy8pSkQqoG3PNoT0bOCQtOXcOkur21r2Eq2kI+IE+gsmAEVlYw== core-js-pure@^3.0.1: - version "3.23.1" - resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.23.1.tgz#0b27e4c3ad46178b84e790dbbb81987218ab82ad" - integrity sha512-3qNgf6TqI3U1uhuSYRzJZGfFd4T+YlbyVPl+jgRiKjdZopvG4keZQwWZDAWpu1UH9nCgTpUzIV3GFawC7cJsqg== + version "3.26.0" + resolved "https://registry.yarnpkg.com/core-js-pure/-/core-js-pure-3.26.0.tgz#7ad8a5dd7d910756f3124374b50026e23265ca9a" + integrity sha512-LiN6fylpVBVwT8twhhluD9TzXmZQQsr2I2eIKtWNbZI1XMfBT7CV18itaN6RA7EtQd/SDdRx/wzvAShX2HvhQA== core-js@^2.4.0, core-js@^2.5.0: version "2.6.12" @@ -4108,6 +4119,13 @@ cross-fetch@^2.1.0, cross-fetch@^2.1.1: node-fetch "^2.6.7" whatwg-fetch "^2.0.4" +cross-fetch@^3.1.4: + version "3.1.5" + resolved "https://registry.yarnpkg.com/cross-fetch/-/cross-fetch-3.1.5.tgz#e1389f44d9e7ba767907f7af8454787952ab534f" + integrity sha512-lvb1SBsI0Z7GDwmuid+mU3kWVBwTVUbe7S0H52yaaAdQOXq2YktTCZdlAcNKFzE6QtRz0snpw9bNiPeOIkkQvw== + dependencies: + node-fetch "2.6.7" + cross-spawn@^5.1.0: version "5.1.0" resolved "https://registry.yarnpkg.com/cross-spawn/-/cross-spawn-5.1.0.tgz#e8bd0efee58fcff6f8f94510a0a554bbfa235449" @@ -4243,7 +4261,7 @@ death@^1.1.0: resolved "https://registry.yarnpkg.com/death/-/death-1.1.0.tgz#01aa9c401edd92750514470b8266390c66c67318" integrity sha512-vsV6S4KVHvTGxbEcij7hkWRv0It+sGGWVOM67dQde/o5Xjnr+KmLjxWJii2uEObIrt1CcM9w0Yaovx+iOlIL+w== -debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.8, debug@^2.6.9: +debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.8, debug@^2.6.9: version "2.6.9" resolved "https://registry.yarnpkg.com/debug/-/debug-2.6.9.tgz#5d128515df134ff327e90a4c93f4e077a536341f" integrity sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA== @@ -4306,13 +4324,20 @@ decode-uri-component@^0.2.0: resolved "https://registry.yarnpkg.com/decode-uri-component/-/decode-uri-component-0.2.0.tgz#eb3913333458775cb84cd1a1fae062106bb87545" integrity sha512-hjf+xovcEn31w/EUYdTXQh/8smFL/dzYjohQGEIgjyNavaJfBY2p5F527Bo1VPATxv0VYTUC2bOcXvqFwk78Og== -decompress-response@^3.2.0, decompress-response@^3.3.0: +decompress-response@^3.3.0: version "3.3.0" resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-3.3.0.tgz#80a4dd323748384bfa248083622aedec982adff3" integrity sha512-BzRPQuY1ip+qDonAOz42gRm/pg9F768C+npV/4JOsxRC2sq+Rlk+Q4ZCAsOhnIaMrgarILY+RMUIvMmmX1qAEA== dependencies: mimic-response "^1.0.0" +decompress-response@^6.0.0: + version "6.0.0" + resolved "https://registry.yarnpkg.com/decompress-response/-/decompress-response-6.0.0.tgz#ca387612ddb7e104bd16d85aab00d5ecf09c66fc" + integrity sha512-aW35yZM6Bb/4oJlZncMH2LCoZtJXTRxES17vE3hoRiowU2kWHaJKFkSBDnDR+cm9J+9QhXmREyIfv0pji9ejCQ== + dependencies: + mimic-response "^3.1.0" + deep-eql@^3.0.1: version "3.0.1" resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df" @@ -4321,9 +4346,9 @@ deep-eql@^3.0.1: type-detect "^4.0.0" deep-eql@^4.0.1: - version "4.0.1" - resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.0.1.tgz#2b65bc89491d193780c452edee2144a91bb0a445" - integrity sha512-D/Oxqobjr+kxaHsgiQBZq9b6iAWdEj5W/JdJm8deNduAPc9CwXQ3BJJCuEqlrPXcy45iOMkGPZ0T81Dnz7UDCA== + version "4.1.1" + resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-4.1.1.tgz#b1154ea8c95012d9f23f37f4eecfd2ee8e5b9323" + integrity sha512-rc6HkZswtl+KMi/IODZ8k7C/P37clC2Rf1HYI11GqdbgvggIyHjsU5MdjlTlaP6eu24c0sR3mcW2SqsVZ1sXUw== dependencies: type-detect "^4.0.0" @@ -4350,9 +4375,9 @@ deep-is@^0.1.3, deep-is@~0.1.3: integrity sha512-oIPzksmTg4/MriiaYGO+okXDT7ztn/w3Eptv/+gSIdMdKsJo0u4CfYNFJPy+4SKMuCqGw2wxnA+URMg3t8a/bQ== defaults@^1.0.3: - version "1.0.3" - resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.3.tgz#c656051e9817d9ff08ed881477f3fe4019f3ef7d" - integrity sha512-s82itHOnYrN0Ib8r+z7laQz3sdE+4FP3d9Q7VLO7U+KRT+CR0GsWuyHxzdAY82I7cXv0G/twrqomTJLOssO5HA== + version "1.0.4" + resolved "https://registry.yarnpkg.com/defaults/-/defaults-1.0.4.tgz#b0b02062c1e2aa62ff5d9528f0f98baa90978d7a" + integrity sha512-eFuaLoy/Rxalv2kr+lqMlUnrDWV+3j4pljOIJgLIhI058IQfWJ7vXhyEIHu+HtC738klGALYxOKDO0bQP3tg8A== dependencies: clone "^1.0.2" @@ -4361,6 +4386,11 @@ defer-to-connect@^1.0.1: resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-1.1.3.tgz#331ae050c08dcf789f8c83a7b81f0ed94f4ac591" integrity sha512-0ISdNousHvZT2EiFlZeZAHBUvSxmKswVCEf8hW7KWgG4a8MVEu/3Vb6uWYozkjylyCxe0JBIiRB1jV45S70WVQ== +defer-to-connect@^2.0.0, defer-to-connect@^2.0.1: + version "2.0.1" + resolved "https://registry.yarnpkg.com/defer-to-connect/-/defer-to-connect-2.0.1.tgz#8016bdb4143e4632b77a3449c6236277de520587" + integrity sha512-4tvttepXG1VaYGrRibk5EwJd1t4udunSOVMdLSAL6mId1ix438oPwPZMALY41FCijukO1L0twNcGsdzS7dHgDg== + deferred-leveldown@~1.2.1: version "1.2.2" resolved "https://registry.yarnpkg.com/deferred-leveldown/-/deferred-leveldown-1.2.2.tgz#3acd2e0b75d1669924bc0a4b642851131173e1eb" @@ -4407,9 +4437,9 @@ define-property@^2.0.2: isobject "^3.0.1" defined@~1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693" - integrity sha512-Y2caI5+ZwS5c3RiNDJ6u53VhQHv+hHKwhkI1iHvceKUHw9Df6EK2zRLfjejRgMuCuxK7PfSWIMwWecceVvThjQ== + version "1.0.1" + resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.1.tgz#c0b9db27bfaffd95d6f61399419b893df0f91ebf" + integrity sha512-hsBd2qSVCRE+5PmNdHt1uzyrFu5d3RwmFDKzyNZMFq/EwDNJF7Ee5+D5oEKF0hU6LhtoUF1macFvOe4AskQC1Q== delayed-stream@~1.0.0: version "1.0.0" @@ -4451,13 +4481,18 @@ detect-indent@^6.0.0: resolved "https://registry.yarnpkg.com/detect-indent/-/detect-indent-6.1.0.tgz#592485ebbbf6b3b1ab2be175c8393d04ca0d57e6" integrity sha512-reYkTUJAZb9gUuZ2RvVCNhVHdg62RHnJ7WJl8ftMi4diZ6NWlciOzQN88pUhSELEwflJht4oQDv0F0BMlwaYtA== +detect-node@^2.0.4: + version "2.1.0" + resolved "https://registry.yarnpkg.com/detect-node/-/detect-node-2.1.0.tgz#c9c70775a49c3d03bc2c06d9a73be550f978f8b1" + integrity sha512-T0NIuQpnTvFDATNuHN5roPwSBG83rFsuO+MXXH9/3N1eFbn4wcPjttvjMLEPWJ0RGUYgQE7cGgS3tNxbqCGM7g== + detect-port@^1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.3.0.tgz#d9c40e9accadd4df5cac6a782aefd014d573d1f1" - integrity sha512-E+B1gzkl2gqxt1IhUzwjrxBKRqx1UzC3WLONHinn8S3T6lwV/agVCyitiFOsGJ/eYuEUBvD71MZHy3Pv1G9doQ== + version "1.5.1" + resolved "https://registry.yarnpkg.com/detect-port/-/detect-port-1.5.1.tgz#451ca9b6eaf20451acb0799b8ab40dff7718727b" + integrity sha512-aBzdj76lueB6uUst5iAs7+0H/oOjqI5D16XUWxlWMIMROhcM0rfsNVk93zTngq1dDNpoXRr++Sus7ETAExppAQ== dependencies: address "^1.0.1" - debug "^2.6.0" + debug "4" diff@3.5.0: version "3.5.0" @@ -4580,9 +4615,9 @@ dotignore@~0.1.2: minimatch "^3.0.4" duplexer3@^0.1.4: - version "0.1.4" - resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.4.tgz#ee01dd1cac0ed3cbc7fdbea37dc0a8f1ce002ce2" - integrity sha512-CEj8FwwNA4cVH2uFCoHUrmojhYh1vmCdOaneKJXwkeY1i9jnlslVo9dx+hQ5Hl9GnH/Bwy/IjxAyOePyPKYnzA== + version "0.1.5" + resolved "https://registry.yarnpkg.com/duplexer3/-/duplexer3-0.1.5.tgz#0b5e4d7bad5de8901ea4440624c8e1d20099217e" + integrity sha512-1A8za6ws41LQgv9HrE/66jyC5yuSjQ3L/KOpFtoBilsAK2iA2wuS5rTt1OCzIvtS2V7nVmedsUU+DGRcjBmOYA== ecc-jsbn@~0.1.1: version "0.1.2" @@ -4598,9 +4633,9 @@ ee-first@1.1.1: integrity sha512-WMwm9LhRUo+WUaRN+vRuETqG89IgZphVSNkdFgeb6sS/E4OrDIN7t48CAewSHXc6C8lefD8KKfr5vY61brQlow== electron-to-chromium@^1.3.47: - version "1.4.156" - resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.156.tgz#fc398e1bfbe586135351ebfaf198473a82923af5" - integrity sha512-/Wj5NC7E0wHaMCdqxWz9B0lv7CcycDTiHyXCtbbu3pXM9TV2AOp8BtMqkVuqvJNdEvltBG6LxT2Q+BxY4LUCIA== + version "1.4.284" + resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.4.284.tgz#61046d1e4cab3a25238f6bf7413795270f125592" + integrity sha512-M8WEXFuKXMYMVr45fo8mq0wUrrJHheiKZf6BArTKk9ZBYCKJEOU5H8cdWgDT+qCVZf7Na4lVUaZsA+h6uA9+PA== elliptic@6.5.4, elliptic@^6.4.0, elliptic@^6.5.2, elliptic@^6.5.3, elliptic@^6.5.4: version "6.5.4" @@ -4662,10 +4697,10 @@ enquirer@^2.3.0, enquirer@^2.3.4, enquirer@^2.3.5: dependencies: ansi-colors "^4.1.1" -entities@^4.2.0, entities@^4.3.0: - version "4.3.0" - resolved "https://registry.yarnpkg.com/entities/-/entities-4.3.0.tgz#62915f08d67353bb4eb67e3d62641a4059aec656" - integrity sha512-/iP1rZrSEJ0DTlPiX+jbzlA3eVkY/e8L8SozroF395fIqE3TYF/Nz7YOMAawta+vLmyJ/hkGNNPcSbMADCCXbg== +entities@^4.2.0, entities@^4.3.0, entities@^4.4.0: + version "4.4.0" + resolved "https://registry.yarnpkg.com/entities/-/entities-4.4.0.tgz#97bdaba170339446495e653cfd2db78962900174" + integrity sha512-oYp7156SP8LkeGD0GF85ad1X9Ai79WtRsZ2gxJqtBuzH+98YUV6jkHEKlZkMbcrjJjIVJNIDP/3WL9wQkoPbWA== env-paths@^2.2.0: version "2.2.1" @@ -4687,30 +4722,31 @@ error-ex@^1.2.0, error-ex@^1.3.1: is-arrayish "^0.2.1" es-abstract@^1.19.0, es-abstract@^1.19.1, es-abstract@^1.19.2, es-abstract@^1.19.5, es-abstract@^1.20.0, es-abstract@^1.20.1: - version "1.20.1" - resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.1.tgz#027292cd6ef44bd12b1913b828116f54787d1814" - integrity sha512-WEm2oBhfoI2sImeM4OF2zE2V3BYdSF+KnSi9Sidz51fQHd7+JuF8Xgcj9/0o+OWeIeIS/MiuNnlruQrJf16GQA== + version "1.20.4" + resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.20.4.tgz#1d103f9f8d78d4cf0713edcd6d0ed1a46eed5861" + integrity sha512-0UtvRN79eMe2L+UNEF1BwRe364sj/DXhQ/k5FmivgoSdpM90b8Jc0mDzKMGo7QS0BVbOP/bTwBKNnDc9rNzaPA== dependencies: call-bind "^1.0.2" es-to-primitive "^1.2.1" function-bind "^1.1.1" function.prototype.name "^1.1.5" - get-intrinsic "^1.1.1" + get-intrinsic "^1.1.3" get-symbol-description "^1.0.0" has "^1.0.3" has-property-descriptors "^1.0.0" has-symbols "^1.0.3" internal-slot "^1.0.3" - is-callable "^1.2.4" + is-callable "^1.2.7" is-negative-zero "^2.0.2" is-regex "^1.1.4" is-shared-array-buffer "^1.0.2" is-string "^1.0.7" is-weakref "^1.0.2" - object-inspect "^1.12.0" + object-inspect "^1.12.2" object-keys "^1.1.1" - object.assign "^4.1.2" + object.assign "^4.1.4" regexp.prototype.flags "^1.4.3" + safe-regex-test "^1.0.0" string.prototype.trimend "^1.0.5" string.prototype.trimstart "^1.0.5" unbox-primitive "^1.0.2" @@ -4737,9 +4773,9 @@ es-to-primitive@^1.2.1: is-symbol "^1.0.2" es5-ext@^0.10.35, es5-ext@^0.10.50: - version "0.10.61" - resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.61.tgz#311de37949ef86b6b0dcea894d1ffedb909d3269" - integrity sha512-yFhIqQAzu2Ca2I4SE2Au3rxVfmohU9Y7wqGR+s7+H7krk26NXhIRAZDgqd6xqjCEFUomDEA3/Bo/7fKmIkW1kA== + version "0.10.62" + resolved "https://registry.yarnpkg.com/es5-ext/-/es5-ext-0.10.62.tgz#5e6adc19a6da524bf3d1e02bbc8960e5eb49a9a5" + integrity sha512-BHLqn0klhEpnOKSrzn/Xsz2UIW8j+cGmo9JLzr8BiUapV8hPL9+FliFqjwr9ngW7jWdnxv6eO+/LqyhJVqgrjA== dependencies: es6-iterator "^2.0.3" es6-symbol "^3.1.3" @@ -4754,6 +4790,11 @@ es6-iterator@^2.0.3: es5-ext "^0.10.35" es6-symbol "^3.1.1" +es6-promise@^4.2.8: + version "4.2.8" + resolved "https://registry.yarnpkg.com/es6-promise/-/es6-promise-4.2.8.tgz#4eb21594c972bc40553d276e510539143db53e0a" + integrity sha512-HJDGx5daxeIvxdBxvG2cb9g4tEvwIk3i8+nhX0yGrYmZUzbkdg8QbDevheDB8gd0//uPj4c1EQua8Q+MViT0/w== + es6-symbol@^3.1.1, es6-symbol@^3.1.3: version "3.1.3" resolved "https://registry.yarnpkg.com/es6-symbol/-/es6-symbol-3.1.3.tgz#bad5d3c1bcdac28269f4cb331e431c78ac705d18" @@ -4808,12 +4849,11 @@ eslint-import-resolver-node@^0.3.6: resolve "^1.20.0" eslint-module-utils@^2.6.2: - version "2.7.3" - resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.3.tgz#ad7e3a10552fdd0642e1e55292781bd6e34876ee" - integrity sha512-088JEC7O3lDZM9xGe0RerkOMd0EjFl+Yvd1jPWIkMT5u3H9+HC34mWWPnqPrN13gieT9pBOO+Qt07Nb/6TresQ== + version "2.7.4" + resolved "https://registry.yarnpkg.com/eslint-module-utils/-/eslint-module-utils-2.7.4.tgz#4f3e41116aaf13a20792261e61d3a2e7e0583974" + integrity sha512-j4GT+rqzCoRKHwURX7pddtIPGySnX9Si/cgMI5ztrcqOPtk5dDEeZ34CQVPphnqkJytlc97Vuk05Um2mJ3gEQA== dependencies: debug "^3.2.7" - find-up "^2.1.0" eslint-plugin-import@2.24.1: version "2.24.1" @@ -5076,7 +5116,7 @@ eth-ens-namehash@2.0.8, eth-ens-namehash@^2.0.0, eth-ens-namehash@^2.0.8: idna-uts46-hx "^2.3.1" js-sha3 "^0.5.7" -eth-gas-reporter@^0.2.24: +eth-gas-reporter@^0.2.25: version "0.2.25" resolved "https://registry.yarnpkg.com/eth-gas-reporter/-/eth-gas-reporter-0.2.25.tgz#546dfa946c1acee93cb1a94c2a1162292d6ff566" integrity sha512-1fRgyE4xUB8SoqLgN3eDfpDfwEfRxh2Sz1b7wzFbyQA+9TekMmvSjjoRu9SKcSVyK+vLkLIsVbJDsTWjw195OQ== @@ -5240,12 +5280,12 @@ ethereum-cryptography@0.1.3, ethereum-cryptography@^0.1.3: setimmediate "^1.0.5" ethereum-cryptography@^1.0.3: - version "1.1.0" - resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-1.1.0.tgz#7048d184ff365a5255ced5cc9eb7682a273c4db7" - integrity sha512-wyNVTBR4wIR2yoXdMv4Qt44mTVBpPgSW/DQCTmNO6nQluwpyrAIvmL4mxPbziFuc6VWJQa3rwUxn0nUFU03nyQ== + version "1.1.2" + resolved "https://registry.yarnpkg.com/ethereum-cryptography/-/ethereum-cryptography-1.1.2.tgz#74f2ac0f0f5fe79f012c889b3b8446a9a6264e6d" + integrity sha512-XDSJlg4BD+hq9N2FjvotwUET9Tfxpxc3kWGE2AqUG5vcbeunnbImVk3cj6e/xT3phdW21mE8R5IugU4fspQDcQ== dependencies: - "@noble/hashes" "1.1.1" - "@noble/secp256k1" "1.6.0" + "@noble/hashes" "1.1.2" + "@noble/secp256k1" "1.6.3" "@scure/bip32" "1.1.0" "@scure/bip39" "1.1.0" @@ -5494,40 +5534,40 @@ ethers@^4.0.0-beta.1, ethers@^4.0.32, ethers@^4.0.40: xmlhttprequest "1.8.0" ethers@^5.0.0, ethers@^5.0.1, ethers@^5.0.13, ethers@^5.0.2, ethers@^5.0.8, ethers@^5.4.7, ethers@^5.5.2: - version "5.6.8" - resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.6.8.tgz#d36b816b4896341a80a8bbd2a44e8cb6e9b98dd4" - integrity sha512-YxIGaltAOdvBFPZwIkyHnXbW40f1r8mHUgapW6dxkO+6t7H6wY8POUn0Kbxrd/N7I4hHxyi7YCddMAH/wmho2w== - dependencies: - "@ethersproject/abi" "5.6.3" - "@ethersproject/abstract-provider" "5.6.1" - "@ethersproject/abstract-signer" "5.6.2" - "@ethersproject/address" "5.6.1" - "@ethersproject/base64" "5.6.1" - "@ethersproject/basex" "5.6.1" - "@ethersproject/bignumber" "5.6.2" - "@ethersproject/bytes" "5.6.1" - "@ethersproject/constants" "5.6.1" - "@ethersproject/contracts" "5.6.2" - "@ethersproject/hash" "5.6.1" - "@ethersproject/hdnode" "5.6.2" - "@ethersproject/json-wallets" "5.6.1" - "@ethersproject/keccak256" "5.6.1" - "@ethersproject/logger" "5.6.0" - "@ethersproject/networks" "5.6.3" - "@ethersproject/pbkdf2" "5.6.1" - "@ethersproject/properties" "5.6.0" - "@ethersproject/providers" "5.6.8" - "@ethersproject/random" "5.6.1" - "@ethersproject/rlp" "5.6.1" - "@ethersproject/sha2" "5.6.1" - "@ethersproject/signing-key" "5.6.2" - "@ethersproject/solidity" "5.6.1" - "@ethersproject/strings" "5.6.1" - "@ethersproject/transactions" "5.6.2" - "@ethersproject/units" "5.6.1" - "@ethersproject/wallet" "5.6.2" - "@ethersproject/web" "5.6.1" - "@ethersproject/wordlists" "5.6.1" + version "5.7.2" + resolved "https://registry.yarnpkg.com/ethers/-/ethers-5.7.2.tgz#3a7deeabbb8c030d4126b24f84e525466145872e" + integrity sha512-wswUsmWo1aOK8rR7DIKiWSw9DbLWe6x98Jrn8wcTflTVvaXhAMaB5zGAXy0GYQEQp9iO1iSHWVyARQm11zUtyg== + dependencies: + "@ethersproject/abi" "5.7.0" + "@ethersproject/abstract-provider" "5.7.0" + "@ethersproject/abstract-signer" "5.7.0" + "@ethersproject/address" "5.7.0" + "@ethersproject/base64" "5.7.0" + "@ethersproject/basex" "5.7.0" + "@ethersproject/bignumber" "5.7.0" + "@ethersproject/bytes" "5.7.0" + "@ethersproject/constants" "5.7.0" + "@ethersproject/contracts" "5.7.0" + "@ethersproject/hash" "5.7.0" + "@ethersproject/hdnode" "5.7.0" + "@ethersproject/json-wallets" "5.7.0" + "@ethersproject/keccak256" "5.7.0" + "@ethersproject/logger" "5.7.0" + "@ethersproject/networks" "5.7.1" + "@ethersproject/pbkdf2" "5.7.0" + "@ethersproject/properties" "5.7.0" + "@ethersproject/providers" "5.7.2" + "@ethersproject/random" "5.7.0" + "@ethersproject/rlp" "5.7.0" + "@ethersproject/sha2" "5.7.0" + "@ethersproject/signing-key" "5.7.0" + "@ethersproject/solidity" "5.7.0" + "@ethersproject/strings" "5.7.0" + "@ethersproject/transactions" "5.7.0" + "@ethersproject/units" "5.7.0" + "@ethersproject/wallet" "5.7.0" + "@ethersproject/web" "5.7.1" + "@ethersproject/wordlists" "5.7.0" ethjs-abi@0.1.8: version "0.1.8" @@ -5604,13 +5644,13 @@ expand-brackets@^2.1.4: to-regex "^3.0.1" express@^4.14.0: - version "4.18.1" - resolved "https://registry.yarnpkg.com/express/-/express-4.18.1.tgz#7797de8b9c72c857b9cd0e14a5eea80666267caf" - integrity sha512-zZBcOX9TfehHQhtupq57OF8lFZ3UZi08Y97dwFCkD8p9d/d2Y3M+ykKcwaMDEL+4qyUolgBDX6AblpR3fL212Q== + version "4.18.2" + resolved "https://registry.yarnpkg.com/express/-/express-4.18.2.tgz#3fabe08296e930c796c19e3c516979386ba9fd59" + integrity sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ== dependencies: accepts "~1.3.8" array-flatten "1.1.1" - body-parser "1.20.0" + body-parser "1.20.1" content-disposition "0.5.4" content-type "~1.0.4" cookie "0.5.0" @@ -5629,7 +5669,7 @@ express@^4.14.0: parseurl "~1.3.3" path-to-regexp "0.1.7" proxy-addr "~2.0.7" - qs "6.10.3" + qs "6.11.0" range-parser "~1.2.1" safe-buffer "5.2.1" send "0.18.0" @@ -5641,11 +5681,11 @@ express@^4.14.0: vary "~1.1.2" ext@^1.1.2: - version "1.6.0" - resolved "https://registry.yarnpkg.com/ext/-/ext-1.6.0.tgz#3871d50641e874cc172e2b53f919842d19db4c52" - integrity sha512-sdBImtzkq2HpkdRLtlLWDa6w4DX22ijZLKx8BMPUuKe1c5lbN6xwQDQCxSfxBQnHZ13ls/FH0MQZx/q/gr6FQg== + version "1.7.0" + resolved "https://registry.yarnpkg.com/ext/-/ext-1.7.0.tgz#0ea4383c0103d60e70be99e9a7f11027a33c4f5f" + integrity sha512-6hxeJYaL110a9b5TEJSj0gojyHQAmA2ch5Os+ySCiA1QGdS697XWY1pzsrSjqA9LDEEgdB/KypIlR59RcLuHYw== dependencies: - type "^2.5.0" + type "^2.7.2" extend-shallow@^2.0.1: version "2.0.1" @@ -5712,15 +5752,10 @@ fake-merkle-patricia-tree@^1.0.1: dependencies: checkpoint-store "^1.1.0" -faker@5.5.3: - version "5.5.3" - resolved "https://registry.yarnpkg.com/faker/-/faker-5.5.3.tgz#c57974ee484431b25205c2c8dc09fda861e51e0e" - integrity sha512-wLTv2a28wjUyWkbnX7u/ABZBkUkIF2fCd73V6P2oFqEGEktDfzWx4UxrSqtPRw0xPRAcjeAOIiJWqZm3pP4u3g== - -fast-check@^2.12.1: - version "2.25.0" - resolved "https://registry.yarnpkg.com/fast-check/-/fast-check-2.25.0.tgz#5146601851bf3be0953bd17eb2b7d547936c6561" - integrity sha512-wRUT2KD2lAmT75WNIJIHECawoUUMHM0I5jrlLXGtGeqmPL8jl/EldUDjY1VCp6fDY8yflyfUeIOsOBrIbIiArg== +fast-check@3.1.1: + version "3.1.1" + resolved "https://registry.yarnpkg.com/fast-check/-/fast-check-3.1.1.tgz#72c5ae7022a4e86504762e773adfb8a5b0b01252" + integrity sha512-3vtXinVyuUKCKFKYcwXhGE6NtGWkqF8Yh3rvMZNzmwz8EPrgoc/v4pDdLHyLnCyCI5MZpZZkDEwFyXyEONOxpA== dependencies: pure-rand "^5.0.1" @@ -5740,9 +5775,9 @@ fast-diff@^1.1.2: integrity sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w== fast-glob@^3.0.3, fast-glob@^3.2.9: - version "3.2.11" - resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.11.tgz#a1172ad95ceb8a16e20caa5c5e56480e5129c1d9" - integrity sha512-xrO3+1bxSo3ZVHAnqzyuewYT6aMFHRAd4Kcs92MAonjwQZLsK9d0SF1IyQ3k5PoirxTW0Oe/RqFgMQ6TcNE5Ew== + version "3.2.12" + resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.12.tgz#7f39ec99c2e6ab030337142da9e0c18f37afae80" + integrity sha512-DVj4CQIYYow0BlaelwK1pHl5n5cRSJfM60UA0zK891sVInoPri2Ekj7+e1CT3/3qxXenpI+nBBmQAcJPJgaj4w== dependencies: "@nodelib/fs.stat" "^2.0.2" "@nodelib/fs.walk" "^1.2.3" @@ -5760,6 +5795,11 @@ fast-levenshtein@^2.0.6, fast-levenshtein@~2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== +fast-safe-stringify@^2.0.7: + version "2.1.1" + resolved "https://registry.yarnpkg.com/fast-safe-stringify/-/fast-safe-stringify-2.1.1.tgz#c406a83b6e70d9e35ce3b30a81141df30aeba884" + integrity sha512-W+KJc2dmILlPplD/H4K9l9LcAHAfPtP6BY84uVLXQ6Evcz9Lcg33Y2z1IVblT6xdY54PXYVHEv+0Wpq8Io6zkA== + fastq@^1.6.0: version "1.13.0" resolved "https://registry.yarnpkg.com/fastq/-/fastq-1.13.0.tgz#616760f88a7526bdfc596b7cab8c18938c36b98c" @@ -5936,9 +5976,9 @@ flatted@^2.0.0: integrity sha512-r5wGx7YeOwNWNlCA0wQ86zKyDLMQr+/RB8xy74M4hTphfmjlijTSSXGuH8rnvKZnfT9i+75zmd8jcKdMR4O6jA== flatted@^3.1.0: - version "3.2.5" - resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.5.tgz#76c8584f4fc843db64702a6bd04ab7a8bd666da3" - integrity sha512-WIWGi2L3DyTUvUrwRKgGi9TwxQMUEqPOPQBVi71R96jZXJdFskXEmf54BoZaS1kknGODoIGASGEzBUYdyMCBJg== + version "3.2.7" + resolved "https://registry.yarnpkg.com/flatted/-/flatted-3.2.7.tgz#609f39207cb614b89d0765b477cb2d437fbf9787" + integrity sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ== flow-stoplight@^1.0.0: version "1.0.0" @@ -5946,9 +5986,9 @@ flow-stoplight@^1.0.0: integrity sha512-rDjbZUKpN8OYhB0IE/vY/I8UWO/602IIJEU/76Tv4LvYnwHCk0BCsvz4eRr9n+FQcri7L5cyaXOo0+/Kh4HisA== follow-redirects@^1.12.1, follow-redirects@^1.14.0: - version "1.15.1" - resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.1.tgz#0ca6a452306c9b276e4d3127483e29575e207ad5" - integrity sha512-yLAMQs+k0b2m7cVxpS1VKJVvoz7SS9Td1zss3XRwXj+ZDH00RJgnuLx7E44wx02kQLrdM3aOOy+FpzS7+8OizA== + version "1.15.2" + resolved "https://registry.yarnpkg.com/follow-redirects/-/follow-redirects-1.15.2.tgz#b460864144ba63f2681096f274c4e57026da2c13" + integrity sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA== for-each@^0.3.3, for-each@~0.3.3: version "0.3.3" @@ -5967,6 +6007,11 @@ forever-agent@~0.6.1: resolved "https://registry.yarnpkg.com/forever-agent/-/forever-agent-0.6.1.tgz#fbc71f0c41adeb37f96c577ad1ed42d8fdacca91" integrity sha512-j0KLYPhm6zeac4lz3oJ3o65qvgQCcPubiyotZrXqEaG4hNagNYO8qdlUrX5vwqv9ohqeT/Z3j6+yW067yWWdUw== +form-data-encoder@1.7.1: + version "1.7.1" + resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.1.tgz#ac80660e4f87ee0d3d3c3638b7da8278ddb8ec96" + integrity sha512-EFRDrsMm/kyqbTQocNvRXMLjc7Es2Vk+IQFx/YW7hkUH1eBl4J1fqiP34l74Yt0pFLCNpc06fkbVk00008mzjg== + form-data@^2.2.0: version "2.5.1" resolved "https://registry.yarnpkg.com/form-data/-/form-data-2.5.1.tgz#f2cbec57b5e59e23716e128fe44d4e5dd23895f4" @@ -6037,6 +6082,15 @@ fs-extra@^0.30.0: path-is-absolute "^1.0.0" rimraf "^2.2.8" +fs-extra@^10.0.0: + version "10.1.0" + resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-10.1.0.tgz#02873cfbc4084dde127eaa5f9905eef2325d1abf" + integrity sha512-oRXApq54ETRj4eMiFzGnHWGy+zo5raudjuxN0b8H7s/RU2oW0Wvsx9O0ACRN/kRq9E8Vu/ReskGB5o3ji+FzHQ== + dependencies: + graceful-fs "^4.2.0" + jsonfile "^6.0.1" + universalify "^2.0.0" + fs-extra@^4.0.2, fs-extra@^4.0.3: version "4.0.3" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94" @@ -6064,7 +6118,7 @@ fs-extra@^8.1.0: jsonfile "^4.0.0" universalify "^0.1.0" -fs-extra@^9.1.0: +fs-extra@^9.0.0, fs-extra@^9.1.0: version "9.1.0" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.1.0.tgz#5954460c764a8da2094ba3554bf839e6b9a7c86d" integrity sha512-hcg3ZmepS30/7BSFqRvoo3DOMQu7IjqxO5nCDt+zM9XWjb33Wg7ziNT+Qvqbuc3+gWpzO02JubVyk2G4Zvo1OQ== @@ -6187,10 +6241,10 @@ get-func-name@^2.0.0: resolved "https://registry.yarnpkg.com/get-func-name/-/get-func-name-2.0.0.tgz#ead774abee72e20409433a066366023dd6887a41" integrity sha512-Hm0ixYtaSZ/V7C8FJrtZIuBBI+iSgL+1Aq82zSu8VQNB4S3Gk8e7Qs3VwBDJAhmRZcFqkl3tQu36g/Foh5I5ig== -get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1: - version "1.1.2" - resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.2.tgz#336975123e05ad0b7ba41f152ee4aadbea6cf598" - integrity sha512-Jfm3OyCxHh9DJyc28qGk+JmfkpO41A4XkneDSujN9MDXrm4oDKdHvndhZ2dN94+ERNfkYJWDclW6k2L/ZGHjXA== +get-intrinsic@^1.0.2, get-intrinsic@^1.1.0, get-intrinsic@^1.1.1, get-intrinsic@^1.1.3: + version "1.1.3" + resolved "https://registry.yarnpkg.com/get-intrinsic/-/get-intrinsic-1.1.3.tgz#063c84329ad93e83893c7f4f243ef63ffa351385" + integrity sha512-QJVz1Tj7MS099PevUG5jvnt9tSkXN8K14dxQlikJuPt4uD9hHAHjLyLBiLR5zELelBdD9QNRAXZzsJx0WaDL9A== dependencies: function-bind "^1.1.1" has "^1.0.3" @@ -6206,11 +6260,6 @@ get-port@^5.1.1: resolved "https://registry.yarnpkg.com/get-port/-/get-port-5.1.1.tgz#0469ed07563479de6efb986baf053dcd7d4e3193" integrity sha512-g/Q1aTSDOxFpchXC4i8ZWvxA1lnPqx/JHqcpIw0/LX9T8x/GBbi6YnlN5nhaKIFkT8oFsscUKgDJYxfwfS6QsQ== -get-stream@^3.0.0: - version "3.0.0" - resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14" - integrity sha512-GlhdIUuVakc8SJ6kK0zAFbiGzRFzNnY4jUuEbV9UROo4Y+0Ny4fjvcZFVTeDA4odpFyOQzaw6hXukJSq/f28sQ== - get-stream@^4.0.0, get-stream@^4.1.0: version "4.1.0" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" @@ -6225,6 +6274,11 @@ get-stream@^5.1.0: dependencies: pump "^3.0.0" +get-stream@^6.0.1: + version "6.0.1" + resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-6.0.1.tgz#a262d8eef67aced57c2852ad6167526a43cbf7b7" + integrity sha512-ts6Wi+2j3jQjqi70w5AlN8DFnkSwC+MqmxEzdEALB2qXZYV3X/b1CTfgPLGJNMeAWxdPfU8FO1ms3NUfaHCPYg== + get-symbol-description@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/get-symbol-description/-/get-symbol-description-1.0.0.tgz#7fdb81c900101fbd564dd5f1a30af5aadc1e58d6" @@ -6307,7 +6361,7 @@ glob@^5.0.15: once "^1.3.0" path-is-absolute "^1.0.0" -glob@^7.0.0, glob@^7.1.2, glob@^7.1.3, glob@^7.1.6, glob@~7.2.0: +glob@^7.0.0, glob@^7.1.2, glob@^7.1.3, glob@^7.1.6, glob@~7.2.3: version "7.2.3" resolved "https://registry.yarnpkg.com/glob/-/glob-7.2.3.tgz#b8df0fb802bbfa8e89bd1d938b4e16578ed44f2b" integrity sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q== @@ -6349,9 +6403,9 @@ globals@^11.7.0: integrity sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA== globals@^13.6.0, globals@^13.9.0: - version "13.15.0" - resolved "https://registry.yarnpkg.com/globals/-/globals-13.15.0.tgz#38113218c907d2f7e98658af246cef8b77e90bac" - integrity sha512-bpzcOlgDhMG070Av0Vy5Owklpv1I6+j96GhUI7Rh7IzDCKLzboflLrrfqMu8NquDbiR4EOQk7XzJwqVJxicxog== + version "13.17.0" + resolved "https://registry.yarnpkg.com/globals/-/globals-13.17.0.tgz#902eb1e680a41da93945adbdcb5a9f361ba69bd4" + integrity sha512-1C+6nQRb1GwGMKm2dH/E7enFAMxGTmGI7/dEdhy/DNelv85w9B72t3uc5frtMNXIbzrarJJ/lTCjcaZwbLJmyw== dependencies: type-fest "^0.20.2" @@ -6386,6 +6440,25 @@ globby@^11.0.0, globby@^11.0.3, globby@^11.1.0: merge2 "^1.4.1" slash "^3.0.0" +got@12.1.0: + version "12.1.0" + resolved "https://registry.yarnpkg.com/got/-/got-12.1.0.tgz#099f3815305c682be4fd6b0ee0726d8e4c6b0af4" + integrity sha512-hBv2ty9QN2RdbJJMK3hesmSkFTjVIHyIDDbssCKnSmq62edGgImJWD10Eb1k77TiV1bxloxqcFAVK8+9pkhOig== + dependencies: + "@sindresorhus/is" "^4.6.0" + "@szmarczak/http-timer" "^5.0.1" + "@types/cacheable-request" "^6.0.2" + "@types/responselike" "^1.0.0" + cacheable-lookup "^6.0.4" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + form-data-encoder "1.7.1" + get-stream "^6.0.1" + http2-wrapper "^2.1.10" + lowercase-keys "^3.0.0" + p-cancelable "^3.0.0" + responselike "^2.0.0" + got@9.6.0: version "9.6.0" resolved "https://registry.yarnpkg.com/got/-/got-9.6.0.tgz#edf45e7d67f99545705de1f7bbeeeb121765ed85" @@ -6403,25 +6476,22 @@ got@9.6.0: to-readable-stream "^1.0.0" url-parse-lax "^3.0.0" -got@^7.1.0: - version "7.1.0" - resolved "https://registry.yarnpkg.com/got/-/got-7.1.0.tgz#05450fd84094e6bbea56f451a43a9c289166385a" - integrity sha512-Y5WMo7xKKq1muPsxD+KmrR8DH5auG7fBdDVueZwETwV6VytKyU9OX/ddpq2/1hp1vIPvVb4T81dKQz3BivkNLw== - dependencies: - decompress-response "^3.2.0" - duplexer3 "^0.1.4" - get-stream "^3.0.0" - is-plain-obj "^1.1.0" - is-retry-allowed "^1.0.0" - is-stream "^1.0.0" - isurl "^1.0.0-alpha5" - lowercase-keys "^1.0.0" - p-cancelable "^0.3.0" - p-timeout "^1.1.1" - safe-buffer "^5.0.1" - timed-out "^4.0.0" - url-parse-lax "^1.0.0" - url-to-options "^1.0.1" +got@^11.8.5: + version "11.8.5" + resolved "https://registry.yarnpkg.com/got/-/got-11.8.5.tgz#ce77d045136de56e8f024bebb82ea349bc730046" + integrity sha512-o0Je4NvQObAuZPHLFoRSkdG2lTgtcynqymzg2Vupdx6PorhaT5MCbIyXG6d4D94kk8ZG57QeosgdiqfJWhEhlQ== + dependencies: + "@sindresorhus/is" "^4.0.0" + "@szmarczak/http-timer" "^4.0.5" + "@types/cacheable-request" "^6.0.1" + "@types/responselike" "^1.0.0" + cacheable-lookup "^5.0.3" + cacheable-request "^7.0.2" + decompress-response "^6.0.0" + http2-wrapper "^1.0.0-beta.5.2" + lowercase-keys "^2.0.0" + p-cancelable "^2.0.0" + responselike "^2.0.0" graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.5, graceful-fs@^4.1.6, graceful-fs@^4.1.9, graceful-fs@^4.2.0: version "4.2.10" @@ -6469,12 +6539,12 @@ hard-rejection@^2.1.0: integrity sha512-VIZB+ibDhx7ObhAe7OVtoEbuP4h/MuOTHJ+J8h/eBXotJYl0fBgR72xDFCKgIh22OJZIOVNxBMWuhAr10r8HdA== hardhat-gas-reporter@^1.0.8: - version "1.0.8" - resolved "https://registry.yarnpkg.com/hardhat-gas-reporter/-/hardhat-gas-reporter-1.0.8.tgz#93ce271358cd748d9c4185dbb9d1d5525ec145e0" - integrity sha512-1G5thPnnhcwLHsFnl759f2tgElvuwdkzxlI65fC9PwxYMEe9cmjkVAAWTf3/3y8uP6ZSPiUiOW8PgZnykmZe0g== + version "1.0.9" + resolved "https://registry.yarnpkg.com/hardhat-gas-reporter/-/hardhat-gas-reporter-1.0.9.tgz#9a2afb354bc3b6346aab55b1c02ca556d0e16450" + integrity sha512-INN26G3EW43adGKBNzYWOlI3+rlLnasXTwW79YNnUhXPDa+yHESgt639dJEs37gCjhkbNKcRRJnomXEuMFBXJg== dependencies: array-uniq "1.0.3" - eth-gas-reporter "^0.2.24" + eth-gas-reporter "^0.2.25" sha1 "^1.1.1" has-ansi@^2.0.0: @@ -6516,23 +6586,11 @@ has-property-descriptors@^1.0.0: dependencies: get-intrinsic "^1.1.1" -has-symbol-support-x@^1.4.1: - version "1.4.2" - resolved "https://registry.yarnpkg.com/has-symbol-support-x/-/has-symbol-support-x-1.4.2.tgz#1409f98bc00247da45da67cee0a36f282ff26455" - integrity sha512-3ToOva++HaW+eCpgqZrCfN51IPB+7bJNVT6CUATzueB5Heb8o6Nam0V3HG5dlDvZU1Gn5QLcbahiKw/XVk5JJw== - has-symbols@^1.0.0, has-symbols@^1.0.1, has-symbols@^1.0.2, has-symbols@^1.0.3: version "1.0.3" resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.3.tgz#bb7b2c4349251dce87b125f7bdf874aa7c8b39f8" integrity sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A== -has-to-string-tag-x@^1.2.0: - version "1.4.1" - resolved "https://registry.yarnpkg.com/has-to-string-tag-x/-/has-to-string-tag-x-1.4.1.tgz#a045ab383d7b4b2012a00148ab0aa5f290044d4d" - integrity sha512-vdbKfmw+3LoOYVr+mtxHaX5a96+0f3DljYd8JOqvOLsf5mw2Otda2qCDT9qRqLAhrjyQ0h7ual5nOiASpsGNFw== - dependencies: - has-symbol-support-x "^1.4.1" - has-tostringtag@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/has-tostringtag/-/has-tostringtag-1.0.0.tgz#7e133818a7d394734f941e73c3d3f9291e658b25" @@ -6715,6 +6773,22 @@ http-signature@~1.2.0: jsprim "^1.2.2" sshpk "^1.7.0" +http2-wrapper@^1.0.0-beta.5.2: + version "1.0.3" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-1.0.3.tgz#b8f55e0c1f25d4ebd08b3b0c2c079f9590800b3d" + integrity sha512-V+23sDMr12Wnz7iTcDeJr3O6AIxlnvT/bmaAAAP/Xda35C90p9599p0F1eHR/N1KILWSoWVAiOMFjBBXaXSMxg== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.0.0" + +http2-wrapper@^2.1.10: + version "2.1.11" + resolved "https://registry.yarnpkg.com/http2-wrapper/-/http2-wrapper-2.1.11.tgz#d7c980c7ffb85be3859b6a96c800b2951ae257ef" + integrity sha512-aNAk5JzLturWEUiuhAN73Jcbq96R7rTitAoXV54FYMatvihnpD2+6PUgU4ce3D/m5VDbw+F5CsyKSF176ptitQ== + dependencies: + quick-lru "^5.1.1" + resolve-alpn "^1.2.0" + https-proxy-agent@^5.0.0: version "5.0.1" resolved "https://registry.yarnpkg.com/https-proxy-agent/-/https-proxy-agent-5.0.1.tgz#c59ef224a04fe8b754f3db0063a25ea30d0005d6" @@ -6944,10 +7018,10 @@ is-buffer@^2.0.5, is-buffer@~2.0.3: resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-2.0.5.tgz#ebc252e400d22ff8d77fa09888821a24a658c191" integrity sha512-i2R6zNFDwgEHJyQUtJEk0XFi1i0dPFn/oqjK3/vPCcDeJvW5NQ83V8QbicfF1SupOaB0h8ntgBC2YiE7dfyctQ== -is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.4: - version "1.2.4" - resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.4.tgz#47301d58dd0259407865547853df6d61fe471945" - integrity sha512-nsuwtxZfMX67Oryl9LCQ+upnC0Z0BgpwntpS89m1H/TLF0zNfzfLMV/9Wa/6MZsj0acpEjAO0KF1xT6ZdLl95w== +is-callable@^1.1.3, is-callable@^1.1.4, is-callable@^1.2.7: + version "1.2.7" + resolved "https://registry.yarnpkg.com/is-callable/-/is-callable-1.2.7.tgz#3bc2a85ea742d9e36205dcacdd72ca1fdc51b055" + integrity sha512-1BC0BVFhS/p0qtw6enp8e+8OD0UrK0oFLztSjNzhcKA3WDuJxxAPXzPuPtKkjEY9UUoEWlX/8fgKeu2S8i9JTA== is-ci@^2.0.0: version "2.0.0" @@ -6963,10 +7037,10 @@ is-ci@^3.0.1: dependencies: ci-info "^3.2.0" -is-core-module@^2.6.0, is-core-module@^2.8.1: - version "2.9.0" - resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.9.0.tgz#e1c34429cd51c6dd9e09e0799e396e27b19a9c69" - integrity sha512-+5FPy5PnwmO3lvfMb0AsoPaBG+5KHUI0wYFXOtYPnVVVspTFUuMZNfNaNVRt3FZadstu2c8x23vykRW/NBoU6A== +is-core-module@^2.6.0, is-core-module@^2.9.0: + version "2.11.0" + resolved "https://registry.yarnpkg.com/is-core-module/-/is-core-module-2.11.0.tgz#ad4cb3e3863e814523c96f3f58d26cc570ff0144" + integrity sha512-RRjxlvLDkD1YJwDbroBHMb+cukurkDWNyHx7D3oNB5x9rb5ogcksMC5wHCadcXoo67gVr/+3GFySh3134zi6rw== dependencies: has "^1.0.3" @@ -7118,11 +7192,6 @@ is-number@^7.0.0: resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b" integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng== -is-object@^1.0.1: - version "1.0.2" - resolved "https://registry.yarnpkg.com/is-object/-/is-object-1.0.2.tgz#a56552e1c665c9e950b4a025461da87e72f86fcf" - integrity sha512-2rRIahhZr2UWb45fIOuvZGpFtz0TyOZLf32KxBbSoUCeZR495zCKlWUKKUByk3geS2eAs7ZAABt0Y/Rx0GiQGA== - is-plain-obj@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-plain-obj/-/is-plain-obj-1.1.0.tgz#71a50c8429dfca773c92a390a4a03b39fcd51d3e" @@ -7148,11 +7217,6 @@ is-regex@^1.0.4, is-regex@^1.1.4, is-regex@~1.1.4: call-bind "^1.0.2" has-tostringtag "^1.0.0" -is-retry-allowed@^1.0.0: - version "1.2.0" - resolved "https://registry.yarnpkg.com/is-retry-allowed/-/is-retry-allowed-1.2.0.tgz#d778488bd0a4666a3be8a1482b9f2baafedea8b4" - integrity sha512-RUbUeKwvm3XG2VYamhJL1xFktgjvPzL0Hq8C+6yrWIswDy3BIXGqCxhxkc30N9jqK311gVU137K8Ei55/zVJRg== - is-shared-array-buffer@^1.0.2: version "1.0.2" resolved "https://registry.yarnpkg.com/is-shared-array-buffer/-/is-shared-array-buffer-1.0.2.tgz#8f259c573b60b6a32d4058a1a07430c0a7344c79" @@ -7160,7 +7224,7 @@ is-shared-array-buffer@^1.0.2: dependencies: call-bind "^1.0.2" -is-stream@^1.0.0, is-stream@^1.0.1, is-stream@^1.1.0: +is-stream@^1.0.1, is-stream@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/is-stream/-/is-stream-1.1.0.tgz#12d4a3dd4e68e0b79ceb8dbc84173ae80d91ca44" integrity sha512-uQPm8kcs47jx38atAcWTVxyltQYoPT68y9aWYdV6yWXSyW8mzSat0TL6CiWdZeCdF3KrAvpVtnHbTv4RN+rqdQ== @@ -7270,19 +7334,19 @@ isobject@^3.0.0, isobject@^3.0.1: resolved "https://registry.yarnpkg.com/isobject/-/isobject-3.0.1.tgz#4e431e92b11a9731636aa1f9c8d1ccbcfdab78df" integrity sha512-WhB9zCku7EGTj/HQQRz5aUQEUeoQZH2bWcltRErOpymJ4boYE6wL9Tbr23krRPSZ+C5zqNSrSw+Cc7sZZ4b7vg== +isomorphic-fetch@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/isomorphic-fetch/-/isomorphic-fetch-3.0.0.tgz#0267b005049046d2421207215d45d6a262b8b8b4" + integrity sha512-qvUtwJ3j6qwsF3jLxkZ72qCgjMysPzDfeV240JHiGZsANBYd+EEuu35v7dfrJ9Up0Ak07D7GGSkGhCHTqg/5wA== + dependencies: + node-fetch "^2.6.1" + whatwg-fetch "^3.4.1" + isstream@~0.1.2: version "0.1.2" resolved "https://registry.yarnpkg.com/isstream/-/isstream-0.1.2.tgz#47e63f7af55afa6f92e1500e690eb8b8529c099a" integrity sha512-Yljz7ffyPbrLpLngrMtZ7NduUgVvi6wG9RJ9IUcyCd59YQ911PBJphODUcbOVbqYfxe1wuYf/LJ8PauMRwsM/g== -isurl@^1.0.0-alpha5: - version "1.0.0" - resolved "https://registry.yarnpkg.com/isurl/-/isurl-1.0.0.tgz#b27f4f49f3cdaa3ea44a0a5b7f3462e6edc39d67" - integrity sha512-1P/yWsxPlDtn7QeRD+ULKQPaIaN6yF368GZ2vDfv0AL0NwpStafjWCDDdn0k8wgFMWpVAqG7oJhxHnlud42i9w== - dependencies: - has-to-string-tag-x "^1.2.0" - is-object "^1.0.1" - jest-changed-files@^24.9.0: version "24.9.0" resolved "https://registry.yarnpkg.com/jest-changed-files/-/jest-changed-files-24.9.0.tgz#08d8c15eb79a7fa3fc98269bc14b451ee82f8039" @@ -7360,6 +7424,11 @@ json-buffer@3.0.0: resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.0.tgz#5b1f397afc75d677bde8bcfc0e47e1f9a3d9a898" integrity sha512-CuUqjv0FUZIdXkHPI8MezCnFCdaTAacej1TZYulLoAg1h/PhwkdXFN4V/gzY4g+fMBCOV2xF+rp7t2XD2ns/NQ== +json-buffer@3.0.1: + version "3.0.1" + resolved "https://registry.yarnpkg.com/json-buffer/-/json-buffer-3.0.1.tgz#9338802a30d3b6605fbe0613e094008ca8c05a13" + integrity sha512-4bV5BfR2mqfQTJm+V5tPPdf+ZpuhiIvTuAB5g8kcrXOZpTT/QwwVRWBywX1ozr6lEuPdbHxwaJlm9G6mI2sfSQ== + json-parse-better-errors@^1.0.1: version "1.0.2" resolved "https://registry.yarnpkg.com/json-parse-better-errors/-/json-parse-better-errors-1.0.2.tgz#bb867cfb3450e69107c131d1c514bab3dc8bcaa9" @@ -7467,9 +7536,9 @@ jsonfile@^6.0.1: graceful-fs "^4.1.6" jsonify@~0.0.0: - version "0.0.0" - resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.0.tgz#2c74b6ee41d93ca51b7b5aaee8f503631d252a73" - integrity sha512-trvBk1ki43VZptdBI5rIlG4YOzyeH/WefQt5rj1grasPn4iiZWKet8nkgc4GlsAylaztn0qZfUYOiTsASJFdNA== + version "0.0.1" + resolved "https://registry.yarnpkg.com/jsonify/-/jsonify-0.0.1.tgz#2aa3111dae3d34a0f151c63f3a45d995d9420978" + integrity sha512-2/Ki0GcmuqSrgFyelQq9M05y7PS0mEwuIzrf3f1fPqkVDVRvZrPZtVSMHxdgo8Aq0sxAOb/cr2aqqA3LeWHVPg== jsonparse@^1.2.0: version "1.3.1" @@ -7513,6 +7582,13 @@ keccak@^3.0.0, keccak@^3.0.2: node-gyp-build "^4.2.0" readable-stream "^3.6.0" +keyv@*, keyv@^4.0.0: + version "4.5.0" + resolved "https://registry.yarnpkg.com/keyv/-/keyv-4.5.0.tgz#dbce9ade79610b6e641a9a65f2f6499ba06b9bc6" + integrity sha512-2YvuMsA+jnFGtBareKqgANOEKe1mk3HKiXu2fRmAfyxG0MJAywNhi5ttWA3PMjl4NmpyjZNbFifR2vNjW1znfA== + dependencies: + json-buffer "3.0.1" + keyv@^3.0.0: version "3.1.0" resolved "https://registry.yarnpkg.com/keyv/-/keyv-3.1.0.tgz#ecc228486f69991e49e9476485a5be1e8fc5c4d9" @@ -7559,9 +7635,9 @@ klaw@^1.0.0: graceful-fs "^4.1.9" kleur@^4.1.4: - version "4.1.4" - resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.4.tgz#8c202987d7e577766d039a8cd461934c01cda04d" - integrity sha512-8QADVssbrFjivHWQU7KkMgptGTl6WAcSdlbBPY4uNF+mWr6DGcKrvY2w4FQJoXch7+fKMjj0dRrL75vk3k23OA== + version "4.1.5" + resolved "https://registry.yarnpkg.com/kleur/-/kleur-4.1.5.tgz#95106101795f7050c6c650f350c683febddb1780" + integrity sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ== lcid@^1.0.0: version "1.0.0" @@ -7835,6 +7911,11 @@ lodash.camelcase@^4.3.0: resolved "https://registry.yarnpkg.com/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz#b28aa6288a2b9fc651035c7711f65ab6190331a6" integrity sha512-TwuEnCnxbc3rAvhf/LbG7tJUDzhqXyFnv3dtzLOPgCG/hODL7WFnsbwktkD7yUV0RrreP/l1PALq/YSg6VvjlA== +lodash.deburr@^4.1.0: + version "4.1.0" + resolved "https://registry.yarnpkg.com/lodash.deburr/-/lodash.deburr-4.1.0.tgz#ddb1bbb3ef07458c0177ba07de14422cb033ff9b" + integrity sha512-m/M1U1f3ddMCs6Hq2tAsYThTBDaAKFDX3dwDo97GEYzamXi9SqUpjWi/Rrj/gf3X2n8ktwgZrlP1z6E3v/IExQ== + lodash.get@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.get/-/lodash.get-4.4.2.tgz#2d177f652fa31e939b4438d5341499dfa3825e99" @@ -7845,11 +7926,21 @@ lodash.merge@^4.6.2: resolved "https://registry.yarnpkg.com/lodash.merge/-/lodash.merge-4.6.2.tgz#558aa53b43b661e1925a0afdfa36a9a1085fe57a" integrity sha512-0KpjqXRVvrYyCsX1swR/XTK0va6VQkQM6MNo7PqW77ByjAhoARA8EfrP1N4+KlKj8YS0ZUCtRT/YUuhyYDujIQ== +lodash.snakecase@^4.1.1: + version "4.1.1" + resolved "https://registry.yarnpkg.com/lodash.snakecase/-/lodash.snakecase-4.1.1.tgz#39d714a35357147837aefd64b5dcbb16becd8f8d" + integrity sha512-QZ1d4xoBHYUeuouhEq3lk3Uq7ldgyFXGBhg04+oRLnIz8o9T65Eh+8YdroUwn846zchkA9yDsDl5CVVaV2nqYw== + lodash.startcase@^4.4.0: version "4.4.0" resolved "https://registry.yarnpkg.com/lodash.startcase/-/lodash.startcase-4.4.0.tgz#9436e34ed26093ed7ffae1936144350915d9add8" integrity sha512-+WKqsK294HMSc2jEbNgpHpd0JfIBhp7rEV4aqXWqFr6AlXov+SlcgB1Fv01y2kGe3Gc8nMW7VA0SrGuSkRfIEg== +lodash.trim@^4.5.1: + version "4.5.1" + resolved "https://registry.yarnpkg.com/lodash.trim/-/lodash.trim-4.5.1.tgz#36425e7ee90be4aa5e27bcebb85b7d11ea47aa57" + integrity sha512-nJAlRl/K+eiOehWKDzoBVrSMhK0K3A3YQsUNXHQa5yIrKBAhsZgSu3KoAFoFT+mEgiyBHddZ0pRk1ITpIp90Wg== + lodash.truncate@^4.4.2: version "4.4.2" resolved "https://registry.yarnpkg.com/lodash.truncate/-/lodash.truncate-4.4.2.tgz#5a350da0b1113b837ecfffd5812cbe58d6eae193" @@ -7926,6 +8017,11 @@ lowercase-keys@^2.0.0: resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-2.0.0.tgz#2603e78b7b4b0006cbca2fbcc8a3202558ac9479" integrity sha512-tqNXrS78oMOE73NMxK4EMLQsQowWf8jKooH9g7xPavRT706R6bkQJ6DY2Te7QukaZsulxa30wQ7bk0pm4XiHmA== +lowercase-keys@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/lowercase-keys/-/lowercase-keys-3.0.0.tgz#c5e7d442e37ead247ae9db117a9d0a467c89d4f2" + integrity sha512-ozCC6gdQ+glXOQsveKD0YsDy8DSQFjDTz4zyzEHNV5+JP5D62LmfDZ6o1cycFx9ouG940M5dE8C8CTewdj2YWQ== + lru-cache@5.1.1, lru-cache@^5.1.1: version "5.1.1" resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-5.1.1.tgz#1da27e6710271947695daf6848e847f01d84b920" @@ -8201,6 +8297,11 @@ mimic-response@^1.0.0, mimic-response@^1.0.1: resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-1.0.1.tgz#4923538878eef42063cb8a3e3b0798781487ab1b" integrity sha512-j5EctnkH7amfV/q5Hgmoal1g2QHFJRraOtmx0JpIqkxhBhI/lJSl1nMpQ45hVarwNETOoWEimndZ4QK0RHxuxQ== +mimic-response@^3.1.0: + version "3.1.0" + resolved "https://registry.yarnpkg.com/mimic-response/-/mimic-response-3.1.0.tgz#2d1d59af9c1b129815accc2c46a022a5ce1fa3c9" + integrity sha512-z0yWI+4FDrrweS8Zmt4Ej5HdJmky15+L2e6Wgn3+iK5fWzb6T3fhNFq2+MeTRb064c6Wr4N/wv0DzQTjNzHNGQ== + min-document@^2.19.0: version "2.19.0" resolved "https://registry.yarnpkg.com/min-document/-/min-document-2.19.0.tgz#7bd282e3f5842ed295bb748cdd9f1ffa2c824685" @@ -8261,9 +8362,9 @@ minimist-options@^4.0.2: kind-of "^6.0.3" minimist@^1.2.0, minimist@^1.2.5, minimist@^1.2.6, minimist@~1.2.6: - version "1.2.6" - resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.6.tgz#8637a5b759ea0d6e98702cfb3a9283323c93af44" - integrity sha512-Jsjnk4bw3YJqYzbdyBiNsPWHPfO++UGG749Cxs6peCu5Xg4nrena6OVxOYxrQTqww0Jmwt+Ref8rggumkTLz9Q== + version "1.2.7" + resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.7.tgz#daa1c4d91f507390437c6a8bc01078e7000c4d18" + integrity sha512-bzfL1YUZsP41gmu/qjrEk0Q6i2ix/cVeAhbCbqH9u3zYutS1cLg00qhrD0M2MVdCcx4Sc0UpP2eBWo9rotpq6g== minipass@^2.6.0, minipass@^2.9.0: version "2.9.0" @@ -8357,11 +8458,10 @@ mocha@7.1.2: yargs-unparser "1.6.0" mocha@^10.0.0: - version "10.0.0" - resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.0.0.tgz#205447d8993ec755335c4b13deba3d3a13c4def9" - integrity sha512-0Wl+elVUD43Y0BqPZBzZt8Tnkw9CMUdNYnUsTfOM1vuhJVZL+kiesFYsqwBkEEuEixaiPe5ZQdqDgX2jddhmoA== + version "10.1.0" + resolved "https://registry.yarnpkg.com/mocha/-/mocha-10.1.0.tgz#dbf1114b7c3f9d0ca5de3133906aea3dfc89ef7a" + integrity sha512-vUF7IYxEoN7XhQpFLxQAEMtE4W91acW4B6En9l97MwE9stL1A9gusXfoHZCLVHDUJ/7V5+lbCM6yMqzo5vNymg== dependencies: - "@ungap/promise-all-settled" "1.1.2" ansi-colors "4.1.1" browser-stdout "1.3.1" chokidar "3.5.3" @@ -8638,7 +8738,7 @@ node-environment-flags@1.0.6: object.getownpropertydescriptors "^2.0.3" semver "^5.7.0" -node-fetch@^2.6.1, node-fetch@^2.6.7: +node-fetch@2.6.7, node-fetch@^2.6.1, node-fetch@^2.6.7: version "2.6.7" resolved "https://registry.yarnpkg.com/node-fetch/-/node-fetch-2.6.7.tgz#24de9fba827e3b4ae44dc8b20256a379160052ad" integrity sha512-ZjMPFEfVx5j+y2yF35Kzx5sF7kDzxuDj6ziH4FFbOp87zKDZNx8yExJIb05OGF4Nlt9IHFIMBkRl41VdvcNdbQ== @@ -8654,9 +8754,9 @@ node-fetch@~1.7.1: is-stream "^1.0.1" node-gyp-build@^4.2.0, node-gyp-build@^4.3.0: - version "4.4.0" - resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.4.0.tgz#42e99687ce87ddeaf3a10b99dc06abc11021f3f4" - integrity sha512-amJnQCcgtRVw9SvoebO3BKGESClrfXGCUTX9hSn1OuGQTQBOZmVd0Z0OlecpuRksKvbsUqALE8jls/ErClAPuQ== + version "4.5.0" + resolved "https://registry.yarnpkg.com/node-gyp-build/-/node-gyp-build-4.5.0.tgz#7a64eefa0b21112f89f58379da128ac177f20e40" + integrity sha512-2iGbaQBV+ITgCz76ZEjmhUKAKVf7xfY1sRl4UiKQspfZMH2h06SyhNsnSVy50cwkFQDGLyif6m/6uFXHkOZ6rg== nofilter@^1.0.4: version "1.0.4" @@ -8690,6 +8790,11 @@ normalize-url@^4.1.0: resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-4.5.1.tgz#0dd90cf1288ee1d1313b87081c9a5932ee48518a" integrity sha512-9UZCFRHQdNrfTpGg8+1INIg93B6zE0aXMVFkw1WFwvO4SlZywU6aLg5Of0Ap/PgcbSw4LNxvMWXMeugwMCX0AA== +normalize-url@^6.0.1: + version "6.1.0" + resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-6.1.0.tgz#40d0885b535deffe3f3147bec877d05fe4c5668a" + integrity sha512-DlL+XwOy3NxAQ8xuC0okPgK46iuVNAK01YN7RueYBqqFeGsBjV9XmCAzAdgt+667bCl5kPh9EqKKDwnaPG1I7A== + npm-run-path@^2.0.0: version "2.0.2" resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f" @@ -8736,7 +8841,7 @@ object-copy@^0.1.0: define-property "^0.2.5" kind-of "^3.0.3" -object-inspect@^1.12.0, object-inspect@^1.9.0, object-inspect@~1.12.0: +object-inspect@^1.12.2, object-inspect@^1.9.0, object-inspect@~1.12.2: version "1.12.2" resolved "https://registry.yarnpkg.com/object-inspect/-/object-inspect-1.12.2.tgz#c0641f26394532f28ab8d796ab954e43c009a8ea" integrity sha512-z+cPxW0QGUp0mcqcsgQyLVRDoXFQbXOwBaqyF7VIgI4TWNQsDHrBpUQslRmIfAoYWdYzs6UlKJtB2XJpTaNSpQ== @@ -8776,14 +8881,14 @@ object.assign@4.1.0: has-symbols "^1.0.0" object-keys "^1.0.11" -object.assign@^4.1.2: - version "4.1.2" - resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.2.tgz#0ed54a342eceb37b38ff76eb831a0e788cb63940" - integrity sha512-ixT2L5THXsApyiUPYKmW+2EHpXXe5Ii3M+f4e+aJFAHao5amFRW6J0OO6c/LU8Be47utCx2GL89hxGB6XSmKuQ== +object.assign@^4.1.4: + version "4.1.4" + resolved "https://registry.yarnpkg.com/object.assign/-/object.assign-4.1.4.tgz#9673c7c7c351ab8c4d0b516f4343ebf4dfb7799f" + integrity sha512-1mxKf0e58bvyjSCtKYY4sRe9itRk3PJpquJOjeIkz885CczcI4IvJJDLPS72oowuSh+pBxUFROpX+TU++hxhZQ== dependencies: - call-bind "^1.0.0" - define-properties "^1.1.3" - has-symbols "^1.0.1" + call-bind "^1.0.2" + define-properties "^1.1.4" + has-symbols "^1.0.3" object-keys "^1.1.1" object.getownpropertydescriptors@^2.0.3, object.getownpropertydescriptors@^2.1.1: @@ -8920,16 +9025,21 @@ outdent@^0.5.0: resolved "https://registry.yarnpkg.com/outdent/-/outdent-0.5.0.tgz#9e10982fdc41492bb473ad13840d22f9655be2ff" integrity sha512-/jHxFIzoMXdqPzTaCpFzAAWhpkSjZPF4Vsn6jAfNpmbH/ymsmd7Qc6VE9BGn0L6YMj6uwpQLxCECpus4ukKS9Q== -p-cancelable@^0.3.0: - version "0.3.0" - resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-0.3.0.tgz#b9e123800bcebb7ac13a479be195b507b98d30fa" - integrity sha512-RVbZPLso8+jFeq1MfNvgXtCRED2raz/dKpacfTNxsx6pLEpEomM7gah6VeHSYV3+vo0OAi4MkArtQcWWXuQoyw== - p-cancelable@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-1.1.0.tgz#d078d15a3af409220c886f1d9a0ca2e441ab26cc" integrity sha512-s73XxOZ4zpt1edZYZzvhqFa6uvQc1vwUa0K0BdtIZgQMAJj9IbebH+JkgKZc9h+B05PKHLOTl4ajG1BmNrVZlw== +p-cancelable@^2.0.0: + version "2.1.1" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-2.1.1.tgz#aab7fbd416582fa32a3db49859c122487c5ed2cf" + integrity sha512-BZOr3nRQHOntUjTrH8+Lh54smKHoHyur8We1V8DSMVrl5A2malOOwuJRnKRDjSnkoeBh4at6BwEnb5I7Jl31wg== + +p-cancelable@^3.0.0: + version "3.0.0" + resolved "https://registry.yarnpkg.com/p-cancelable/-/p-cancelable-3.0.0.tgz#63826694b54d61ca1c20ebcb6d3ecf5e14cd8050" + integrity sha512-mlVgR3PGuzlo0MmTdk4cXqXWlwQDLnONTAg6sm62XkMJEiRxN3GL3SffkYvqwonbkJBcrI7Uvv5Zh9yjvn2iUw== + p-defer@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c" @@ -9013,13 +9123,6 @@ p-map@^4.0.0: dependencies: aggregate-error "^3.0.0" -p-timeout@^1.1.1: - version "1.2.1" - resolved "https://registry.yarnpkg.com/p-timeout/-/p-timeout-1.2.1.tgz#5eb3b353b7fce99f101a1038880bb054ebbea386" - integrity sha512-gb0ryzr+K2qFqFv6qi3khoeqMZF/+ajxQipEF6NteZVnvz9tzdsfAVj3lYtn1gAXvH5lfLwfxEII799gt/mRIA== - dependencies: - p-finally "^1.0.0" - p-try@^1.0.0: version "1.0.0" resolved "https://registry.yarnpkg.com/p-try/-/p-try-1.0.0.tgz#cbc79cdbaf8fd4228e13f621f2b1a237c1b207b3" @@ -9109,11 +9212,11 @@ parse5-htmlparser2-tree-adapter@^7.0.0: parse5 "^7.0.0" parse5@^7.0.0: - version "7.0.0" - resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.0.0.tgz#51f74a5257f5fcc536389e8c2d0b3802e1bfa91a" - integrity sha512-y/t8IXSPWTuRZqXc0ajH/UwDj4mnqLEbSttNbThcFhGrZuOyoyvNBO85PBp2jQa55wY9d07PBNjsK8ZP3K5U6g== + version "7.1.1" + resolved "https://registry.yarnpkg.com/parse5/-/parse5-7.1.1.tgz#4649f940ccfb95d8754f37f73078ea20afe0c746" + integrity sha512-kwpuwzB+px5WUg9pyK0IcK/shltJN5/OVhQagxhCQNtT9Y9QRZqNY2e1cmbu/paRh5LMnz/oVTVLBpjFmMZhSg== dependencies: - entities "^4.3.0" + entities "^4.4.0" parseurl@~1.3.3: version "1.3.3" @@ -9359,11 +9462,6 @@ prelude-ls@~1.1.2: resolved "https://registry.yarnpkg.com/prelude-ls/-/prelude-ls-1.1.2.tgz#21932a549f5e52ffd9a827f570e04be62a97da54" integrity sha512-ESF23V4SKG6lVSGZgYNpbsiaAkdab6ZgOxe52p7+Kid3W3u3bxR4Vfd/o21dmN7jSt0IwgZ4v5MUd26FEtXE9w== -prepend-http@^1.0.1: - version "1.0.4" - resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc" - integrity sha512-PhmXi5XmoyKw1Un4E+opM2KcsJInDvKyuOumcjjw3waw86ZNjHwVUOOWLc4bCzLdcKNaWBH9e99sbWzDQsVaYg== - prepend-http@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-2.0.0.tgz#e92434bfa5ea8c19f41cdfd401d741a3c819d897" @@ -9381,15 +9479,15 @@ prettier@2.4.1: resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.4.1.tgz#671e11c89c14a4cfc876ce564106c4a6726c9f5c" integrity sha512-9fbDAXSBcc6Bs1mZrDYb3XKzDLm4EXXL9sC1LqKP5rZkT6KRr/rf9amVUcODVXgguK/isJz0d0hP72WeaKWsvA== -prettier@^1.14.3, prettier@^1.19.1: +prettier@^1.14.3: version "1.19.1" resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb" integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew== -prettier@^2.1.2, prettier@^2.3.1: - version "2.7.0" - resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.0.tgz#a4fdae07e5596c51c9857ea676cd41a0163879d6" - integrity sha512-nwoX4GMFgxoPC6diHvSwmK/4yU8FFH3V8XWtLQrbj4IBsK2pkYhG4kf/ljF/haaZ/aii+wNJqISrCDPgxGWDVQ== +prettier@^2.1.2, prettier@^2.3.1, prettier@^2.7.1: + version "2.7.1" + resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.7.1.tgz#e235806850d057f97bb08368a4f7d899f7760c64" + integrity sha512-ujppO+MkdPqoVINuDFDRLClm7D78qbDt0/NR+wp5FqEZOoTNAjPHWj17QRhu7geIHJfcNhRk1XVQmF8Bp3ye+g== pretty-ms@^0.2.1: version "0.2.2" @@ -9427,9 +9525,9 @@ promise-to-callback@^1.0.0: set-immediate-shim "^1.0.1" promise@^8.0.0: - version "8.1.0" - resolved "https://registry.yarnpkg.com/promise/-/promise-8.1.0.tgz#697c25c3dfe7435dd79fcd58c38a135888eaf05e" - integrity sha512-W04AqnILOL/sPRXziNicCjSNRruLAuIHEOVBazepu0545DDNGYHz7ar9ZgZ1fMU8/MA4mVxp5rkBWRi6OXIy3Q== + version "8.2.0" + resolved "https://registry.yarnpkg.com/promise/-/promise-8.2.0.tgz#a1f6280ab67457fbfc8aad2b198c9497e9e5c806" + integrity sha512-+CMAlLHqwRYwBMXKCP+o8ns7DN+xHDUiI+0nArsiJ9y+kJVPLFxEaSw6Ha9s9H0tftxg2Yzl25wqj9G7m5wLZg== dependencies: asap "~2.0.6" @@ -9461,9 +9559,9 @@ pseudomap@^1.0.1, pseudomap@^1.0.2: integrity sha512-b/YwNhb8lk1Zz2+bXXpS/LK9OisiZZ1SNsSLxN1x2OXVEhW2Ckr/7mWE5vrC1ZTiJlD9g19jWszTmJsB+oEpFQ== psl@^1.1.28: - version "1.8.0" - resolved "https://registry.yarnpkg.com/psl/-/psl-1.8.0.tgz#9326f8bcfb013adcc005fdff056acce020e51c24" - integrity sha512-RIdOzyoavK+hA18OGGWDqUTsCLhtA7IcZ/6NCs4fFJaHBDab+pDDmDIByWFRQJq2Cd7r1OoQxBGKOaztq+hjIQ== + version "1.9.0" + resolved "https://registry.yarnpkg.com/psl/-/psl-1.9.0.tgz#d0df2a137f00794565fcaf3b2c00cd09f8d5a5a7" + integrity sha512-E/ZsdU4HLs/68gYzgGTkMicWTLPdAftJLfJFlLUAAKZGkStNU72sZjT66SnMDVOfOWY/YAoiD7Jxa9iHvngcag== public-encrypt@^4.0.0: version "4.0.3" @@ -9557,21 +9655,14 @@ punycode@^2.1.0, punycode@^2.1.1: integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== pure-rand@^5.0.1: - version "5.0.1" - resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-5.0.1.tgz#97a287b4b4960b2a3448c0932bf28f2405cac51d" - integrity sha512-ksWccjmXOHU2gJBnH0cK1lSYdvSZ0zLoCMSz/nTGh6hDvCSgcRxDyIcOBD6KNxFz3xhMPm/T267Tbe2JRymKEQ== - -qs@6.10.3: - version "6.10.3" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.3.tgz#d6cde1b2ffca87b5aa57889816c5f81535e22e8e" - integrity sha512-wr7M2E0OFRfIfJZjKGieI8lBKb7fRCH4Fv5KNPEs7gJ8jadvotdsS08PzOKR7opXhZ/Xkjtt3WF9g38drmyRqQ== - dependencies: - side-channel "^1.0.4" + version "5.0.3" + resolved "https://registry.yarnpkg.com/pure-rand/-/pure-rand-5.0.3.tgz#a2f15dfbc3be8433d1d8ed67ee411aa83fb90406" + integrity sha512-9N8x1h8dptBQpHyC7aZMS+iNOAm97WMGY0AFrguU1cpfW3I5jINkWe5BIY5md0ofy+1TCIELsVcm/GJXZSaPbw== -qs@^6.4.0, qs@^6.7.0: - version "6.10.5" - resolved "https://registry.yarnpkg.com/qs/-/qs-6.10.5.tgz#974715920a80ff6a262264acd2c7e6c2a53282b4" - integrity sha512-O5RlPh0VFtR78y79rgcgKK4wbAI0C5zGVLztOIdpWX6ep368q5Hv6XRxDvXuZ9q3C6v+e3n8UfZZJw7IIG27eQ== +qs@6.11.0, qs@^6.4.0, qs@^6.7.0: + version "6.11.0" + resolved "https://registry.yarnpkg.com/qs/-/qs-6.11.0.tgz#fd0d963446f7a65e1367e01abd85429453f0c37a" + integrity sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q== dependencies: side-channel "^1.0.4" @@ -9604,6 +9695,11 @@ quick-lru@^4.0.1: resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-4.0.1.tgz#5b8878f113a58217848c6482026c73e1ba57727f" integrity sha512-ARhCpm70fzdcvNQfPoy49IaanKkTlRWF2JMzqhcJbhSFRZv7nPTvZJdcY7301IPmvW+/p0RgIWnQDLJxifsQ7g== +quick-lru@^5.1.1: + version "5.1.1" + resolved "https://registry.yarnpkg.com/quick-lru/-/quick-lru-5.1.1.tgz#366493e6b3e42a3a6885e2e99d18f80fb7a8c932" + integrity sha512-WuyALRjWPDGtt/wzJiadO5AXY+8hZ80hVpe6MyivgraREW751X3SbhRvG3eLKOYN+8VEvqLcf3wdnt44Z4S4SA== + randombytes@^2.0.0, randombytes@^2.0.1, randombytes@^2.0.5, randombytes@^2.0.6, randombytes@^2.1.0: version "2.1.0" resolved "https://registry.yarnpkg.com/randombytes/-/randombytes-2.1.0.tgz#df6f84372f0270dc65cdf6291349ab7a473d4f2a" @@ -9791,9 +9887,9 @@ regenerator-runtime@^0.11.0: integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg== regenerator-runtime@^0.13.4: - version "0.13.9" - resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.9.tgz#8925742a98ffd90814988d7566ad30ca3b263b52" - integrity sha512-p3VT+cOEgxFsRRA9X4lkI1E+k2/CtnKtU4gcxyaCUreilL/vqI6CdZ3wxVUx3UOUg+gnUOQQcRI7BmSI656MYA== + version "0.13.10" + resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.10.tgz#ed07b19616bcbec5da6274ebc75ae95634bfc2ee" + integrity sha512-KepLsg4dU12hryUO7bp/axHAKvwGOCV0sGloQtpagJ12ai+ojVDqkeGSiRX1zlq+kjIMZ1t7gpze+26QqtdGqw== regenerator-transform@^0.10.0: version "0.10.1" @@ -9950,6 +10046,11 @@ require-main-filename@^2.0.0: resolved "https://registry.yarnpkg.com/require-main-filename/-/require-main-filename-2.0.0.tgz#d0b329ecc7cc0f61649f62215be69af54aa8989b" integrity sha512-NKN5kMDylKuldxYLSUfrbo5Tuzh4hd+2E8NPPX02mZtn1VuREQToYe/ZdlJy+J3uCpfaiGF05e7B8W0iXbQHmg== +resolve-alpn@^1.0.0, resolve-alpn@^1.2.0: + version "1.2.1" + resolved "https://registry.yarnpkg.com/resolve-alpn/-/resolve-alpn-1.2.1.tgz#b7adbdac3546aaaec20b45e7d8265927072726f9" + integrity sha512-0a1F4l73/ZFZOakJnQ3FvkJ2+gSTQWz/r2KE5OdDY0TxPm5h4GkqkWWfM47T7HsbnOtcJVEF4epCVy6u7Q3K+g== + resolve-from@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/resolve-from/-/resolve-from-3.0.0.tgz#b22c7af7d9d6881bc8b6e653335eebcb0a188748" @@ -9982,12 +10083,12 @@ resolve@1.17.0: dependencies: path-parse "^1.0.6" -resolve@^1.1.6, resolve@^1.10.0, resolve@^1.20.0, resolve@^1.8.1, resolve@~1.22.0: - version "1.22.0" - resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.0.tgz#5e0b8c67c15df57a89bdbabe603a002f21731198" - integrity sha512-Hhtrw0nLeSrFQ7phPp4OOcVjLPIeMnRlr5mcnVuMe7M/7eBn98A3hmFRLoFo3DLZkivSYwhRUJTyPyWAk56WLw== +resolve@^1.1.6, resolve@^1.10.0, resolve@^1.20.0, resolve@^1.8.1, resolve@~1.22.1: + version "1.22.1" + resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.22.1.tgz#27cb2ebb53f91abb49470a928bba7558066ac177" + integrity sha512-nBpuuYuY5jFsli/JIs1oldw6fOQCBioohqWZg/2hiaOybXOft4lonv85uDOKXdf8rhyK159cxU5cDcK/NKk8zw== dependencies: - is-core-module "^2.8.1" + is-core-module "^2.9.0" path-parse "^1.0.7" supports-preserve-symlinks-flag "^1.0.0" @@ -9998,6 +10099,13 @@ responselike@^1.0.2: dependencies: lowercase-keys "^1.0.0" +responselike@^2.0.0: + version "2.0.1" + resolved "https://registry.yarnpkg.com/responselike/-/responselike-2.0.1.tgz#9a0bc8fdc252f3fb1cca68b016591059ba1422bc" + integrity sha512-4gl03wn3hj1HP3yzgdI7d3lCkF95F21Pz4BPGvKHinyQzALR5CapwC8yIi0Rh58DEMQ/SguC03wFj2k0M/mHhw== + dependencies: + lowercase-keys "^2.0.0" + restore-cursor@^2.0.0: version "2.0.0" resolved "https://registry.yarnpkg.com/restore-cursor/-/restore-cursor-2.0.0.tgz#9f7ee287f82fd326d4fd162923d62129eee0dfaf" @@ -10112,6 +10220,15 @@ safe-event-emitter@^1.0.1: dependencies: events "^3.0.0" +safe-regex-test@^1.0.0: + version "1.0.0" + resolved "https://registry.yarnpkg.com/safe-regex-test/-/safe-regex-test-1.0.0.tgz#793b874d524eb3640d1873aad03596db2d4f2295" + integrity sha512-JBUUzyOgEwXQY1NuPtvcj/qcBDbDmEvWufhlnXZIm75DEHp+afM1r1ujJpJsV/gSM4t59tpDyPi1sd6ZaPFfsA== + dependencies: + call-bind "^1.0.2" + get-intrinsic "^1.1.3" + is-regex "^1.1.4" + safe-regex@^1.1.0: version "1.1.0" resolved "https://registry.yarnpkg.com/safe-regex/-/safe-regex-1.1.0.tgz#40a3669f3b077d1e943d44629e157dd48023bf2e" @@ -10185,15 +10302,22 @@ semaphore@>=1.0.1, semaphore@^1.0.3, semaphore@^1.1.0: resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7" integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ== +semver@7.3.7: + version "7.3.7" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" + integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + dependencies: + lru-cache "^6.0.0" + semver@^6.3.0: version "6.3.0" resolved "https://registry.yarnpkg.com/semver/-/semver-6.3.0.tgz#ee0a64c8af5e8ceea67687b133761e1becbd1d3d" integrity sha512-b39TBaTSfV6yBrapU89p5fKekE2m/NwnDocOVruQFS1/veMgdzuPcnOM34M6CwxW8jH/lxEa5rBoDeUwu5HHTw== semver@^7.2.1, semver@^7.3.4, semver@^7.3.5, semver@^7.3.7: - version "7.3.7" - resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.7.tgz#12c5b649afdbf9049707796e22a4028814ce523f" - integrity sha512-QlYTucUYOews+WeEujDoEGziz4K6c47V/Bd+LjSSYcA94p+DmINdf7ncaUinThfvZyu13lN9OY1XDxt8C0Tw0g== + version "7.3.8" + resolved "https://registry.yarnpkg.com/semver/-/semver-7.3.8.tgz#07a78feafb3f7b32347d725e33de7e2a2df67798" + integrity sha512-NB1ctGL5rlHrPJtFDVIVzTyQylMLu9N9VICA6HSFJo8MCGVTMW6gfpicwKmmK/dAjTOrqu5l63JJOpDSrAis3A== dependencies: lru-cache "^6.0.0" @@ -10531,9 +10655,9 @@ solhint@^2.0.0: prettier "^1.14.3" solidity-coverage@^0.8.1: - version "0.8.1" - resolved "https://registry.yarnpkg.com/solidity-coverage/-/solidity-coverage-0.8.1.tgz#c6ec1209db6461b464782390812fdbf473638264" - integrity sha512-IsXuCmgofoph8gA5x93NWYoqHD1pNJBW/OfSXQsIqHL7g3n7/zKW7k9u4t9wJspJuXu+QcW3+oc/ryw7khoLzw== + version "0.8.2" + resolved "https://registry.yarnpkg.com/solidity-coverage/-/solidity-coverage-0.8.2.tgz#bc39604ab7ce0a3fa7767b126b44191830c07813" + integrity sha512-cv2bWb7lOXPE9/SSleDO6czkFiMHgP4NXPj+iW9W7iEKLBk7Cj0AGBiNmGX3V1totl9wjPrT0gHmABZKZt65rQ== dependencies: "@ethersproject/abi" "^5.0.9" "@solidity-parser/parser" "^0.14.1" @@ -10554,7 +10678,7 @@ solidity-coverage@^0.8.1: sc-istanbul "^0.4.5" semver "^7.3.4" shelljs "^0.8.3" - web3-utils "1.3.0" + web3-utils "^1.3.6" solpp@^0.11.5: version "0.11.5" @@ -10657,9 +10781,9 @@ spdx-expression-parse@^3.0.0: spdx-license-ids "^3.0.0" spdx-license-ids@^3.0.0: - version "3.0.11" - resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.11.tgz#50c0d8c40a14ec1bf449bae69a0ea4685a9d9f95" - integrity sha512-Ctl2BrFiM0X3MANYgj3CkygxhRmr9mi6xhejbdO960nF6EDJApTYpn0BQnDKlnNBULKiCN1n3w9EBkHK8ZWg+g== + version "3.0.12" + resolved "https://registry.yarnpkg.com/spdx-license-ids/-/spdx-license-ids-3.0.12.tgz#69077835abe2710b65f03969898b6637b505a779" + integrity sha512-rr+VVSXtRhO4OHbXUiAF7xW3Bo9DuuF6C5jH+q/x15j2jniycgKbxU09Hr0WqlSLUs4i4ltHGXqTe7VHclYWyA== split-ca@^1.0.0: version "1.0.1" @@ -10740,6 +10864,11 @@ stream-transform@^2.1.3: dependencies: mixme "^0.5.1" +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + strict-uri-encode@^1.0.0: version "1.1.0" resolved "https://registry.yarnpkg.com/strict-uri-encode/-/strict-uri-encode-1.1.0.tgz#279b225df1d582b1f54e65addd4352e18faa0713" @@ -10785,7 +10914,7 @@ string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.3: is-fullwidth-code-point "^3.0.0" strip-ansi "^6.0.1" -string.prototype.trim@~1.2.5: +string.prototype.trim@~1.2.6: version "1.2.6" resolved "https://registry.yarnpkg.com/string.prototype.trim/-/string.prototype.trim-1.2.6.tgz#824960787db37a9e24711802ed0c1d1c0254f83e" integrity sha512-8lMR2m+U0VJTPp6JjvJTtGyc4FIGq9CdRt7O9p6T0e6K4vjU+OP+SQJpbe/SBmRcCUIvNUnjsbmY6lnMp8MhsQ== @@ -10964,15 +11093,15 @@ swap-case@^1.1.0: upper-case "^1.1.1" swarm-js@^0.1.40: - version "0.1.40" - resolved "https://registry.yarnpkg.com/swarm-js/-/swarm-js-0.1.40.tgz#b1bc7b6dcc76061f6c772203e004c11997e06b99" - integrity sha512-yqiOCEoA4/IShXkY3WKwP5PvZhmoOOD8clsKA7EEcRILMkTEYHCQ21HDCAcVpmIxZq4LyZvWeRJ6quIyHk1caA== + version "0.1.42" + resolved "https://registry.yarnpkg.com/swarm-js/-/swarm-js-0.1.42.tgz#497995c62df6696f6e22372f457120e43e727979" + integrity sha512-BV7c/dVlA3R6ya1lMlSSNPLYrntt0LUq4YMgy3iwpCIc6rZnS5W2wUoctarZ5pXlpKtxDDf9hNziEkcfrxdhqQ== dependencies: bluebird "^3.5.0" buffer "^5.0.5" eth-lib "^0.1.26" fs-extra "^4.0.2" - got "^7.1.0" + got "^11.8.5" mime-types "^2.1.16" mkdirp-promise "^5.0.1" mock-fs "^4.1.0" @@ -11028,24 +11157,24 @@ table@^6.0.9, table@^6.8.0: strip-ansi "^6.0.1" tape@^4.6.3: - version "4.15.1" - resolved "https://registry.yarnpkg.com/tape/-/tape-4.15.1.tgz#88fb662965a11f9be1bddb04c11662d7eceb129e" - integrity sha512-k7F5pyr91n9D/yjSJwbLLYDCrTWXxMSXbbmHX2n334lSIc2rxeXyFkaBv4UuUd2gBYMrAOalPutAiCxC6q1qbw== + version "4.16.1" + resolved "https://registry.yarnpkg.com/tape/-/tape-4.16.1.tgz#8d511b3a0be1a30441885972047c1dac822fd9be" + integrity sha512-U4DWOikL5gBYUrlzx+J0oaRedm2vKLFbtA/+BRAXboGWpXO7bMP8ddxlq3Cse2bvXFQ0jZMOj6kk3546mvCdFg== dependencies: call-bind "~1.0.2" deep-equal "~1.1.1" defined "~1.0.0" dotignore "~0.1.2" for-each "~0.3.3" - glob "~7.2.0" + glob "~7.2.3" has "~1.0.3" inherits "~2.0.4" is-regex "~1.1.4" minimist "~1.2.6" - object-inspect "~1.12.0" - resolve "~1.22.0" + object-inspect "~1.12.2" + resolve "~1.22.1" resumer "~0.0.0" - string.prototype.trim "~1.2.5" + string.prototype.trim "~1.2.6" through "~2.3.8" tar-fs@~1.16.3: @@ -11166,7 +11295,7 @@ time-require@^0.1.2: pretty-ms "^0.2.1" text-table "^0.2.0" -timed-out@^4.0.0, timed-out@^4.0.1: +timed-out@^4.0.1: version "4.0.1" resolved "https://registry.yarnpkg.com/timed-out/-/timed-out-4.0.1.tgz#f32eacac5a175bea25d7fab565ab3ed8741ef56f" integrity sha512-G7r3AhovYtr5YKOWQkta8RKAPb+J9IsO4uVmzjl8AZwfhs8UcUwTiD6gcJYSgOtzyjvQKrKYn41syHbUWMkafA== @@ -11379,11 +11508,6 @@ tslib@^1.8.1, tslib@^1.9.0, tslib@^1.9.3: resolved "https://registry.yarnpkg.com/tslib/-/tslib-1.14.1.tgz#cf2d38bdc34a134bcaf1091c41f6619e2f672d00" integrity sha512-Xni35NKzjgMrwevysHTCArtLDpPvye8zV/0E4EyYn43P7/7qvQwPh9BGkHewbMulVntbigmcT7rdX3BNo9wRJg== -tslib@^2.4.0: - version "2.4.0" - resolved "https://registry.yarnpkg.com/tslib/-/tslib-2.4.0.tgz#7cecaa7f073ce680a05847aa77be941098f36dc3" - integrity sha512-d6xOpEDfsi2CZVlPQzGeux8XMwLT9hssAsaPYExaQMuYskwb+x1x7J371tWlbBdWHroy99KnVB6qIkUbs5X3UQ== - tsort@0.0.1: version "0.0.1" resolved "https://registry.yarnpkg.com/tsort/-/tsort-0.0.1.tgz#e2280f5e817f8bf4275657fd0f9aebd44f5a2786" @@ -11493,10 +11617,10 @@ type@^1.0.1: resolved "https://registry.yarnpkg.com/type/-/type-1.2.0.tgz#848dd7698dafa3e54a6c479e759c4bc3f18847a0" integrity sha512-+5nt5AAniqsCnu2cEQQdpzCAh33kVx8n0VoFidKpB1dVVLAN/F+bgVOqOJqOnEnrhp222clB5p3vUlD+1QAnfg== -type@^2.5.0: - version "2.6.0" - resolved "https://registry.yarnpkg.com/type/-/type-2.6.0.tgz#3ca6099af5981d36ca86b78442973694278a219f" - integrity sha512-eiDBDOmkih5pMbo9OqsqPRGMljLodLcwd5XD5JbtNB0o89xZAwynY9EdCDsJU7LtcVCClu9DvM7/0Ep1hYX3EQ== +type@^2.7.2: + version "2.7.2" + resolved "https://registry.yarnpkg.com/type/-/type-2.7.2.tgz#2376a15a3a28b1efa0f5350dcf72d24df6ef98d0" + integrity sha512-dzlvlNlt6AXU7EBSfpAscydQ7gXB+pPGsPnfJnZpiNJBDj7IaJzQlBZYGdEi4R9HmPdBv2XmWJ6YUtoTa7lmCw== typechain@^3.0.0: version "3.0.0" @@ -11582,9 +11706,9 @@ typical@^5.2.0: integrity sha512-dvdQgNDNJo+8B2uBQoqdb11eUCE1JQXhvjC/CZtgvZseVd5TYMXnq0+vuUemXbd/Se29cTaUuPX3YIc2xgbvIg== uglify-js@^3.1.4: - version "3.16.0" - resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.16.0.tgz#b778ba0831ca102c1d8ecbdec2d2bdfcc7353190" - integrity sha512-FEikl6bR30n0T3amyBh3LoiBdqHRy/f4H80+My34HOesOKyHfOsxAPAxOoqC0JUnC1amnO0IwkYC3sko51caSw== + version "3.17.4" + resolved "https://registry.yarnpkg.com/uglify-js/-/uglify-js-3.17.4.tgz#61678cf5fa3f5b7eb789bb345df29afb8257c22c" + integrity sha512-T9q82TJI9e/C1TAxYvfb16xO120tMVFZrGA3f9/P4424DNu6ypK103y0GPFVa17yotwSyZW5iYXgjYHkGrJW/g== ultron@~1.1.0: version "1.1.1" @@ -11607,14 +11731,16 @@ underscore@1.9.1: integrity sha512-5/4etnCkd9c8gwgowi5/om/mYO5ajCaOgdzj/oW+0eQV9WxKBDZw5+ycmKmeaTXjInS/W0BzpGLo2xR2aBwZdg== underscore@^1.8.3: - version "1.13.4" - resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.4.tgz#7886b46bbdf07f768e0052f1828e1dcab40c0dee" - integrity sha512-BQFnUDuAQ4Yf/cYY5LNrK9NCJFKriaRbD9uR1fTeXnBeoa97W0i41qkZfGO9pSo8I5KzjAcSY2XYtdf0oKd7KQ== + version "1.13.6" + resolved "https://registry.yarnpkg.com/underscore/-/underscore-1.13.6.tgz#04786a1f589dc6c09f761fc5f45b89e935136441" + integrity sha512-+A5Sja4HP1M08MaXya7p5LvjuM7K6q/2EaC0+iovj/wOcMsTzMvDFbasi/oSapiwOlt252IqsKqPjCl7huKS0A== undici@^5.4.0: - version "5.5.1" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.5.1.tgz#baaf25844a99eaa0b22e1ef8d205bffe587c8f43" - integrity sha512-MEvryPLf18HvlCbLSzCW0U00IMftKGI5udnjrQbC5D4P0Hodwffhv+iGfWuJwg16Y/TK11ZFK8i+BPVW2z/eAw== + version "5.11.0" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.11.0.tgz#1db25f285821828fc09d3804b9e2e934ae86fc13" + integrity sha512-oWjWJHzFet0Ow4YZBkyiJwiK5vWqEYoH7BINzJAJOLedZ++JpAlCbUktW2GQ2DS2FpKmxD/JMtWUUWl1BtghGw== + dependencies: + busboy "^1.6.0" union-value@^1.0.0: version "1.0.1" @@ -11683,13 +11809,6 @@ urix@^0.1.0: resolved "https://registry.yarnpkg.com/urix/-/urix-0.1.0.tgz#da937f7a62e21fec1fd18d49b35c2935067a6c72" integrity sha512-Am1ousAhSLBeB9cG/7k7r2R0zj50uDRlZHPGbazid5s9rlF1F/QKYObEKSIunSjIOkJZqwRRLpvewjEkM7pSqg== -url-parse-lax@^1.0.0: - version "1.0.0" - resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-1.0.0.tgz#7af8f303645e9bd79a272e7a14ac68bc0609da73" - integrity sha512-BVA4lR5PIviy2PMseNd2jbFQ+jwSwQGdJejf5ctd1rEXt0Ypd7yanUK9+lYechVlN5VaTJGsu2U/3MDDu6KgBA== - dependencies: - prepend-http "^1.0.1" - url-parse-lax@^3.0.0: version "3.0.0" resolved "https://registry.yarnpkg.com/url-parse-lax/-/url-parse-lax-3.0.0.tgz#16b5cafc07dbe3676c1b1999177823d6503acb0c" @@ -11702,11 +11821,6 @@ url-set-query@^1.0.0: resolved "https://registry.yarnpkg.com/url-set-query/-/url-set-query-1.0.0.tgz#016e8cfd7c20ee05cafe7795e892bd0702faa339" integrity sha512-3AChu4NiXquPfeckE5R5cGdiHCMWJx1dwCWOmWIL4KHAziJNOFIYJlpGFeKDvwLPHovZRCxK3cYlwzqI9Vp+Gg== -url-to-options@^1.0.1: - version "1.0.1" - resolved "https://registry.yarnpkg.com/url-to-options/-/url-to-options-1.0.1.tgz#1505a03a289a48cbd7a434efbaeec5055f5633a9" - integrity sha512-0kQLIzG4fdk/G5NONku64rSH/x32NOA39LVQqlK8Le6lvTF6GGRJpqaQFGgU+CLwySIqBSMdwYM0sYcW9f6P4A== - url@^0.11.0: version "0.11.0" resolved "https://registry.yarnpkg.com/url/-/url-0.11.0.tgz#3838e97cfc60521eb73c525a8e55bfdd9e2e28f1" @@ -11721,9 +11835,9 @@ use@^3.1.0: integrity sha512-cwESVXlO3url9YWlFW/TA9cshCEhtu7IKJ/p5soJ/gGpj7vbvFrAY/eIioQ6Dw23KjZhYgiIo8HOs1nQ2vr/oQ== utf-8-validate@^5.0.2: - version "5.0.9" - resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.9.tgz#ba16a822fbeedff1a58918f2a6a6b36387493ea3" - integrity sha512-Yek7dAy0v3Kl0orwMlvi7TPtiCNrdfHNd7Gcc/pLq4BLXqfAmd0J7OWMizUQnTTJsyjKn02mU7anqwfmUP4J8Q== + version "5.0.10" + resolved "https://registry.yarnpkg.com/utf-8-validate/-/utf-8-validate-5.0.10.tgz#d7d10ea39318171ca982718b6b96a8d2442571a2" + integrity sha512-Z6czzLq4u8fPOyx7TU6X3dvUZVvoJmxSQ+IcrlmagKhilxlhZgxPK6C5Jqbkw1IDUmFTM+cz9QDnnLTwDz/2gQ== dependencies: node-gyp-build "^4.3.0" @@ -11754,15 +11868,14 @@ util.promisify@^1.0.0: object.getownpropertydescriptors "^2.1.1" util@^0.12.0: - version "0.12.4" - resolved "https://registry.yarnpkg.com/util/-/util-0.12.4.tgz#66121a31420df8f01ca0c464be15dfa1d1850253" - integrity sha512-bxZ9qtSlGUWSOy9Qa9Xgk11kSslpuZwaxCg4sNIDj6FLucDab2JxnHwyNTCpHMtK1MjoQiWQ6DiUMZYbSrO+Sw== + version "0.12.5" + resolved "https://registry.yarnpkg.com/util/-/util-0.12.5.tgz#5f17a6059b73db61a875668781a1c2b136bd6fbc" + integrity sha512-kZf/K6hEIrWHI6XqOFUiiMa+79wE/D8Q+NCNAWclkyg3b4d2k7s0QGepNjiABc+aR3N1PAyHL7p6UcLY6LmrnA== dependencies: inherits "^2.0.3" is-arguments "^1.0.4" is-generator-function "^1.0.7" is-typed-array "^1.1.3" - safe-buffer "^5.1.2" which-typed-array "^1.1.2" utils-merge@1.0.1: @@ -11839,22 +11952,22 @@ web3-bzz@1.2.11: swarm-js "^0.1.40" underscore "1.9.1" -web3-bzz@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.5.3.tgz#e36456905ce051138f9c3ce3623cbc73da088c2b" - integrity sha512-SlIkAqG0eS6cBS9Q2eBOTI1XFzqh83RqGJWnyrNZMDxUwsTVHL+zNnaPShVPvrWQA1Ub5b0bx1Kc5+qJVxsTJg== +web3-bzz@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.7.4.tgz#9419e606e38a9777443d4ce40506ebd796e06075" + integrity sha512-w9zRhyEqTK/yi0LGRHjZMcPCfP24LBjYXI/9YxFw9VqsIZ9/G0CRCnUt12lUx0A56LRAMpF7iQ8eA73aBcO29Q== dependencies: "@types/node" "^12.12.6" got "9.6.0" swarm-js "^0.1.40" -web3-bzz@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.7.3.tgz#6860a584f748838af5e3932b6798e024ab8ae951" - integrity sha512-y2i2IW0MfSqFc1JBhBSQ59Ts9xE30hhxSmLS13jLKWzie24/An5dnoGarp2rFAy20tevJu1zJVPYrEl14jiL5w== +web3-bzz@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-bzz/-/web3-bzz-1.8.0.tgz#2023676d7c17ea36512bf76eb310755a02a3d464" + integrity sha512-caDtdKeLi7+2Vb+y+cq2yyhkNjnxkFzVW0j1DtemarBg3dycG1iEl75CVQMLNO6Wkg+HH9tZtRnUyFIe5LIUeQ== dependencies: "@types/node" "^12.12.6" - got "9.6.0" + got "12.1.0" swarm-js "^0.1.40" web3-core-helpers@1.2.11: @@ -11866,21 +11979,21 @@ web3-core-helpers@1.2.11: web3-eth-iban "1.2.11" web3-utils "1.2.11" -web3-core-helpers@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.5.3.tgz#099030235c477aadf39a94199ef40092151d563c" - integrity sha512-Ip1IjB3S8vN7Kf1PPjK41U5gskmMk6IJQlxIVuS8/1U7n/o0jC8krqtpRwiMfAgYyw3TXwBFtxSRTvJtnLyXZw== +web3-core-helpers@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.7.4.tgz#f8f808928560d3e64e0c8d7bdd163aa4766bcf40" + integrity sha512-F8PH11qIkE/LpK4/h1fF/lGYgt4B6doeMi8rukeV/s4ivseZHHslv1L6aaijLX/g/j4PsFmR42byynBI/MIzFg== dependencies: - web3-eth-iban "1.5.3" - web3-utils "1.5.3" + web3-eth-iban "1.7.4" + web3-utils "1.7.4" -web3-core-helpers@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.7.3.tgz#9a8d7830737d0e9c48694b244f4ce0f769ba67b9" - integrity sha512-qS2t6UKLhRV/6C7OFHtMeoHphkcA+CKUr2vfpxy4hubs3+Nj28K9pgiqFuvZiXmtEEwIAE2A28GBOC3RdcSuFg== +web3-core-helpers@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-helpers/-/web3-core-helpers-1.8.0.tgz#5dcfdda1a4ea277041d912003198f1334ca29d7c" + integrity sha512-nMAVwZB3rEp/khHI2BvFy0e/xCryf501p5NGjswmJtEM+Zrd3Biaw52JrB1qAZZIzCA8cmLKaOgdfamoDOpWdw== dependencies: - web3-eth-iban "1.7.3" - web3-utils "1.7.3" + web3-eth-iban "1.8.0" + web3-utils "1.8.0" web3-core-method@1.2.11: version "1.2.11" @@ -11894,28 +12007,27 @@ web3-core-method@1.2.11: web3-core-subscriptions "1.2.11" web3-utils "1.2.11" -web3-core-method@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.5.3.tgz#6cff97ed19fe4ea2e9183d6f703823a079f5132c" - integrity sha512-8wJrwQ2qD9ibWieF9oHXwrJsUGrv3XAtEkNeyvyNMpktNTIjxJ2jaFGQUuLiyUrMubD18XXgLk4JS6PJU4Loeg== +web3-core-method@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.7.4.tgz#3873c6405e1a0a8a1efc1d7b28de8b7550b00c15" + integrity sha512-56K7pq+8lZRkxJyzf5MHQPI9/VL3IJLoy4L/+q8HRdZJ3CkB1DkXYaXGU2PeylG1GosGiSzgIfu1ljqS7CP9xQ== dependencies: - "@ethereumjs/common" "^2.4.0" - "@ethersproject/transactions" "^5.0.0-beta.135" - web3-core-helpers "1.5.3" - web3-core-promievent "1.5.3" - web3-core-subscriptions "1.5.3" - web3-utils "1.5.3" + "@ethersproject/transactions" "^5.6.2" + web3-core-helpers "1.7.4" + web3-core-promievent "1.7.4" + web3-core-subscriptions "1.7.4" + web3-utils "1.7.4" -web3-core-method@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.7.3.tgz#eb2a4f140448445c939518c0fa6216b3d265c5e9" - integrity sha512-SeF8YL/NVFbj/ddwLhJeS0io8y7wXaPYA2AVT0h2C2ESYkpvOtQmyw2Bc3aXxBmBErKcbOJjE2ABOKdUmLSmMA== +web3-core-method@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-method/-/web3-core-method-1.8.0.tgz#9c2da8896808917d1679c319f19e2174ba17086c" + integrity sha512-c94RAzo3gpXwf2rf8rL8C77jOzNWF4mXUoUfZYYsiY35cJFd46jQDPI00CB5+ZbICTiA5mlVzMj4e7jAsTqiLA== dependencies: - "@ethersproject/transactions" "^5.0.0-beta.135" - web3-core-helpers "1.7.3" - web3-core-promievent "1.7.3" - web3-core-subscriptions "1.7.3" - web3-utils "1.7.3" + "@ethersproject/transactions" "^5.6.2" + web3-core-helpers "1.8.0" + web3-core-promievent "1.8.0" + web3-core-subscriptions "1.8.0" + web3-utils "1.8.0" web3-core-promievent@1.2.11: version "1.2.11" @@ -11924,17 +12036,17 @@ web3-core-promievent@1.2.11: dependencies: eventemitter3 "4.0.4" -web3-core-promievent@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.5.3.tgz#3f11833c3dc6495577c274350b61144e0a4dba01" - integrity sha512-CFfgqvk3Vk6PIAxtLLuX+pOMozxkKCY+/GdGr7weMh033mDXEPvwyVjoSRO1PqIKj668/hMGQsVoIgbyxkJ9Mg== +web3-core-promievent@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.7.4.tgz#80a75633fdfe21fbaae2f1e38950edb2f134868c" + integrity sha512-o4uxwXKDldN7ER7VUvDfWsqTx9nQSP1aDssi1XYXeYC2xJbVo0n+z6ryKtmcoWoRdRj7uSpVzal3nEmlr480mA== dependencies: eventemitter3 "4.0.4" -web3-core-promievent@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.7.3.tgz#2d0eeef694569b61355054c721578f67df925b80" - integrity sha512-+mcfNJLP8h2JqcL/UdMGdRVfTdm+bsoLzAFtLpazE4u9kU7yJUgMMAqnK59fKD3Zpke3DjaUJKwz1TyiGM5wig== +web3-core-promievent@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-promievent/-/web3-core-promievent-1.8.0.tgz#979765fd4d37ab0f158f0ee54037b279b737bd53" + integrity sha512-FGLyjAuOaAQ+ZhV6iuw9tg/9WvIkSZXKHQ4mdTyQ8MxVraOtFivOCbuLLsGgapfHYX+RPxsc1j1YzQjKoupagQ== dependencies: eventemitter3 "4.0.4" @@ -11949,27 +12061,27 @@ web3-core-requestmanager@1.2.11: web3-providers-ipc "1.2.11" web3-providers-ws "1.2.11" -web3-core-requestmanager@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.5.3.tgz#b339525815fd40e3a2a81813c864ddc413f7b6f7" - integrity sha512-9k/Bze2rs8ONix5IZR+hYdMNQv+ark2Ek2kVcrFgWO+LdLgZui/rn8FikPunjE+ub7x7pJaKCgVRbYFXjo3ZWg== +web3-core-requestmanager@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.7.4.tgz#2dc8a526dab8183dca3fef54658621801b1d0469" + integrity sha512-IuXdAm65BQtPL4aI6LZJJOrKAs0SM5IK2Cqo2/lMNvVMT9Kssq6qOk68Uf7EBDH0rPuINi+ReLP+uH+0g3AnPA== dependencies: util "^0.12.0" - web3-core-helpers "1.5.3" - web3-providers-http "1.5.3" - web3-providers-ipc "1.5.3" - web3-providers-ws "1.5.3" + web3-core-helpers "1.7.4" + web3-providers-http "1.7.4" + web3-providers-ipc "1.7.4" + web3-providers-ws "1.7.4" -web3-core-requestmanager@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.7.3.tgz#226f79d16e546c9157d00908de215e984cae84e9" - integrity sha512-bC+jeOjPbagZi2IuL1J5d44f3zfPcgX+GWYUpE9vicNkPUxFBWRG+olhMo7L+BIcD57cTmukDlnz+1xBULAjFg== +web3-core-requestmanager@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-requestmanager/-/web3-core-requestmanager-1.8.0.tgz#06189df80cf52d24a195a7ef655031afe8192df3" + integrity sha512-2AoYCs3Owl5foWcf4uKPONyqFygSl9T54L8b581U16nsUirjhoTUGK/PBhMDVcLCmW4QQmcY5A8oPFpkQc1TTg== dependencies: util "^0.12.0" - web3-core-helpers "1.7.3" - web3-providers-http "1.7.3" - web3-providers-ipc "1.7.3" - web3-providers-ws "1.7.3" + web3-core-helpers "1.8.0" + web3-providers-http "1.8.0" + web3-providers-ipc "1.8.0" + web3-providers-ws "1.8.0" web3-core-subscriptions@1.2.11: version "1.2.11" @@ -11980,21 +12092,21 @@ web3-core-subscriptions@1.2.11: underscore "1.9.1" web3-core-helpers "1.2.11" -web3-core-subscriptions@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.5.3.tgz#d7d69c4caad65074212028656e9dc56ca5c2159d" - integrity sha512-L2m9vG1iRN6thvmv/HQwO2YLhOQlmZU8dpLG6GSo9FBN14Uch868Swk0dYVr3rFSYjZ/GETevSXU+O+vhCummA== +web3-core-subscriptions@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.7.4.tgz#cfbd3fa71081a8c8c6f1a64577a1a80c5bd9826f" + integrity sha512-VJvKWaXRyxk2nFWumOR94ut9xvjzMrRtS38c4qj8WBIRSsugrZr5lqUwgndtj0qx4F+50JhnU++QEqUEAtKm3g== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.5.3" + web3-core-helpers "1.7.4" -web3-core-subscriptions@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.7.3.tgz#ca456dfe2c219a0696c5cf34c13b03c3599ec5d5" - integrity sha512-/i1ZCLW3SDxEs5mu7HW8KL4Vq7x4/fDXY+yf/vPoDljlpvcLEOnI8y9r7om+0kYwvuTlM6DUHHafvW0221TyRQ== +web3-core-subscriptions@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core-subscriptions/-/web3-core-subscriptions-1.8.0.tgz#ff66ae4467c8cb4716367248bcefb1845c0f8b83" + integrity sha512-7lHVRzDdg0+Gcog55lG6Q3D8JV+jN+4Ly6F8cSn9xFUAwOkdbgdWsjknQG7t7CDWy21DQkvdiY2BJF8S68AqOA== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" web3-core@1.2.11: version "1.2.11" @@ -12009,31 +12121,31 @@ web3-core@1.2.11: web3-core-requestmanager "1.2.11" web3-utils "1.2.11" -web3-core@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.5.3.tgz#59f8728b27c8305b349051326aa262b9b7e907bf" - integrity sha512-ACTbu8COCu+0eUNmd9pG7Q9EVsNkAg2w3Y7SqhDr+zjTgbSHZV01jXKlapm9z+G3AN/BziV3zGwudClJ4u4xXQ== +web3-core@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.7.4.tgz#943fff99134baedafa7c65b4a0bbd424748429ff" + integrity sha512-L0DCPlIh9bgIED37tYbe7bsWrddoXYc897ANGvTJ6MFkSNGiMwDkTLWSgYd9Mf8qu8b4iuPqXZHMwIo4atoh7Q== dependencies: - "@types/bn.js" "^4.11.5" + "@types/bn.js" "^5.1.0" "@types/node" "^12.12.6" bignumber.js "^9.0.0" - web3-core-helpers "1.5.3" - web3-core-method "1.5.3" - web3-core-requestmanager "1.5.3" - web3-utils "1.5.3" + web3-core-helpers "1.7.4" + web3-core-method "1.7.4" + web3-core-requestmanager "1.7.4" + web3-utils "1.7.4" -web3-core@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.7.3.tgz#2ef25c4cc023997f43af9f31a03b571729ff3cda" - integrity sha512-4RNxueGyevD1XSjdHE57vz/YWRHybpcd3wfQS33fgMyHZBVLFDNwhn+4dX4BeofVlK/9/cmPAokLfBUStZMLdw== +web3-core@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-core/-/web3-core-1.8.0.tgz#90afce527ac1b1dff8cbed2acbc0336530b8aacf" + integrity sha512-9sCA+Z02ci6zoY2bAquFiDjujRwmSKHiSGi4B8IstML8okSytnzXk1izHYSynE7ahIkguhjWAuXFvX76F5rAbA== dependencies: - "@types/bn.js" "^4.11.5" + "@types/bn.js" "^5.1.0" "@types/node" "^12.12.6" bignumber.js "^9.0.0" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-requestmanager "1.7.3" - web3-utils "1.7.3" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-requestmanager "1.8.0" + web3-utils "1.8.0" web3-eth-abi@1.2.11: version "1.2.11" @@ -12044,21 +12156,21 @@ web3-eth-abi@1.2.11: underscore "1.9.1" web3-utils "1.2.11" -web3-eth-abi@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.5.3.tgz#5aea9394d797f99ca0d9bd40c3417eb07241c96c" - integrity sha512-i/qhuFsoNrnV130CSRYX/z4SlCfSQ4mHntti5yTmmQpt70xZKYZ57BsU0R29ueSQ9/P+aQrL2t2rqkQkAloUxg== +web3-eth-abi@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.7.4.tgz#3fee967bafd67f06b99ceaddc47ab0970f2a614a" + integrity sha512-eMZr8zgTbqyL9MCTCAvb67RbVyN5ZX7DvA0jbLOqRWCiw+KlJKTGnymKO6jPE8n5yjk4w01e165Qb11hTDwHgg== dependencies: - "@ethersproject/abi" "5.0.7" - web3-utils "1.5.3" + "@ethersproject/abi" "^5.6.3" + web3-utils "1.7.4" -web3-eth-abi@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.7.3.tgz#2a1123c7252c37100eecd0b1fb2fb2c51366071f" - integrity sha512-ZlD8DrJro0ocnbZViZpAoMX44x5aYAb73u2tMq557rMmpiluZNnhcCYF/NnVMy6UIkn7SF/qEA45GXA1ne6Tnw== +web3-eth-abi@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-abi/-/web3-eth-abi-1.8.0.tgz#47fdff00bfdfa72064c9c612ff6369986598196d" + integrity sha512-xPeMb2hS9YLQK/Q5YZpkcmzoRGM+/R8bogSrYHhNC3hjZSSU0YRH+1ZKK0f9YF4qDZaPMI8tKWIMSCDIpjG6fg== dependencies: - "@ethersproject/abi" "5.0.7" - web3-utils "1.7.3" + "@ethersproject/abi" "^5.6.3" + web3-utils "1.8.0" web3-eth-accounts@1.2.11: version "1.2.11" @@ -12077,27 +12189,27 @@ web3-eth-accounts@1.2.11: web3-core-method "1.2.11" web3-utils "1.2.11" -web3-eth-accounts@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.5.3.tgz#076c816ff4d68c9dffebdc7fd2bfaddcfc163d77" - integrity sha512-pdGhXgeBaEJENMvRT6W9cmji3Zz/46ugFSvmnLLw79qi5EH7XJhKISNVb41eWCrs4am5GhI67GLx5d2s2a72iw== +web3-eth-accounts@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.7.4.tgz#7a24a4dfe947f7e9d1bae678529e591aa146167a" + integrity sha512-Y9vYLRKP7VU7Cgq6wG1jFaG2k3/eIuiTKAG8RAuQnb6Cd9k5BRqTm5uPIiSo0AP/u11jDomZ8j7+WEgkU9+Btw== dependencies: - "@ethereumjs/common" "^2.3.0" - "@ethereumjs/tx" "^3.2.1" + "@ethereumjs/common" "^2.5.0" + "@ethereumjs/tx" "^3.3.2" crypto-browserify "3.12.0" eth-lib "0.2.8" ethereumjs-util "^7.0.10" scrypt-js "^3.0.1" uuid "3.3.2" - web3-core "1.5.3" - web3-core-helpers "1.5.3" - web3-core-method "1.5.3" - web3-utils "1.5.3" + web3-core "1.7.4" + web3-core-helpers "1.7.4" + web3-core-method "1.7.4" + web3-utils "1.7.4" -web3-eth-accounts@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.7.3.tgz#cd1789000f13ed3c438e96b3e80ee7be8d3f1a9b" - integrity sha512-aDaWjW1oJeh0LeSGRVyEBiTe/UD2/cMY4dD6pQYa8dOhwgMtNQjxIQ7kacBBXe7ZKhjbIFZDhvXN4mjXZ82R2Q== +web3-eth-accounts@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-accounts/-/web3-eth-accounts-1.8.0.tgz#960d947ee87a49d6c706dc6312334fbfbd6ff812" + integrity sha512-HQ/MDSv4bexwJLvnqsM6xpGE7c2NVOqyhzOZFyMUKXbIwIq85T3TaLnM9pCN7XqMpDcfxqiZ3q43JqQVkzHdmw== dependencies: "@ethereumjs/common" "^2.5.0" "@ethereumjs/tx" "^3.3.2" @@ -12106,10 +12218,10 @@ web3-eth-accounts@1.7.3: ethereumjs-util "^7.0.10" scrypt-js "^3.0.1" uuid "3.3.2" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-utils "1.7.3" + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-utils "1.8.0" web3-eth-contract@1.2.11: version "1.2.11" @@ -12126,33 +12238,33 @@ web3-eth-contract@1.2.11: web3-eth-abi "1.2.11" web3-utils "1.2.11" -web3-eth-contract@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.5.3.tgz#12b03a4a16ce583a945f874bea2ff2fb4c5b81ad" - integrity sha512-Gdlt1L6cdHe83k7SdV6xhqCytVtOZkjD0kY/15x441AuuJ4JLubCHuqu69k2Dr3tWifHYVys/vG8QE/W16syGg== +web3-eth-contract@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.7.4.tgz#e5761cfb43d453f57be4777b2e5e7e1082078ff7" + integrity sha512-ZgSZMDVI1pE9uMQpK0T0HDT2oewHcfTCv0osEqf5qyn5KrcQDg1GT96/+S0dfqZ4HKj4lzS5O0rFyQiLPQ8LzQ== dependencies: - "@types/bn.js" "^4.11.5" - web3-core "1.5.3" - web3-core-helpers "1.5.3" - web3-core-method "1.5.3" - web3-core-promievent "1.5.3" - web3-core-subscriptions "1.5.3" - web3-eth-abi "1.5.3" - web3-utils "1.5.3" - -web3-eth-contract@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.7.3.tgz#c4efc118ed7adafbc1270b633f33e696a39c7fc7" - integrity sha512-7mjkLxCNMWlQrlfM/MmNnlKRHwFk5XrZcbndoMt3KejcqDP6dPHi2PZLutEcw07n/Sk8OMpSamyF3QiGfmyRxw== + "@types/bn.js" "^5.1.0" + web3-core "1.7.4" + web3-core-helpers "1.7.4" + web3-core-method "1.7.4" + web3-core-promievent "1.7.4" + web3-core-subscriptions "1.7.4" + web3-eth-abi "1.7.4" + web3-utils "1.7.4" + +web3-eth-contract@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-contract/-/web3-eth-contract-1.8.0.tgz#58f4ce0bde74e5ce87663502e409a92abad7b2c5" + integrity sha512-6xeXhW2YoCrz2Ayf2Vm4srWiMOB6LawkvxWJDnUWJ8SMATg4Pgu42C/j8rz/enXbYWt2IKuj0kk8+QszxQbK+Q== dependencies: - "@types/bn.js" "^4.11.5" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-promievent "1.7.3" - web3-core-subscriptions "1.7.3" - web3-eth-abi "1.7.3" - web3-utils "1.7.3" + "@types/bn.js" "^5.1.0" + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-promievent "1.8.0" + web3-core-subscriptions "1.8.0" + web3-eth-abi "1.8.0" + web3-utils "1.8.0" web3-eth-ens@1.2.11: version "1.2.11" @@ -12169,33 +12281,33 @@ web3-eth-ens@1.2.11: web3-eth-contract "1.2.11" web3-utils "1.2.11" -web3-eth-ens@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.5.3.tgz#ef6eee1ddf32b1ff9536fc7c599a74f2656bafe1" - integrity sha512-QmGFFtTGElg0E+3xfCIFhiUF+1imFi9eg/cdsRMUZU4F1+MZCC/ee+IAelYLfNTGsEslCqfAusliKOT9DdGGnw== +web3-eth-ens@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.7.4.tgz#346720305379c0a539e226141a9602f1da7bc0c8" + integrity sha512-Gw5CVU1+bFXP5RVXTCqJOmHn71X2ghNk9VcEH+9PchLr0PrKbHTA3hySpsPco1WJAyK4t8SNQVlNr3+bJ6/WZA== dependencies: content-hash "^2.5.2" eth-ens-namehash "2.0.8" - web3-core "1.5.3" - web3-core-helpers "1.5.3" - web3-core-promievent "1.5.3" - web3-eth-abi "1.5.3" - web3-eth-contract "1.5.3" - web3-utils "1.5.3" - -web3-eth-ens@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.7.3.tgz#ebc56a4dc7007f4f899259bbae1237d3095e2f3f" - integrity sha512-q7+hFGHIc0mBI3LwgRVcLCQmp6GItsWgUtEZ5bjwdjOnJdbjYddm7PO9RDcTDQ6LIr7hqYaY4WTRnDHZ6BEt5Q== + web3-core "1.7.4" + web3-core-helpers "1.7.4" + web3-core-promievent "1.7.4" + web3-eth-abi "1.7.4" + web3-eth-contract "1.7.4" + web3-utils "1.7.4" + +web3-eth-ens@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-ens/-/web3-eth-ens-1.8.0.tgz#f1937371eac54b087ebe2e871780c2710d39998d" + integrity sha512-/eFbQEwvsMOEiOhw9/iuRXCsPkqAmHHWuFOrThQkozRgcnSTRnvxkkRC/b6koiT5/HaKeUs4yQDg+/ixsIxZxA== dependencies: content-hash "^2.5.2" eth-ens-namehash "2.0.8" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-promievent "1.7.3" - web3-eth-abi "1.7.3" - web3-eth-contract "1.7.3" - web3-utils "1.7.3" + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-promievent "1.8.0" + web3-eth-abi "1.8.0" + web3-eth-contract "1.8.0" + web3-utils "1.8.0" web3-eth-iban@1.2.11: version "1.2.11" @@ -12205,21 +12317,21 @@ web3-eth-iban@1.2.11: bn.js "^4.11.9" web3-utils "1.2.11" -web3-eth-iban@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.5.3.tgz#91b1475893a877b10eac1de5cce6eb379fb81b5d" - integrity sha512-vMzmGqolYZvRHwP9P4Nf6G8uYM5aTLlQu2a34vz78p0KlDC+eV1th3+90Qeaupa28EG7OO0IT1F0BejiIauOPw== +web3-eth-iban@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.7.4.tgz#711fb2547fdf0f988060027331b2b6c430505753" + integrity sha512-XyrsgWlZQMv5gRcjXMsNvAoCRvV5wN7YCfFV5+tHUCqN8g9T/o4XUS20vDWD0k4HNiAcWGFqT1nrls02MGZ08w== dependencies: - bn.js "^4.11.9" - web3-utils "1.5.3" + bn.js "^5.2.1" + web3-utils "1.7.4" -web3-eth-iban@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.7.3.tgz#47433a73380322bba04e17b91fccd4a0e63a390a" - integrity sha512-1GPVWgajwhh7g53mmYDD1YxcftQniIixMiRfOqlnA1w0mFGrTbCoPeVaSQ3XtSf+rYehNJIZAUeDBnONVjXXmg== +web3-eth-iban@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-iban/-/web3-eth-iban-1.8.0.tgz#3af8a0c95b5f7b0b81ab0bcd2075c1e5dda31520" + integrity sha512-4RbvUxcMpo/e5811sE3a6inJ2H4+FFqUVmlRYs0RaXaxiHweahSRBNcpO0UWgmlePTolj0rXqPT2oEr0DuC8kg== dependencies: - bn.js "^4.11.9" - web3-utils "1.7.3" + bn.js "^5.2.1" + web3-utils "1.8.0" web3-eth-personal@1.2.11: version "1.2.11" @@ -12233,29 +12345,29 @@ web3-eth-personal@1.2.11: web3-net "1.2.11" web3-utils "1.2.11" -web3-eth-personal@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.5.3.tgz#4ebe09e9a77dd49d23d93b36b36cfbf4a6dae713" - integrity sha512-JzibJafR7ak/Icas8uvos3BmUNrZw1vShuNR5Cxjo+vteOC8XMqz1Vr7RH65B4bmlfb3bm9xLxetUHO894+Sew== +web3-eth-personal@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.7.4.tgz#22c399794cb828a75703df8bb4b3c1331b471546" + integrity sha512-O10C1Hln5wvLQsDhlhmV58RhXo+GPZ5+W76frSsyIrkJWLtYQTCr5WxHtRC9sMD1idXLqODKKgI2DL+7xeZ0/g== dependencies: "@types/node" "^12.12.6" - web3-core "1.5.3" - web3-core-helpers "1.5.3" - web3-core-method "1.5.3" - web3-net "1.5.3" - web3-utils "1.5.3" + web3-core "1.7.4" + web3-core-helpers "1.7.4" + web3-core-method "1.7.4" + web3-net "1.7.4" + web3-utils "1.7.4" -web3-eth-personal@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.7.3.tgz#ca2464dca356d4335aa8141cf75a6947f10f45a6" - integrity sha512-iTLz2OYzEsJj2qGE4iXC1Gw+KZN924fTAl0ESBFs2VmRhvVaM7GFqZz/wx7/XESl3GVxGxlRje3gNK0oGIoYYQ== +web3-eth-personal@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth-personal/-/web3-eth-personal-1.8.0.tgz#433c35e2e042844402a12d543c4126ea1494b478" + integrity sha512-L7FT4nR3HmsfZyIAhFpEctKkYGOjRC2h6iFKs9gnFCHZga8yLcYcGaYOBIoYtaKom99MuGBoosayWt/Twh7F5A== dependencies: "@types/node" "^12.12.6" - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-net "1.7.3" - web3-utils "1.7.3" + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-net "1.8.0" + web3-utils "1.8.0" web3-eth@1.2.11: version "1.2.11" @@ -12276,41 +12388,41 @@ web3-eth@1.2.11: web3-net "1.2.11" web3-utils "1.2.11" -web3-eth@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.5.3.tgz#d7d1ac7198f816ab8a2088c01e0bf1eda45862fe" - integrity sha512-saFurA1L23Bd7MEf7cBli6/jRdMhD4X/NaMiO2mdMMCXlPujoudlIJf+VWpRWJpsbDFdu7XJ2WHkmBYT5R3p1Q== - dependencies: - web3-core "1.5.3" - web3-core-helpers "1.5.3" - web3-core-method "1.5.3" - web3-core-subscriptions "1.5.3" - web3-eth-abi "1.5.3" - web3-eth-accounts "1.5.3" - web3-eth-contract "1.5.3" - web3-eth-ens "1.5.3" - web3-eth-iban "1.5.3" - web3-eth-personal "1.5.3" - web3-net "1.5.3" - web3-utils "1.5.3" - -web3-eth@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.7.3.tgz#9e92785ea18d682548b6044551abe7f2918fc0b5" - integrity sha512-BCIRMPwaMlTCbswXyGT6jj9chCh9RirbDFkPtvqozfQ73HGW7kP78TXXf9+Xdo1GjutQfxi/fQ9yPdxtDJEpDA== - dependencies: - web3-core "1.7.3" - web3-core-helpers "1.7.3" - web3-core-method "1.7.3" - web3-core-subscriptions "1.7.3" - web3-eth-abi "1.7.3" - web3-eth-accounts "1.7.3" - web3-eth-contract "1.7.3" - web3-eth-ens "1.7.3" - web3-eth-iban "1.7.3" - web3-eth-personal "1.7.3" - web3-net "1.7.3" - web3-utils "1.7.3" +web3-eth@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.7.4.tgz#a7c1d3ccdbba4de4a82df7e3c4db716e4a944bf2" + integrity sha512-JG0tTMv0Ijj039emXNHi07jLb0OiWSA9O24MRSk5vToTQyDNXihdF2oyq85LfHuF690lXZaAXrjhtLNlYqb7Ug== + dependencies: + web3-core "1.7.4" + web3-core-helpers "1.7.4" + web3-core-method "1.7.4" + web3-core-subscriptions "1.7.4" + web3-eth-abi "1.7.4" + web3-eth-accounts "1.7.4" + web3-eth-contract "1.7.4" + web3-eth-ens "1.7.4" + web3-eth-iban "1.7.4" + web3-eth-personal "1.7.4" + web3-net "1.7.4" + web3-utils "1.7.4" + +web3-eth@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-eth/-/web3-eth-1.8.0.tgz#006974a5d5e30644d05814111f9e162a72e4a09c" + integrity sha512-hist52os3OT4TQFB/GxPSMxTh3995sz6LPvQpPvj7ktSbpg9RNSFaSsPlCT63wUAHA3PZb1FemkAIeQM5t72Lw== + dependencies: + web3-core "1.8.0" + web3-core-helpers "1.8.0" + web3-core-method "1.8.0" + web3-core-subscriptions "1.8.0" + web3-eth-abi "1.8.0" + web3-eth-accounts "1.8.0" + web3-eth-contract "1.8.0" + web3-eth-ens "1.8.0" + web3-eth-iban "1.8.0" + web3-eth-personal "1.8.0" + web3-net "1.8.0" + web3-utils "1.8.0" web3-net@1.2.11: version "1.2.11" @@ -12321,23 +12433,23 @@ web3-net@1.2.11: web3-core-method "1.2.11" web3-utils "1.2.11" -web3-net@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.5.3.tgz#545fee49b8e213b0c55cbe74ffd0295766057463" - integrity sha512-0W/xHIPvgVXPSdLu0iZYnpcrgNnhzHMC888uMlGP5+qMCt8VuflUZHy7tYXae9Mzsg1kxaJAS5lHVNyeNw4CoQ== +web3-net@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.7.4.tgz#3153dfd3423262dd6fbec7aae5467202c4cad431" + integrity sha512-d2Gj+DIARHvwIdmxFQ4PwAAXZVxYCR2lET0cxz4KXbE5Og3DNjJi+MoPkX+WqoUXqimu/EOd4Cd+7gefqVAFDg== dependencies: - web3-core "1.5.3" - web3-core-method "1.5.3" - web3-utils "1.5.3" + web3-core "1.7.4" + web3-core-method "1.7.4" + web3-utils "1.7.4" -web3-net@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.7.3.tgz#54e35bcc829fdc40cf5001a3870b885d95069810" - integrity sha512-zAByK0Qrr71k9XW0Adtn+EOuhS9bt77vhBO6epAeQ2/VKl8rCGLAwrl3GbeEl7kWa8s/su72cjI5OetG7cYR0g== +web3-net@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-net/-/web3-net-1.8.0.tgz#9acff92d7c647d801bc68df0ff4416f104dbe789" + integrity sha512-kX6EAacK7QrOe7DOh0t5yHS5q2kxZmTCxPVwSz9io9xBeE4n4UhmzGJ/VfhP2eM3OPKYeypcR3LEO6zZ8xn2vw== dependencies: - web3-core "1.7.3" - web3-core-method "1.7.3" - web3-utils "1.7.3" + web3-core "1.8.0" + web3-core-method "1.8.0" + web3-utils "1.8.0" web3-provider-engine@14.2.1: version "14.2.1" @@ -12373,21 +12485,23 @@ web3-providers-http@1.2.11: web3-core-helpers "1.2.11" xhr2-cookies "1.1.0" -web3-providers-http@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.5.3.tgz#74f170fc3d79eb7941d9fbc34e2a067d61ced0b2" - integrity sha512-5DpUyWGHtDAr2RYmBu34Fu+4gJuBAuNx2POeiJIooUtJ+Mu6pIx4XkONWH6V+Ez87tZAVAsFOkJRTYuzMr3rPw== +web3-providers-http@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.7.4.tgz#8209cdcb115db5ccae1f550d1c4e3005e7538d02" + integrity sha512-AU+/S+49rcogUER99TlhW+UBMk0N2DxvN54CJ2pK7alc2TQ7+cprNPLHJu4KREe8ndV0fT6JtWUfOMyTvl+FRA== dependencies: - web3-core-helpers "1.5.3" + web3-core-helpers "1.7.4" xhr2-cookies "1.1.0" -web3-providers-http@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.7.3.tgz#8ea5e39f6ceee0b5bc4e45403fae75cad8ff4cf7" - integrity sha512-TQJfMsDQ5Uq9zGMYlu7azx1L7EvxW+Llks3MaWn3cazzr5tnrDbGh6V17x6LN4t8tFDHWx0rYKr3mDPqyTjOZw== +web3-providers-http@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-http/-/web3-providers-http-1.8.0.tgz#3fd1e569ead2095343fac17d53160a3bae674c23" + integrity sha512-/MqxwRzExohBWW97mqlCSW/+NHydGRyoEDUS1bAIF2YjfKFwyRtHgrEzOojzkC9JvB+8LofMvbXk9CcltpZapw== dependencies: - web3-core-helpers "1.7.3" - xhr2-cookies "1.1.0" + abortcontroller-polyfill "^1.7.3" + cross-fetch "^3.1.4" + es6-promise "^4.2.8" + web3-core-helpers "1.8.0" web3-providers-ipc@1.2.11: version "1.2.11" @@ -12398,21 +12512,21 @@ web3-providers-ipc@1.2.11: underscore "1.9.1" web3-core-helpers "1.2.11" -web3-providers-ipc@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.5.3.tgz#4bd7f5e445c2f3c2595fce0929c72bb879320a3f" - integrity sha512-JmeAptugVpmXI39LGxUSAymx0NOFdgpuI1hGQfIhbEAcd4sv7fhfd5D+ZU4oLHbRI8IFr4qfGU0uhR8BXhDzlg== +web3-providers-ipc@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.7.4.tgz#02e85e99e48f432c9d34cee7d786c3685ec9fcfa" + integrity sha512-jhArOZ235dZy8fS8090t60nTxbd1ap92ibQw5xIrAQ9m7LcZKNfmLAQUVsD+3dTFvadRMi6z1vCO7zRi84gWHw== dependencies: oboe "2.1.5" - web3-core-helpers "1.5.3" + web3-core-helpers "1.7.4" -web3-providers-ipc@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.7.3.tgz#a34872103a8d37a03795fa2f9b259e869287dcaa" - integrity sha512-Z4EGdLKzz6I1Bw+VcSyqVN4EJiT2uAro48Am1eRvxUi4vktGoZtge1ixiyfrRIVb6nPe7KnTFl30eQBtMqS0zA== +web3-providers-ipc@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-ipc/-/web3-providers-ipc-1.8.0.tgz#d339a24c4d764e459e425d3ac868a551ac33e3ea" + integrity sha512-tAXHtVXNUOgehaBU8pzAlB3qhjn/PRpjdzEjzHNFqtRRTwzSEKOJxFeEhaUA4FzHnTlbnrs8ujHWUitcp1elfg== dependencies: oboe "2.1.5" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" web3-providers-ws@1.2.11: version "1.2.11" @@ -12424,22 +12538,22 @@ web3-providers-ws@1.2.11: web3-core-helpers "1.2.11" websocket "^1.0.31" -web3-providers-ws@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.5.3.tgz#eec6cfb32bb928a4106de506f13a49070a21eabf" - integrity sha512-6DhTw4Q7nm5CFYEUHOJM0gAb3xFx+9gWpVveg3YxJ/ybR1BUvEWo3bLgIJJtX56cYX0WyY6DS35a7f0LOI1kVg== +web3-providers-ws@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.7.4.tgz#6e60bcefb456f569a3e766e386d7807a96f90595" + integrity sha512-g72X77nrcHMFU8hRzQJzfgi/072n8dHwRCoTw+WQrGp+XCQ71fsk2qIu3Tp+nlp5BPn8bRudQbPblVm2uT4myQ== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.5.3" + web3-core-helpers "1.7.4" websocket "^1.0.32" -web3-providers-ws@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.7.3.tgz#87564facc47387c9004a043a6686e4881ed6acfe" - integrity sha512-PpykGbkkkKtxPgv7U4ny4UhnkqSZDfLgBEvFTXuXLAngbX/qdgfYkhIuz3MiGplfL7Yh93SQw3xDjImXmn2Rgw== +web3-providers-ws@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-providers-ws/-/web3-providers-ws-1.8.0.tgz#a0a73e0606981ea32bed40d215000a64753899de" + integrity sha512-bcZtSifsqyJxwkfQYamfdIRp4nhj9eJd7cxHg1uUkfLJK125WP96wyJL1xbPt7qt0MpfnTFn8/UuIqIB6nFENg== dependencies: eventemitter3 "4.0.4" - web3-core-helpers "1.7.3" + web3-core-helpers "1.8.0" websocket "^1.0.32" web3-shh@1.2.11: @@ -12452,25 +12566,25 @@ web3-shh@1.2.11: web3-core-subscriptions "1.2.11" web3-net "1.2.11" -web3-shh@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.5.3.tgz#3c04aa4cda9ba0b746d7225262401160f8e38b13" - integrity sha512-COfEXfsqoV/BkcsNLRxQqnWc1Teb8/9GxdGag5GtPC5gQC/vsN+7hYVJUwNxY9LtJPKYTij2DHHnx6UkITng+Q== +web3-shh@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.7.4.tgz#bee91cce2737c529fd347274010b548b6ea060f1" + integrity sha512-mlSZxSYcMkuMCxqhTYnZkUdahZ11h+bBv/8TlkXp/IHpEe4/Gg+KAbmfudakq3EzG/04z70XQmPgWcUPrsEJ+A== dependencies: - web3-core "1.5.3" - web3-core-method "1.5.3" - web3-core-subscriptions "1.5.3" - web3-net "1.5.3" + web3-core "1.7.4" + web3-core-method "1.7.4" + web3-core-subscriptions "1.7.4" + web3-net "1.7.4" -web3-shh@1.7.3: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.7.3.tgz#84e10adf628556798244b58f73cda1447bb7075e" - integrity sha512-bQTSKkyG7GkuULdZInJ0osHjnmkHij9tAySibpev1XjYdjLiQnd0J9YGF4HjvxoG3glNROpuCyTaRLrsLwaZuw== +web3-shh@1.8.0: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-shh/-/web3-shh-1.8.0.tgz#b4abbf4f59d097ce2f74360e61e2e5c0bd6507c7" + integrity sha512-DNRgSa9Jf9xYFUGKSMylrf+zt3MPjhI2qF+UWX07o0y3+uf8zalDGiJOWvIS4upAsdPiKKVJ7co+Neof47OMmg== dependencies: - web3-core "1.7.3" - web3-core-method "1.7.3" - web3-core-subscriptions "1.7.3" - web3-net "1.7.3" + web3-core "1.8.0" + web3-core-method "1.8.0" + web3-core-subscriptions "1.8.0" + web3-net "1.8.0" web3-utils@1.2.11: version "1.2.11" @@ -12486,39 +12600,25 @@ web3-utils@1.2.11: underscore "1.9.1" utf8 "3.0.0" -web3-utils@1.3.0: - version "1.3.0" - resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.3.0.tgz#5bac16e5e0ec9fe7bdcfadb621655e8aa3cf14e1" - integrity sha512-2mS5axFCbkhicmoDRuJeuo0TVGQDgC2sPi/5dblfVC+PMtX0efrb8Xlttv/eGkq7X4E83Pds34FH98TP2WOUZA== - dependencies: - bn.js "^4.11.9" - eth-lib "0.2.8" - ethereum-bloom-filters "^1.0.6" - ethjs-unit "0.1.6" - number-to-bn "1.7.0" - randombytes "^2.1.0" - underscore "1.9.1" - utf8 "3.0.0" - -web3-utils@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.5.3.tgz#e914c9320cd663b2a09a5cb920ede574043eb437" - integrity sha512-56nRgA+Ad9SEyCv39g36rTcr5fpsd4L9LgV3FK0aB66nAMazLAA6Qz4lH5XrUKPDyBIPGJIR+kJsyRtwcu2q1Q== +web3-utils@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.7.4.tgz#eb6fa3706b058602747228234453811bbee017f5" + integrity sha512-acBdm6Evd0TEZRnChM/MCvGsMwYKmSh7OaUfNf5OKG0CIeGWD/6gqLOWIwmwSnre/2WrA1nKGId5uW2e5EfluA== dependencies: - bn.js "^4.11.9" - eth-lib "0.2.8" + bn.js "^5.2.1" ethereum-bloom-filters "^1.0.6" + ethereumjs-util "^7.1.0" ethjs-unit "0.1.6" number-to-bn "1.7.0" randombytes "^2.1.0" utf8 "3.0.0" -web3-utils@1.7.3, web3-utils@^1.0.0-beta.31: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.7.3.tgz#b214d05f124530d8694ad364509ac454d05f207c" - integrity sha512-g6nQgvb/bUpVUIxJE+ezVN+rYwYmlFyMvMIRSuqpi1dk6ApDD00YNArrk7sPcZnjvxOJ76813Xs2vIN2rgh4lg== +web3-utils@1.8.0, web3-utils@^1.0.0-beta.31, web3-utils@^1.3.6: + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3-utils/-/web3-utils-1.8.0.tgz#0a506f8c6af9a2ad6ba79689892662769534fc03" + integrity sha512-7nUIl7UWpLVka2f09CMbKOSEvorvHnaugIabU4mj7zfMvm0tSByLcEu3eyV9qgS11qxxLuOkzBIwCstTflhmpQ== dependencies: - bn.js "^4.11.9" + bn.js "^5.2.1" ethereum-bloom-filters "^1.0.6" ethereumjs-util "^7.1.0" ethjs-unit "0.1.6" @@ -12550,18 +12650,18 @@ web3@1.2.11: web3-shh "1.2.11" web3-utils "1.2.11" -web3@1.5.3: - version "1.5.3" - resolved "https://registry.yarnpkg.com/web3/-/web3-1.5.3.tgz#11882679453c645bf33620fbc255a243343075aa" - integrity sha512-eyBg/1K44flfv0hPjXfKvNwcUfIVDI4NX48qHQe6wd7C8nPSdbWqo9vLy6ksZIt9NLa90HjI8HsGYgnMSUxn6w== +web3@1.7.4: + version "1.7.4" + resolved "https://registry.yarnpkg.com/web3/-/web3-1.7.4.tgz#00c9aef8e13ade92fd773d845fff250535828e93" + integrity sha512-iFGK5jO32vnXM/ASaJBaI0+gVR6uHozvYdxkdhaeOCD6HIQ4iIXadbO2atVpE9oc/H8l2MovJ4LtPhG7lIBN8A== dependencies: - web3-bzz "1.5.3" - web3-core "1.5.3" - web3-eth "1.5.3" - web3-eth-personal "1.5.3" - web3-net "1.5.3" - web3-shh "1.5.3" - web3-utils "1.5.3" + web3-bzz "1.7.4" + web3-core "1.7.4" + web3-eth "1.7.4" + web3-eth-personal "1.7.4" + web3-net "1.7.4" + web3-shh "1.7.4" + web3-utils "1.7.4" web3@^0.20.0: version "0.20.7" @@ -12575,17 +12675,17 @@ web3@^0.20.0: xmlhttprequest "*" web3@^1.0.0-beta.34, web3@^1.0.0-beta.36: - version "1.7.3" - resolved "https://registry.yarnpkg.com/web3/-/web3-1.7.3.tgz#30fe786338b2cc775881cb28c056ee5da4be65b8" - integrity sha512-UgBvQnKIXncGYzsiGacaiHtm0xzQ/JtGqcSO/ddzQHYxnNuwI72j1Pb4gskztLYihizV9qPNQYHMSCiBlStI9A== + version "1.8.0" + resolved "https://registry.yarnpkg.com/web3/-/web3-1.8.0.tgz#3ca5f0b32de6a1f626407740411219035b5fde64" + integrity sha512-sldr9stK/SALSJTgI/8qpnDuBJNMGjVR84hJ+AcdQ+MLBGLMGsCDNubCoyO6qgk1/Y9SQ7ignegOI/7BPLoiDA== dependencies: - web3-bzz "1.7.3" - web3-core "1.7.3" - web3-eth "1.7.3" - web3-eth-personal "1.7.3" - web3-net "1.7.3" - web3-shh "1.7.3" - web3-utils "1.7.3" + web3-bzz "1.8.0" + web3-core "1.8.0" + web3-eth "1.8.0" + web3-eth-personal "1.8.0" + web3-net "1.8.0" + web3-shh "1.8.0" + web3-utils "1.8.0" webidl-conversions@^3.0.0: version "3.0.1" @@ -12621,6 +12721,11 @@ whatwg-fetch@^2.0.4: resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-2.0.4.tgz#dde6a5df315f9d39991aa17621853d720b85566f" integrity sha512-dcQ1GWpOD/eEQ97k66aiEVpNnapVj90/+R+SXTPYGHpYBBypfKJEQjLrvMZ7YXbKm21gXd4NcuxUTjiv1YtLng== +whatwg-fetch@^3.4.1: + version "3.6.2" + resolved "https://registry.yarnpkg.com/whatwg-fetch/-/whatwg-fetch-3.6.2.tgz#dced24f37f2624ed0281725d51d0e2e3fe677f8c" + integrity sha512-bJlen0FcuU/0EMLrdbJ7zOnW6ITZLrZMIarMUVmdKtsGvZna8vxKYaexICWPfZ8qwf9fzNq+UEIZrnSaApt6RA== + whatwg-url@^5.0.0: version "5.0.0" resolved "https://registry.yarnpkg.com/whatwg-url/-/whatwg-url-5.0.0.tgz#966454e8765462e37644d3626f6742ce8b70965d" @@ -12793,9 +12898,9 @@ ws@^5.1.1: async-limiter "~1.0.0" ws@^7.4.6: - version "7.5.8" - resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.8.tgz#ac2729881ab9e7cbaf8787fe3469a48c5c7f636a" - integrity sha512-ri1Id1WinAX5Jqn9HejiGb8crfRio0Qgu8+MtL36rlTA6RLsMdWt1Az/19A2Qij6uSHUMphEFaTKa4WG+UNHNw== + version "7.5.9" + resolved "https://registry.yarnpkg.com/ws/-/ws-7.5.9.tgz#54fa7db29f4c7cec68b1ddd3a89de099942bb591" + integrity sha512-F+P9Jil7UiSKSkppIiD94dN07AwvFixvLIj1Og1Rl9GGMuNipJnV9JzjD6XuqmAeiswGvUmNLjr5cFuXwNS77Q== wsrun@^5.2.2: version "5.2.4" @@ -12941,9 +13046,9 @@ yargs-parser@^20.2.2: integrity sha512-y11nGElTIV+CT3Zv9t7VKl+Q3hTQoT9a1Qzezhhl6Rp21gJ/IVTW7Z3y9EWXhuUBC2Shnf+DX0antecpAwSP8w== yargs-parser@^21.0.0: - version "21.0.1" - resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.0.1.tgz#0267f286c877a4f0f728fceb6f8a3e4cb95c6e35" - integrity sha512-9BK1jFpLzJROCI5TzwZL/TU4gqjK5xiHV/RfWLOahrjAko/e4DJkRDZQXfvqAsiZzzYhgAzbgz6lg48jcm4GLg== + version "21.1.1" + resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-21.1.1.tgz#9096bceebf990d21bb31fa9516e0ede294a77d35" + integrity sha512-tVpsJW7DdjecAiFpbIB1e3qxIQsE6NoPc5/eTdrbbIC4h0LVsWhnoa3g+m2HclBIujHzsxZ4VJVA+GUuc2/LBw== yargs-unparser@1.6.0: version "1.6.0" @@ -13028,11 +13133,11 @@ yargs@^15.1.0: yargs-parser "^18.1.2" yargs@^17.1.1: - version "17.5.1" - resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.5.1.tgz#e109900cab6fcb7fd44b1d8249166feb0b36e58e" - integrity sha512-t6YAJcxDkNX7NFYiVtKvWUz8l+PaKTLiL63mJYWR2GnHq2gjEWISzsLp9wg3aY36dY1j+gfIEL3pIF+XlJJfbA== + version "17.6.0" + resolved "https://registry.yarnpkg.com/yargs/-/yargs-17.6.0.tgz#e134900fc1f218bc230192bdec06a0a5f973e46c" + integrity sha512-8H/wTDqlSwoSnScvV2N/JHfLWOKuh5MVla9hqLjK3nsfyy6Y4kDSYSvkU5YCUEPOSnRXfIyx3Sq+B/IWudTo4g== dependencies: - cliui "^7.0.2" + cliui "^8.0.1" escalade "^3.1.1" get-caller-file "^2.0.5" require-directory "^2.1.1" From 7b879a6760b85ef9d41ac5d7d7b6b73da0f9011d Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Wed, 2 Nov 2022 21:19:31 +0100 Subject: [PATCH 002/406] Use RunTxResult from rethnet (#3317) Co-authored-by: Wodann --- crates/rethnet_evm/Cargo.toml | 2 +- .../internal/hardhat-network/provider/node.ts | 5 +- .../provider/utils/convertToRethnet.ts | 101 ++++++++++++++++++ 3 files changed, 105 insertions(+), 3 deletions(-) diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index cf60a9df78..44572a3df0 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -9,6 +9,6 @@ bytes = { version = "1.2.1", default-features = false } hashbrown = { version = "0.12.3", default-features = false, features = ["serde"] } log = { version = "0.4.17", default-features = false } primitive-types = { version = "0.11.1", default-features = false, features = ["impl-serde"] } -revm = { git = "https://github.com/bluealloy/revm/", version = "2.1.0", default-features = false, features = ["dev", "k256", "with-serde"] } +revm = { git = "https://github.com/bluealloy/revm/", rev = "9f8cdbd", default-features = false, features = ["dev", "k256", "with-serde"] } sha3 = { version = "0.10.4", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["sync"] } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index ef9a1639d4..10e8cb36fd 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -85,6 +85,7 @@ import { createRethnetFromHardhatDB, ethereumjsTransactionToRethnet, HardhatDB, + rethnetResultToRunTxResult, } from "./utils/convertToRethnet"; import { bloomFilter, Filter, filterLogs, LATEST_BLOCK, Type } from "./filter"; import { ForkBlockchain } from "./fork/ForkBlockchain"; @@ -1846,7 +1847,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu assertEthereumJsAndRethnetResults(rethnetResult.execResult, txResult); traces.push(await this._gatherTraces(txResult.execResult)); - results.push(txResult); + results.push(rethnetResultToRunTxResult(rethnetResult.execResult)); receipts.push(txResult.receipt); } @@ -2468,7 +2469,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu ethereumjsResult ); - return ethereumjsResult; + return rethnetResultToRunTxResult(rethnetResult.execResult); } finally { if (originalCommon !== undefined) { (this._vm as any)._common = originalCommon; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 1a711f7303..03cbb81930 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -1,4 +1,6 @@ import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; +import { EvmError } from "@nomicfoundation/ethereumjs-evm"; +import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; import { AccessListEIP2930Transaction, @@ -12,12 +14,15 @@ import { bufferToBigInt, setLengthLeft, } from "@nomicfoundation/ethereumjs-util"; +import { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { Account as RethnetAccount, Config, + ExecutionResult, Rethnet, Transaction, } from "rethnet-evm"; + import { HardhatError } from "../../../core/errors"; import { ERRORS } from "../../../core/errors-list"; @@ -182,3 +187,99 @@ export function createRethnetFromHardhatDB( } ); } + +export function rethnetResultToRunTxResult( + rethnetResult: ExecutionResult +): RunTxResult { + // We return an object with only the properties that are used by Hardhat. + // To be extra sure that the other properties are not used, we add getters + // that exit the process if accessed. + return { + totalGasSpent: rethnetResult.gasUsed, + gasRefund: rethnetResult.gasRefunded, + createdAddress: + rethnetResult.output.address !== undefined + ? new Address(rethnetResult.output.address) + : undefined, + execResult: { + exceptionError: mapRethnetExitCodeToEthereumJsExceptionError( + rethnetResult.exitCode + ), + returnValue: rethnetResult.output.output ?? Buffer.from([]), + + get runState(): any { + console.trace("execResult.runState not implemented"); + return process.exit(1); + }, + get gas(): any { + console.trace("execResult.gas not implemented"); + return process.exit(1); + }, + get executionGasUsed(): any { + console.trace("execResult.executionGasUsed not implemented"); + return process.exit(1); + }, + get logs(): any { + console.trace("execResult.logs not implemented"); + return process.exit(1); + }, + get selfdestruct(): any { + console.trace("execResult.selfdestruct not implemented"); + return process.exit(1); + }, + get gasRefund(): any { + console.trace("execResult.gasRefund not implemented"); + return process.exit(1); + }, + }, + + get bloom(): any { + console.trace("bloom not implemented"); + return process.exit(1); + }, + get amountSpent(): any { + console.trace("amountSpent not implemented"); + return process.exit(1); + }, + get receipt(): any { + console.trace("receipt not implemented"); + return process.exit(1); + }, + get accessList(): any { + console.trace("accessList not implemented"); + return process.exit(1); + }, + }; +} + +const rethnetExitCodeToEthereumJsError = new Map([ + [0x50, ERROR.OUT_OF_GAS], + [0x57, ERROR.STACK_UNDERFLOW], + [0x58, ERROR.STACK_OVERFLOW], + [0x54, ERROR.INVALID_JUMP], + [0x51, ERROR.INVALID_OPCODE], + [0x53, ERROR.INVALID_OPCODE], + [0x59, ERROR.OUT_OF_RANGE], + [0x20, ERROR.REVERT], + [0x52, ERROR.STATIC_STATE_CHANGE], + [0x60, ERROR.CREATE_COLLISION], + [0x01, ERROR.STOP], + [0x53, ERROR.INVALID_BYTECODE_RESULT], + [0x64, ERROR.INITCODE_SIZE_VIOLATION], +]); + +function mapRethnetExitCodeToEthereumJsExceptionError( + rethnetExitCode: number +): EvmError | undefined { + if (rethnetExitCode <= 0x03) { + return; + } + + const ethereumJsError = rethnetExitCodeToEthereumJsError.get(rethnetExitCode); + if (ethereumJsError === undefined) { + console.trace(`Couldn't map exit code ${rethnetExitCode}`); + process.exit(1); + } + + return new EvmError(ethereumJsError); +} From c867b807b4f9d0e871de95e9a63aaca6dd85be37 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Tue, 15 Nov 2022 19:51:11 +0100 Subject: [PATCH 003/406] Use VMAdapter --- .../hardhat-network/provider/TxPool.ts | 16 +- .../hardhat-network/provider/filter.ts | 2 +- .../hardhat-network/provider/node-types.ts | 2 +- .../internal/hardhat-network/provider/node.ts | 514 ++++-------------- .../hardhat-network/provider/output.ts | 2 +- .../provider/utils/assertions.ts | 2 +- .../provider/utils/convertToRethnet.ts | 19 +- .../provider/vm/block-builder.ts | 221 ++++++++ .../hardhat-network/provider/vm/dual.ts | 237 ++++++++ .../hardhat-network/provider/vm/ethereumjs.ts | 441 +++++++++++++++ .../hardhat-network/provider/vm/rethnet.ts | 269 +++++++++ .../hardhat-network/provider/vm/vm-adapter.ts | 66 +++ .../stack-traces/vm-debug-tracer.ts | 8 +- .../hardhat-network/stack-traces/vm-tracer.ts | 48 +- .../hardhat-network/helpers/retrieveCommon.ts | 2 +- .../hardhat-network/provider/TxPool.ts | 6 +- .../provider/utils/assertEqualBlocks.ts | 4 +- .../provider/utils/runFullBlock.ts | 4 +- .../hardhat-network/stack-traces/execution.ts | 79 ++- .../hardhat-network/stack-traces/test.ts | 28 +- 20 files changed, 1465 insertions(+), 505 deletions(-) create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts index 85c8f23fb5..d11d956a9e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts @@ -3,8 +3,8 @@ import { TransactionFactory, TypedTransaction, } from "@nomicfoundation/ethereumjs-tx"; -import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; import { + Account, Address, bufferToHex, toBuffer, @@ -101,7 +101,7 @@ export class TxPool { ) => OrderedTransaction; constructor( - private readonly _stateManager: StateManager, + private readonly _getAccount: (address: Address) => Promise, blockGasLimit: bigint, common: Common ) { @@ -278,9 +278,7 @@ export class TxPool { // update pending transactions for (const [address, txs] of newPending) { - const senderAccount = await this._stateManager.getAccount( - Address.fromString(address) - ); + const senderAccount = await this._getAccount(Address.fromString(address)); const senderNonce = senderAccount.nonce; const senderBalance = senderAccount.balance; @@ -316,9 +314,7 @@ export class TxPool { // update queued addresses let newQueued = this._getQueued(); for (const [address, txs] of newQueued) { - const senderAccount = await this._stateManager.getAccount( - Address.fromString(address) - ); + const senderAccount = await this._getAccount(Address.fromString(address)); const senderNonce = senderAccount.nonce; const senderBalance = senderAccount.balance; @@ -444,7 +440,7 @@ export class TxPool { ); } - const senderAccount = await this._stateManager.getAccount(senderAddress); + const senderAccount = await this._getAccount(senderAddress); const senderBalance = senderAccount.balance; const maxFee = "gasPrice" in tx ? tx.gasPrice : tx.maxFeePerGas; @@ -590,7 +586,7 @@ export class TxPool { private async _getNextConfirmedNonce( accountAddress: Address ): Promise { - const account = await this._stateManager.getAccount(accountAddress); + const account = await this._getAccount(accountAddress); return account.nonce; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts index c3b66a5390..129c64cd04 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts @@ -1,5 +1,5 @@ +import type { Bloom } from "@nomicfoundation/ethereumjs-vm"; import { bufferToHex, toBuffer } from "@nomicfoundation/ethereumjs-util"; -import { Bloom } from "@nomicfoundation/ethereumjs-vm"; import { RpcLogOutput } from "./output"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts index 3a790aa150..ce0effa151 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts @@ -1,7 +1,7 @@ +import type { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; import type { ReturnData } from "./return-data"; import { Block } from "@nomicfoundation/ethereumjs-block"; -import { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; import { HARDHAT_MEMPOOL_SUPPORTED_ORDERS } from "../../constants"; import { BuildInfo, HardhatNetworkChainsConfig } from "../../../types"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 10e8cb36fd..4e13f3b81a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -1,3 +1,4 @@ +import type { EVMResult } from "@nomicfoundation/ethereumjs-evm"; import { Block, HeaderData } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { @@ -6,6 +7,7 @@ import { Transaction, TypedTransaction, } from "@nomicfoundation/ethereumjs-tx"; +import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { Address, ECDSASignature, @@ -16,21 +18,12 @@ import { privateToAddress, setLengthLeft, toBuffer, - bufferToBigInt, } from "@nomicfoundation/ethereumjs-util"; import { Bloom, - EEI, RunBlockResult, RunTxResult, - VM, } from "@nomicfoundation/ethereumjs-vm"; -import { EVM, EVMResult } from "@nomicfoundation/ethereumjs-evm"; -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; -import { - DefaultStateManager, - StateManager, -} from "@nomicfoundation/ethereumjs-statemanager"; import { SignTypedDataVersion, signTypedData } from "@metamask/eth-sig-util"; import chalk from "chalk"; import debug from "debug"; @@ -73,23 +66,14 @@ import { } from "../stack-traces/solidity-errors"; import { SolidityStackTrace } from "../stack-traces/solidity-stack-trace"; import { SolidityTracer } from "../stack-traces/solidityTracer"; -import { VMDebugTracer } from "../stack-traces/vm-debug-tracer"; import { VmTraceDecoder } from "../stack-traces/vm-trace-decoder"; import { VMTracer } from "../stack-traces/vm-tracer"; import "./ethereumjs-workarounds"; import { rpcQuantityToBigInt } from "../../core/jsonrpc/types/base-types"; import { JsonRpcClient } from "../jsonrpc/client"; -import { assertEthereumJsAndRethnetResults } from "./utils/assertions"; -import { - createRethnetFromHardhatDB, - ethereumjsTransactionToRethnet, - HardhatDB, - rethnetResultToRunTxResult, -} from "./utils/convertToRethnet"; import { bloomFilter, Filter, filterLogs, LATEST_BLOCK, Type } from "./filter"; import { ForkBlockchain } from "./fork/ForkBlockchain"; -import { ForkStateManager } from "./fork/ForkStateManager"; import { HardhatBlockchain } from "./HardhatBlockchain"; import { CallParams, @@ -129,6 +113,9 @@ import { makeStateTrie } from "./utils/makeStateTrie"; import { putGenesisBlock } from "./utils/putGenesisBlock"; import { txMapToArray } from "./utils/txMapToArray"; import { RandomBufferGenerator } from "./utils/random"; +import { DualModeAdapter } from "./vm/dual"; +import { VMAdapter } from "./vm/vm-adapter"; +import { BlockBuilder } from "./vm/block-builder"; type ExecResult = EVMResult["execResult"]; @@ -144,7 +131,6 @@ export class HardhatNode extends EventEmitter { automine, genesisAccounts, blockGasLimit, - allowUnlimitedContractSize, tracingConfig, minGasPrice, mempoolOrder, @@ -152,7 +138,6 @@ export class HardhatNode extends EventEmitter { chainId, } = config; - let stateManager: StateManager; let blockchain: HardhatBlockchainInterface; let initialBlockTimeOffset: bigint | undefined; let nextBlockBaseFeePerGas: bigint | undefined; @@ -188,13 +173,6 @@ export class HardhatNode extends EventEmitter { forkNetworkId ); - const forkStateManager = new ForkStateManager( - forkClient, - forkBlockNumber - ); - await forkStateManager.initializeGenesisAccounts(genesisAccounts); - stateManager = forkStateManager; - blockchain = new ForkBlockchain(forkClient, forkBlockNumber, common); initialBlockTimeOffset = BigInt( @@ -225,10 +203,6 @@ export class HardhatNode extends EventEmitter { } else { const stateTrie = await makeStateTrie(genesisAccounts); - stateManager = new DefaultStateManager({ - trie: stateTrie, - }); - const hardhatBlockchain = new HardhatBlockchain(common); const genesisBlockBaseFeePerGas = hardforkGte( @@ -258,26 +232,28 @@ export class HardhatNode extends EventEmitter { blockchain = hardhatBlockchain; } - const txPool = new TxPool(stateManager, BigInt(blockGasLimit), common); - - const eei = new EEI(stateManager, common, blockchain); - const evm = await EVM.create({ - eei, - allowUnlimitedContractSize, - common, - }); - - const vm = await VM.create({ - evm, - activatePrecompiles: true, + const currentHardfork = common.hardfork(); + const vm = await DualModeAdapter.create( common, - stateManager, blockchain, - }); + config, + (blockNumber) => + selectHardfork( + forkBlockNum, + currentHardfork, + hardforkActivations, + blockNumber + ) + ); + + const txPool = new TxPool( + (address) => vm.getAccount(address), + BigInt(blockGasLimit), + common + ); const node = new HardhatNode( vm, - stateManager, blockchain, txPool, automine, @@ -289,9 +265,7 @@ export class HardhatNode extends EventEmitter { networkId, chainId, hardfork, - hardforkActivations, mixHashGenerator, - allowUnlimitedContractSize ?? false, tracingConfig, forkNetworkId, forkBlockNum, @@ -359,12 +333,8 @@ Hardhat Network's forking functionality only works with blocks from at least spu // blockNumber => state root private _irregularStatesByBlockNumber: Map = new Map(); - private _hardhatDB; - public _rethnet; - private constructor( - private readonly _vm: VM, - private readonly _stateManager: StateManager, + private readonly _vm: VMAdapter, private readonly _blockchain: HardhatBlockchainInterface, private readonly _txPool: TxPool, private _automine: boolean, @@ -376,9 +346,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private readonly _configNetworkId: number, private readonly _configChainId: number, public readonly hardfork: HardforkName, - private readonly _hardforkActivations: HardforkHistoryConfig, private _mixHashGenerator: RandomBufferGenerator, - allowUnlimitedContractSize: boolean, tracingConfig?: TracingConfig, private _forkNetworkId?: number, private _forkBlockNumber?: bigint, @@ -393,33 +361,13 @@ Hardhat Network's forking functionality only works with blocks from at least spu this.setUserProvidedNextBlockBaseFeePerGas(nextBlockBaseFee); } - this._vmTracer = new VMTracer( - this._vm, - this._stateManager.getContractCode.bind(this._stateManager), - false - ); + this._vmTracer = new VMTracer(this._vm, false); this._vmTracer.enableTracing(); const contractsIdentifier = new ContractsIdentifier(); this._vmTraceDecoder = new VmTraceDecoder(contractsIdentifier); this._solidityTracer = new SolidityTracer(); - this._hardhatDB = new HardhatDB(this._stateManager, this._blockchain); - - const limitContractCodeSize = allowUnlimitedContractSize - ? 2n ** 64n - 1n - : undefined; - - this._rethnet = createRethnetFromHardhatDB( - { - chainId: BigInt(this._configChainId), - limitContractCodeSize, - disableBlockGasLimit: true, - disableEip3607: true, - }, - this._hardhatDB - ); - if (tracingConfig === undefined || tracingConfig.buildInfos === undefined) { return; } @@ -465,14 +413,14 @@ Hardhat Network's forking functionality only works with blocks from at least spu if ("maxFeePerGas" in txParams) { tx = FeeMarketEIP1559Transaction.fromTxData(txParams, { - common: this._vm._common, + common: this._vm.getCommon(), }); } else if ("accessList" in txParams) { tx = AccessListEIP2930Transaction.fromTxData(txParams, { - common: this._vm._common, + common: this._vm.getCommon(), }); } else { - tx = Transaction.fromTxData(txParams, { common: this._vm._common }); + tx = Transaction.fromTxData(txParams, { common: this._vm.getCommon() }); } return tx.sign(pk); @@ -615,7 +563,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu this._blockchain.reserveBlocks( remainingBlockCount - 1n, interval, - await this._stateManager.getStateRoot(), + await this._vm.getStateRoot(), await this.getBlockTotalDifficulty(latestBlock), (await this.getLatestBlock()).header.baseFeePerGas ); @@ -682,7 +630,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu } const account = await this._runInBlockContext(blockNumberOrPending, () => - this._stateManager.getAccount(address) + this._vm.getAccount(address) ); return account.balance; @@ -693,7 +641,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu blockNumberOrPending: bigint | "pending" ): Promise { const account = await this._runInBlockContext(blockNumberOrPending, () => - this._stateManager.getAccount(address) + this._vm.getAccount(address) ); return account.nonce; @@ -873,7 +821,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu const key = setLengthLeft(bigIntToBuffer(positionIndex), 32); const data = await this._runInBlockContext(blockNumberOrPending, () => - this._stateManager.getContractStorage(address, key) + this._vm.getContractStorage(address, key) ); const EXPECTED_DATA_SIZE = 32; @@ -926,7 +874,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu blockNumberOrPending: bigint | "pending" ): Promise { return this._runInBlockContext(blockNumberOrPending, () => - this._stateManager.getContractCode(address) + this._vm.getContractCode(address) ); } @@ -1030,7 +978,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu id, date: new Date(), latestBlock: await this.getLatestBlock(), - stateRoot: await this._stateManager.getStateRoot(), + stateRoot: await this._vm.getStateRoot(), txPoolSnapshotId: this._txPool.snapshot(), blockTimeOffsetSeconds: this.getTimeIncrement(), nextBlockTimestamp: this.getNextBlockTimestamp(), @@ -1077,7 +1025,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu const irregularStateOrUndefined = this._irregularStatesByBlockNumber.get( (await this.getLatestBlock()).header.number ); - await this._stateManager.setStateRoot( + await this._vm.restoreContext( irregularStateOrUndefined ?? snapshot.stateRoot ); this.setTimeIncrement(newOffset); @@ -1311,9 +1259,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu address: Address, newBalance: bigint ): Promise { - const account = await this._stateManager.getAccount(address); + const account = await this._vm.getAccount(address); account.balance = newBalance; - await this._stateManager.putAccount(address, account); + await this._vm.putAccount(address, account); await this._persistIrregularWorldState(); } @@ -1321,7 +1269,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu address: Address, newCode: Buffer ): Promise { - await this._stateManager.putContractCode(address, newCode); + await this._vm.putContractCode(address, newCode); await this._persistIrregularWorldState(); } @@ -1334,14 +1282,14 @@ Hardhat Network's forking functionality only works with blocks from at least spu "Cannot set account nonce when the transaction pool is not empty" ); } - const account = await this._stateManager.getAccount(address); + const account = await this._vm.getAccount(address); if (newNonce < account.nonce) { throw new InvalidInputError( `New nonce (${newNonce.toString()}) must not be smaller than the existing nonce (${account.nonce.toString()})` ); } account.nonce = newNonce; - await this._stateManager.putAccount(address, account); + await this._vm.putAccount(address, account); await this._persistIrregularWorldState(); } @@ -1350,7 +1298,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu positionIndex: bigint, value: Buffer ) { - await this._stateManager.putContractStorage( + await this._vm.putContractStorage( address, setLengthLeft(bigIntToBuffer(positionIndex), 32), value @@ -1367,83 +1315,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu } return this._runInBlockContext(block.header.number - 1n, async () => { - const blockNumber = block.header.number; - const blockchain = this._blockchain; - let vm = this._vm; - if ( - blockchain instanceof ForkBlockchain && - blockNumber <= blockchain.getForkBlockNumber() - ) { - assertHardhatInvariant( - this._forkNetworkId !== undefined, - "this._forkNetworkId should exist if the blockchain is an instance of ForkBlockchain" - ); - - const common = this._getCommonForTracing( - this._forkNetworkId, - blockNumber - ); - - vm = await VM.create({ - common, - activatePrecompiles: true, - stateManager: this._vm.stateManager, - blockchain: this._vm.blockchain, - }); - } - - // We don't support tracing transactions before the spuriousDragon fork - // to avoid having to distinguish between empty and non-existing accounts. - // We *could* do it during the non-forked mode, but for simplicity we just - // don't support it at all. - const isPreSpuriousDragon = !vm._common.gteHardfork("spuriousDragon"); - if (isPreSpuriousDragon) { - throw new InvalidInputError( - "Tracing is not supported for transactions using hardforks older than Spurious Dragon. " - ); - } - - for (const tx of block.transactions) { - let txWithCommon: TypedTransaction; - const sender = tx.getSenderAddress(); - if (tx.type === 0) { - txWithCommon = new FakeSenderTransaction(sender, tx, { - common: vm._common, - }); - } else if (tx.type === 1) { - txWithCommon = new FakeSenderAccessListEIP2930Transaction( - sender, - tx, - { - common: vm._common, - } - ); - } else if (tx.type === 2) { - txWithCommon = new FakeSenderEIP1559Transaction( - sender, - { ...tx, gasPrice: undefined }, - { - common: vm._common, - } - ); - } else { - throw new InternalError( - "Only legacy, EIP2930, and EIP1559 txs are supported" - ); - } - - const txHash = txWithCommon.hash(); - if (txHash.equals(hash)) { - const vmDebugTracer = new VMDebugTracer(vm); - return vmDebugTracer.trace(async () => { - await vm.runTx({ tx: txWithCommon, block }); - }, config); - } - await vm.runTx({ tx: txWithCommon, block }); - } - throw new TransactionExecutionError( - `Unable to find a transaction in a block that contains that transaction, this should never happen` - ); + return this._vm.traceTransaction(hash, block, config); }); } @@ -1794,11 +1666,11 @@ Hardhat Network's forking functionality only works with blocks from at least spu headerData.baseFeePerGas = await this.getNextBlockBaseFeePerGas(); - const blockBuilder = await this._vm.buildBlock({ + const blockBuilder = new BlockBuilder(this._vm, { parentBlock, headerData, - blockOpts: { calcDifficultyFromHeader: parentBlock.header }, }); + await blockBuilder.startBlock(); try { const traces: GatherTracesResult[] = []; @@ -1818,43 +1690,29 @@ Hardhat Network's forking functionality only works with blocks from at least spu const receipts = []; while ( - blockGasLimit - blockBuilder.gasUsed >= minTxFee && + blockGasLimit - blockBuilder.getGasUsed() >= minTxFee && tx !== undefined ) { if ( !this._isTxMinable(tx, headerData.baseFeePerGas) || - tx.gasLimit > blockGasLimit - blockBuilder.gasUsed + tx.gasLimit > blockGasLimit - blockBuilder.getGasUsed() ) { transactionQueue.removeLastSenderTransactions(); } else { - const rethnetTx = ethereumjsTransactionToRethnet(tx); - const difficulty = this._getBlockEnvDifficulty( - BigIntUtils.fromBigIntLike(headerData.difficulty), - headerData.mixHash !== undefined - ? bufferToBigInt(toBuffer(headerData.mixHash)) - : undefined - ); - const rethnetResult = await this._rethnet.dryRun(rethnetTx, { - number: BigInt(parentBlock.header.number) + 1n, - coinbase: coinbase.buf, - timestamp: blockTimestamp, - basefee: headerData.baseFeePerGas, - gasLimit: blockGasLimit, - difficulty, - }); - const txResult = await blockBuilder.addTransaction(tx); - assertEthereumJsAndRethnetResults(rethnetResult.execResult, txResult); traces.push(await this._gatherTraces(txResult.execResult)); - results.push(rethnetResultToRunTxResult(rethnetResult.execResult)); + results.push(txResult); receipts.push(txResult.receipt); } tx = transactionQueue.getNextTransaction(); } - const block = await blockBuilder.build(); + const minerReward = this._vm.getCommon().param("pow", "minerReward"); + await blockBuilder.addRewards([[coinbase, minerReward]]); + const block = await blockBuilder.seal(); + await this._blockchain.putBlock(block); await this._txPool.updatePendingAndQueued(); @@ -1878,7 +1736,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private _getMinimalTransactionFee(): bigint { // Typically 21_000 gas - return this._vm._common.param("gasPrices", "tx"); + return this._vm.getCommon().param("gasPrices", "tx"); } private async _getFakeTransaction( @@ -1892,18 +1750,18 @@ Hardhat Network's forking functionality only works with blocks from at least spu if ("maxFeePerGas" in txParams && txParams.maxFeePerGas !== undefined) { return new FakeSenderEIP1559Transaction(sender, txParams, { - common: this._vm._common, + common: this._vm.getCommon(), }); } if ("accessList" in txParams && txParams.accessList !== undefined) { return new FakeSenderAccessListEIP2930Transaction(sender, txParams, { - common: this._vm._common, + common: this._vm.getCommon(), }); } return new FakeSenderTransaction(sender, txParams, { - common: this._vm._common, + common: this._vm.getCommon(), }); } @@ -2113,7 +1971,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu const receipts = getRpcReceiptOutputsFromLocalBlockExecution( block, runBlockResult, - shouldShowTransactionTypeForHardfork(this._vm._common) + shouldShowTransactionTypeForHardfork(this._vm.getCommon()) ); this._blockchain.addTransactionReceipts(receipts); @@ -2138,7 +1996,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu getRpcBlock( block, td, - shouldShowTransactionTypeForHardfork(this._vm._common), + shouldShowTransactionTypeForHardfork(this._vm.getCommon()), false ) ); @@ -2203,12 +2061,12 @@ Hardhat Network's forking functionality only works with blocks from at least spu ); } - const currentStateRoot = await this._stateManager.getStateRoot(); + const currentStateRoot = await this._vm.getStateRoot(); await this._setBlockContext(block); try { return await action(); } finally { - await this._restoreBlockContext(currentStateRoot); + await this._vm.restoreContext(currentStateRoot); } } @@ -2227,24 +2085,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu block.header.number ); - if (this._stateManager instanceof ForkStateManager) { - return this._stateManager.setBlockContext( - block.header.stateRoot, - block.header.number, - irregularStateOrUndefined - ); - } - - return this._stateManager.setStateRoot( - irregularStateOrUndefined ?? block.header.stateRoot - ); - } - - private async _restoreBlockContext(stateRoot: Buffer) { - if (this._stateManager instanceof ForkStateManager) { - return this._stateManager.restoreForkBlockContext(stateRoot); - } - return this._stateManager.setStateRoot(stateRoot); + await this._vm.setBlockContext(block, irregularStateOrUndefined); } private async _correctInitialEstimation( @@ -2372,110 +2213,27 @@ Hardhat Network's forking functionality only works with blocks from at least spu blockNumberOrPending: bigint | "pending", forceBaseFeeZero = false ): Promise { - const initialStateRoot = await this._stateManager.getStateRoot(); - let blockContext: Block | undefined; - let originalCommon: Common | undefined; - - try { - if (blockNumberOrPending === "pending") { - // the new block has already been mined by _runInBlockContext hence we take latest here - blockContext = await this.getLatestBlock(); - } else { - // We know that this block number exists, because otherwise - // there would be an error in the RPC layer. - const block = await this.getBlockByNumber(blockNumberOrPending); - assertHardhatInvariant( - block !== undefined, - "Tried to run a tx in the context of a non-existent block" - ); - - blockContext = block; - - // we don't need to add the tx to the block because runTx doesn't - // know anything about the txs in the current block - } - - // NOTE: This is a workaround of both an @nomicfoundation/ethereumjs-vm limitation, and - // a bug in Hardhat Network. - // - // See: https://github.com/nomiclabs/hardhat/issues/1666 - // - // If this VM is running with EIP1559 activated, and the block is not - // an EIP1559 one, this will crash, so we create a new one that has - // baseFeePerGas = 0. - // - // We also have an option to force the base fee to be zero, - // we don't want to debit any balance nor fail any tx when running an - // eth_call. This will make the BASEFEE option also return 0, which - // shouldn't. See: https://github.com/nomiclabs/hardhat/issues/1688 - if ( - this.isEip1559Active(blockNumberOrPending) && - (blockContext.header.baseFeePerGas === undefined || forceBaseFeeZero) - ) { - blockContext = Block.fromBlockData(blockContext, { - freeze: false, - common: this._vm._common, - - skipConsensusFormatValidation: true, - }); - - (blockContext.header as any).baseFeePerGas = 0n; - } - - originalCommon = (this._vm as any)._common; - - (this._vm as any)._common = Common.custom( - { - chainId: - this._forkBlockNumber === undefined || - blockContext.header.number >= this._forkBlockNumber - ? this._configChainId - : this._forkNetworkId, - networkId: this._forkNetworkId ?? this._configNetworkId, - }, - { - hardfork: this._selectHardfork(blockContext.header.number), - } - ); - const rethnetTx = ethereumjsTransactionToRethnet(tx); - await this._rethnet.guaranteeTransaction(rethnetTx); - const difficulty = this._getBlockEnvDifficulty( - blockContext.header.difficulty, - bufferToBigInt(blockContext.header.mixHash) + if (blockNumberOrPending === "pending") { + // the new block has already been mined by _runInBlockContext hence we take latest here + blockContext = await this.getLatestBlock(); + } else { + // We know that this block number exists, because otherwise + // there would be an error in the RPC layer. + const block = await this.getBlockByNumber(blockNumberOrPending); + assertHardhatInvariant( + block !== undefined, + "Tried to run a tx in the context of a non-existent block" ); - const rethnetResult = await this._rethnet.dryRun(rethnetTx, { - number: blockContext.header.number, - coinbase: blockContext.header.coinbase.buf, - timestamp: blockContext.header.timestamp, - basefee: blockContext.header.baseFeePerGas, - gasLimit: blockContext.header.gasLimit, - difficulty, - }); - - await this._stateManager.setStateRoot(initialStateRoot); - const ethereumjsResult = await this._vm.runTx({ - block: blockContext, - tx, - skipNonce: true, - skipBalance: true, - skipBlockGasLimitValidation: true, - }); - - assertEthereumJsAndRethnetResults( - rethnetResult.execResult, - ethereumjsResult - ); + blockContext = block; - return rethnetResultToRunTxResult(rethnetResult.execResult); - } finally { - if (originalCommon !== undefined) { - (this._vm as any)._common = originalCommon; - } - await this._stateManager.setStateRoot(initialStateRoot); + // we don't need to add the tx to the block because runTx doesn't + // know anything about the txs in the current block } + + return this._vm.dryRun(tx, blockContext, forceBaseFeeZero); } private async _computeFilterParams( @@ -2535,7 +2293,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu } return this._runInBlockContext(blockNumberOrPending, async () => { - const account = await this._stateManager.getAccount(address); + const account = await this._vm.getAccount(address); return account.nonce; }); @@ -2565,21 +2323,12 @@ Hardhat Network's forking functionality only works with blocks from at least spu private async _persistIrregularWorldState(): Promise { this._irregularStatesByBlockNumber.set( this.getLatestBlockNumber(), - await this._stateManager.getStateRoot() + await this._vm.getStateRoot() ); } public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { - if ( - blockNumberOrPending !== undefined && - blockNumberOrPending !== "pending" - ) { - return this._vm._common.hardforkGteHardfork( - this._selectHardfork(blockNumberOrPending), - "london" - ); - } - return this._vm._common.gteHardfork("london"); + return this._vm.isEip1559Active(blockNumberOrPending); } public isPostMergeHardfork(): boolean { @@ -2633,82 +2382,51 @@ Hardhat Network's forking functionality only works with blocks from at least spu return { maxFeePerGas, maxPriorityFeePerGas }; } +} - private _selectHardfork(blockNumber: bigint): string { - if ( - this._forkBlockNumber === undefined || - blockNumber >= this._forkBlockNumber - ) { - return this._vm._common.hardfork() as HardforkName; - } - - if (this._hardforkActivations.size === 0) { - throw new InternalError( - `No known hardfork for execution on historical block ${blockNumber.toString()} (relative to fork block number ${ - this._forkBlockNumber - }). The node was not configured with a hardfork activation history. See http://hardhat.org/custom-hardfork-history` - ); - } +function selectHardfork( + forkBlockNumber: bigint | undefined, + currentHardfork: string, + hardforkActivations: HardforkHistoryConfig, + blockNumber: bigint +): string { + if (forkBlockNumber === undefined || blockNumber >= forkBlockNumber) { + return currentHardfork; + } - /** search this._hardforkActivations for the highest block number that - * isn't higher than blockNumber, and then return that found block number's - * associated hardfork name. */ - const hardforkHistory: Array<[name: string, block: number]> = Array.from( - this._hardforkActivations.entries() + if (hardforkActivations.size === 0) { + throw new InternalError( + `No known hardfork for execution on historical block ${blockNumber.toString()} (relative to fork block number ${forkBlockNumber}). The node was not configured with a hardfork activation history. See http://hardhat.org/custom-hardfork-history` ); - const [hardfork, activationBlock] = hardforkHistory.reduce( - ([highestHardfork, highestBlock], [thisHardfork, thisBlock]) => - thisBlock > highestBlock && thisBlock <= blockNumber - ? [thisHardfork, thisBlock] - : [highestHardfork, highestBlock] - ); - if (hardfork === undefined || blockNumber < activationBlock) { - throw new InternalError( - `Could not find a hardfork to run for block ${blockNumber.toString()}, after having looked for one in the HardhatNode's hardfork activation history, which was: ${JSON.stringify( - hardforkHistory - )}. For more information, see https://hardhat.org/hardhat-network/reference/#config` - ); - } - - if (!HARDHAT_NETWORK_SUPPORTED_HARDFORKS.includes(hardfork)) { - throw new InternalError( - `Tried to run a call or transaction in the context of a block whose hardfork is "${hardfork}", but Hardhat Network only supports the following hardforks: ${HARDHAT_NETWORK_SUPPORTED_HARDFORKS.join( - ", " - )}` - ); - } - - return hardfork; } - private _getCommonForTracing(networkId: number, blockNumber: bigint): Common { - try { - const common = Common.custom( - { - chainId: networkId, - networkId, - }, - { - hardfork: this._selectHardfork(BigInt(blockNumber)), - } - ); - - return common; - } catch { - throw new InternalError( - `Network id ${networkId} does not correspond to a network that Hardhat can trace` - ); - } + /** search this._hardforkActivations for the highest block number that + * isn't higher than blockNumber, and then return that found block number's + * associated hardfork name. */ + const hardforkHistory: Array<[name: string, block: number]> = Array.from( + hardforkActivations.entries() + ); + const [hardfork, activationBlock] = hardforkHistory.reduce( + ([highestHardfork, highestBlock], [thisHardfork, thisBlock]) => + thisBlock > highestBlock && thisBlock <= blockNumber + ? [thisHardfork, thisBlock] + : [highestHardfork, highestBlock] + ); + if (hardfork === undefined || blockNumber < activationBlock) { + throw new InternalError( + `Could not find a hardfork to run for block ${blockNumber.toString()}, after having looked for one in the HardhatNode's hardfork activation history, which was: ${JSON.stringify( + hardforkHistory + )}. For more information, see https://hardhat.org/hardhat-network/reference/#config` + ); } - private _getBlockEnvDifficulty( - difficulty: bigint | undefined, - mixHash: bigint | undefined - ): bigint | undefined { - if (this.isPostMergeHardfork()) { - return mixHash; - } - - return difficulty; + if (!HARDHAT_NETWORK_SUPPORTED_HARDFORKS.includes(hardfork)) { + throw new InternalError( + `Tried to run a call or transaction in the context of a block whose hardfork is "${hardfork}", but Hardhat Network only supports the following hardforks: ${HARDHAT_NETWORK_SUPPORTED_HARDFORKS.join( + ", " + )}` + ); } + + return hardfork; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts index d59de0ee0d..b8fc0c178f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts @@ -1,8 +1,8 @@ +import type { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { bufferToHex } from "@nomicfoundation/ethereumjs-util"; -import { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; import * as BigIntUtils from "../../util/bigint"; import { assertHardhatInvariant } from "../../core/errors"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts index 9703473241..cd1c112036 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts @@ -1,5 +1,5 @@ +import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import assert, { AssertionError } from "assert"; -import { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { ExecutionResult } from "rethnet-evm"; import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 03cbb81930..ccbd3d73e5 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -1,7 +1,11 @@ +import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; import { EvmError } from "@nomicfoundation/ethereumjs-evm"; import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; -import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; +import { + DefaultStateManager, + StateManager, +} from "@nomicfoundation/ethereumjs-statemanager"; import { AccessListEIP2930Transaction, FeeMarketEIP1559Transaction, @@ -14,7 +18,6 @@ import { bufferToBigInt, setLengthLeft, } from "@nomicfoundation/ethereumjs-util"; -import { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { Account as RethnetAccount, Config, @@ -81,8 +84,13 @@ export class HardhatDB { } public async getCodeByHash(codeHash: Buffer) { - const db = (this._stateManager as any)._trie._db; - return db.get(Buffer.concat([Buffer.from("c"), codeHash])); + if (this._stateManager instanceof DefaultStateManager) { + // eslint-disable-next-line @typescript-eslint/dot-notation + const db = this._stateManager._trie["_db"]; + return db.get(Buffer.concat([Buffer.from("c"), codeHash])); + } + + return Buffer.from([]); } public async getStorageRoot() { @@ -278,7 +286,8 @@ function mapRethnetExitCodeToEthereumJsExceptionError( const ethereumJsError = rethnetExitCodeToEthereumJsError.get(rethnetExitCode); if (ethereumJsError === undefined) { console.trace(`Couldn't map exit code ${rethnetExitCode}`); - process.exit(1); + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error(`Couldn't map exit code ${rethnetExitCode}`); } return new EvmError(ethereumJsError); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts new file mode 100644 index 0000000000..98f7cc2544 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -0,0 +1,221 @@ +import { Block, HeaderData } from "@nomicfoundation/ethereumjs-block"; +import { RLP } from "@nomicfoundation/ethereumjs-rlp"; +import { Trie } from "@nomicfoundation/ethereumjs-trie"; +import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import { + Address, + bigIntToBuffer, + bufArrToArr, + intToBuffer, +} from "@nomicfoundation/ethereumjs-util"; +import { + Bloom, + PostByzantiumTxReceipt, + PreByzantiumTxReceipt, + RunTxResult, + TxReceipt, +} from "@nomicfoundation/ethereumjs-vm"; +import { fromBigIntLike } from "../../../util/bigint"; + +import { VMAdapter } from "./vm-adapter"; + +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ + +type Reward = [address: Address, reward: bigint]; + +export interface BuildBlockOpts { + parentBlock: Block; + headerData?: HeaderData; +} + +// ready: can be started +// started: can add txs or rewards +// rewarded: can seal or revert +// sealed: can't do anything +// reverted: can't do anything +type BlockBuilderState = + | "ready" + | "started" + | "rewarded" + | "sealed" + | "reverted"; + +export class BlockBuilder { + private _state: BlockBuilderState = "ready"; + private _gasUsed = 0n; + private _transactions: TypedTransaction[] = []; + private _transactionResults: RunTxResult[] = []; + + constructor(private _vm: VMAdapter, private _opts: BuildBlockOpts) {} + + public async startBlock(): Promise { + await this._vm.startBlock(); + this._state = "started"; + } + + public getGasUsed(): bigint { + return this._gasUsed; + } + + public async addTransaction(tx: TypedTransaction): Promise { + if (this._state !== "started") { + throw new Error( + `BlockBuilder.addTransaction cannot be used in state ${this._state}` + ); + } + + const blockGasLimit = + fromBigIntLike(this._opts.headerData?.gasLimit) ?? 1_000_000n; + const blockGasRemaining = blockGasLimit - this._gasUsed; + if (tx.gasLimit > blockGasRemaining) { + throw new Error( + "tx has a higher gas limit than the remaining gas in the block" + ); + } + + const header = { + ...this._opts.headerData, + gasUsed: this._gasUsed, + }; + + const blockData = { header, transactions: this._transactions }; + const block = Block.fromBlockData(blockData, { + common: this._vm.getCommon(), + skipConsensusFormatValidation: true, + calcDifficultyFromHeader: this._opts.parentBlock.header, + }); + + const result = await this._vm.runTxInBlock(tx, block); + + this._transactions.push(tx); + this._transactionResults.push(result); + this._gasUsed += result.totalGasSpent; + + return result; + } + + public async addRewards(rewards: Reward[]): Promise { + if (this._state !== "started") { + throw new Error( + `BlockBuilder.addRewards cannot be used in state ${this._state}` + ); + } + + await this._vm.addBlockRewards(rewards); + this._state = "rewarded"; + } + + public async seal(): Promise { + if (this._state !== "rewarded") { + throw new Error( + `BlockBuilder.seal cannot be used in state ${this._state}` + ); + } + + const stateRoot = await this._vm.getStateRoot(); + const transactionsTrie = await this._getTransactionsTrie(); + const receiptTrie = await this._getReceiptsTrie(); + const logsBloom = this._getLogsBloom(); + const gasUsed = this._gasUsed; + const timestamp = + this._opts.headerData?.timestamp ?? Math.round(Date.now() / 1000); + + const headerData = { + ...this._opts.headerData, + stateRoot, + transactionsTrie, + receiptTrie, + logsBloom, + gasUsed, + timestamp, + }; + + const blockData = { + header: { + ...headerData, + parentHash: + this._opts.headerData?.parentHash ?? this._opts.parentBlock.hash(), + number: + this._opts.headerData?.number ?? + this._opts.parentBlock.header.number + BigInt(1), + gasLimit: + this._opts.headerData?.gasLimit ?? + this._opts.parentBlock.header.gasLimit, + }, + transactions: this._transactions, + }; + + const block = Block.fromBlockData(blockData, { + common: this._vm.getCommon(), + skipConsensusFormatValidation: true, + calcDifficultyFromHeader: this._opts.parentBlock.header, + }); + + await this._vm.sealBlock(); + this._state = "sealed"; + + return block; + } + + public async revert(): Promise { + if (this._state !== "started" && this._state !== "rewarded") { + throw new Error( + `BlockBuilder.revert cannot be used in state ${this._state}` + ); + } + + await this._vm.revertBlock(); + this._state = "reverted"; + } + + private async _getTransactionsTrie(): Promise { + const trie = new Trie(); + for (const [i, tx] of this._transactions.entries()) { + await trie.put(Buffer.from(RLP.encode(i)), tx.serialize()); + } + return trie.root(); + } + + private async _getReceiptsTrie(): Promise { + const receiptTrie = new Trie(); + for (const [i, txResult] of this._transactionResults.entries()) { + const tx = this._transactions[i]; + const encodedReceipt = encodeReceipt(txResult.receipt, tx.type); + await receiptTrie.put(Buffer.from(RLP.encode(i)), encodedReceipt); + } + return receiptTrie.root(); + } + + private _getLogsBloom(): Buffer { + const bloom = new Bloom(); + for (const txResult of this._transactionResults) { + // Combine blooms via bitwise OR + bloom.or(txResult.bloom); + } + return bloom.bitvector; + } +} + +function encodeReceipt(receipt: TxReceipt, txType: number) { + const encoded = Buffer.from( + RLP.encode( + bufArrToArr([ + (receipt as PreByzantiumTxReceipt).stateRoot ?? + ((receipt as PostByzantiumTxReceipt).status === 0 + ? Buffer.from([]) + : Buffer.from("01", "hex")), + bigIntToBuffer(receipt.cumulativeBlockGasUsed), + receipt.bitvector, + receipt.logs, + ]) + ) + ); + + if (txType === 0) { + return encoded; + } + + // Serialize receipt according to EIP-2718: + // `typed-receipt = tx-type || receipt-data` + return Buffer.concat([intToBuffer(txType), encoded]); +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts new file mode 100644 index 0000000000..1c6827a16a --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -0,0 +1,237 @@ +import type { Message } from "@nomicfoundation/ethereumjs-evm"; +import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; +import { Block } from "@nomicfoundation/ethereumjs-block"; +import { Common } from "@nomicfoundation/ethereumjs-common"; +import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import { Account, Address } from "@nomicfoundation/ethereumjs-util"; + +import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; +import { NodeConfig } from "../node-types"; +import { RpcDebugTraceOutput } from "../output"; +import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; + +import { EthereumJSAdapter } from "./ethereumjs"; +import { RethnetAdapter } from "./rethnet"; +import { VMAdapter } from "./vm-adapter"; + +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ +/* eslint-disable @typescript-eslint/restrict-template-expressions */ + +export class DualModeAdapter implements VMAdapter { + constructor( + private _ethereumJSAdapter: VMAdapter, + private _rethnetAdapter: VMAdapter + ) {} + + public static async create( + common: Common, + blockchain: HardhatBlockchainInterface, + config: NodeConfig, + selectHardfork: (blockNumber: bigint) => string + ) { + const ethereumJSAdapter = await EthereumJSAdapter.create( + common, + blockchain, + config, + selectHardfork + ); + + const rethnetAdapter = await RethnetAdapter.create( + // eslint-disable-next-line @typescript-eslint/dot-notation + ethereumJSAdapter["_stateManager"], + blockchain, + config, + selectHardfork + ); + + return new DualModeAdapter(ethereumJSAdapter, rethnetAdapter); + } + + public async dryRun( + tx: TypedTransaction, + blockContext: Block, + forceBaseFeeZero?: boolean + ): Promise { + const ethereumJSResult = await this._ethereumJSAdapter.dryRun( + tx, + blockContext, + forceBaseFeeZero + ); + + const rethnetResult = await this._rethnetAdapter.dryRun( + tx, + blockContext, + forceBaseFeeZero + ); + + assertEqualRunTxResults(ethereumJSResult, rethnetResult); + + return rethnetResult; + } + + public getCommon(): Common { + return this._ethereumJSAdapter.getCommon(); + } + + public async getStateRoot(): Promise { + return this._ethereumJSAdapter.getStateRoot(); + } + + public async getAccount(address: Address): Promise { + return this._ethereumJSAdapter.getAccount(address); + } + + public async getContractStorage( + address: Address, + key: Buffer + ): Promise { + return this._ethereumJSAdapter.getContractStorage(address, key); + } + + public async getContractCode(address: Address): Promise { + return this._ethereumJSAdapter.getContractCode(address); + } + + public async putAccount(address: Address, account: Account): Promise { + return this._ethereumJSAdapter.putAccount(address, account); + } + + public async putContractCode(address: Address, value: Buffer): Promise { + return this._ethereumJSAdapter.putContractCode(address, value); + } + + public async putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise { + return this._ethereumJSAdapter.putContractStorage(address, key, value); + } + + public async restoreContext(stateRoot: Buffer): Promise { + return this._ethereumJSAdapter.restoreContext(stateRoot); + } + + public async traceTransaction( + hash: Buffer, + block: Block, + config: RpcDebugTracingConfig + ): Promise { + return this._ethereumJSAdapter.traceTransaction(hash, block, config); + } + + public enableTracing(callbacks: { + beforeMessage: (message: Message, next: any) => Promise; + step: () => Promise; + afterMessage: () => Promise; + }): void { + return this._ethereumJSAdapter.enableTracing(callbacks); + } + + public disableTracing(): void { + return this._ethereumJSAdapter.disableTracing(); + } + + public async setBlockContext( + block: Block, + irregularStateOrUndefined: Buffer | undefined + ): Promise { + return this._ethereumJSAdapter.setBlockContext( + block, + irregularStateOrUndefined + ); + } + + public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { + return this._ethereumJSAdapter.isEip1559Active(blockNumberOrPending); + } + + public async startBlock(): Promise { + return this._ethereumJSAdapter.startBlock(); + } + + public async runTxInBlock( + tx: TypedTransaction, + block: Block + ): Promise { + return this._ethereumJSAdapter.runTxInBlock(tx, block); + } + + public async addBlockRewards( + rewards: Array<[Address, bigint]> + ): Promise { + return this._ethereumJSAdapter.addBlockRewards(rewards); + } + + public async sealBlock(): Promise { + return this._ethereumJSAdapter.sealBlock(); + } + + public async revertBlock(): Promise { + return this._ethereumJSAdapter.revertBlock(); + } +} + +function assertEqualRunTxResults( + ethereumJSResult: RunTxResult, + rethnetResult: RunTxResult +) { + if (ethereumJSResult.totalGasSpent !== rethnetResult.totalGasSpent) { + console.trace( + `Different totalGasSpent: ${ethereumJSResult.totalGasSpent} !== ${rethnetResult.totalGasSpent}` + ); + throw new Error("Different totalGasSpent"); + } + if (ethereumJSResult.gasRefund !== rethnetResult.gasRefund) { + console.trace( + `Different gasRefund: ${ethereumJSResult.gasRefund} !== ${rethnetResult.gasRefund}` + ); + throw new Error("Different gasRefund"); + } + if ( + ethereumJSResult.createdAddress?.toString() !== + rethnetResult.createdAddress?.toString() + ) { + console.trace( + `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} !== ${rethnetResult.createdAddress?.toString()}` + ); + throw new Error("Different createdAddress"); + } + + if ( + ethereumJSResult.execResult.exceptionError?.error !== + rethnetResult.execResult.exceptionError?.error + ) { + console.trace( + `Different exceptionError.error: ${ethereumJSResult.execResult.exceptionError?.error} !== ${rethnetResult.execResult.exceptionError?.error}` + ); + throw new Error("Different exceptionError.error"); + } + + if ( + ethereumJSResult.execResult.exceptionError?.errorType !== + rethnetResult.execResult.exceptionError?.errorType + ) { + console.trace( + `Different exceptionError.errorType: ${ethereumJSResult.execResult.exceptionError?.errorType} !== ${rethnetResult.execResult.exceptionError?.errorType}` + ); + throw new Error("Different exceptionError.errorType"); + } + + // TODO: we only compare the return values when a contract was *not* created, + // because sometimes ethereumjs has the created bytecode in the return value + // and rethnet doesn't + if (ethereumJSResult.createdAddress === undefined) { + if ( + ethereumJSResult.execResult.returnValue.toString("hex") !== + rethnetResult.execResult.returnValue.toString("hex") + ) { + console.trace( + `Different returnValue: ${ethereumJSResult.execResult.returnValue.toString( + "hex" + )} !== ${rethnetResult.execResult.returnValue.toString("hex")}` + ); + throw new Error("Different returnValue"); + } + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts new file mode 100644 index 0000000000..c10e929792 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -0,0 +1,441 @@ +import { Block } from "@nomicfoundation/ethereumjs-block"; +import { Common } from "@nomicfoundation/ethereumjs-common"; +import { + EVM, + EVMResult, + InterpreterStep, + Message, +} from "@nomicfoundation/ethereumjs-evm"; +import { + DefaultStateManager, + StateManager, +} from "@nomicfoundation/ethereumjs-statemanager"; +import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import { EEI, RunTxResult, VM } from "@nomicfoundation/ethereumjs-vm"; +import { assertHardhatInvariant } from "../../../core/errors"; +import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; +import { + InternalError, + InvalidInputError, + TransactionExecutionError, +} from "../../../core/providers/errors"; +import { VMDebugTracer } from "../../stack-traces/vm-debug-tracer"; +import { ForkStateManager } from "../fork/ForkStateManager"; +import { isForkedNodeConfig, NodeConfig } from "../node-types"; +import { RpcDebugTraceOutput } from "../output"; +import { FakeSenderAccessListEIP2930Transaction } from "../transactions/FakeSenderAccessListEIP2930Transaction"; +import { FakeSenderEIP1559Transaction } from "../transactions/FakeSenderEIP1559Transaction"; +import { FakeSenderTransaction } from "../transactions/FakeSenderTransaction"; +import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; +import { makeForkClient } from "../utils/makeForkClient"; +import { makeStateTrie } from "../utils/makeStateTrie"; +import { VMAdapter } from "./vm-adapter"; + +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ + +interface TracingCallbacks { + beforeMessage: (message: Message, next: any) => Promise; + step: (step: InterpreterStep, next: any) => Promise; + afterMessage: (result: EVMResult, next: any) => Promise; +} + +export class EthereumJSAdapter implements VMAdapter { + private _tracingCallbacks: TracingCallbacks | undefined; + + private _blockStartStateRoot: Buffer | undefined; + + constructor( + private _vm: VM, + private _stateManager: StateManager, + private _blockchain: HardhatBlockchainInterface, + private readonly _configNetworkId: number, + private readonly _configChainId: number, + private readonly _selectHardfork: (blockNumber: bigint) => string, + private _forkNetworkId?: number, + private _forkBlockNumber?: bigint + ) {} + + public static async create( + common: Common, + blockchain: HardhatBlockchainInterface, + config: NodeConfig, + selectHardfork: (blockNumber: bigint) => string + ): Promise { + let stateManager: StateManager; + let forkBlockNum: bigint | undefined; + let forkNetworkId: number | undefined; + + if (isForkedNodeConfig(config)) { + const { forkClient, forkBlockNumber } = await makeForkClient( + config.forkConfig, + config.forkCachePath + ); + + forkNetworkId = forkClient.getNetworkId(); + forkBlockNum = forkBlockNumber; + + const forkStateManager = new ForkStateManager( + forkClient, + forkBlockNumber + ); + await forkStateManager.initializeGenesisAccounts(config.genesisAccounts); + + stateManager = forkStateManager; + } else { + const stateTrie = await makeStateTrie(config.genesisAccounts); + + stateManager = new DefaultStateManager({ + trie: stateTrie, + }); + } + + const eei = new EEI(stateManager, common, blockchain); + const evm = await EVM.create({ + eei, + allowUnlimitedContractSize: config.allowUnlimitedContractSize, + common, + }); + + const vm = await VM.create({ + evm, + activatePrecompiles: true, + common, + stateManager, + blockchain, + }); + + return new EthereumJSAdapter( + vm, + stateManager, + blockchain, + config.networkId, + config.chainId, + selectHardfork, + forkNetworkId, + forkBlockNum + ); + } + + public async dryRun( + tx: TypedTransaction, + blockContext: Block, + forceBaseFeeZero = false + ): Promise { + const initialStateRoot = await this.getStateRoot(); + + let originalCommon: Common | undefined; + + try { + // NOTE: This is a workaround of both an @nomicfoundation/ethereumjs-vm limitation, and + // a bug in Hardhat Network. + // + // See: https://github.com/nomiclabs/hardhat/issues/1666 + // + // If this VM is running with EIP1559 activated, and the block is not + // an EIP1559 one, this will crash, so we create a new one that has + // baseFeePerGas = 0. + // + // We also have an option to force the base fee to be zero, + // we don't want to debit any balance nor fail any tx when running an + // eth_call. This will make the BASEFEE option also return 0, which + // shouldn't. See: https://github.com/nomiclabs/hardhat/issues/1688 + if ( + this.isEip1559Active(blockContext.header.number) && + (blockContext.header.baseFeePerGas === undefined || forceBaseFeeZero) + ) { + blockContext = Block.fromBlockData(blockContext, { + freeze: false, + common: this.getCommon(), + + skipConsensusFormatValidation: true, + }); + + (blockContext.header as any).baseFeePerGas = 0n; + } + + originalCommon = (this._vm as any)._common; + + (this._vm as any)._common = Common.custom( + { + chainId: + this._forkBlockNumber === undefined || + blockContext.header.number >= this._forkBlockNumber + ? this._configChainId + : this._forkNetworkId, + networkId: this._forkNetworkId ?? this._configNetworkId, + }, + { + hardfork: this._selectHardfork(blockContext.header.number), + } + ); + + return await this._vm.runTx({ + block: blockContext, + tx, + skipNonce: true, + skipBalance: true, + skipBlockGasLimitValidation: true, + }); + } finally { + if (originalCommon !== undefined) { + (this._vm as any)._common = originalCommon; + } + await this._stateManager.setStateRoot(initialStateRoot); + } + } + + public getCommon(): Common { + return this._vm._common; + } + + public async getStateRoot(): Promise { + return this._stateManager.getStateRoot(); + } + + public async getAccount(address: Address): Promise { + return this._stateManager.getAccount(address); + } + + public async getContractStorage( + address: Address, + key: Buffer + ): Promise { + return this._stateManager.getContractStorage(address, key); + } + + public async getContractCode(address: Address): Promise { + return this._stateManager.getContractCode(address); + } + + public async putAccount(address: Address, account: Account): Promise { + return this._stateManager.putAccount(address, account); + } + + public async putContractCode(address: Address, value: Buffer): Promise { + return this._stateManager.putContractCode(address, value); + } + + public async putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise { + return this._stateManager.putContractStorage(address, key, value); + } + + public async restoreContext(stateRoot: Buffer): Promise { + if (this._stateManager instanceof ForkStateManager) { + return this._stateManager.restoreForkBlockContext(stateRoot); + } + return this._stateManager.setStateRoot(stateRoot); + } + + public enableTracing(callbacks: TracingCallbacks): void { + assertHardhatInvariant( + this._vm.evm.events !== undefined, + "EVM should have an 'events' property" + ); + + this._tracingCallbacks = callbacks; + + this._vm.evm.events.on( + "beforeMessage", + this._tracingCallbacks.beforeMessage + ); + this._vm.evm.events.on("step", this._tracingCallbacks.step); + this._vm.evm.events.on("afterMessage", this._tracingCallbacks.afterMessage); + } + + public disableTracing(): void { + assertHardhatInvariant( + this._vm.evm.events !== undefined, + "EVM should have an 'events' property" + ); + + if (this._tracingCallbacks !== undefined) { + this._vm.evm.events.removeListener( + "beforeMessage", + this._tracingCallbacks.beforeMessage + ); + this._vm.evm.events.removeListener("step", this._tracingCallbacks.step); + this._vm.evm.events.removeListener( + "afterMessage", + this._tracingCallbacks.afterMessage + ); + + this._tracingCallbacks = undefined; + } + } + + public async setBlockContext( + block: Block, + irregularStateOrUndefined: Buffer | undefined + ): Promise { + if (this._stateManager instanceof ForkStateManager) { + return this._stateManager.setBlockContext( + block.header.stateRoot, + block.header.number, + irregularStateOrUndefined + ); + } + + return this._stateManager.setStateRoot( + irregularStateOrUndefined ?? block.header.stateRoot + ); + } + + public async traceTransaction( + hash: Buffer, + block: Block, + config: RpcDebugTracingConfig + ): Promise { + const blockNumber = block.header.number; + let vm = this._vm; + if ( + this._forkBlockNumber !== undefined && + blockNumber <= this._forkBlockNumber + ) { + assertHardhatInvariant( + this._forkNetworkId !== undefined, + "this._forkNetworkId should exist if this._forkBlockNumber exists" + ); + + const common = this._getCommonForTracing( + this._forkNetworkId, + blockNumber + ); + + vm = await VM.create({ + common, + activatePrecompiles: true, + stateManager: this._vm.stateManager, + blockchain: this._vm.blockchain, + }); + } + + // We don't support tracing transactions before the spuriousDragon fork + // to avoid having to distinguish between empty and non-existing accounts. + // We *could* do it during the non-forked mode, but for simplicity we just + // don't support it at all. + const isPreSpuriousDragon = !vm._common.gteHardfork("spuriousDragon"); + if (isPreSpuriousDragon) { + throw new InvalidInputError( + "Tracing is not supported for transactions using hardforks older than Spurious Dragon. " + ); + } + + for (const tx of block.transactions) { + let txWithCommon: TypedTransaction; + const sender = tx.getSenderAddress(); + if (tx.type === 0) { + txWithCommon = new FakeSenderTransaction(sender, tx, { + common: vm._common, + }); + } else if (tx.type === 1) { + txWithCommon = new FakeSenderAccessListEIP2930Transaction(sender, tx, { + common: vm._common, + }); + } else if (tx.type === 2) { + txWithCommon = new FakeSenderEIP1559Transaction( + sender, + { ...tx, gasPrice: undefined }, + { + common: vm._common, + } + ); + } else { + throw new InternalError( + "Only legacy, EIP2930, and EIP1559 txs are supported" + ); + } + + const txHash = txWithCommon.hash(); + if (txHash.equals(hash)) { + const vmDebugTracer = new VMDebugTracer(vm); + return vmDebugTracer.trace(async () => { + await vm.runTx({ tx: txWithCommon, block }); + }, config); + } + await vm.runTx({ tx: txWithCommon, block }); + } + throw new TransactionExecutionError( + `Unable to find a transaction in a block that contains that transaction, this should never happen` + ); + } + + public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { + if ( + blockNumberOrPending !== undefined && + blockNumberOrPending !== "pending" + ) { + return this.getCommon().hardforkGteHardfork( + this._selectHardfork(blockNumberOrPending), + "london" + ); + } + return this.getCommon().gteHardfork("london"); + } + + public async startBlock(): Promise { + if (this._blockStartStateRoot !== undefined) { + throw new Error("a block is already started"); + } + + this._blockStartStateRoot = await this.getStateRoot(); + } + + public async runTxInBlock( + tx: TypedTransaction, + block: Block + ): Promise { + return this._vm.runTx({ tx, block }); + } + + public async addBlockRewards( + rewards: Array<[Address, bigint]> + ): Promise { + for (const [address, reward] of rewards) { + const account = await this._stateManager.getAccount(address); + account.balance += reward; + await this._stateManager.putAccount(address, account); + } + } + + public async sealBlock(): Promise { + if (this._blockStartStateRoot === undefined) { + throw new Error("Cannot seal a block that wasn't started"); + } + + this._blockStartStateRoot = undefined; + } + + public async revertBlock(): Promise { + if (this._blockStartStateRoot === undefined) { + throw new Error("Cannot revert a block that wasn't started"); + } + + await this._stateManager.setStateRoot(this._blockStartStateRoot); + this._blockStartStateRoot = undefined; + } + + private _getCommonForTracing(networkId: number, blockNumber: bigint): Common { + try { + const common = Common.custom( + { + chainId: networkId, + networkId, + }, + { + hardfork: this._selectHardfork(BigInt(blockNumber)), + } + ); + + return common; + } catch { + throw new InternalError( + `Network id ${networkId} does not correspond to a network that Hardhat can trace` + ); + } + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts new file mode 100644 index 0000000000..9b5d142ef0 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -0,0 +1,269 @@ +import type { Message } from "@nomicfoundation/ethereumjs-evm"; +import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; +import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; +import { Block } from "@nomicfoundation/ethereumjs-block"; +import { Common } from "@nomicfoundation/ethereumjs-common"; +import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; +import { + Account, + Address, + bufferToBigInt, +} from "@nomicfoundation/ethereumjs-util"; +import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import { Rethnet } from "rethnet-evm"; + +import { NodeConfig } from "../node-types"; +import { + createRethnetFromHardhatDB, + ethereumjsTransactionToRethnet, + HardhatDB, + rethnetResultToRunTxResult, +} from "../utils/convertToRethnet"; +import { hardforkGte, HardforkName } from "../../../util/hardforks"; +import { RpcDebugTraceOutput } from "../output"; +import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; + +import { VMAdapter } from "./vm-adapter"; + +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +export class RethnetAdapter implements VMAdapter { + constructor( + private _rethnet: Rethnet, + private readonly _selectHardfork: (blockNumber: bigint) => string + ) {} + + public static async create( + stateManager: StateManager, + blockchain: BlockchainInterface, + config: NodeConfig, + selectHardfork: (blockNumber: bigint) => string + ): Promise { + const hardhatDB = new HardhatDB(stateManager, blockchain); + + const limitContractCodeSize = + config.allowUnlimitedContractSize === true ? 2n ** 64n - 1n : undefined; + + const rethnet = createRethnetFromHardhatDB( + { + chainId: BigInt(config.chainId), + limitContractCodeSize, + disableBlockGasLimit: true, + disableEip3607: true, + }, + hardhatDB + ); + + return new RethnetAdapter(rethnet, selectHardfork); + } + + /** + * Run `tx` with the given `blockContext`, without modifying the state. + */ + public async dryRun( + tx: TypedTransaction, + blockContext: Block, + forceBaseFeeZero?: boolean + ): Promise { + const rethnetTx = ethereumjsTransactionToRethnet(tx); + + const difficulty = this._getBlockEnvDifficulty( + blockContext.header.number, + blockContext.header.difficulty, + bufferToBigInt(blockContext.header.mixHash) + ); + + await this._rethnet.guaranteeTransaction(rethnetTx); + const rethnetResult = await this._rethnet.dryRun(rethnetTx, { + number: blockContext.header.number, + coinbase: blockContext.header.coinbase.buf, + timestamp: blockContext.header.timestamp, + basefee: + forceBaseFeeZero === true ? 0n : blockContext.header.baseFeePerGas, + gasLimit: blockContext.header.gasLimit, + difficulty, + }); + + return rethnetResultToRunTxResult(rethnetResult.execResult); + } + + /** + * Temporary method, will be removed. + */ + public getCommon(): Common { + throw new Error("not implemented"); + } + + /** + * Temporary method, will be removed. + */ + public isEip1559Active(): boolean { + throw new Error("not implemented"); + } + + /** + * Get the account info for the given address. + */ + public async getAccount(address: Address): Promise { + throw new Error("not implemented"); + } + + /** + * Get the storage value at the given address and slot. + */ + public async getContractStorage( + address: Address, + key: Buffer + ): Promise { + throw new Error("not implemented"); + } + + /** + * Get the contract code at the given address. + */ + public async getContractCode(address: Address): Promise { + throw new Error("not implemented"); + } + + /** + * Update the account info for the given address. + */ + public async putAccount(address: Address, account: Account): Promise { + throw new Error("not implemented"); + } + + /** + * Update the contract code for the given address. + */ + public async putContractCode(address: Address, value: Buffer): Promise { + throw new Error("not implemented"); + } + + /** + * Update the value of the given storage slot. + */ + public async putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise { + throw new Error("not implemented"); + } + + /** + * Get the root of the current state trie. + */ + public async getStateRoot(): Promise { + throw new Error("not implemented"); + } + + /** + * Reset the state trie to the point after `block` was mined. If + * `irregularStateOrUndefined` is passed, use it as the state root. + */ + public async setBlockContext( + block: Block, + irregularStateOrUndefined: Buffer | undefined + ): Promise { + throw new Error("not implemented"); + } + + /** + * Reset the state trie to the point where it had the given state root. + * + * Throw if it can't. + */ + public async restoreContext(stateRoot: Buffer): Promise { + throw new Error("not implemented"); + } + + /** + * Start a new block and accept transactions sent with `runTxInBlock`. + */ + public async startBlock(): Promise { + throw new Error("not implemented"); + } + + /** + * Must be called after `startBlock`, and before `addBlockRewards`. + */ + public async runTxInBlock( + tx: TypedTransaction, + block: Block + ): Promise { + throw new Error("not implemented"); + } + + /** + * Must be called after `startBlock` and all `runTxInBlock` calls. + */ + public async addBlockRewards( + rewards: Array<[Address, bigint]> + ): Promise { + throw new Error("not implemented"); + } + + /** + * Finish the block successfully. Must be called after `addBlockRewards`. + */ + public async sealBlock(): Promise { + throw new Error("not implemented"); + } + + /** + * Revert the block and discard the changes to the state. Can be called + * at any point after `startBlock`. + */ + public async revertBlock(): Promise { + throw new Error("not implemented"); + } + + /** + * Re-execute the transactions in the block up until the transaction with the + * given hash, and trace the execution of that transaction. + */ + public async traceTransaction( + hash: Buffer, + block: Block, + config: RpcDebugTracingConfig + ): Promise { + throw new Error("not implemented"); + } + + /** + * Start tracing the VM execution with the given callbacks. + */ + public enableTracing(callbacks: { + beforeMessage: (message: Message, next: any) => Promise; + step: () => Promise; + afterMessage: () => Promise; + }): void { + throw new Error("not implemented"); + } + + /** + * Stop tracing the execution. + */ + public disableTracing(): void { + throw new Error("not implemented"); + } + + private _getBlockEnvDifficulty( + blockNumber: bigint, + difficulty: bigint | undefined, + mixHash: bigint | undefined + ): bigint | undefined { + const hardfork = this._selectHardfork(blockNumber); + const isPostMergeHardfork = hardforkGte( + hardfork as HardforkName, + HardforkName.MERGE + ); + + if (isPostMergeHardfork) { + return mixHash; + } + + return difficulty; + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts new file mode 100644 index 0000000000..36a8317623 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -0,0 +1,66 @@ +import type { + EVMResult, + InterpreterStep, + Message, +} from "@nomicfoundation/ethereumjs-evm"; +import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; +import type { Block } from "@nomicfoundation/ethereumjs-block"; +import type { Common } from "@nomicfoundation/ethereumjs-common"; +import type { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import type { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import type { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; +import type { RpcDebugTraceOutput } from "../output"; + +export interface VMAdapter { + dryRun( + tx: TypedTransaction, + blockContext: Block, + forceBaseFeeZero?: boolean + ): Promise; + + // temporary methods + getCommon(): Common; + isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean; + + // getters + getAccount(address: Address): Promise; + getContractStorage(address: Address, key: Buffer): Promise; + getContractCode(address: Address): Promise; + + // setters + putAccount(address: Address, account: Account): Promise; + putContractCode(address: Address, value: Buffer): Promise; + putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise; + + // getters/setters for the whole state + getStateRoot(): Promise; + setBlockContext( + block: Block, + irregularStateOrUndefined: Buffer | undefined + ): Promise; + restoreContext(stateRoot: Buffer): Promise; + + // methods for block-building + startBlock(): Promise; + runTxInBlock(tx: TypedTransaction, block: Block): Promise; + addBlockRewards(rewards: Array<[Address, bigint]>): Promise; + sealBlock(): Promise; + revertBlock(): Promise; + + // methods for tracing + traceTransaction( + hash: Buffer, + block: Block, + config: RpcDebugTracingConfig + ): Promise; + enableTracing(callbacks: { + beforeMessage: (message: Message, next: any) => Promise; + step: (step: InterpreterStep, next: any) => Promise; + afterMessage: (result: EVMResult, next: any) => Promise; + }): void; + disableTracing(): void; +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts index 63c3431466..de88e7deca 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts @@ -1,8 +1,8 @@ +import type { AfterTxEvent, VM } from "@nomicfoundation/ethereumjs-vm"; +import type { EVMResult } from "@nomicfoundation/ethereumjs-evm"; +import type { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; +import type { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; -import { AfterTxEvent, VM } from "@nomicfoundation/ethereumjs-vm"; -import { EVMResult } from "@nomicfoundation/ethereumjs-evm"; -import { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; -import { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; import { Address, bufferToBigInt, diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts index 5af71de205..017979cad3 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts @@ -1,12 +1,11 @@ +import type { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; +import type { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; import { EVMResult, getActivePrecompiles, } from "@nomicfoundation/ethereumjs-evm"; -import { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; -import { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; -import { Address, bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; -import { VM } from "@nomicfoundation/ethereumjs-vm"; -import { assertHardhatInvariant } from "../../core/errors"; +import { bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; +import { VMAdapter } from "../provider/vm/vm-adapter"; import { CallMessageTrace, @@ -26,11 +25,11 @@ export class VMTracer { private _messageTraces: MessageTrace[] = []; private _enabled = false; private _lastError: Error | undefined; - private _maxPrecompileNumber = getActivePrecompiles(this._vm._common).size; + private _maxPrecompileNumber = getActivePrecompiles(this._vm.getCommon()) + .size; constructor( - private readonly _vm: VM, - private readonly _getContractCode: (address: Address) => Promise, + private readonly _vm: VMAdapter, private readonly _throwErrors = true ) { this._beforeMessageHandler = this._beforeMessageHandler.bind(this); @@ -42,14 +41,13 @@ export class VMTracer { if (this._enabled) { return; } - assertHardhatInvariant( - this._vm.evm.events !== undefined, - "EVM should have an 'events' property" - ); - - this._vm.evm.events.on("beforeMessage", this._beforeMessageHandler); - this._vm.evm.events.on("step", this._stepHandler); - this._vm.evm.events.on("afterMessage", this._afterMessageHandler); + + this._vm.enableTracing({ + beforeMessage: this._beforeMessageHandler, + step: this._stepHandler, + afterMessage: this._afterMessageHandler, + }); + this._enabled = true; } @@ -58,20 +56,8 @@ export class VMTracer { return; } - assertHardhatInvariant( - this._vm.evm.events !== undefined, - "EVM should have an 'events' property" - ); - - this._vm.evm.events.removeListener( - "beforeMessage", - this._beforeMessageHandler - ); - this._vm.evm.events.removeListener("step", this._stepHandler); - this._vm.evm.events.removeListener( - "afterMessage", - this._afterMessageHandler - ); + this._vm.disableTracing(); + this._enabled = false; } @@ -142,7 +128,7 @@ export class VMTracer { } else { const codeAddress = message.codeAddress; - const code = await this._getContractCode(codeAddress); + const code = await this._vm.getContractCode(codeAddress); const callTrace: CallMessageTrace = { code, diff --git a/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts b/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts index 8c787ff243..6967d0b135 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts @@ -9,7 +9,7 @@ export async function retrieveCommon( if (provider["_node"] === undefined) { await provider["_init"](); } - const common = provider["_node"]?.["_vm"]._common; + const common = provider["_node"]?.["_vm"].getCommon(); if (common === undefined) { throw new Error("Failed to retrieve common from HardhatNetworkProvider"); } diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts index d62a3bf081..8f8aba0dde 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts @@ -36,7 +36,11 @@ describe("Tx Pool", () => { beforeEach(() => { stateManager = new DefaultStateManager(); const common = new Common({ chain: "mainnet" }); - txPool = new TxPool(stateManager, blockGasLimit, common); + txPool = new TxPool( + (address) => stateManager.getAccount(address), + blockGasLimit, + common + ); }); describe("addTransaction", () => { diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts index 9200cb68a3..fc7eb50178 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts @@ -1,8 +1,8 @@ -import { Block } from "@nomicfoundation/ethereumjs-block"; -import { +import type { AfterBlockEvent, PostByzantiumTxReceipt, } from "@nomicfoundation/ethereumjs-vm"; +import { Block } from "@nomicfoundation/ethereumjs-block"; import { assert } from "chai"; import { bufferToHex } from "@nomicfoundation/ethereumjs-util"; diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts index ecfd523d2d..5a16a8182b 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts @@ -1,9 +1,9 @@ -import { Block } from "@nomicfoundation/ethereumjs-block"; -import { +import type { AfterBlockEvent, RunBlockOpts, VM, } from "@nomicfoundation/ethereumjs-vm"; +import { Block } from "@nomicfoundation/ethereumjs-block"; import { assert } from "chai"; import { defaultHardhatNetworkParams } from "../../../../../src/internal/core/config/default-config"; diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts index 9d246e504e..c0c49cbc5d 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts @@ -1,3 +1,5 @@ +import { Block } from "@nomicfoundation/ethereumjs-block"; +import { Common } from "@nomicfoundation/ethereumjs-common"; import { Transaction, TxData } from "@nomicfoundation/ethereumjs-tx"; import { Account, @@ -5,14 +7,15 @@ import { privateToAddress, bigIntToBuffer, } from "@nomicfoundation/ethereumjs-util"; -import { VM } from "@nomicfoundation/ethereumjs-vm"; import abi from "ethereumjs-abi"; -import { Rethnet } from "rethnet-evm"; +import { HardhatBlockchain } from "../../../../src/internal/hardhat-network/provider/HardhatBlockchain"; -import { assertEthereumJsAndRethnetResults } from "../../../../src/internal/hardhat-network/provider/utils/assertions"; -import { ethereumjsTransactionToRethnet } from "../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; +import { VMAdapter } from "../../../../src/internal/hardhat-network/provider/vm/vm-adapter"; +import { DualModeAdapter } from "../../../../src/internal/hardhat-network/provider/vm/dual"; import { MessageTrace } from "../../../../src/internal/hardhat-network/stack-traces/message-trace"; import { VMTracer } from "../../../../src/internal/hardhat-network/stack-traces/vm-tracer"; +import { defaultHardhatNetworkParams } from "../../../../src/internal/core/config/default-config"; +import { BlockBuilder } from "../../../../src/internal/hardhat-network/provider/vm/block-builder"; const senderPrivateKey = Buffer.from( "e331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109", @@ -20,12 +23,39 @@ const senderPrivateKey = Buffer.from( ); const senderAddress = privateToAddress(senderPrivateKey); -export async function instantiateVm(): Promise { +export async function instantiateVm(): Promise { const account = Account.fromAccountData({ balance: 1e15 }); - const vm = await VM.create({ activatePrecompiles: true }); + const common = new Common({ chain: "mainnet" }); + const blockchain = new HardhatBlockchain(common); + await blockchain.addBlock( + Block.fromBlockData({ + header: { + number: 0n, + }, + }) + ); + + const vm = await DualModeAdapter.create( + common, + blockchain, + { + automine: true, + blockGasLimit: 1_000_000, + chainId: 1, + genesisAccounts: [], + hardfork: "london", + minGasPrice: 0n, + networkId: 1, + networkName: "mainnet", + mempoolOrder: "priority", + coinbase: "0x0000000000000000000000000000000000000000", + chains: defaultHardhatNetworkParams.chains, + }, + () => "london" + ); - await vm.stateManager.putAccount(new Address(senderAddress), account); + await vm.putAccount(new Address(senderAddress), account); return vm; } @@ -61,8 +91,7 @@ export function encodeCall( } export async function traceTransaction( - vm: VM, - rethnet: Rethnet, + vm: VMAdapter, txData: TxData ): Promise { const tx = new Transaction({ @@ -76,23 +105,25 @@ export async function traceTransaction( const signedTx = tx.sign(senderPrivateKey); - const getContractCode = vm.stateManager.getContractCode.bind(vm.stateManager); - - const vmTracer = new VMTracer(vm, getContractCode); + const vmTracer = new VMTracer(vm as any); vmTracer.enableTracing(); try { - const rethnetTx = ethereumjsTransactionToRethnet(signedTx); - - const rethnetResult = await rethnet.dryRun(rethnetTx, { - number: 0n, - coinbase: Buffer.from("0000000000000000000000000000000000000000", "hex"), - timestamp: BigInt(Math.floor(Date.now() / 1000)), - gasLimit: 4000000n, + const blockBuilder = new BlockBuilder(vm, { + parentBlock: Block.fromBlockData( + {}, + { + skipConsensusFormatValidation: true, + } + ), + headerData: { + gasLimit: 10_000_000n, + }, }); - - const txResult = await vm.runTx({ tx: signedTx }); - assertEthereumJsAndRethnetResults(rethnetResult.execResult, txResult); + await blockBuilder.startBlock(); + await blockBuilder.addTransaction(signedTx); + await blockBuilder.addRewards([]); + await blockBuilder.seal(); const messageTrace = vmTracer.getLastTopLevelMessageTrace(); if (messageTrace === undefined) { @@ -105,7 +136,7 @@ export async function traceTransaction( } } -async function getNextPendingNonce(vm: VM): Promise { - const acc = await vm.stateManager.getAccount(new Address(senderAddress)); +async function getNextPendingNonce(vm: VMAdapter): Promise { + const acc = await vm.getAccount(new Address(senderAddress)); return bigIntToBuffer(acc.nonce); } diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts index db7fd9b60a..a3df04ddeb 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts @@ -1,13 +1,12 @@ import { toBuffer } from "@nomicfoundation/ethereumjs-util"; -import { VM } from "@nomicfoundation/ethereumjs-vm"; import { assert } from "chai"; import fs from "fs"; import fsExtra from "fs-extra"; import path from "path"; -import { Rethnet } from "rethnet-evm"; import semver from "semver"; import { ReturnData } from "../../../../src/internal/hardhat-network/provider/return-data"; +import { VMAdapter } from "../../../../src/internal/hardhat-network/provider/vm/vm-adapter"; import { createModelsAndDecodeBytecodes } from "../../../../src/internal/hardhat-network/stack-traces/compiler-to-model"; import { ConsoleLogger, @@ -37,10 +36,6 @@ import { } from "../../../../src/types"; import { setCWD } from "../helpers/cwd"; -import { - createRethnetFromHardhatDB, - HardhatDB, -} from "../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; import { SUPPORTED_SOLIDITY_VERSION_RANGE } from "../../../../src/internal/hardhat-network/stack-traces/constants"; import { compileFiles, @@ -438,15 +433,6 @@ async function runTest( const logger = new ConsoleLogger(); const vm = await instantiateVm(); - const hardhatDB = new HardhatDB(vm.stateManager, vm.blockchain); - - const rethnet = createRethnetFromHardhatDB( - { - chainId: vm._common.chainId(), - limitContractCodeSize: 2n ** 64n - 1n, - }, - hardhatDB - ); const txIndexToContract: Map = new Map(); @@ -458,7 +444,6 @@ async function runTest( txIndex, tx, vm, - rethnet, compilerOutput, txIndexToContract ); @@ -482,7 +467,6 @@ async function runTest( txIndex, tx, vm, - rethnet, compilerOutput, contract! ); @@ -589,8 +573,7 @@ function linkBytecode( async function runDeploymentTransactionTest( txIndex: number, tx: DeploymentTransaction, - vm: VM, - rethnet: Rethnet, + vm: VMAdapter, compilerOutput: CompilerOutput, txIndexToContract: Map ): Promise { @@ -622,7 +605,7 @@ async function runDeploymentTransactionTest( const data = Buffer.concat([deploymentBytecode, params]); - const trace = await traceTransaction(vm, rethnet, { + const trace = await traceTransaction(vm, { value: tx.value, data, gasLimit: tx.gas, @@ -634,8 +617,7 @@ async function runDeploymentTransactionTest( async function runCallTransactionTest( txIndex: number, tx: CallTransaction, - vm: VM, - rethnet: Rethnet, + vm: VMAdapter, compilerOutput: CompilerOutput, contract: DeployedContract ): Promise { @@ -656,7 +638,7 @@ async function runCallTransactionTest( data = Buffer.from([]); } - const trace = await traceTransaction(vm, rethnet, { + const trace = await traceTransaction(vm, { to: contract.address, value: tx.value, data, From 9a0a10a4cb645010064b8598cb150385dc44f9f9 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Tue, 15 Nov 2022 20:19:41 +0100 Subject: [PATCH 004/406] Make getCodeByHash behavior more explicit --- .../provider/utils/convertToRethnet.ts | 12 ++++++++++-- 1 file changed, 10 insertions(+), 2 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index ccbd3d73e5..4c126da640 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -87,10 +87,18 @@ export class HardhatDB { if (this._stateManager instanceof DefaultStateManager) { // eslint-disable-next-line @typescript-eslint/dot-notation const db = this._stateManager._trie["_db"]; - return db.get(Buffer.concat([Buffer.from("c"), codeHash])); + const code = await db.get(Buffer.concat([Buffer.from("c"), codeHash])); + + if (code === null) { + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error("returning null in getCodeByHash is not supported"); + } + + return code; } - return Buffer.from([]); + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error("getCodeByHash not implemented for ForkStateManager"); } public async getStorageRoot() { From d94dd3f47b935bfea3a19f1f6a3d6c7c5089da91 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Fri, 18 Nov 2022 22:38:09 +0100 Subject: [PATCH 005/406] Remove getCommon from VMAdapter --- .../internal/hardhat-network/provider/node.ts | 26 ++++++++++--------- .../provider/vm/block-builder.ts | 11 +++++--- .../hardhat-network/provider/vm/dual.ts | 4 --- .../hardhat-network/provider/vm/ethereumjs.ts | 22 +++++++--------- .../hardhat-network/provider/vm/rethnet.ts | 8 ------ .../hardhat-network/provider/vm/vm-adapter.ts | 2 -- .../hardhat-network/stack-traces/vm-tracer.ts | 6 +++-- .../hardhat-network/helpers/retrieveCommon.ts | 2 +- .../hardhat-network/stack-traces/execution.ts | 9 ++++--- .../hardhat-network/stack-traces/test.ts | 11 +++++--- 10 files changed, 50 insertions(+), 51 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 4e13f3b81a..2baa095168 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -256,6 +256,7 @@ export class HardhatNode extends EventEmitter { vm, blockchain, txPool, + common, automine, minGasPrice, initialBlockTimeOffset, @@ -337,6 +338,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private readonly _vm: VMAdapter, private readonly _blockchain: HardhatBlockchainInterface, private readonly _txPool: TxPool, + private readonly _common: Common, private _automine: boolean, private _minGasPrice: bigint, private _blockTimeOffsetSeconds: bigint = 0n, @@ -361,7 +363,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu this.setUserProvidedNextBlockBaseFeePerGas(nextBlockBaseFee); } - this._vmTracer = new VMTracer(this._vm, false); + this._vmTracer = new VMTracer(this._vm, this._common, false); this._vmTracer.enableTracing(); const contractsIdentifier = new ContractsIdentifier(); @@ -413,14 +415,14 @@ Hardhat Network's forking functionality only works with blocks from at least spu if ("maxFeePerGas" in txParams) { tx = FeeMarketEIP1559Transaction.fromTxData(txParams, { - common: this._vm.getCommon(), + common: this._common, }); } else if ("accessList" in txParams) { tx = AccessListEIP2930Transaction.fromTxData(txParams, { - common: this._vm.getCommon(), + common: this._common, }); } else { - tx = Transaction.fromTxData(txParams, { common: this._vm.getCommon() }); + tx = Transaction.fromTxData(txParams, { common: this._common }); } return tx.sign(pk); @@ -1666,7 +1668,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu headerData.baseFeePerGas = await this.getNextBlockBaseFeePerGas(); - const blockBuilder = new BlockBuilder(this._vm, { + const blockBuilder = new BlockBuilder(this._vm, this._common, { parentBlock, headerData, }); @@ -1709,7 +1711,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu tx = transactionQueue.getNextTransaction(); } - const minerReward = this._vm.getCommon().param("pow", "minerReward"); + const minerReward = this._common.param("pow", "minerReward"); await blockBuilder.addRewards([[coinbase, minerReward]]); const block = await blockBuilder.seal(); await this._blockchain.putBlock(block); @@ -1736,7 +1738,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private _getMinimalTransactionFee(): bigint { // Typically 21_000 gas - return this._vm.getCommon().param("gasPrices", "tx"); + return this._common.param("gasPrices", "tx"); } private async _getFakeTransaction( @@ -1750,18 +1752,18 @@ Hardhat Network's forking functionality only works with blocks from at least spu if ("maxFeePerGas" in txParams && txParams.maxFeePerGas !== undefined) { return new FakeSenderEIP1559Transaction(sender, txParams, { - common: this._vm.getCommon(), + common: this._common, }); } if ("accessList" in txParams && txParams.accessList !== undefined) { return new FakeSenderAccessListEIP2930Transaction(sender, txParams, { - common: this._vm.getCommon(), + common: this._common, }); } return new FakeSenderTransaction(sender, txParams, { - common: this._vm.getCommon(), + common: this._common, }); } @@ -1971,7 +1973,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu const receipts = getRpcReceiptOutputsFromLocalBlockExecution( block, runBlockResult, - shouldShowTransactionTypeForHardfork(this._vm.getCommon()) + shouldShowTransactionTypeForHardfork(this._common) ); this._blockchain.addTransactionReceipts(receipts); @@ -1996,7 +1998,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu getRpcBlock( block, td, - shouldShowTransactionTypeForHardfork(this._vm.getCommon()), + shouldShowTransactionTypeForHardfork(this._common), false ) ); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index 98f7cc2544..878d840362 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -1,3 +1,4 @@ +import type { Common } from "@nomicfoundation/ethereumjs-common"; import { Block, HeaderData } from "@nomicfoundation/ethereumjs-block"; import { RLP } from "@nomicfoundation/ethereumjs-rlp"; import { Trie } from "@nomicfoundation/ethereumjs-trie"; @@ -46,7 +47,11 @@ export class BlockBuilder { private _transactions: TypedTransaction[] = []; private _transactionResults: RunTxResult[] = []; - constructor(private _vm: VMAdapter, private _opts: BuildBlockOpts) {} + constructor( + private _vm: VMAdapter, + private _common: Common, + private _opts: BuildBlockOpts + ) {} public async startBlock(): Promise { await this._vm.startBlock(); @@ -80,7 +85,7 @@ export class BlockBuilder { const blockData = { header, transactions: this._transactions }; const block = Block.fromBlockData(blockData, { - common: this._vm.getCommon(), + common: this._common, skipConsensusFormatValidation: true, calcDifficultyFromHeader: this._opts.parentBlock.header, }); @@ -146,7 +151,7 @@ export class BlockBuilder { }; const block = Block.fromBlockData(blockData, { - common: this._vm.getCommon(), + common: this._common, skipConsensusFormatValidation: true, calcDifficultyFromHeader: this._opts.parentBlock.header, }); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 1c6827a16a..7836c38fca 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -69,10 +69,6 @@ export class DualModeAdapter implements VMAdapter { return rethnetResult; } - public getCommon(): Common { - return this._ethereumJSAdapter.getCommon(); - } - public async getStateRoot(): Promise { return this._ethereumJSAdapter.getStateRoot(); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index c10e929792..4f13d243d5 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -46,14 +46,15 @@ export class EthereumJSAdapter implements VMAdapter { private _blockStartStateRoot: Buffer | undefined; constructor( - private _vm: VM, - private _stateManager: StateManager, - private _blockchain: HardhatBlockchainInterface, + private readonly _vm: VM, + private readonly _stateManager: StateManager, + private readonly _blockchain: HardhatBlockchainInterface, + private readonly _common: Common, private readonly _configNetworkId: number, private readonly _configChainId: number, private readonly _selectHardfork: (blockNumber: bigint) => string, - private _forkNetworkId?: number, - private _forkBlockNumber?: bigint + private readonly _forkNetworkId?: number, + private readonly _forkBlockNumber?: bigint ) {} public static async create( @@ -109,6 +110,7 @@ export class EthereumJSAdapter implements VMAdapter { vm, stateManager, blockchain, + common, config.networkId, config.chainId, selectHardfork, @@ -146,7 +148,7 @@ export class EthereumJSAdapter implements VMAdapter { ) { blockContext = Block.fromBlockData(blockContext, { freeze: false, - common: this.getCommon(), + common: this._common, skipConsensusFormatValidation: true, }); @@ -185,10 +187,6 @@ export class EthereumJSAdapter implements VMAdapter { } } - public getCommon(): Common { - return this._vm._common; - } - public async getStateRoot(): Promise { return this._stateManager.getStateRoot(); } @@ -369,12 +367,12 @@ export class EthereumJSAdapter implements VMAdapter { blockNumberOrPending !== undefined && blockNumberOrPending !== "pending" ) { - return this.getCommon().hardforkGteHardfork( + return this._common.hardforkGteHardfork( this._selectHardfork(blockNumberOrPending), "london" ); } - return this.getCommon().gteHardfork("london"); + return this._common.gteHardfork("london"); } public async startBlock(): Promise { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 9b5d142ef0..f6e4c8706e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -2,7 +2,6 @@ import type { Message } from "@nomicfoundation/ethereumjs-evm"; import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; import { Block } from "@nomicfoundation/ethereumjs-block"; -import { Common } from "@nomicfoundation/ethereumjs-common"; import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; import { Account, @@ -88,13 +87,6 @@ export class RethnetAdapter implements VMAdapter { return rethnetResultToRunTxResult(rethnetResult.execResult); } - /** - * Temporary method, will be removed. - */ - public getCommon(): Common { - throw new Error("not implemented"); - } - /** * Temporary method, will be removed. */ diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 36a8317623..bb4d0fbab5 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -5,7 +5,6 @@ import type { } from "@nomicfoundation/ethereumjs-evm"; import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import type { Block } from "@nomicfoundation/ethereumjs-block"; -import type { Common } from "@nomicfoundation/ethereumjs-common"; import type { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import type { Account, Address } from "@nomicfoundation/ethereumjs-util"; import type { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; @@ -19,7 +18,6 @@ export interface VMAdapter { ): Promise; // temporary methods - getCommon(): Common; isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean; // getters diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts index 017979cad3..9c578d9a3f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts @@ -1,3 +1,4 @@ +import type { Common } from "@nomicfoundation/ethereumjs-common"; import type { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; import type { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; import { @@ -25,16 +26,17 @@ export class VMTracer { private _messageTraces: MessageTrace[] = []; private _enabled = false; private _lastError: Error | undefined; - private _maxPrecompileNumber = getActivePrecompiles(this._vm.getCommon()) - .size; + private _maxPrecompileNumber; constructor( private readonly _vm: VMAdapter, + common: Common, private readonly _throwErrors = true ) { this._beforeMessageHandler = this._beforeMessageHandler.bind(this); this._stepHandler = this._stepHandler.bind(this); this._afterMessageHandler = this._afterMessageHandler.bind(this); + this._maxPrecompileNumber = getActivePrecompiles(common).size; } public enableTracing() { diff --git a/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts b/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts index 6967d0b135..01a1c324a8 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/helpers/retrieveCommon.ts @@ -9,7 +9,7 @@ export async function retrieveCommon( if (provider["_node"] === undefined) { await provider["_init"](); } - const common = provider["_node"]?.["_vm"].getCommon(); + const common = provider["_node"]?.["_common"]; if (common === undefined) { throw new Error("Failed to retrieve common from HardhatNetworkProvider"); } diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts index c0c49cbc5d..0eca7effe6 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts @@ -23,7 +23,7 @@ const senderPrivateKey = Buffer.from( ); const senderAddress = privateToAddress(senderPrivateKey); -export async function instantiateVm(): Promise { +export async function instantiateVm(): Promise<[VMAdapter, Common]> { const account = Account.fromAccountData({ balance: 1e15 }); const common = new Common({ chain: "mainnet" }); @@ -57,7 +57,7 @@ export async function instantiateVm(): Promise { await vm.putAccount(new Address(senderAddress), account); - return vm; + return [vm, common]; } export function encodeConstructorParams( @@ -92,6 +92,7 @@ export function encodeCall( export async function traceTransaction( vm: VMAdapter, + common: Common, txData: TxData ): Promise { const tx = new Transaction({ @@ -105,11 +106,11 @@ export async function traceTransaction( const signedTx = tx.sign(senderPrivateKey); - const vmTracer = new VMTracer(vm as any); + const vmTracer = new VMTracer(vm as any, common); vmTracer.enableTracing(); try { - const blockBuilder = new BlockBuilder(vm, { + const blockBuilder = new BlockBuilder(vm, common, { parentBlock: Block.fromBlockData( {}, { diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts index 73fa66283c..c5c0e606ce 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts @@ -1,3 +1,4 @@ +import { Common } from "@nomicfoundation/ethereumjs-common"; import { toBuffer } from "@nomicfoundation/ethereumjs-util"; import { assert } from "chai"; import fs from "fs"; @@ -436,7 +437,7 @@ async function runTest( const tracer = new SolidityTracer(); const logger = new ConsoleLogger(); - const vm = await instantiateVm(); + const [vm, common] = await instantiateVm(); const txIndexToContract: Map = new Map(); @@ -448,6 +449,7 @@ async function runTest( txIndex, tx, vm, + common, compilerOutput, txIndexToContract ); @@ -471,6 +473,7 @@ async function runTest( txIndex, tx, vm, + common, compilerOutput, contract! ); @@ -578,6 +581,7 @@ async function runDeploymentTransactionTest( txIndex: number, tx: DeploymentTransaction, vm: VMAdapter, + common: Common, compilerOutput: CompilerOutput, txIndexToContract: Map ): Promise { @@ -609,7 +613,7 @@ async function runDeploymentTransactionTest( const data = Buffer.concat([deploymentBytecode, params]); - const trace = await traceTransaction(vm, { + const trace = await traceTransaction(vm, common, { value: tx.value, data, gasLimit: tx.gas, @@ -622,6 +626,7 @@ async function runCallTransactionTest( txIndex: number, tx: CallTransaction, vm: VMAdapter, + common: Common, compilerOutput: CompilerOutput, contract: DeployedContract ): Promise { @@ -642,7 +647,7 @@ async function runCallTransactionTest( data = Buffer.from([]); } - const trace = await traceTransaction(vm, { + const trace = await traceTransaction(vm, common, { to: contract.address, value: tx.value, data, From a63b68fd903f55cf585449e371ebb9c4fe473aa2 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Fri, 18 Nov 2022 22:49:36 +0100 Subject: [PATCH 006/406] Remove isEip1559Active from VMAdapter --- .../internal/hardhat-network/provider/node.ts | 18 +++++++++++- .../hardhat-network/provider/vm/dual.ts | 4 --- .../hardhat-network/provider/vm/ethereumjs.ts | 28 +++++++++---------- .../hardhat-network/provider/vm/rethnet.ts | 7 ----- .../hardhat-network/provider/vm/vm-adapter.ts | 3 -- 5 files changed, 31 insertions(+), 29 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 2baa095168..4493f52b8d 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -257,6 +257,7 @@ export class HardhatNode extends EventEmitter { blockchain, txPool, common, + hardforkActivations, automine, minGasPrice, initialBlockTimeOffset, @@ -339,6 +340,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private readonly _blockchain: HardhatBlockchainInterface, private readonly _txPool: TxPool, private readonly _common: Common, + private _hardforkActivations: HardforkHistoryConfig, private _automine: boolean, private _minGasPrice: bigint, private _blockTimeOffsetSeconds: bigint = 0n, @@ -2330,7 +2332,21 @@ Hardhat Network's forking functionality only works with blocks from at least spu } public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { - return this._vm.isEip1559Active(blockNumberOrPending); + if ( + blockNumberOrPending !== undefined && + blockNumberOrPending !== "pending" + ) { + return this._common.hardforkGteHardfork( + selectHardfork( + this._forkBlockNumber, + this._common.hardfork(), + this._hardforkActivations, + blockNumberOrPending + ), + "london" + ); + } + return this._common.gteHardfork("london"); } public isPostMergeHardfork(): boolean { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 7836c38fca..e58c5dd835 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -138,10 +138,6 @@ export class DualModeAdapter implements VMAdapter { ); } - public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { - return this._ethereumJSAdapter.isEip1559Active(blockNumberOrPending); - } - public async startBlock(): Promise { return this._ethereumJSAdapter.startBlock(); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 4f13d243d5..0ce2920572 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -143,7 +143,7 @@ export class EthereumJSAdapter implements VMAdapter { // eth_call. This will make the BASEFEE option also return 0, which // shouldn't. See: https://github.com/nomiclabs/hardhat/issues/1688 if ( - this.isEip1559Active(blockContext.header.number) && + this._isEip1559Active(blockContext.header.number) && (blockContext.header.baseFeePerGas === undefined || forceBaseFeeZero) ) { blockContext = Block.fromBlockData(blockContext, { @@ -362,19 +362,6 @@ export class EthereumJSAdapter implements VMAdapter { ); } - public isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { - if ( - blockNumberOrPending !== undefined && - blockNumberOrPending !== "pending" - ) { - return this._common.hardforkGteHardfork( - this._selectHardfork(blockNumberOrPending), - "london" - ); - } - return this._common.gteHardfork("london"); - } - public async startBlock(): Promise { if (this._blockStartStateRoot !== undefined) { throw new Error("a block is already started"); @@ -436,4 +423,17 @@ export class EthereumJSAdapter implements VMAdapter { ); } } + + private _isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean { + if ( + blockNumberOrPending !== undefined && + blockNumberOrPending !== "pending" + ) { + return this._common.hardforkGteHardfork( + this._selectHardfork(blockNumberOrPending), + "london" + ); + } + return this._common.gteHardfork("london"); + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index f6e4c8706e..47de62a2a1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -87,13 +87,6 @@ export class RethnetAdapter implements VMAdapter { return rethnetResultToRunTxResult(rethnetResult.execResult); } - /** - * Temporary method, will be removed. - */ - public isEip1559Active(): boolean { - throw new Error("not implemented"); - } - /** * Get the account info for the given address. */ diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index bb4d0fbab5..61a78f41df 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -17,9 +17,6 @@ export interface VMAdapter { forceBaseFeeZero?: boolean ): Promise; - // temporary methods - isEip1559Active(blockNumberOrPending?: bigint | "pending"): boolean; - // getters getAccount(address: Address): Promise; getContractStorage(address: Address, key: Buffer): Promise; From 206c09dae311318cff6e1158453f93224be8cdd8 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Fri, 18 Nov 2022 22:59:20 +0100 Subject: [PATCH 007/406] Don't pass blockchain instance to RethnetAdapter --- .../provider/utils/convertToRethnet.ts | 18 +-- .../hardhat-network/provider/vm/dual.ts | 13 +- .../hardhat-network/provider/vm/rethnet.ts | 7 +- .../provider/utils/HardhatDb.ts | 127 ------------------ 4 files changed, 19 insertions(+), 146 deletions(-) delete mode 100644 packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 4c126da640..a37edbb108 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -26,16 +26,15 @@ import { Transaction, } from "rethnet-evm"; -import { HardhatError } from "../../../core/errors"; -import { ERRORS } from "../../../core/errors-list"; - export class HardhatDB { private _stateManager: StateManager; private _blockchain: BlockchainInterface | undefined; - constructor(stateManager: StateManager, blockchain?: BlockchainInterface) { + constructor( + stateManager: StateManager, + private _getBlockHash: (blockNumber: bigint) => Promise + ) { this._stateManager = stateManager; - this._blockchain = blockchain; } public async commit() { @@ -73,14 +72,7 @@ export class HardhatDB { } public async getBlockHash(blockNumber: bigint) { - const block = await this._blockchain?.getBlock(blockNumber); - if (block === undefined || block === null) { - throw new HardhatError(ERRORS.GENERAL.UNSUPPORTED_OPERATION, { - error: "Block not found", - }); - } - - return block.header.hash(); + return this._getBlockHash(blockNumber); } public async getCodeByHash(codeHash: Buffer) { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index e58c5dd835..e8e13f6654 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -5,6 +5,7 @@ import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import { assertHardhatInvariant } from "../../../core/errors"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import { NodeConfig } from "../node-types"; import { RpcDebugTraceOutput } from "../output"; @@ -39,9 +40,17 @@ export class DualModeAdapter implements VMAdapter { const rethnetAdapter = await RethnetAdapter.create( // eslint-disable-next-line @typescript-eslint/dot-notation ethereumJSAdapter["_stateManager"], - blockchain, config, - selectHardfork + selectHardfork, + async (blockNumber) => { + const block = await blockchain.getBlock(blockNumber); + assertHardhatInvariant( + block !== undefined && block !== null, + "Should be able to get block" + ); + + return block.header.hash(); + } ); return new DualModeAdapter(ethereumJSAdapter, rethnetAdapter); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 47de62a2a1..d9c84b47ce 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -1,6 +1,5 @@ import type { Message } from "@nomicfoundation/ethereumjs-evm"; import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; -import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; import { @@ -35,11 +34,11 @@ export class RethnetAdapter implements VMAdapter { public static async create( stateManager: StateManager, - blockchain: BlockchainInterface, config: NodeConfig, - selectHardfork: (blockNumber: bigint) => string + selectHardfork: (blockNumber: bigint) => string, + getBlockHash: (blockNumber: bigint) => Promise ): Promise { - const hardhatDB = new HardhatDB(stateManager, blockchain); + const hardhatDB = new HardhatDB(stateManager, getBlockHash); const limitContractCodeSize = config.allowUnlimitedContractSize === true ? 2n ** 64n - 1n : undefined; diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts deleted file mode 100644 index 7a748fc539..0000000000 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/HardhatDb.ts +++ /dev/null @@ -1,127 +0,0 @@ -import { assert } from "chai"; -import { DefaultStateManager } from "@nomicfoundation/ethereumjs-statemanager"; -import { Address } from "@nomicfoundation/ethereumjs-util"; - -import { Block, Config, Rethnet, Transaction } from "rethnet-evm"; -import { HardhatDB } from "../../../../../src/internal/hardhat-network/provider/utils/convertToRethnet"; - -describe("Hardhat DB", () => { - const caller = Address.fromString( - "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" - ); - const receiver = Address.fromString( - "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" - ); - - let db: HardhatDB; - let rethnet: Rethnet; - - beforeEach(function () { - db = new HardhatDB(new DefaultStateManager()); - - const cfg: Config = { - chainId: BigInt(0), - limitContractCodeSize: 2n ** 64n - 1n, - disableEip3607: true, - }; - rethnet = Rethnet.withCallbacks( - cfg, - { - getAccountByAddressFn: HardhatDB.prototype.getAccountByAddress.bind(db), - getAccountStorageSlotFn: - HardhatDB.prototype.getAccountStorageSlot.bind(db), - getBlockHashFn: HardhatDB.prototype.getBlockHash.bind(db), - getCodeByHashFn: HardhatDB.prototype.getCodeByHash.bind(db), - }, - { - commitFn: HardhatDB.prototype.commit.bind(db), - }, - { - checkpointFn: HardhatDB.prototype.checkpoint.bind(db), - revertFn: HardhatDB.prototype.revert.bind(db), - getStorageRootFn: HardhatDB.prototype.getStorageRoot.bind(db), - insertAccountFn: HardhatDB.prototype.insertAccount.bind(db), - setAccountBalanceFn: HardhatDB.prototype.setAccountBalance.bind(db), - setAccountCodeFn: HardhatDB.prototype.setAccountCode.bind(db), - setAccountNonceFn: HardhatDB.prototype.setAccountNonce.bind(db), - setAccountStorageSlotFn: - HardhatDB.prototype.setAccountStorageSlot.bind(db), - } - ); - }); - - // TODO: insertBlock, setAccountCode, setAccountStorageSlot - it("getAccountByAddress", async () => { - await rethnet.insertAccount(caller.buf); - const account = await rethnet.getAccountByAddress(caller.buf); - - assert.equal(account?.balance, 0n); - assert.equal(account?.nonce, 0n); - }); - it("setAccountBalance", async () => { - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountBalance(caller.buf, 100n); - - const account = await rethnet.getAccountByAddress(caller.buf); - - assert.equal(account?.balance, 100n); - assert.equal(account?.nonce, 0n); - }); - it("setAccountNonce", async () => { - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountNonce(caller.buf, 5n); - - const account = await rethnet.getAccountByAddress(caller.buf); - - assert.equal(account?.balance, 0n); - assert.equal(account?.nonce, 5n); - }); - it("call", async () => { - // Add funds to caller - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountBalance(caller.buf, BigInt("0xffffffff")); - - // send some value - const sendValue: Transaction = { - from: caller.buf, - to: receiver.buf, - gasLimit: BigInt(1000000), - value: 100n, - }; - - const block: Block = { - number: BigInt(1), - timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), - }; - const sendValueChanges = await rethnet.dryRun(sendValue, block); - - // receiver should have 100 (0x64) wei - assert.equal( - sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"].info - .balance, - "0x64" - ); - - // create a contract - const createContract: Transaction = { - from: caller.buf, - - gasLimit: BigInt(1000000), - - // minimal creation bytecode - input: Buffer.from("3859818153F3", "hex"), - }; - - const createContractChanges = await rethnet.dryRun(createContract, block); - - assert.exists( - createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] - ); - // check that the code hash is not the null hash (i.e., the address has code) - assert.notEqual( - createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] - .info.code_hash, - "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" - ); - }); -}); From 6aa4b14ff4179b71ede8f6da45e07cf11a5126c3 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Wed, 23 Nov 2022 21:20:23 +0100 Subject: [PATCH 008/406] Vendor bloom implementation --- .../provider/BlockchainData.ts | 2 +- .../hardhat-network/provider/filter.ts | 3 +- .../internal/hardhat-network/provider/node.ts | 7 +- .../hardhat-network/provider/utils/bloom.ts | 84 ++++++++++++++ .../provider/vm/block-builder.ts | 2 +- .../hardhat-network/provider/utils/bloom.ts | 108 ++++++++++++++++++ 6 files changed, 198 insertions(+), 8 deletions(-) create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts create mode 100644 packages/hardhat-core/test/internal/hardhat-network/provider/utils/bloom.ts diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/BlockchainData.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/BlockchainData.ts index a9f0b8d381..d4be557f00 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/BlockchainData.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/BlockchainData.ts @@ -2,12 +2,12 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { bufferToHex } from "@nomicfoundation/ethereumjs-util"; -import { Bloom } from "@nomicfoundation/ethereumjs-vm"; import { assertHardhatInvariant } from "../../core/errors"; import { bloomFilter, filterLogs } from "./filter"; import { FilterParams } from "./node-types"; import { RpcLogOutput, RpcReceiptOutput } from "./output"; +import { Bloom } from "./utils/bloom"; interface Reservation { first: bigint; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts index 129c64cd04..f0e6e45b12 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/filter.ts @@ -1,4 +1,5 @@ -import type { Bloom } from "@nomicfoundation/ethereumjs-vm"; +import type { Bloom } from "./utils/bloom"; + import { bufferToHex, toBuffer } from "@nomicfoundation/ethereumjs-util"; import { RpcLogOutput } from "./output"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 4e13f3b81a..3a8e29c3ae 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -19,11 +19,7 @@ import { setLengthLeft, toBuffer, } from "@nomicfoundation/ethereumjs-util"; -import { - Bloom, - RunBlockResult, - RunTxResult, -} from "@nomicfoundation/ethereumjs-vm"; +import { RunBlockResult, RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { SignTypedDataVersion, signTypedData } from "@metamask/eth-sig-util"; import chalk from "chalk"; import debug from "debug"; @@ -106,6 +102,7 @@ import { FakeSenderTransaction } from "./transactions/FakeSenderTransaction"; import { TxPool } from "./TxPool"; import { TransactionQueue } from "./TransactionQueue"; import { HardhatBlockchainInterface } from "./types/HardhatBlockchainInterface"; +import { Bloom } from "./utils/bloom"; import { getCurrentTimestamp } from "./utils/getCurrentTimestamp"; import { makeCommon } from "./utils/makeCommon"; import { makeForkClient } from "./utils/makeForkClient"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts new file mode 100644 index 0000000000..532843c45d --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts @@ -0,0 +1,84 @@ +// This code was adapted from ethereumjs and is distributed under their license: https://github.com/ethereumjs/ethereumjs-monorepo/blob/161a4029c2fc24e5d04da6ad3aab4ac3c72af0f8/packages/vm/LICENSE +// For the original context see: https://github.com/ethereumjs/ethereumjs-monorepo/blob/161a4029c2fc24e5d04da6ad3aab4ac3c72af0f8/packages/vm/src/bloom/index.ts + +import { zeros } from "@nomicfoundation/ethereumjs-util"; +import { keccak256 } from "ethereum-cryptography/keccak"; + +import { assertHardhatInvariant } from "../../../core/errors"; + +/* eslint-disable no-bitwise */ + +const BYTE_SIZE = 256; + +export class Bloom { + bitvector: Buffer; + + /** + * Represents a Bloom filter. + */ + constructor(bitvector?: Buffer) { + if (bitvector === undefined) { + this.bitvector = zeros(BYTE_SIZE); + } else { + assertHardhatInvariant( + bitvector.length === BYTE_SIZE, + "bitvectors must be 2048 bits long" + ); + this.bitvector = bitvector; + } + } + + /** + * Adds an element to a bit vector of a 64 byte bloom filter. + * @param e - The element to add + */ + public add(e: Buffer) { + e = Buffer.from(keccak256(e)); + const mask = 2047; // binary 11111111111 + + for (let i = 0; i < 3; i++) { + const first2bytes = e.readUInt16BE(i * 2); + const loc = mask & first2bytes; + const byteLoc = loc >> 3; + const bitLoc = 1 << loc % 8; + this.bitvector[BYTE_SIZE - byteLoc - 1] |= bitLoc; + } + } + + /** + * Checks if an element is in the bloom. + * @param e - The element to check + */ + public check(e: Buffer): boolean { + e = Buffer.from(keccak256(e)); + const mask = 2047; // binary 11111111111 + let match = true; + + for (let i = 0; i < 3 && match; i++) { + const first2bytes = e.readUInt16BE(i * 2); + const loc = mask & first2bytes; + const byteLoc = loc >> 3; + const bitLoc = 1 << loc % 8; + match = (this.bitvector[BYTE_SIZE - byteLoc - 1] & bitLoc) !== 0; + } + + return Boolean(match); + } + + /** + * Checks if multiple topics are in a bloom. + * @returns `true` if every topic is in the bloom + */ + public multiCheck(topics: Buffer[]): boolean { + return topics.every((t: Buffer) => this.check(t)); + } + + /** + * Bitwise or blooms together. + */ + public or(bloom: Bloom) { + for (let i = 0; i <= BYTE_SIZE; i++) { + this.bitvector[i] = this.bitvector[i] | bloom.bitvector[i]; + } + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index 98f7cc2544..26c090ad84 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -9,13 +9,13 @@ import { intToBuffer, } from "@nomicfoundation/ethereumjs-util"; import { - Bloom, PostByzantiumTxReceipt, PreByzantiumTxReceipt, RunTxResult, TxReceipt, } from "@nomicfoundation/ethereumjs-vm"; import { fromBigIntLike } from "../../../util/bigint"; +import { Bloom } from "../utils/bloom"; import { VMAdapter } from "./vm-adapter"; diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/bloom.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/bloom.ts new file mode 100644 index 0000000000..24125e69ac --- /dev/null +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/bloom.ts @@ -0,0 +1,108 @@ +// This code was adapted from ethereumjs and is distributed under their license: https://github.com/ethereumjs/ethereumjs-monorepo/blob/161a4029c2fc24e5d04da6ad3aab4ac3c72af0f8/packages/vm/LICENSE +// For the original context see: https://github.com/ethereumjs/ethereumjs-monorepo/blob/161a4029c2fc24e5d04da6ad3aab4ac3c72af0f8/packages/vm/test/api/bloom.spec.ts + +import { assert } from "chai"; +import { Bloom } from "../../../../../src/internal/hardhat-network/provider/utils/bloom"; + +const byteSize = 256; + +function zeros(size: number): Buffer { + return Buffer.allocUnsafe(size).fill(0); +} + +describe("bloom", () => { + it("should initialize without params", () => { + const b = new Bloom(); + assert.deepEqual(b.bitvector, zeros(byteSize), "should be empty"); + }); + + it("shouldnt initialize with invalid bitvector", () => { + assert.throws( + () => new Bloom(zeros(byteSize / 2)), + /bitvectors must be 2048 bits long/, + "should fail for invalid length" + ); + }); + + it("should contain values of hardcoded bitvector", () => { + const hex = + "00000000000000000000000000000000000000000000000000000000000000000100000000000000000000000000010000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000200000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000002000000000000000000000000000000000000000000000000000000000000000000"; + const vector = Buffer.from(hex, "hex"); + + const b = new Bloom(vector); + assert.isTrue( + b.check(Buffer.from("value 1", "utf8")), + 'should contain string "value 1"' + ); + assert.isTrue( + b.check(Buffer.from("value 2", "utf8")), + 'should contain string "value 2"' + ); + }); + + it("check shouldnt be tautology", () => { + const b = new Bloom(); + assert.isFalse( + b.check(Buffer.from("random value", "utf8")), + 'should not contain string "random value"' + ); + }); + + it("should correctly add value", () => { + const b = new Bloom(); + b.add(Buffer.from("value", "utf8")); + const found = b.check(Buffer.from("value", "utf8")); + assert.isTrue(found, "should contain added value"); + }); + + it("should check multiple values", () => { + const b = new Bloom(); + b.add(Buffer.from("value 1", "utf8")); + b.add(Buffer.from("value 2", "utf8")); + const found = b.multiCheck([ + Buffer.from("value 1"), + Buffer.from("value 2"), + ]); + assert.isTrue(found, "should contain both values"); + }); + + it("should or two filters", () => { + const b1 = new Bloom(); + b1.add(Buffer.from("value 1", "utf8")); + const b2 = new Bloom(); + b2.add(Buffer.from("value 2", "utf8")); + + b1.or(b2); + assert.isTrue( + b1.check(Buffer.from("value 2", "utf-8")), + 'should contain "value 2" after or' + ); + }); + + it("should generate the correct bloom filter value", () => { + const bloom = new Bloom(); + bloom.add(Buffer.from("1d7022f5b17d2f8b695918fb48fa1089c9f85401", "hex")); + bloom.add( + Buffer.from( + "8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925", + "hex" + ) + ); + bloom.add( + Buffer.from( + "0000000000000000000000005409ed021d9299bf6814279a6a1411a7e866a631", + "hex" + ) + ); + bloom.add( + Buffer.from( + "0000000000000000000000001dc4c1cefef38a777b15aa20260a54e584b16c48", + "hex" + ) + ); + assert.equal( + bloom.bitvector.toString("hex"), + "00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001000081100200000000000000000000000000000000000000000000000000000000008000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000080000000000000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000010000000000000000000002000000000000000004000000000000000000000" + ); + }); +}); From 00cd2199987ebb296c60e8acb2e1944ac797b824 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Thu, 24 Nov 2022 15:59:36 +0100 Subject: [PATCH 009/406] feat: print debug traces if results don't match (#3373) --- .../internal/hardhat-network/provider/node.ts | 3 +- .../provider/vm/block-builder.ts | 2 +- .../hardhat-network/provider/vm/dual.ts | 39 +++++++++----- .../hardhat-network/provider/vm/ethereumjs.ts | 52 +++++++++++++++---- .../hardhat-network/provider/vm/rethnet.ts | 10 ++-- .../hardhat-network/provider/vm/vm-adapter.ts | 9 +++- 6 files changed, 84 insertions(+), 31 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index bc18e0629e..a20d266d34 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -2234,7 +2234,8 @@ Hardhat Network's forking functionality only works with blocks from at least spu // know anything about the txs in the current block } - return this._vm.dryRun(tx, blockContext, forceBaseFeeZero); + const [result] = await this._vm.dryRun(tx, blockContext, forceBaseFeeZero); + return result; } private async _computeFilterParams( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index 06ec5a301b..9ba580ecbb 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -90,7 +90,7 @@ export class BlockBuilder { calcDifficultyFromHeader: this._opts.parentBlock.header, }); - const result = await this._vm.runTxInBlock(tx, block); + const [result] = await this._vm.runTxInBlock(tx, block); this._transactions.push(tx); this._transactionResults.push(result); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index e8e13f6654..b904cb663a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -13,11 +13,19 @@ import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface" import { EthereumJSAdapter } from "./ethereumjs"; import { RethnetAdapter } from "./rethnet"; -import { VMAdapter } from "./vm-adapter"; +import { Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/restrict-template-expressions */ +function printEthereumJSTrace(trace: any) { + console.log(JSON.stringify(trace, null, 2)); +} + +function printRethnetTrace(_trace: any) { + // not implemented +} + export class DualModeAdapter implements VMAdapter { constructor( private _ethereumJSAdapter: VMAdapter, @@ -60,22 +68,29 @@ export class DualModeAdapter implements VMAdapter { tx: TypedTransaction, blockContext: Block, forceBaseFeeZero?: boolean - ): Promise { - const ethereumJSResult = await this._ethereumJSAdapter.dryRun( - tx, - blockContext, - forceBaseFeeZero - ); + ): Promise<[RunTxResult, Trace]> { + const [ethereumJSResult, ethereumJSTrace] = + await this._ethereumJSAdapter.dryRun(tx, blockContext, forceBaseFeeZero); - const rethnetResult = await this._rethnetAdapter.dryRun( + const [rethnetResult, rethnetTrace] = await this._rethnetAdapter.dryRun( tx, blockContext, forceBaseFeeZero ); - assertEqualRunTxResults(ethereumJSResult, rethnetResult); - - return rethnetResult; + try { + assertEqualRunTxResults(ethereumJSResult, rethnetResult); + return [rethnetResult, null]; + } catch (e) { + // if the results didn't match, print the traces + console.log("EthereumJS trace"); + printEthereumJSTrace(ethereumJSTrace); + console.log(); + console.log("Rethnet trace"); + printRethnetTrace(rethnetTrace); + + throw e; + } } public async getStateRoot(): Promise { @@ -154,7 +169,7 @@ export class DualModeAdapter implements VMAdapter { public async runTxInBlock( tx: TypedTransaction, block: Block - ): Promise { + ): Promise<[RunTxResult, Trace]> { return this._ethereumJSAdapter.runTxInBlock(tx, block); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 0ce2920572..6164b9dae9 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -30,7 +30,7 @@ import { FakeSenderTransaction } from "../transactions/FakeSenderTransaction"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; import { makeForkClient } from "../utils/makeForkClient"; import { makeStateTrie } from "../utils/makeStateTrie"; -import { VMAdapter } from "./vm-adapter"; +import { Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ @@ -123,7 +123,7 @@ export class EthereumJSAdapter implements VMAdapter { tx: TypedTransaction, blockContext: Block, forceBaseFeeZero = false - ): Promise { + ): Promise<[RunTxResult, Trace]> { const initialStateRoot = await this.getStateRoot(); let originalCommon: Common | undefined; @@ -172,13 +172,28 @@ export class EthereumJSAdapter implements VMAdapter { } ); - return await this._vm.runTx({ - block: blockContext, - tx, - skipNonce: true, - skipBalance: true, - skipBlockGasLimitValidation: true, - }); + const vmDebugTracer = new VMDebugTracer(this._vm); + let result: RunTxResult | undefined; + const trace = await vmDebugTracer.trace( + async () => { + result = await this._vm.runTx({ + block: blockContext, + tx, + skipNonce: true, + skipBalance: true, + skipBlockGasLimitValidation: true, + }); + }, + { + disableStorage: true, + disableMemory: true, + disableStack: true, + } + ); + + assertHardhatInvariant(result !== undefined, "Should have a result"); + + return [result, trace]; } finally { if (originalCommon !== undefined) { (this._vm as any)._common = originalCommon; @@ -373,8 +388,23 @@ export class EthereumJSAdapter implements VMAdapter { public async runTxInBlock( tx: TypedTransaction, block: Block - ): Promise { - return this._vm.runTx({ tx, block }); + ): Promise<[RunTxResult, Trace]> { + const vmTracer = new VMDebugTracer(this._vm); + let result: RunTxResult | undefined; + const trace = await vmTracer.trace( + async () => { + result = await this._vm.runTx({ tx, block }); + }, + { + disableStorage: true, + disableMemory: true, + disableStack: true, + } + ); + + assertHardhatInvariant(result !== undefined, "Should have a result"); + + return [result, trace]; } public async addBlockRewards( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index d9c84b47ce..700637f98f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -21,7 +21,7 @@ import { hardforkGte, HardforkName } from "../../../util/hardforks"; import { RpcDebugTraceOutput } from "../output"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; -import { VMAdapter } from "./vm-adapter"; +import { Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ @@ -63,7 +63,7 @@ export class RethnetAdapter implements VMAdapter { tx: TypedTransaction, blockContext: Block, forceBaseFeeZero?: boolean - ): Promise { + ): Promise<[RunTxResult, Trace]> { const rethnetTx = ethereumjsTransactionToRethnet(tx); const difficulty = this._getBlockEnvDifficulty( @@ -83,7 +83,9 @@ export class RethnetAdapter implements VMAdapter { difficulty, }); - return rethnetResultToRunTxResult(rethnetResult.execResult); + const result = rethnetResultToRunTxResult(rethnetResult.execResult); + + return [result, null]; } /** @@ -175,7 +177,7 @@ export class RethnetAdapter implements VMAdapter { public async runTxInBlock( tx: TypedTransaction, block: Block - ): Promise { + ): Promise<[RunTxResult, Trace]> { throw new Error("not implemented"); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 61a78f41df..536c826b9c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -10,12 +10,14 @@ import type { Account, Address } from "@nomicfoundation/ethereumjs-util"; import type { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import type { RpcDebugTraceOutput } from "../output"; +export type Trace = any; + export interface VMAdapter { dryRun( tx: TypedTransaction, blockContext: Block, forceBaseFeeZero?: boolean - ): Promise; + ): Promise<[RunTxResult, Trace]>; // getters getAccount(address: Address): Promise; @@ -41,7 +43,10 @@ export interface VMAdapter { // methods for block-building startBlock(): Promise; - runTxInBlock(tx: TypedTransaction, block: Block): Promise; + runTxInBlock( + tx: TypedTransaction, + block: Block + ): Promise<[RunTxResult, Trace]>; addBlockRewards(rewards: Array<[Address, bigint]>): Promise; sealBlock(): Promise; revertBlock(): Promise; From c3f370b55fcd0b25cb389b3bc8c544ab0ec565ed Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Mon, 28 Nov 2022 11:55:01 +0100 Subject: [PATCH 010/406] Temporary "as any"s in runFullBlock.ts module --- .../internal/hardhat-network/provider/utils/runFullBlock.ts | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts index 5a16a8182b..cbf529c1c1 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts @@ -72,7 +72,8 @@ export async function runFullBlock( forkedNode["_vmTracer"].disableTracing(); const afterBlockEvent = await runBlockAndGetAfterBlockEvent( - forkedNode["_vm"], + // TODO remove "as any" and make this work with VMAdapter + forkedNode["_vm"] as any, { block, generate: true, @@ -82,7 +83,8 @@ export async function runFullBlock( const modifiedBlock = afterBlockEvent.block; - await forkedNode["_vm"].blockchain.putBlock(modifiedBlock); + // TODO remove "as any" and make this work with VMAdapter + await (forkedNode["_vm"] as any).blockchain.putBlock(modifiedBlock); await forkedNode["_saveBlockAsSuccessfullyRun"]( modifiedBlock, afterBlockEvent From f652941f12344012ea101e995963d50f30c22dd3 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Mon, 5 Dec 2022 18:17:54 +0100 Subject: [PATCH 011/406] Define minimal types involved in tracing (#3377) --- crates/rethnet_evm_napi/src/lib.rs | 38 ++++++ .../hardhat-network/provider/modules/eth.ts | 4 +- .../provider/modules/logger.ts | 8 +- .../hardhat-network/provider/node-types.ts | 2 +- .../internal/hardhat-network/provider/node.ts | 52 +++---- .../hardhat-network/provider/output.ts | 6 +- .../provider/utils/assertions.ts | 109 --------------- .../provider/utils/convertToRethnet.ts | 85 +----------- .../provider/vm/block-builder.ts | 5 +- .../hardhat-network/provider/vm/dual.ts | 51 ++----- .../hardhat-network/provider/vm/ethereumjs.ts | 127 ++++++++++++++---- .../hardhat-network/provider/vm/exit.ts | 124 +++++++++++++++++ .../hardhat-network/provider/vm/rethnet.ts | 10 +- .../hardhat-network/provider/vm/vm-adapter.ts | 45 +++++-- .../hardhat-network/stack-traces/debug.ts | 12 +- .../stack-traces/error-inferrer.ts | 15 +-- .../stack-traces/message-trace.ts | 6 +- .../stack-traces/solidityTracer.ts | 10 +- .../hardhat-network/stack-traces/vm-tracer.ts | 53 +++++--- .../hardhat-network/stack-traces/test.ts | 8 +- 20 files changed, 413 insertions(+), 357 deletions(-) create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 036c9b3769..8824ef1919 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -451,6 +451,44 @@ pub struct DatabaseDebugCallbacks { pub set_account_storage_slot_fn: JsFunction, } +#[napi(object)] +pub struct TracingMessage { + /// Recipient address. None if it is a Create message. + #[napi(readonly)] + pub to: Option, + + /// Depth of the message + #[napi(readonly)] + pub depth: u8, + + /// Input data of the message + #[napi(readonly)] + pub data: Buffer, + + /// Value sent in the message + #[napi(readonly)] + pub value: BigInt, + + /// Address of the code that is being executed. Can be different from `to` if a delegate call + /// is being done. + #[napi(readonly)] + pub code_address: Option, +} + +#[napi(object)] +pub struct TracingStep { + /// Program counter + #[napi(readonly)] + pub pc: BigInt, +} + +#[napi(object)] +pub struct TracingMessageResult { + /// Execution result + #[napi(readonly)] + pub execution_result: ExecutionResult, +} + #[napi] pub struct Rethnet { client: Client, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts index 9e52201690..5a391fe027 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/eth.ts @@ -1565,7 +1565,7 @@ export class EthModule { if (singleTransactionMined) { const block = results[0].block; const tx = block.transactions[0]; - const txGasUsed = results[0].blockResult.results[0].totalGasSpent; + const txGasUsed = results[0].blockResult.results[0].gasUsed; const trace = results[0].traces[0]; await this._logSingleTransaction(tx, block, txGasUsed, trace); @@ -1591,7 +1591,7 @@ export class EthModule { ); const { block, blockResult } = sentTxResult; - const gasUsed = blockResult.results[sentTxIndex].totalGasSpent; + const gasUsed = blockResult.results[sentTxIndex].gasUsed; this._logger.logCurrentlySentTransaction( sentTx, gasUsed, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/logger.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/logger.ts index c3b3f5df5d..8471a11ce8 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/logger.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/logger.ts @@ -109,7 +109,7 @@ export class ModulesLogger { for (let i = 0; i < block.transactions.length; i++) { const tx = block.transactions[i]; - const txGasUsed = results[i].totalGasSpent; + const txGasUsed = results[i].gasUsed; const txTrace = traces[i]; const code = codes[i]; @@ -156,7 +156,7 @@ export class ModulesLogger { for (let i = 0; i < block.transactions.length; i++) { const tx = block.transactions[i]; - const txGasUsed = results[i].totalGasSpent; + const txGasUsed = results[i].gasUsed; const txTrace = traces[i]; const code = codes[i]; @@ -188,7 +188,7 @@ export class ModulesLogger { for (let i = 0; i < block.transactions.length; i++) { const tx = block.transactions[i]; - const txGasUsed = results[i].totalGasSpent; + const txGasUsed = results[i].gasUsed; const txTrace = traces[i]; const code = codes[i]; @@ -661,7 +661,7 @@ export class ModulesLogger { this._logWithTitle("Contract deployment", trace.bytecode.contract.name); } - if (trace.deployedContract !== undefined && trace.error === undefined) { + if (trace.deployedContract !== undefined && !trace.exit.isError()) { this._logWithTitle( "Contract address", bufferToHex(trace.deployedContract) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts index ce0effa151..ab8b5a78d2 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node-types.ts @@ -1,5 +1,5 @@ -import type { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; import type { ReturnData } from "./return-data"; +import type { RunBlockResult } from "./vm/vm-adapter"; import { Block } from "@nomicfoundation/ethereumjs-block"; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index a20d266d34..08ea4fcb6c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -1,4 +1,3 @@ -import type { EVMResult } from "@nomicfoundation/ethereumjs-evm"; import { Block, HeaderData } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { @@ -7,7 +6,6 @@ import { Transaction, TypedTransaction, } from "@nomicfoundation/ethereumjs-tx"; -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { Address, ECDSASignature, @@ -19,7 +17,6 @@ import { setLengthLeft, toBuffer, } from "@nomicfoundation/ethereumjs-util"; -import { RunBlockResult, RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { SignTypedDataVersion, signTypedData } from "@metamask/eth-sig-util"; import chalk from "chalk"; import debug from "debug"; @@ -111,10 +108,9 @@ import { putGenesisBlock } from "./utils/putGenesisBlock"; import { txMapToArray } from "./utils/txMapToArray"; import { RandomBufferGenerator } from "./utils/random"; import { DualModeAdapter } from "./vm/dual"; -import { VMAdapter } from "./vm/vm-adapter"; +import { RunBlockResult, RunTxResult, VMAdapter } from "./vm/vm-adapter"; import { BlockBuilder } from "./vm/block-builder"; - -type ExecResult = EVMResult["execResult"]; +import { ExitCode, Exit } from "./vm/exit"; const log = debug("hardhat:core:hardhat-network:node"); @@ -614,11 +610,11 @@ Hardhat Network's forking functionality only works with blocks from at least spu async () => this._runTxAndRevertMutations(tx, blockNumberOrPending, true) ); - const traces = await this._gatherTraces(result.execResult); + const traces = await this._gatherTraces(result); return { ...traces, - result: new ReturnData(result.execResult.returnValue), + result: new ReturnData(result.returnValue), }; } @@ -768,20 +764,16 @@ Hardhat Network's forking functionality only works with blocks from at least spu // This is only considered if the call to _runTxAndRevertMutations doesn't // manage errors - if (result.execResult.exceptionError !== undefined) { + if (result.exit.isError()) { return { estimation: this.getBlockGasLimit(), trace: vmTrace, - error: await this._manageErrors( - result.execResult, - vmTrace, - vmTracerError - ), + error: await this._manageErrors(result, vmTrace, vmTracerError), consoleLogMessages, }; } - const initialEstimation = result.totalGasSpent; + const initialEstimation = result.gasUsed; return { estimation: await this._correctInitialEstimation( @@ -1562,7 +1554,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu return results; } - private async _gatherTraces(result: ExecResult): Promise { + private async _gatherTraces( + result: RunTxResult + ): Promise { let vmTrace = this._vmTracer.getLastTopLevelMessageTrace(); const vmTracerError = this._vmTracer.getLastError(); this._vmTracer.clearLastError(); @@ -1702,7 +1696,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu } else { const txResult = await blockBuilder.addTransaction(tx); - traces.push(await this._gatherTraces(txResult.execResult)); + traces.push(await this._gatherTraces(txResult)); results.push(txResult); receipts.push(txResult.receipt); } @@ -1813,11 +1807,11 @@ Hardhat Network's forking functionality only works with blocks from at least spu } private async _manageErrors( - vmResult: ExecResult, + vmResult: RunTxResult, vmTrace: MessageTrace | undefined, vmTracerError: Error | undefined ): Promise { - if (vmResult.exceptionError === undefined) { + if (!vmResult.exit.isError()) { return undefined; } @@ -1837,20 +1831,18 @@ Hardhat Network's forking functionality only works with blocks from at least spu ); } - const error = vmResult.exceptionError; + const exitCode = vmResult.exit; - // we don't use `instanceof` in case someone uses a different VM dependency - // see https://github.com/nomiclabs/hardhat/issues/1317 - const isVmError = "error" in error && typeof error.error === "string"; + const isExitCode = exitCode instanceof Exit; // If this is not a VM error, or if it's an internal VM error, we just // rethrow. An example of a non-VmError being thrown here is an HTTP error // coming from the ForkedStateManager. - if (!isVmError || error.error === ERROR.INTERNAL_ERROR) { - throw error; + if (!isExitCode || exitCode.kind === ExitCode.INTERNAL_ERROR) { + throw exitCode; } - if (error.error === ERROR.CODESIZE_EXCEEDS_MAXIMUM) { + if (exitCode.kind === ExitCode.CODESIZE_EXCEEDS_MAXIMUM) { if (stackTrace !== undefined) { return encodeSolidityStackTrace( "Transaction ran out of gas", @@ -1861,7 +1853,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu return new TransactionExecutionError("Transaction ran out of gas"); } - if (error.error === ERROR.OUT_OF_GAS) { + if (exitCode.kind === ExitCode.OUT_OF_GAS) { // if the error is an out of gas, we ignore the inferred error in the // trace return new TransactionExecutionError("Transaction ran out of gas"); @@ -1881,7 +1873,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu returnDataExplanation = "with unrecognized return data or custom error"; } - if (error.error === ERROR.REVERT) { + if (exitCode.kind === ExitCode.REVERT) { const fallbackMessage = `VM Exception while processing transaction: revert ${returnDataExplanation}`; if (stackTrace !== undefined) { @@ -2112,7 +2104,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu this._runTxAndRevertMutations(tx, blockNumberOrPending) ); - if (result.execResult.exceptionError === undefined) { + if (!result.exit.isError()) { return initialEstimation; } @@ -2186,7 +2178,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu this._runTxAndRevertMutations(tx, blockNumberOrPending) ); - if (result.execResult.exceptionError === undefined) { + if (!result.exit.isError()) { return this._binarySearchEstimation( blockNumberOrPending, txParams, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts index 500156ff50..ef9e712a41 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/output.ts @@ -1,4 +1,4 @@ -import type { RunBlockResult } from "@nomicfoundation/ethereumjs-vm"; +import type { RunBlockResult } from "./vm/vm-adapter"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; @@ -295,7 +295,7 @@ export function getRpcReceiptOutputsFromLocalBlockExecution( for (let i = 0; i < runBlockResult.results.length; i += 1) { const tx = block.transactions[i]; - const { createdAddress, totalGasSpent } = runBlockResult.results[i]; + const { createdAddress, gasUsed } = runBlockResult.results[i]; const receipt = runBlockResult.receipts[i]; const logs = receipt.logs.map((log) => { @@ -312,7 +312,7 @@ export function getRpcReceiptOutputsFromLocalBlockExecution( from: bufferToRpcData(tx.getSenderAddress().toBuffer()), to: tx.to === undefined ? null : bufferToRpcData(tx.to.toBuffer()), cumulativeGasUsed: numberToRpcQuantity(receipt.cumulativeBlockGasUsed), - gasUsed: numberToRpcQuantity(totalGasSpent), + gasUsed: numberToRpcQuantity(gasUsed), contractAddress: createdAddress !== undefined ? bufferToRpcData(createdAddress.toBuffer()) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts index cd1c112036..7dfad757c0 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/assertions.ts @@ -1,8 +1,3 @@ -import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; -import assert, { AssertionError } from "assert"; -import { ExecutionResult } from "rethnet-evm"; -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; - import { InternalError } from "../../../core/providers/errors"; export function assertHardhatNetworkInvariant( @@ -16,107 +11,3 @@ export function assertHardhatNetworkInvariant( ); } } - -export function assertEthereumJsAndRethnetResults( - rethnetResult: ExecutionResult, - ethereumjsResult: RunTxResult -): asserts rethnetResult { - assertEthereumJsAndRethnetExitCodes( - rethnetResult.exitCode, - ethereumjsResult.execResult.exceptionError?.error - ); - assertEqual( - rethnetResult.gasRefunded, - ethereumjsResult.gasRefund, - "Gas refunded" - ); - - assertEqual( - rethnetResult.gasUsed, - ethereumjsResult.totalGasSpent, - "Gas used" - ); - - const rethnetCreatedAddress = rethnetResult.output.address?.toString("hex"); - const ethereumjsCreatedAddress = ethereumjsResult.createdAddress - ?.toString() - .slice(2); // remove the 0x prefix - - assertEqual( - rethnetCreatedAddress, - ethereumjsCreatedAddress, - "Created address" - ); - - if (ethereumjsResult.createdAddress === undefined) { - assertEqual( - rethnetResult.output.output?.toString("hex"), - ethereumjsResult.execResult.returnValue.toString("hex"), - "Return value" - ); - } - // TODO: Compare logs? -} - -function assertEthereumJsAndRethnetExitCodes( - rethnetExitCode: number, - ethereumjsExitCode: ERROR | undefined -) { - // assert(ethereumjsExitCode === undefined && !( - // rethnetExitCode === 0x00 || - // rethnetExitCode === 0x02 || - // rethnetExitCode === 0x03), "Expected a successful exit code"); - - const mapping = new Map([ - [ERROR.OUT_OF_GAS, [0x50]], - [ERROR.CODESTORE_OUT_OF_GAS, undefined], - [ERROR.CODESIZE_EXCEEDS_MAXIMUM, undefined], - [ERROR.STACK_UNDERFLOW, [0x57]], - [ERROR.STACK_OVERFLOW, [0x58]], - [ERROR.INVALID_JUMP, [0x54]], - [ERROR.INVALID_OPCODE, [0x51, 0x53]], - [ERROR.OUT_OF_RANGE, [0x59]], // ? - [ERROR.REVERT, [0x20]], - [ERROR.STATIC_STATE_CHANGE, [0x52]], // ? - [ERROR.INTERNAL_ERROR, undefined], - [ERROR.CREATE_COLLISION, [0x60]], - [ERROR.STOP, [0x01]], - [ERROR.REFUND_EXHAUSTED, undefined], - [ERROR.VALUE_OVERFLOW, undefined], - [ERROR.INSUFFICIENT_BALANCE, undefined], - [ERROR.INVALID_BEGINSUB, undefined], - [ERROR.INVALID_RETURNSUB, undefined], - [ERROR.INVALID_JUMPSUB, undefined], - [ERROR.INVALID_BYTECODE_RESULT, [0x53]], // ? - [ERROR.INVALID_EOF_FORMAT, undefined], - [ERROR.INITCODE_SIZE_VIOLATION, [0x64]], // ? - [ERROR.AUTHCALL_UNSET, undefined], - [ERROR.AUTHCALL_NONZERO_VALUEEXT, undefined], - [ERROR.AUTH_INVALID_S, undefined], - [ERROR.BLS_12_381_INVALID_INPUT_LENGTH, undefined], - [ERROR.BLS_12_381_POINT_NOT_ON_CURVE, undefined], - [ERROR.BLS_12_381_INPUT_EMPTY, undefined], - [ERROR.BLS_12_381_FP_NOT_IN_FIELD, undefined], - ]); - - if (ethereumjsExitCode !== undefined) { - const expected = mapping.get(ethereumjsExitCode); - if (expected !== undefined) { - assert( - expected.includes(rethnetExitCode), - `Expected rethnet's exit code ${rethnetExitCode} to be included in ${expected.join( - ", " - )}` - ); - } - } -} - -function assertEqual(rethnetValue: any, ethereumJsValue: any, field: string) { - if (rethnetValue !== ethereumJsValue) { - // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new AssertionError({ - message: `Expected '${field}' to match, but rethnet returned ${rethnetValue} and ethereumjs returned ${ethereumJsValue}`, - }); - } -} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index a37edbb108..69939350d0 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -1,7 +1,4 @@ -import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; -import { EvmError } from "@nomicfoundation/ethereumjs-evm"; -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { DefaultStateManager, StateManager, @@ -25,6 +22,8 @@ import { Rethnet, Transaction, } from "rethnet-evm"; +import { Exit } from "../vm/exit"; +import { RunTxResult } from "../vm/vm-adapter"; export class HardhatDB { private _stateManager: StateManager; @@ -199,96 +198,26 @@ export function createRethnetFromHardhatDB( export function rethnetResultToRunTxResult( rethnetResult: ExecutionResult ): RunTxResult { + const vmError = Exit.fromRethnetExitCode(rethnetResult.exitCode); // We return an object with only the properties that are used by Hardhat. // To be extra sure that the other properties are not used, we add getters // that exit the process if accessed. + return { - totalGasSpent: rethnetResult.gasUsed, - gasRefund: rethnetResult.gasRefunded, + gasUsed: rethnetResult.gasUsed, createdAddress: rethnetResult.output.address !== undefined ? new Address(rethnetResult.output.address) : undefined, - execResult: { - exceptionError: mapRethnetExitCodeToEthereumJsExceptionError( - rethnetResult.exitCode - ), - returnValue: rethnetResult.output.output ?? Buffer.from([]), - - get runState(): any { - console.trace("execResult.runState not implemented"); - return process.exit(1); - }, - get gas(): any { - console.trace("execResult.gas not implemented"); - return process.exit(1); - }, - get executionGasUsed(): any { - console.trace("execResult.executionGasUsed not implemented"); - return process.exit(1); - }, - get logs(): any { - console.trace("execResult.logs not implemented"); - return process.exit(1); - }, - get selfdestruct(): any { - console.trace("execResult.selfdestruct not implemented"); - return process.exit(1); - }, - get gasRefund(): any { - console.trace("execResult.gasRefund not implemented"); - return process.exit(1); - }, - }, - + exit: vmError, + returnValue: rethnetResult.output.output ?? Buffer.from([]), get bloom(): any { console.trace("bloom not implemented"); return process.exit(1); }, - get amountSpent(): any { - console.trace("amountSpent not implemented"); - return process.exit(1); - }, get receipt(): any { console.trace("receipt not implemented"); return process.exit(1); }, - get accessList(): any { - console.trace("accessList not implemented"); - return process.exit(1); - }, }; } - -const rethnetExitCodeToEthereumJsError = new Map([ - [0x50, ERROR.OUT_OF_GAS], - [0x57, ERROR.STACK_UNDERFLOW], - [0x58, ERROR.STACK_OVERFLOW], - [0x54, ERROR.INVALID_JUMP], - [0x51, ERROR.INVALID_OPCODE], - [0x53, ERROR.INVALID_OPCODE], - [0x59, ERROR.OUT_OF_RANGE], - [0x20, ERROR.REVERT], - [0x52, ERROR.STATIC_STATE_CHANGE], - [0x60, ERROR.CREATE_COLLISION], - [0x01, ERROR.STOP], - [0x53, ERROR.INVALID_BYTECODE_RESULT], - [0x64, ERROR.INITCODE_SIZE_VIOLATION], -]); - -function mapRethnetExitCodeToEthereumJsExceptionError( - rethnetExitCode: number -): EvmError | undefined { - if (rethnetExitCode <= 0x03) { - return; - } - - const ethereumJsError = rethnetExitCodeToEthereumJsError.get(rethnetExitCode); - if (ethereumJsError === undefined) { - console.trace(`Couldn't map exit code ${rethnetExitCode}`); - // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error(`Couldn't map exit code ${rethnetExitCode}`); - } - - return new EvmError(ethereumJsError); -} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index 9ba580ecbb..0af88fee5a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -12,13 +12,12 @@ import { import { PostByzantiumTxReceipt, PreByzantiumTxReceipt, - RunTxResult, TxReceipt, } from "@nomicfoundation/ethereumjs-vm"; import { fromBigIntLike } from "../../../util/bigint"; import { Bloom } from "../utils/bloom"; -import { VMAdapter } from "./vm-adapter"; +import { RunTxResult, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ @@ -94,7 +93,7 @@ export class BlockBuilder { this._transactions.push(tx); this._transactionResults.push(result); - this._gasUsed += result.totalGasSpent; + this._gasUsed += result.gasUsed; return result; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index b904cb663a..f2cd944057 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -1,5 +1,3 @@ -import type { Message } from "@nomicfoundation/ethereumjs-evm"; -import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; @@ -13,7 +11,7 @@ import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface" import { EthereumJSAdapter } from "./ethereumjs"; import { RethnetAdapter } from "./rethnet"; -import { Trace, VMAdapter } from "./vm-adapter"; +import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/restrict-template-expressions */ @@ -52,10 +50,7 @@ export class DualModeAdapter implements VMAdapter { selectHardfork, async (blockNumber) => { const block = await blockchain.getBlock(blockNumber); - assertHardhatInvariant( - block !== undefined && block !== null, - "Should be able to get block" - ); + assertHardhatInvariant(block !== null, "Should be able to get block"); return block.header.hash(); } @@ -140,11 +135,7 @@ export class DualModeAdapter implements VMAdapter { return this._ethereumJSAdapter.traceTransaction(hash, block, config); } - public enableTracing(callbacks: { - beforeMessage: (message: Message, next: any) => Promise; - step: () => Promise; - afterMessage: () => Promise; - }): void { + public enableTracing(callbacks: TracingCallbacks): void { return this._ethereumJSAdapter.enableTracing(callbacks); } @@ -192,18 +183,13 @@ function assertEqualRunTxResults( ethereumJSResult: RunTxResult, rethnetResult: RunTxResult ) { - if (ethereumJSResult.totalGasSpent !== rethnetResult.totalGasSpent) { + if (ethereumJSResult.gasUsed !== rethnetResult.gasUsed) { console.trace( - `Different totalGasSpent: ${ethereumJSResult.totalGasSpent} !== ${rethnetResult.totalGasSpent}` + `Different totalGasSpent: ${ethereumJSResult.gasUsed} !== ${rethnetResult.gasUsed}` ); throw new Error("Different totalGasSpent"); } - if (ethereumJSResult.gasRefund !== rethnetResult.gasRefund) { - console.trace( - `Different gasRefund: ${ethereumJSResult.gasRefund} !== ${rethnetResult.gasRefund}` - ); - throw new Error("Different gasRefund"); - } + if ( ethereumJSResult.createdAddress?.toString() !== rethnetResult.createdAddress?.toString() @@ -214,38 +200,25 @@ function assertEqualRunTxResults( throw new Error("Different createdAddress"); } - if ( - ethereumJSResult.execResult.exceptionError?.error !== - rethnetResult.execResult.exceptionError?.error - ) { + if (ethereumJSResult.exit.kind !== rethnetResult.exit.kind) { console.trace( - `Different exceptionError.error: ${ethereumJSResult.execResult.exceptionError?.error} !== ${rethnetResult.execResult.exceptionError?.error}` + `Different exceptionError.error: ${ethereumJSResult.exit.kind} !== ${rethnetResult.exit.kind}` ); throw new Error("Different exceptionError.error"); } - if ( - ethereumJSResult.execResult.exceptionError?.errorType !== - rethnetResult.execResult.exceptionError?.errorType - ) { - console.trace( - `Different exceptionError.errorType: ${ethereumJSResult.execResult.exceptionError?.errorType} !== ${rethnetResult.execResult.exceptionError?.errorType}` - ); - throw new Error("Different exceptionError.errorType"); - } - // TODO: we only compare the return values when a contract was *not* created, // because sometimes ethereumjs has the created bytecode in the return value // and rethnet doesn't if (ethereumJSResult.createdAddress === undefined) { if ( - ethereumJSResult.execResult.returnValue.toString("hex") !== - rethnetResult.execResult.returnValue.toString("hex") + ethereumJSResult.returnValue.toString("hex") !== + rethnetResult.returnValue.toString("hex") ) { console.trace( - `Different returnValue: ${ethereumJSResult.execResult.returnValue.toString( + `Different returnValue: ${ethereumJSResult.returnValue.toString( "hex" - )} !== ${rethnetResult.execResult.returnValue.toString("hex")}` + )} !== ${rethnetResult.returnValue.toString("hex")}` ); throw new Error("Different returnValue"); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 6164b9dae9..4410e85156 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -12,7 +12,11 @@ import { } from "@nomicfoundation/ethereumjs-statemanager"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { Account, Address } from "@nomicfoundation/ethereumjs-util"; -import { EEI, RunTxResult, VM } from "@nomicfoundation/ethereumjs-vm"; +import { + EEI, + RunTxResult as EthereumJSRunTxResult, + VM, +} from "@nomicfoundation/ethereumjs-vm"; import { assertHardhatInvariant } from "../../../core/errors"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import { @@ -30,16 +34,11 @@ import { FakeSenderTransaction } from "../transactions/FakeSenderTransaction"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; import { makeForkClient } from "../utils/makeForkClient"; import { makeStateTrie } from "../utils/makeStateTrie"; -import { Trace, VMAdapter } from "./vm-adapter"; +import { Exit } from "./exit"; +import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ -interface TracingCallbacks { - beforeMessage: (message: Message, next: any) => Promise; - step: (step: InterpreterStep, next: any) => Promise; - afterMessage: (result: EVMResult, next: any) => Promise; -} - export class EthereumJSAdapter implements VMAdapter { private _tracingCallbacks: TracingCallbacks | undefined; @@ -173,10 +172,10 @@ export class EthereumJSAdapter implements VMAdapter { ); const vmDebugTracer = new VMDebugTracer(this._vm); - let result: RunTxResult | undefined; + let ethereumJSResult: EthereumJSRunTxResult | undefined; const trace = await vmDebugTracer.trace( async () => { - result = await this._vm.runTx({ + ethereumJSResult = await this._vm.runTx({ block: blockContext, tx, skipNonce: true, @@ -191,7 +190,20 @@ export class EthereumJSAdapter implements VMAdapter { } ); - assertHardhatInvariant(result !== undefined, "Should have a result"); + assertHardhatInvariant( + ethereumJSResult !== undefined, + "Should have a result" + ); + + const ethereumJSError = ethereumJSResult.execResult.exceptionError; + const result: RunTxResult = { + bloom: ethereumJSResult.bloom, + gasUsed: ethereumJSResult.totalGasSpent, + receipt: ethereumJSResult.receipt, + returnValue: ethereumJSResult.execResult.returnValue, + createdAddress: ethereumJSResult.createdAddress, + exit: Exit.fromEthereumJSEvmError(ethereumJSError), + }; return [result, trace]; } finally { @@ -252,12 +264,9 @@ export class EthereumJSAdapter implements VMAdapter { this._tracingCallbacks = callbacks; - this._vm.evm.events.on( - "beforeMessage", - this._tracingCallbacks.beforeMessage - ); - this._vm.evm.events.on("step", this._tracingCallbacks.step); - this._vm.evm.events.on("afterMessage", this._tracingCallbacks.afterMessage); + this._vm.evm.events.on("beforeMessage", this._beforeMessageHandler); + this._vm.evm.events.on("step", this._stepHandler); + this._vm.evm.events.on("afterMessage", this._afterMessageHandler); } public disableTracing(): void { @@ -269,12 +278,12 @@ export class EthereumJSAdapter implements VMAdapter { if (this._tracingCallbacks !== undefined) { this._vm.evm.events.removeListener( "beforeMessage", - this._tracingCallbacks.beforeMessage + this._beforeMessageHandler ); - this._vm.evm.events.removeListener("step", this._tracingCallbacks.step); + this._vm.evm.events.removeListener("step", this._stepHandler); this._vm.evm.events.removeListener( "afterMessage", - this._tracingCallbacks.afterMessage + this._afterMessageHandler ); this._tracingCallbacks = undefined; @@ -390,10 +399,10 @@ export class EthereumJSAdapter implements VMAdapter { block: Block ): Promise<[RunTxResult, Trace]> { const vmTracer = new VMDebugTracer(this._vm); - let result: RunTxResult | undefined; + let ethereumJSResult: EthereumJSRunTxResult | undefined; const trace = await vmTracer.trace( async () => { - result = await this._vm.runTx({ tx, block }); + ethereumJSResult = await this._vm.runTx({ tx, block }); }, { disableStorage: true, @@ -402,7 +411,20 @@ export class EthereumJSAdapter implements VMAdapter { } ); - assertHardhatInvariant(result !== undefined, "Should have a result"); + assertHardhatInvariant( + ethereumJSResult !== undefined, + "Should have a result" + ); + + const ethereumJSError = ethereumJSResult.execResult.exceptionError; + const result: RunTxResult = { + bloom: ethereumJSResult.bloom, + gasUsed: ethereumJSResult.totalGasSpent, + receipt: ethereumJSResult.receipt, + returnValue: ethereumJSResult.execResult.returnValue, + createdAddress: ethereumJSResult.createdAddress, + exit: Exit.fromEthereumJSEvmError(ethereumJSError), + }; return [result, trace]; } @@ -466,4 +488,63 @@ export class EthereumJSAdapter implements VMAdapter { } return this._common.gteHardfork("london"); } + + private _beforeMessageHandler = (message: Message, next: any) => { + if (this._tracingCallbacks !== undefined) { + return this._tracingCallbacks.beforeMessage( + { + ...message, + to: message.to?.toBuffer(), + codeAddress: + message.to !== undefined + ? message.codeAddress.toBuffer() + : undefined, + }, + next + ); + } + + next(); + }; + + private _stepHandler = (step: InterpreterStep, next: any) => { + if (this._tracingCallbacks !== undefined) { + return this._tracingCallbacks.step( + { + pc: BigInt(step.pc), + }, + next + ); + } + + next(); + }; + + private _afterMessageHandler = (result: EVMResult, next: any) => { + if (this._tracingCallbacks !== undefined) { + const vmError = Exit.fromEthereumJSEvmError( + result.execResult.exceptionError + ); + + const rethnetExitCode = vmError.getRethnetExitCode(); + + return this._tracingCallbacks.afterMessage( + { + executionResult: { + exitCode: rethnetExitCode, + output: { + address: result.createdAddress?.toBuffer(), + output: result.execResult.returnValue, + }, + gasUsed: result.execResult.executionGasUsed, + gasRefunded: result.execResult.gasRefund ?? 0n, + logs: result.execResult.logs ?? [], + }, + }, + next + ); + } + + next(); + }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts new file mode 100644 index 0000000000..feff8d1309 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts @@ -0,0 +1,124 @@ +import { EvmError } from "@nomicfoundation/ethereumjs-evm"; +import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; + +export enum ExitCode { + SUCCESS, + REVERT, + OUT_OF_GAS, + INTERNAL_ERROR, + INVALID_OPCODE, + CODESIZE_EXCEEDS_MAXIMUM, +} + +const exitCodeToRethnetExitCode: Record = { + [ExitCode.SUCCESS]: 0x00, + [ExitCode.REVERT]: 0x20, + [ExitCode.OUT_OF_GAS]: 0x50, + [ExitCode.INTERNAL_ERROR]: 0x20, + [ExitCode.INVALID_OPCODE]: 0x53, + [ExitCode.CODESIZE_EXCEEDS_MAXIMUM]: 0x65, +}; + +export class Exit { + public static fromRethnetExitCode(rethnetExitCode: number): Exit { + switch (rethnetExitCode) { + case 0x00: + case 0x01: + case 0x02: + case 0x03: + return new Exit(ExitCode.SUCCESS); + case 0x20: + return new Exit(ExitCode.REVERT); + case 0x50: + return new Exit(ExitCode.OUT_OF_GAS); + case 0x51: + case 0x53: + return new Exit(ExitCode.INVALID_OPCODE); + case 0x65: + return new Exit(ExitCode.CODESIZE_EXCEEDS_MAXIMUM); + default: { + // TODO temporary, should be removed in production + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error(`Unmatched rethnet exit code: ${rethnetExitCode}`); + } + } + } + + public static fromEthereumJSEvmError(evmError: EvmError | undefined): Exit { + if (evmError === undefined) { + return new Exit(ExitCode.SUCCESS); + } + + if (evmError.error === ERROR.REVERT) { + return new Exit(ExitCode.REVERT); + } + + if (evmError.error === ERROR.OUT_OF_GAS) { + return new Exit(ExitCode.OUT_OF_GAS); + } + + if (evmError.error === ERROR.INTERNAL_ERROR) { + return new Exit(ExitCode.INTERNAL_ERROR); + } + + if (evmError.error === ERROR.INVALID_OPCODE) { + return new Exit(ExitCode.INVALID_OPCODE); + } + + if (evmError.error === ERROR.CODESIZE_EXCEEDS_MAXIMUM) { + return new Exit(ExitCode.CODESIZE_EXCEEDS_MAXIMUM); + } + + // TODO temporary, should be removed in production + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error(`Unmatched rethnet exit code: ${evmError.error}`); + } + + constructor(public kind: ExitCode) {} + + public isError(): boolean { + return this.kind !== ExitCode.SUCCESS; + } + + public getReason(): string { + switch (this.kind) { + case ExitCode.SUCCESS: + return "Success"; + case ExitCode.REVERT: + return "Reverted"; + case ExitCode.OUT_OF_GAS: + return "Out of gas"; + case ExitCode.INTERNAL_ERROR: + return "Internal error"; + case ExitCode.INVALID_OPCODE: + return "Invalid opcode"; + case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: + return "Codesize exceeds maximum"; + } + + const _exhaustiveCheck: never = this.kind; + } + + public getEthereumJSError(): EvmError | undefined { + switch (this.kind) { + case ExitCode.SUCCESS: + return; + case ExitCode.REVERT: + return new EvmError(ERROR.REVERT); + case ExitCode.OUT_OF_GAS: + return new EvmError(ERROR.OUT_OF_GAS); + case ExitCode.INTERNAL_ERROR: + return new EvmError(ERROR.INTERNAL_ERROR); + case ExitCode.INVALID_OPCODE: + return new EvmError(ERROR.INVALID_OPCODE); + case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: + return new EvmError(ERROR.CODESIZE_EXCEEDS_MAXIMUM); + } + + const _exhaustiveCheck: never = this.kind; + } + + public getRethnetExitCode(): number { + return exitCodeToRethnetExitCode[this.kind]; + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 700637f98f..99e0bc0f68 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -1,5 +1,3 @@ -import type { Message } from "@nomicfoundation/ethereumjs-evm"; -import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; import { @@ -21,7 +19,7 @@ import { hardforkGte, HardforkName } from "../../../util/hardforks"; import { RpcDebugTraceOutput } from "../output"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; -import { Trace, VMAdapter } from "./vm-adapter"; +import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ @@ -220,11 +218,7 @@ export class RethnetAdapter implements VMAdapter { /** * Start tracing the VM execution with the given callbacks. */ - public enableTracing(callbacks: { - beforeMessage: (message: Message, next: any) => Promise; - step: () => Promise; - afterMessage: () => Promise; - }): void { + public enableTracing(callbacks: TracingCallbacks): void { throw new Error("not implemented"); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 536c826b9c..5032bf438d 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -1,17 +1,44 @@ -import type { - EVMResult, - InterpreterStep, - Message, -} from "@nomicfoundation/ethereumjs-evm"; -import type { RunTxResult } from "@nomicfoundation/ethereumjs-vm"; import type { Block } from "@nomicfoundation/ethereumjs-block"; import type { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import type { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import type { TxReceipt } from "@nomicfoundation/ethereumjs-vm"; +import type { + TracingMessage, + TracingMessageResult, + TracingStep, +} from "rethnet-evm"; import type { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import type { RpcDebugTraceOutput } from "../output"; +import { Bloom } from "../utils/bloom"; + +import { Exit } from "./exit"; export type Trace = any; +export interface RunTxResult { + bloom: Bloom; + createdAddress?: Address; + gasUsed: bigint; + returnValue: Buffer; + exit: Exit; + receipt: TxReceipt; +} + +export interface RunBlockResult { + results: RunTxResult[]; + receipts: TxReceipt[]; + stateRoot: Buffer; + logsBloom: Buffer; + receiptsRoot: Buffer; + gasUsed: bigint; +} + +export interface TracingCallbacks { + beforeMessage: (message: TracingMessage, next: any) => Promise; + step: (step: TracingStep, next: any) => Promise; + afterMessage: (result: TracingMessageResult, next: any) => Promise; +} + export interface VMAdapter { dryRun( tx: TypedTransaction, @@ -57,10 +84,6 @@ export interface VMAdapter { block: Block, config: RpcDebugTracingConfig ): Promise; - enableTracing(callbacks: { - beforeMessage: (message: Message, next: any) => Promise; - step: (step: InterpreterStep, next: any) => Promise; - afterMessage: (result: EVMResult, next: any) => Promise; - }): void; + enableTracing(callbacks: TracingCallbacks): void; disableTracing(): void; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/debug.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/debug.ts index 9a271ebd28..5af2def916 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/debug.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/debug.ts @@ -57,8 +57,8 @@ export function printCreateTrace(trace: CreateMessageTrace, depth: number) { ); } - if (trace.error !== undefined) { - console.log(`${margin} error: ${trace.error.error}`); + if (trace.exit.isError()) { + console.log(`${margin} error: ${trace.exit.getReason()}`); // The return data is the deployed-bytecode if there was no error, so we don't show it console.log(`${margin} returnData: ${bufferToHex(trace.returnData)}`); @@ -78,8 +78,8 @@ export function printPrecompileTrace( console.log(`${margin} value: ${trace.value.toString(10)}`); console.log(`${margin} calldata: ${bufferToHex(trace.calldata)}`); - if (trace.error !== undefined) { - console.log(`${margin} error: ${trace.error.error}`); + if (trace.exit.isError()) { + console.log(`${margin} error: ${trace.exit.getReason()}`); } console.log(`${margin} returnData: ${bufferToHex(trace.returnData)}`); @@ -103,8 +103,8 @@ export function printCallTrace(trace: CallMessageTrace, depth: number) { console.log(`${margin} value: ${trace.value.toString(10)}`); console.log(`${margin} calldata: ${bufferToHex(trace.calldata)}`); - if (trace.error !== undefined) { - console.log(`${margin} error: ${trace.error.error}`); + if (trace.exit.isError()) { + console.log(`${margin} error: ${trace.exit.getReason()}`); } console.log(`${margin} returnData: ${bufferToHex(trace.returnData)}`); diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts index 76cf11ff02..3884f6847f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts @@ -1,9 +1,9 @@ -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { defaultAbiCoder as abi } from "@ethersproject/abi"; import semver from "semver"; import { AbiHelpers } from "../../util/abi-helpers"; import { ReturnData } from "../provider/return-data"; +import { ExitCode } from "../provider/vm/exit"; import { DecodedCallMessageTrace, @@ -268,8 +268,7 @@ export class ErrorInferrer { callInst ); - const lastMessageFailed = - lastSubmessageData.messageTrace.error !== undefined; + const lastMessageFailed = lastSubmessageData.messageTrace.exit.isError(); if (lastMessageFailed) { // add the call/create that generated the message to the stack trace inferredStacktrace.push(callStackFrame); @@ -1374,7 +1373,7 @@ export class ErrorInferrer { } private _isContractTooLargeError(trace: DecodedCreateMessageTrace) { - return trace.error?.error === ERROR.CODESIZE_EXCEEDS_MAXIMUM; + return trace.exit.kind === ExitCode.CODESIZE_EXCEEDS_MAXIMUM; } private _solidity063CorrectLineNumber( @@ -1555,8 +1554,8 @@ export class ErrorInferrer { } if ( - trace.error?.error === ERROR.OUT_OF_GAS && - call.error?.error === ERROR.OUT_OF_GAS + trace.exit.kind === ExitCode.OUT_OF_GAS && + call.exit.kind === ExitCode.OUT_OF_GAS ) { return true; } @@ -1645,12 +1644,12 @@ export class ErrorInferrer { return false; } - if (trace.error?.error !== ERROR.REVERT) { + if (trace.exit.kind !== ExitCode.REVERT) { return false; } const call = trace.steps[callStepIndex] as MessageTrace; - if (call.error?.error !== ERROR.OUT_OF_GAS) { + if (call.exit.kind !== ExitCode.OUT_OF_GAS) { return false; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts index 6c50e26194..1faad79f83 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts @@ -1,6 +1,5 @@ -import type { EvmError } from "@nomicfoundation/ethereumjs-evm"; - import type { Bytecode } from "./model"; +import type { Exit } from "../provider/vm/exit"; export type MessageTrace = | CreateMessageTrace @@ -16,7 +15,7 @@ export type DecodedEvmMessageTrace = export interface BaseMessageTrace { value: bigint; returnData: Buffer; - error?: EvmError; + exit: Exit; gasUsed: bigint; depth: number; } @@ -30,7 +29,6 @@ export interface BaseEvmMessageTrace extends BaseMessageTrace { code: Buffer; value: bigint; returnData: Buffer; - error?: EvmError; steps: MessageTraceStep[]; bytecode?: Bytecode; // The following is just an optimization: When processing this traces it's useful to know ahead of diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/solidityTracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/solidityTracer.ts index d9353352e0..f7c877fe51 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/solidityTracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/solidityTracer.ts @@ -1,5 +1,5 @@ -import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { ReturnData } from "../provider/return-data"; +import { ExitCode } from "../provider/vm/exit"; import { ErrorInferrer, @@ -38,7 +38,7 @@ export class SolidityTracer { public getStackTrace( maybeDecodedMessageTrace: MessageTrace ): SolidityStackTrace { - if (maybeDecodedMessageTrace.error === undefined) { + if (!maybeDecodedMessageTrace.exit.isError()) { return []; } @@ -79,7 +79,7 @@ export class SolidityTracer { // This is not a very exact heuristic, but most of the time it will be right, as solidity // reverts if a call fails, and most contracts are in solidity if ( - subtrace.error !== undefined && + subtrace.exit.isError() && trace.returnData.equals(subtrace.returnData) ) { let unrecognizedEntry: SolidityStackTraceEntry; @@ -99,7 +99,7 @@ export class SolidityTracer { } } - if (trace.error?.error === ERROR.CODESIZE_EXCEEDS_MAXIMUM) { + if (trace.exit.kind === ExitCode.CODESIZE_EXCEEDS_MAXIMUM) { return [ { type: StackTraceEntryType.CONTRACT_TOO_LARGE_ERROR, @@ -107,7 +107,7 @@ export class SolidityTracer { ]; } - const isInvalidOpcodeError = trace.error?.error === ERROR.INVALID_OPCODE; + const isInvalidOpcodeError = trace.exit.kind === ExitCode.INVALID_OPCODE; if (isCreateTrace(trace)) { return [ diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts index 9c578d9a3f..2882b7214e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts @@ -1,11 +1,15 @@ import type { Common } from "@nomicfoundation/ethereumjs-common"; -import type { InterpreterStep } from "@nomicfoundation/ethereumjs-evm/dist/interpreter"; -import type { Message } from "@nomicfoundation/ethereumjs-evm/dist/message"; -import { - EVMResult, - getActivePrecompiles, -} from "@nomicfoundation/ethereumjs-evm"; -import { bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; +import type { + TracingMessage, + TracingMessageResult, + TracingStep, +} from "rethnet-evm"; + +import { getActivePrecompiles } from "@nomicfoundation/ethereumjs-evm"; +import { Address, bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; + +import { assertHardhatInvariant } from "../../core/errors"; +import { Exit, ExitCode } from "../provider/vm/exit"; import { VMAdapter } from "../provider/vm/vm-adapter"; import { @@ -87,7 +91,7 @@ export class VMTracer { return this._throwErrors || this._lastError === undefined; } - private async _beforeMessageHandler(message: Message, next: any) { + private async _beforeMessageHandler(message: TracingMessage, next: any) { if (!this._shouldKeepTracing()) { next(); return; @@ -105,6 +109,7 @@ export class VMTracer { code: message.data, steps: [], value: message.value, + exit: new Exit(ExitCode.SUCCESS), returnData: DUMMY_RETURN_DATA, numberOfSubtraces: 0, depth: message.depth, @@ -114,13 +119,14 @@ export class VMTracer { trace = createTrace; } else { - const toAsBigInt = bufferToBigInt(message.to.toBuffer()); + const toAsBigInt = bufferToBigInt(message.to); if (toAsBigInt > 0 && toAsBigInt <= this._maxPrecompileNumber) { const precompileTrace: PrecompileMessageTrace = { precompile: Number(toAsBigInt), calldata: message.data, value: message.value, + exit: new Exit(ExitCode.SUCCESS), returnData: DUMMY_RETURN_DATA, depth: message.depth, gasUsed: DUMMY_GAS_USED, @@ -130,19 +136,27 @@ export class VMTracer { } else { const codeAddress = message.codeAddress; - const code = await this._vm.getContractCode(codeAddress); + // if we enter here, then `to` is not undefined, therefore + // `codeAddress` should be defined + assertHardhatInvariant( + codeAddress !== undefined, + "codeAddress should be defined" + ); + + const code = await this._vm.getContractCode(new Address(codeAddress)); const callTrace: CallMessageTrace = { code, calldata: message.data, steps: [], value: message.value, + exit: new Exit(ExitCode.SUCCESS), returnData: DUMMY_RETURN_DATA, - address: message.to.toBuffer(), + address: message.to, numberOfSubtraces: 0, depth: message.depth, gasUsed: DUMMY_GAS_USED, - codeAddress: codeAddress.toBuffer(), + codeAddress, }; trace = callTrace; @@ -174,7 +188,7 @@ export class VMTracer { } } - private async _stepHandler(step: InterpreterStep, next: any) { + private async _stepHandler(step: TracingStep, next: any) { if (!this._shouldKeepTracing()) { next(); return; @@ -189,7 +203,7 @@ export class VMTracer { ); } - trace.steps.push({ pc: step.pc }); + trace.steps.push({ pc: Number(step.pc) }); next(); } catch (error) { if (this._throwErrors) { @@ -201,7 +215,7 @@ export class VMTracer { } } - private async _afterMessageHandler(result: EVMResult, next: any) { + private async _afterMessageHandler(result: TracingMessageResult, next: any) { if (!this._shouldKeepTracing()) { next(); return; @@ -210,12 +224,13 @@ export class VMTracer { try { const trace = this._messageTraces[this._messageTraces.length - 1]; - trace.error = result.execResult.exceptionError; - trace.returnData = result.execResult.returnValue; - trace.gasUsed = result.execResult.executionGasUsed; + trace.exit = Exit.fromRethnetExitCode(result.executionResult.exitCode); + trace.returnData = + result.executionResult.output.output ?? Buffer.from([]); + trace.gasUsed = result.executionResult.gasUsed; if (isCreateTrace(trace)) { - trace.deployedContract = result?.createdAddress?.toBuffer(); + trace.deployedContract = result.executionResult.output.address; } if (this._messageTraces.length > 1) { diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts index c5c0e606ce..7d8b01b89d 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/test.ts @@ -485,13 +485,13 @@ async function runTest( try { if (tx.stackTrace === undefined) { - assert.isUndefined( - trace.error, + assert.isFalse( + trace.exit.isError(), `Transaction ${txIndex} shouldn't have failed` ); } else { assert.isDefined( - trace.error, + trace.exit.isError(), `Transaction ${txIndex} should have failed` ); } @@ -501,7 +501,7 @@ async function runTest( throw error; } - if (trace.error !== undefined) { + if (trace.exit.isError()) { const stackTrace = tracer.getStackTrace(decodedTrace); try { From 902ed1605f91c54ea87d80992c5cbf5d5bc9a4ec Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 14 Dec 2022 15:28:58 -0600 Subject: [PATCH 012/406] feat: foundational Eth types (#3357) --- crates/rethnet_eth/Cargo.toml | 28 + crates/rethnet_eth/src/access_list.rs | 42 + crates/rethnet_eth/src/account.rs | 61 ++ crates/rethnet_eth/src/block.rs | 524 +++++++++++ crates/rethnet_eth/src/lib.rs | 20 + crates/rethnet_eth/src/receipt.rs | 371 ++++++++ crates/rethnet_eth/src/signature.rs | 332 +++++++ crates/rethnet_eth/src/state.rs | 63 ++ crates/rethnet_eth/src/transaction.rs | 1237 +++++++++++++++++++++++++ crates/rethnet_eth/src/trie.rs | 62 ++ crates/rethnet_eth/src/utils.rs | 42 + 11 files changed, 2782 insertions(+) create mode 100644 crates/rethnet_eth/Cargo.toml create mode 100644 crates/rethnet_eth/src/access_list.rs create mode 100644 crates/rethnet_eth/src/account.rs create mode 100644 crates/rethnet_eth/src/block.rs create mode 100644 crates/rethnet_eth/src/lib.rs create mode 100644 crates/rethnet_eth/src/receipt.rs create mode 100644 crates/rethnet_eth/src/signature.rs create mode 100644 crates/rethnet_eth/src/state.rs create mode 100644 crates/rethnet_eth/src/transaction.rs create mode 100644 crates/rethnet_eth/src/trie.rs create mode 100644 crates/rethnet_eth/src/utils.rs diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml new file mode 100644 index 0000000000..e23c5d1eb8 --- /dev/null +++ b/crates/rethnet_eth/Cargo.toml @@ -0,0 +1,28 @@ +[package] +name = "rethnet_eth" +version = "0.1.0-dev" +edition = "2021" + +[dependencies] +bytes = { version = "1.2.1", default-features = false } +ethbloom = { version = "0.13.0", default-features = false, features = ["rlp"] } +hash-db = { version = "0.15.2", default-features = false } +hash256-std-hasher = { version = "0.15.2", default-features = false } +hashbrown = { version = "0.12.3", default-features = false } +hex = { version = "0.4.3", default-features = false, features = ["alloc"] } +open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } +primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } +revm = { git = "https://github.com/wodann/revm", branch = "feat/optional-balance-check", version = "2.1.0", default-features = false } +rlp = { version = "0.5.2", default-features = false, features = ["derive"] } +ruint = { version = "1.7.0", default-features = false } +secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc", "recovery"] } +serde = { version = "1.0.147", default-features = false, features = ["derive"], optional = true } +sha3 = { version = "0.10.6", default-features = false } +thiserror = { version = "1.0.37", default-features = false } +triehash = { version = "0.8.4", default-features = false } + +[features] +default = ["std"] +# fastrlp = ["dep:open-fastrlp", "ruint/fastrlp"] Broken due to lack of support for fastrlp in primitive-types +serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "ruint/serde"] +std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "open-fastrlp?/std", "primitive-types/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] diff --git a/crates/rethnet_eth/src/access_list.rs b/crates/rethnet_eth/src/access_list.rs new file mode 100644 index 0000000000..ebfb30149b --- /dev/null +++ b/crates/rethnet_eth/src/access_list.rs @@ -0,0 +1,42 @@ +// Part of this code was adapted from ethers-rs and is distributed under their licenss: +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-APACHE +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-MIT +// For the original context see: https://github.com/gakonst/ethers-rs/blob/3d9c3290d42b77c510e5b5d0b6f7a2f72913bfff/ethers-core/src/types/transaction/eip2930.rs + +use ruint::aliases::U256; + +use crate::Address; + +/// Access list +// NB: Need to use `RlpEncodableWrapper` else we get an extra [] in the output +// https://github.com/gakonst/ethers-rs/pull/353#discussion_r680683869 +#[derive( + Debug, Default, Clone, PartialEq, Eq, Hash, rlp::RlpEncodableWrapper, rlp::RlpDecodableWrapper, +)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodableWrapper, open_fastrlp::RlpDecodableWrapper) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct AccessList(pub Vec); + +impl From> for AccessList { + fn from(src: Vec) -> AccessList { + AccessList(src) + } +} + +/// Access list item +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, rlp::RlpEncodable, rlp::RlpDecodable)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct AccessListItem { + /// Accessed address + pub address: Address, + /// Accessed storage keys + pub storage_keys: Vec, +} diff --git a/crates/rethnet_eth/src/account.rs b/crates/rethnet_eth/src/account.rs new file mode 100644 index 0000000000..74842dd89d --- /dev/null +++ b/crates/rethnet_eth/src/account.rs @@ -0,0 +1,61 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/proof.rs + +//! Return types for `eth_getProof` + +use crate::trie::KECCAK_NULL_RLP; +use primitive_types::H256; +use revm::KECCAK_EMPTY; +use ruint::aliases::U256; + +/// Basic account type. +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +pub struct BasicAccount { + /// Nonce of the account. + pub nonce: U256, + /// Balance of the account. + pub balance: U256, + /// Storage root of the account. + pub storage_root: H256, + /// Code hash of the account. + pub code_hash: H256, +} + +impl Default for BasicAccount { + fn default() -> Self { + BasicAccount { + balance: U256::ZERO, + nonce: U256::ZERO, + code_hash: KECCAK_EMPTY, + storage_root: KECCAK_NULL_RLP, + } + } +} + +impl rlp::Encodable for BasicAccount { + fn rlp_append(&self, stream: &mut rlp::RlpStream) { + stream.begin_list(4); + stream.append(&self.nonce); + stream.append(&self.balance); + stream.append(&self.storage_root); + stream.append(&self.code_hash); + } +} + +impl rlp::Decodable for BasicAccount { + fn decode(rlp: &rlp::Rlp) -> Result { + let result = BasicAccount { + nonce: rlp.val_at(0)?, + balance: rlp.val_at(1)?, + storage_root: rlp.val_at(2)?, + code_hash: rlp.val_at(3)?, + }; + Ok(result) + } +} diff --git a/crates/rethnet_eth/src/block.rs b/crates/rethnet_eth/src/block.rs new file mode 100644 index 0000000000..c1f3648465 --- /dev/null +++ b/crates/rethnet_eth/src/block.rs @@ -0,0 +1,524 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/block.rs + +use primitive_types::H256; +use revm::common::keccak256; +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; +use ruint::aliases::{B64, U256}; + +use crate::{transaction::SignedTransaction, trie, Address, Bloom, Bytes}; + +/// Ethereum block +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Block { + pub header: Header, + pub transactions: Vec, + pub ommers: Vec
, +} + +impl Block { + pub fn new( + partial_header: PartialHeader, + transactions: Vec, + ommers: Vec
, + ) -> Self { + let ommers_hash = keccak256(&rlp::encode_list(&ommers)[..]); + let transactions_root = + trie::ordered_trie_root(transactions.iter().map(|r| rlp::encode(r).freeze())); + + Self { + header: Header::new(partial_header, ommers_hash, transactions_root), + transactions, + ommers, + } + } +} + +impl Encodable for Block { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(3); + s.append(&self.header); + s.append_list(&self.transactions); + s.append_list(&self.ommers); + } +} + +impl Decodable for Block { + fn decode(rlp: &Rlp) -> Result { + Ok(Self { + header: rlp.val_at(0)?, + transactions: rlp.list_at(1)?, + ommers: rlp.list_at(2)?, + }) + } +} + +/// ethereum block header +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct Header { + pub parent_hash: H256, + pub ommers_hash: H256, + pub beneficiary: Address, + pub state_root: H256, + pub transactions_root: H256, + pub receipts_root: H256, + pub logs_bloom: Bloom, + pub difficulty: U256, + pub number: U256, + pub gas_limit: U256, + pub gas_used: U256, + pub timestamp: u64, + pub extra_data: Bytes, + pub mix_hash: H256, + #[cfg_attr(feature = "serde", serde(with = "B64Def"))] + pub nonce: B64, + /// BaseFee was added by EIP-1559 and is ignored in legacy headers. + pub base_fee_per_gas: Option, +} + +#[cfg(feature = "serde")] +#[derive(serde::Serialize, serde::Deserialize)] +#[serde(remote = "B64")] +struct B64Def(#[serde(getter = "B64::as_uint")] ruint::aliases::U64); + +#[cfg(feature = "serde")] +impl From for B64 { + fn from(def: B64Def) -> Self { + def.0.into() + } +} + +impl Header { + pub fn new(partial_header: PartialHeader, ommers_hash: H256, transactions_root: H256) -> Self { + Self { + parent_hash: partial_header.parent_hash, + ommers_hash, + beneficiary: partial_header.beneficiary, + state_root: partial_header.state_root, + transactions_root, + receipts_root: partial_header.receipts_root, + logs_bloom: partial_header.logs_bloom, + difficulty: partial_header.difficulty, + number: partial_header.number, + gas_limit: partial_header.gas_limit, + gas_used: partial_header.gas_used, + timestamp: partial_header.timestamp, + extra_data: partial_header.extra_data, + mix_hash: partial_header.mix_hash, + nonce: partial_header.nonce, + base_fee_per_gas: partial_header.base_fee, + } + } + + pub fn hash(&self) -> H256 { + keccak256(&rlp::encode(self)) + } + + /// Returns the rlp length of the Header body, _not including_ trailing EIP155 fields or the + /// rlp list header + /// To get the length including the rlp list header, refer to the Encodable implementation. + #[cfg(feature = "fastrlp")] + pub(crate) fn header_payload_length(&self) -> usize { + use open_fastrlp::Encodable; + + let mut length = 0; + length += self.parent_hash.length(); + length += self.ommers_hash.length(); + length += self.beneficiary.length(); + length += self.state_root.length(); + length += self.transactions_root.length(); + length += self.receipts_root.length(); + length += self.logs_bloom.length(); + length += self.difficulty.length(); + length += self.number.length(); + length += self.gas_limit.length(); + length += self.gas_used.length(); + length += self.timestamp.length(); + length += self.extra_data.length(); + length += self.mix_hash.length(); + length += self.nonce.length(); + length += self + .base_fee_per_gas + .map(|fee| fee.length()) + .unwrap_or_default(); + length + } +} + +impl rlp::Encodable for Header { + fn rlp_append(&self, s: &mut rlp::RlpStream) { + if self.base_fee_per_gas.is_none() { + s.begin_list(15); + } else { + s.begin_list(16); + } + s.append(&self.parent_hash); + s.append(&self.ommers_hash); + s.append(&self.beneficiary); + s.append(&self.state_root); + s.append(&self.transactions_root); + s.append(&self.receipts_root); + s.append(&self.logs_bloom); + s.append(&self.difficulty); + s.append(&self.number); + s.append(&self.gas_limit); + s.append(&self.gas_used); + s.append(&self.timestamp); + s.append(&self.extra_data.as_ref()); + s.append(&self.mix_hash); + s.append(&self.nonce); + if let Some(ref base_fee) = self.base_fee_per_gas { + s.append(base_fee); + } + } +} + +impl rlp::Decodable for Header { + fn decode(rlp: &rlp::Rlp) -> Result { + let result = Header { + parent_hash: rlp.val_at(0)?, + ommers_hash: rlp.val_at(1)?, + beneficiary: rlp.val_at(2)?, + state_root: rlp.val_at(3)?, + transactions_root: rlp.val_at(4)?, + receipts_root: rlp.val_at(5)?, + logs_bloom: rlp.val_at(6)?, + difficulty: rlp.val_at(7)?, + number: rlp.val_at(8)?, + gas_limit: rlp.val_at(9)?, + gas_used: rlp.val_at(10)?, + timestamp: rlp.val_at(11)?, + extra_data: rlp.val_at::>(12)?.into(), + mix_hash: rlp.val_at(13)?, + nonce: rlp.val_at(14)?, + base_fee_per_gas: if let Ok(base_fee) = rlp.at(15) { + Some(::decode(&base_fee)?) + } else { + None + }, + }; + Ok(result) + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Encodable for Header { + fn length(&self) -> usize { + // add each of the fields' rlp encoded lengths + let mut length = 0; + length += self.header_payload_length(); + length += open_fastrlp::length_of_length(length); + + length + } + + fn encode(&self, out: &mut dyn open_fastrlp::BufMut) { + let list_header = open_fastrlp::Header { + list: true, + payload_length: self.header_payload_length(), + }; + list_header.encode(out); + self.parent_hash.encode(out); + self.ommers_hash.encode(out); + self.beneficiary.encode(out); + self.state_root.encode(out); + self.transactions_root.encode(out); + self.receipts_root.encode(out); + self.logs_bloom.encode(out); + self.difficulty.encode(out); + self.number.encode(out); + self.gas_limit.encode(out); + self.gas_used.encode(out); + self.timestamp.encode(out); + self.extra_data.encode(out); + self.mix_hash.encode(out); + self.nonce.encode(out); + if let Some(base_fee_per_gas) = self.base_fee_per_gas { + base_fee_per_gas.encode(out); + } + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Decodable for Header { + fn decode(buf: &mut &[u8]) -> Result { + // slice out the rlp list header + let header = open_fastrlp::Header::decode(buf)?; + let start_len = buf.len(); + + Ok(Header { + parent_hash: ::decode(buf)?, + ommers_hash: ::decode(buf)?, + beneficiary:
::decode(buf)?, + state_root: ::decode(buf)?, + transactions_root: ::decode(buf)?, + receipts_root: ::decode(buf)?, + logs_bloom: ::decode(buf)?, + difficulty: ::decode(buf)?, + number: ::decode(buf)?, + gas_limit: ::decode(buf)?, + gas_used: ::decode(buf)?, + timestamp: ::decode(buf)?, + extra_data: ::decode(buf)?, + mix_hash: ::decode(buf)?, + nonce: ::decode(buf)?, + base_fee_per_gas: if start_len - header.payload_length < buf.len() { + // if there is leftover data in the payload, decode the base fee + Some(::decode(buf)?) + } else { + None + }, + }) + } +} + +/// Partial header definition without ommers hash and transactions root +#[derive(Clone, Debug, PartialEq, Eq, Default)] +pub struct PartialHeader { + pub parent_hash: H256, + pub beneficiary: Address, + pub state_root: H256, + pub receipts_root: H256, + pub logs_bloom: Bloom, + pub difficulty: U256, + pub number: U256, + pub gas_limit: U256, + pub gas_used: U256, + pub timestamp: u64, + pub extra_data: Bytes, + pub mix_hash: H256, + pub nonce: B64, + pub base_fee: Option, +} + +impl From
for PartialHeader { + fn from(header: Header) -> PartialHeader { + Self { + parent_hash: header.parent_hash, + beneficiary: header.beneficiary, + state_root: header.state_root, + receipts_root: header.receipts_root, + logs_bloom: header.logs_bloom, + difficulty: header.difficulty, + number: header.number, + gas_limit: header.gas_limit, + gas_used: header.gas_used, + timestamp: header.timestamp, + extra_data: header.extra_data, + mix_hash: header.mix_hash, + nonce: header.nonce, + base_fee: header.base_fee_per_gas, + } + } +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use ruint::uint; + + use super::*; + + #[test] + fn header_rlp_roundtrip() { + let mut header = Header { + parent_hash: Default::default(), + ommers_hash: Default::default(), + beneficiary: Default::default(), + state_root: Default::default(), + transactions_root: Default::default(), + receipts_root: Default::default(), + logs_bloom: Default::default(), + difficulty: Default::default(), + number: U256::from(124), + gas_limit: Default::default(), + gas_used: U256::from(1337), + timestamp: 0, + extra_data: Default::default(), + mix_hash: Default::default(), + nonce: B64::from(uint!(99_U64)), + base_fee_per_gas: None, + }; + + let encoded = rlp::encode(&header); + let decoded: Header = rlp::decode(encoded.as_ref()).unwrap(); + assert_eq!(header, decoded); + + header.base_fee_per_gas = Some(U256::from(12345)); + + let encoded = rlp::encode(&header); + let decoded: Header = rlp::decode(encoded.as_ref()).unwrap(); + assert_eq!(header, decoded); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn header_fastrlp_roundtrip() { + let mut header = Header { + parent_hash: Default::default(), + ommers_hash: Default::default(), + beneficiary: Default::default(), + state_root: Default::default(), + transactions_root: Default::default(), + receipts_root: Default::default(), + logs_bloom: Default::default(), + difficulty: Default::default(), + number: 124u64.into(), + gas_limit: Default::default(), + gas_used: 1337u64.into(), + timestamp: 0, + extra_data: Default::default(), + mix_hash: Default::default(), + nonce: H64::from_low_u64_be(99u64), + base_fee_per_gas: None, + }; + + let mut encoded = vec![]; +
::encode(&header, &mut encoded); + let decoded: Header = +
::decode(&mut encoded.as_slice()).unwrap(); + assert_eq!(header, decoded); + + header.base_fee_per_gas = Some(12345u64.into()); + + encoded.clear(); +
::encode(&header, &mut encoded); + let decoded: Header = +
::decode(&mut encoded.as_slice()).unwrap(); + assert_eq!(header, decoded); + } + + #[test] + #[cfg(feature = "fastrlp")] + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + fn test_encode_block_header() { + use open_fastrlp::Encodable; + + let expected = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); + let mut data = vec![]; + let header = Header { + parent_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + ommers_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + beneficiary: H160::from_str("0000000000000000000000000000000000000000").unwrap(), + state_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + transactions_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + receipts_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), + difficulty: 0x8aeu64.into(), + number: 0xd05u64.into(), + gas_limit: 0x115cu64.into(), + gas_used: 0x15b3u64.into(), + timestamp: 0x1a0au64, + extra_data: hex::decode("7788").unwrap().into(), + mix_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + nonce: U64::from(0x0), + base_fee_per_gas: None, + }; + header.encode(&mut data); + assert_eq!(hex::encode(&data), hex::encode(expected)); + assert_eq!(header.length(), data.len()); + } + + #[test] + // Test vector from: https://github.com/ethereum/tests/blob/f47bbef4da376a49c8fc3166f09ab8a6d182f765/BlockchainTests/ValidBlocks/bcEIP1559/baseFee.json#L15-L36 + fn test_eip1559_block_header_hash() { + use hex::FromHex; + + let expected_hash = + H256::from_str("6a251c7c3c5dca7b42407a3752ff48f3bbca1fab7f9868371d9918daf1988d1f") + .unwrap(); + let header = Header { + parent_hash: H256::from_str( + "e0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a", + ) + .unwrap(), + ommers_hash: H256::from_str( + "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + ) + .unwrap(), + beneficiary: Address::from_str("ba5e000000000000000000000000000000000000").unwrap(), + state_root: H256::from_str( + "ec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7", + ) + .unwrap(), + transactions_root: H256::from_str( + "50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf", + ) + .unwrap(), + receipts_root: H256::from_str( + "29b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9", + ) + .unwrap(), + logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), + difficulty: U256::from(0x020000u64), + number: U256::from(0x01u64), + gas_limit: U256::from_str("0x016345785d8a0000").unwrap(), + gas_used: U256::from(0x015534u64), + timestamp: 0x079eu64, + extra_data: hex::decode("42").unwrap().into(), + mix_hash: H256::from_str( + "0000000000000000000000000000000000000000000000000000000000000000", + ) + .unwrap(), + nonce: B64::from(uint!(0x0_U64)), + base_fee_per_gas: Some(U256::from(0x036bu64)), + }; + assert_eq!(header.hash(), expected_hash); + } + + #[test] + #[cfg(feature = "fastrlp")] + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + fn test_decode_block_header() { + let data = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); + let expected = Header { + parent_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + ommers_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + beneficiary: H160::from_str("0000000000000000000000000000000000000000").unwrap(), + state_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + transactions_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + receipts_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), + difficulty: 0x8aeu64.into(), + number: 0xd05u64.into(), + gas_limit: 0x115cu64.into(), + gas_used: 0x15b3u64.into(), + timestamp: 0x1a0au64, + extra_data: hex::decode("7788").unwrap().into(), + mix_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + nonce: U64::from(0x0), + base_fee_per_gas: None, + }; + let header =
::decode(&mut data.as_slice()).unwrap(); + assert_eq!(header, expected); + } + + #[test] + #[cfg(feature = "fastrlp")] + // Test vector from network + fn block_network_fastrlp_roundtrip() { + use open_fastrlp::Encodable; + + let data = hex::decode("f9034df90348a0fbdbd8d2d0ac5f14bd5fa90e547fe6f1d15019c724f8e7b60972d381cd5d9cf8a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794c9577e7945db22e38fc060909f2278c7746b0f9ba05017cfa3b0247e35197215ae8d610265ffebc8edca8ea66d6567eb0adecda867a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000018355bb7b871fffffffffffff808462bd0e1ab9014bf90148a00000000000000000000000000000000000000000000000000000000000000000f85494319fa8f1bc4e53410e92d10d918659b16540e60a945a573efb304d04c1224cd012313e827eca5dce5d94a9c831c5a268031176ebf5f3de5051e8cba0dbfe94c9577e7945db22e38fc060909f2278c7746b0f9b808400000000f8c9b841a6946f2d16f68338cbcbd8b117374ab421128ce422467088456bceba9d70c34106128e6d4564659cf6776c08a4186063c0a05f7cffd695c10cf26a6f301b67f800b8412b782100c18c35102dc0a37ece1a152544f04ad7dc1868d18a9570f744ace60870f822f53d35e89a2ea9709ccbf1f4a25ee5003944faa845d02dde0a41d5704601b841d53caebd6c8a82456e85c2806a9e08381f959a31fb94a77e58f00e38ad97b2e0355b8519ab2122662cbe022f2a4ef7ff16adc0b2d5dcd123181ec79705116db300a063746963616c2062797a616e74696e65206661756c7420746f6c6572616e6365880000000000000000c0c0").unwrap(); + + let block = ::decode(&mut data.as_slice()).unwrap(); + + // encode and check that it matches the original data + let mut encoded = Vec::new(); + block.encode(&mut encoded); + assert_eq!(data, encoded); + + // check that length of encoding is the same as the output of `length` + assert_eq!(block.length(), encoded.len()); + } +} diff --git a/crates/rethnet_eth/src/lib.rs b/crates/rethnet_eth/src/lib.rs new file mode 100644 index 0000000000..a7e29156ef --- /dev/null +++ b/crates/rethnet_eth/src/lib.rs @@ -0,0 +1,20 @@ +pub mod access_list; +pub mod account; +pub mod block; +pub mod receipt; +pub mod signature; +pub mod state; +pub mod transaction; +pub mod trie; +pub mod utils; + +pub use bytes::Bytes; +pub use ethbloom::Bloom; +pub use primitive_types::{H256, H512}; +pub use ruint::aliases::{B64, U256, U64}; + +use primitive_types::H160; + +pub type Address = H160; +pub type Secret = H256; +pub type Public = H512; diff --git a/crates/rethnet_eth/src/receipt.rs b/crates/rethnet_eth/src/receipt.rs new file mode 100644 index 0000000000..0854c294d1 --- /dev/null +++ b/crates/rethnet_eth/src/receipt.rs @@ -0,0 +1,371 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/receipt.rs + +use crate::{utils::enveloped, Address, Bloom, Bytes, H256, U256}; +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct Log { + pub address: Address, + pub topics: Vec, + pub data: Bytes, +} + +impl From for Log { + fn from(log: revm::Log) -> Self { + let revm::Log { + address, + topics, + data, + } = log; + Log { + address, + topics, + data, + } + } +} + +impl From for revm::Log { + fn from(log: Log) -> Self { + let Log { + address, + topics, + data, + } = log; + revm::Log { + address, + topics, + data, + } + } +} + +impl Encodable for Log { + fn rlp_append(&self, stream: &mut rlp::RlpStream) { + stream.begin_list(3); + stream.append(&self.address); + stream.append_list(&self.topics); + stream.append(&self.data.as_ref()); + } +} + +impl Decodable for Log { + fn decode(rlp: &Rlp) -> Result { + let result = Log { + address: rlp.val_at(0)?, + topics: rlp.list_at(1)?, + data: rlp.val_at::>(2)?.into(), + }; + Ok(result) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct EIP658Receipt { + pub status_code: u8, + pub gas_used: U256, + pub logs_bloom: Bloom, + pub logs: Vec, +} + +impl Encodable for EIP658Receipt { + fn rlp_append(&self, stream: &mut RlpStream) { + stream.begin_list(4); + stream.append(&self.status_code); + stream.append(&self.gas_used); + stream.append(&self.logs_bloom); + stream.append_list(&self.logs); + } +} + +impl Decodable for EIP658Receipt { + fn decode(rlp: &Rlp) -> Result { + let result = EIP658Receipt { + status_code: rlp.val_at(0)?, + gas_used: rlp.val_at(1)?, + logs_bloom: rlp.val_at(2)?, + logs: rlp.list_at(3)?, + }; + Ok(result) + } +} + +// same underlying data structure +pub type EIP2930Receipt = EIP658Receipt; +pub type EIP1559Receipt = EIP658Receipt; + +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum TypedReceipt { + /// Legacy receipt + Legacy(EIP658Receipt), + /// EIP-2930 receipt + EIP2930(EIP2930Receipt), + /// EIP-1559 receipt + EIP1559(EIP1559Receipt), +} + +impl TypedReceipt { + /// Returns the gas used by the transactions + pub fn gas_used(&self) -> U256 { + match self { + TypedReceipt::Legacy(r) | TypedReceipt::EIP2930(r) | TypedReceipt::EIP1559(r) => { + r.gas_used + } + } + } + + /// Returns the gas used by the transactions + pub fn logs_bloom(&self) -> &Bloom { + match self { + TypedReceipt::Legacy(r) | TypedReceipt::EIP2930(r) | TypedReceipt::EIP1559(r) => { + &r.logs_bloom + } + } + } +} + +impl Encodable for TypedReceipt { + fn rlp_append(&self, s: &mut RlpStream) { + match self { + TypedReceipt::Legacy(r) => r.rlp_append(s), + TypedReceipt::EIP2930(r) => enveloped(1, r, s), + TypedReceipt::EIP1559(r) => enveloped(2, r, s), + } + } +} + +impl Decodable for TypedReceipt { + fn decode(rlp: &Rlp) -> Result { + let slice = rlp.data()?; + + let first = *slice.first().ok_or(DecoderError::Custom("empty receipt"))?; + + if rlp.is_list() { + return Ok(TypedReceipt::Legacy(Decodable::decode(rlp)?)); + } + + let s = slice + .get(1..) + .ok_or(DecoderError::Custom("no receipt content"))?; + + if first == 0x01 { + return rlp::decode(s).map(TypedReceipt::EIP2930); + } + + if first == 0x02 { + return rlp::decode(s).map(TypedReceipt::EIP1559); + } + + Err(DecoderError::Custom("unknown receipt type")) + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Encodable for TypedReceipt { + fn length(&self) -> usize { + match self { + TypedReceipt::Legacy(r) => r.length(), + receipt => { + let payload_len = match receipt { + TypedReceipt::EIP2930(r) => r.length() + 1, + TypedReceipt::EIP1559(r) => r.length() + 1, + _ => unreachable!("receipt already matched"), + }; + + // we include a string header for typed receipts, so include the length here + payload_len + open_fastrlp::length_of_length(payload_len) + } + } + } + fn encode(&self, out: &mut dyn open_fastrlp::BufMut) { + use open_fastrlp::Header; + + match self { + TypedReceipt::Legacy(r) => r.encode(out), + receipt => { + let payload_len = match receipt { + TypedReceipt::EIP2930(r) => r.length() + 1, + TypedReceipt::EIP1559(r) => r.length() + 1, + _ => unreachable!("receipt already matched"), + }; + + match receipt { + TypedReceipt::EIP2930(r) => { + let receipt_string_header = Header { + list: false, + payload_length: payload_len, + }; + + receipt_string_header.encode(out); + out.put_u8(0x01); + r.encode(out); + } + TypedReceipt::EIP1559(r) => { + let receipt_string_header = Header { + list: false, + payload_length: payload_len, + }; + + receipt_string_header.encode(out); + out.put_u8(0x02); + r.encode(out); + } + _ => unreachable!("receipt already matched"), + } + } + } + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Decodable for TypedReceipt { + fn decode(buf: &mut &[u8]) -> Result { + use bytes::Buf; + use open_fastrlp::Header; + use std::cmp::Ordering; + + // a receipt is either encoded as a string (non legacy) or a list (legacy). + // We should not consume the buffer if we are decoding a legacy receipt, so let's + // check if the first byte is between 0x80 and 0xbf. + let rlp_type = *buf.first().ok_or(open_fastrlp::DecodeError::Custom( + "cannot decode a receipt from empty bytes", + ))?; + + match rlp_type.cmp(&open_fastrlp::EMPTY_LIST_CODE) { + Ordering::Less => { + // strip out the string header + let _header = Header::decode(buf)?; + let receipt_type = *buf.first().ok_or(open_fastrlp::DecodeError::Custom( + "typed receipt cannot be decoded from an empty slice", + ))?; + if receipt_type == 0x01 { + buf.advance(1); + ::decode(buf) + .map(TypedReceipt::EIP2930) + } else if receipt_type == 0x02 { + buf.advance(1); + ::decode(buf) + .map(TypedReceipt::EIP1559) + } else { + Err(open_fastrlp::DecodeError::Custom("invalid receipt type")) + } + } + Ordering::Equal => Err(open_fastrlp::DecodeError::Custom( + "an empty list is not a valid receipt encoding", + )), + Ordering::Greater => { + ::decode(buf).map(TypedReceipt::Legacy) + } + } + } +} + +impl From for EIP658Receipt { + fn from(v3: TypedReceipt) -> Self { + match v3 { + TypedReceipt::Legacy(receipt) => receipt, + TypedReceipt::EIP2930(receipt) => receipt, + TypedReceipt::EIP1559(receipt) => receipt, + } + } +} + +#[cfg(test)] +mod tests { + #[test] + #[cfg(feature = "fastrlp")] + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + fn encode_legacy_receipt() { + use std::str::FromStr; + + use ethers_core::{ + types::{Bytes, H160, H256}, + utils::hex, + }; + use open_fastrlp::Encodable; + + use crate::eth::receipt::{EIP658Receipt, Log, TypedReceipt}; + + let expected = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap(); + + let mut data = vec![]; + let receipt = TypedReceipt::Legacy(EIP658Receipt { + logs_bloom: [0; 256].into(), + gas_used: 0x1u64.into(), + logs: vec![Log { + address: H160::from_str("0000000000000000000000000000000000000011").unwrap(), + topics: vec![ + H256::from_str( + "000000000000000000000000000000000000000000000000000000000000dead", + ) + .unwrap(), + H256::from_str( + "000000000000000000000000000000000000000000000000000000000000beef", + ) + .unwrap(), + ], + data: Bytes::from_str("0100ff").unwrap(), + }], + status_code: 0, + }); + receipt.encode(&mut data); + + // check that the rlp length equals the length of the expected rlp + assert_eq!(receipt.length(), expected.len()); + assert_eq!(data, expected); + } + + #[test] + #[cfg(feature = "fastrlp")] + // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + fn decode_legacy_receipt() { + use std::str::FromStr; + + use ethers_core::{ + types::{Bytes, H160, H256}, + utils::hex, + }; + use open_fastrlp::Decodable; + + use crate::eth::receipt::{EIP658Receipt, Log, TypedReceipt}; + + let data = hex::decode("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff").unwrap(); + + let expected = TypedReceipt::Legacy(EIP658Receipt { + logs_bloom: [0; 256].into(), + gas_used: 0x1u64.into(), + logs: vec![Log { + address: H160::from_str("0000000000000000000000000000000000000011").unwrap(), + topics: vec![ + H256::from_str( + "000000000000000000000000000000000000000000000000000000000000dead", + ) + .unwrap(), + H256::from_str( + "000000000000000000000000000000000000000000000000000000000000beef", + ) + .unwrap(), + ], + data: Bytes::from_str("0100ff").unwrap(), + }], + status_code: 0, + }); + + let receipt = TypedReceipt::decode(&mut &data[..]).unwrap(); + assert_eq!(receipt, expected); + } +} diff --git a/crates/rethnet_eth/src/signature.rs b/crates/rethnet_eth/src/signature.rs new file mode 100644 index 0000000000..0ff8e03f1c --- /dev/null +++ b/crates/rethnet_eth/src/signature.rs @@ -0,0 +1,332 @@ +// Part of this code was adapted from ethers-rs and is distributed under their licenss: +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-APACHE +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-MIT +// For the original context see: https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/ethers-core/src/types/signature.rs + +use core::fmt; +use std::str::FromStr; + +use secp256k1::{ + ecdsa::{RecoverableSignature, RecoveryId}, + PublicKey, Secp256k1, ThirtyTwoByteHash, +}; +use sha3::{Digest, Keccak256}; +use thiserror::Error; + +use crate::{utils::hash_message, Address, H256, U256}; + +/// Converts a [`PublicKey`] to an [`Address`]. +pub fn public_key_to_address(public_key: PublicKey) -> Address { + let hash = Keccak256::digest(&public_key.serialize_uncompressed()[1..]); + // Only take the lower 160 bits of the hash + Address::from_slice(&hash[12..]) +} + +/// An error involving a signature. +#[derive(Debug, Error)] +pub enum SignatureError { + /// Invalid length, secp256k1 signatures are 65 bytes + #[error("invalid signature length, got {0}, expected 65")] + InvalidLength(usize), + /// When parsing a signature from string to hex + #[error(transparent)] + DecodingError(#[from] hex::FromHexError), + /// Thrown when signature verification failed (i.e. when the address that + /// produced the signature did not match the expected address) + #[error("Signature verification failed. Expected {0}, got {1}")] + VerificationError(Address, Address), + /// Internal error during signature recovery + #[error(transparent)] + K256Error(#[from] secp256k1::Error), + /// Error in recovering public key from signature + #[error("Public key recovery error")] + RecoveryError, +} + +/// Recovery message data. +/// +/// The message data can either be a binary message that is first hashed +/// according to EIP-191 and then recovered based on the signature or a +/// precomputed hash. +#[derive(Clone, Debug, PartialEq, Eq)] +pub enum RecoveryMessage { + /// Message bytes + Data(Vec), + /// Message hash + Hash(H256), +} + +#[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +/// An ECDSA signature +pub struct Signature { + /// R value + pub r: U256, + /// S Value + pub s: U256, + /// V value + pub v: u64, +} + +impl fmt::Display for Signature { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + let sig = <[u8; 65]>::from(self); + write!(f, "{}", hex::encode(&sig[..])) + } +} + +impl Signature { + /// Verifies that signature on `message` was produced by `address` + pub fn verify(&self, message: M, address: A) -> Result<(), SignatureError> + where + M: Into, + A: Into
, + { + let address = address.into(); + let recovered = self.recover(message)?; + if recovered != address { + return Err(SignatureError::VerificationError(address, recovered)); + } + + Ok(()) + } + + /// Recovers the Ethereum address which was used to sign the given message. + /// + /// Recovery signature data uses 'Electrum' notation, this means the `v` + /// value is expected to be either `27` or `28`. + pub fn recover(&self, message: M) -> Result + where + M: Into, + { + let message = message.into(); + let message_hash = match message { + RecoveryMessage::Data(ref message) => hash_message(message), + RecoveryMessage::Hash(hash) => hash, + }; + + struct Hash(H256); + + impl ThirtyTwoByteHash for Hash { + fn into_32(self) -> [u8; 32] { + self.0 .0 + } + } + + let message_hash = Hash(message_hash); + + let (recoverable_sig, _recovery_id) = self.as_signature()?; + + let context = Secp256k1::verification_only(); + let public_key = context.recover_ecdsa(&message_hash.into(), &recoverable_sig)?; + + Ok(public_key_to_address(public_key)) + } + + /// Retrieves the recovery signature. + fn as_signature(&self) -> Result<(RecoverableSignature, RecoveryId), SignatureError> { + let recovery_id = self.recovery_id()?; + let signature = { + let r_bytes = self.r.to_be_bytes::<32>(); + let s_bytes = self.s.to_be_bytes::<32>(); + + let mut bytes = [0u8; 64]; + bytes[..32].copy_from_slice(&r_bytes); + bytes[32..64].copy_from_slice(&s_bytes); + + RecoverableSignature::from_compact(&bytes, recovery_id)? + }; + + Ok((signature, recovery_id)) + } + + /// Retrieve the recovery ID. + pub fn recovery_id(&self) -> Result { + let standard_v = normalize_recovery_id(self.v); + Ok(RecoveryId::from_i32(standard_v)?) + } + + /// Copies and serializes `self` into a new `Vec` with the recovery id included + #[allow(clippy::wrong_self_convention)] + pub fn to_vec(&self) -> Vec { + self.into() + } + + /// Decodes a signature from RLP bytes, assuming no RLP header + #[cfg(feature = "fastrlp")] + pub(crate) fn decode_signature(buf: &mut &[u8]) -> Result { + let v = u64::decode(buf)?; + Ok(Self { + r: U256::decode(buf)?, + s: U256::decode(buf)?, + v, + }) + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Decodable for Signature { + fn decode(buf: &mut &[u8]) -> Result { + Self::decode_signature(buf) + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Encodable for Signature { + fn length(&self) -> usize { + self.r.length() + self.s.length() + self.v.length() + } + fn encode(&self, out: &mut dyn bytes::BufMut) { + self.v.encode(out); + self.r.encode(out); + self.s.encode(out); + } +} + +fn normalize_recovery_id(v: u64) -> i32 { + match v { + 0 => 0, + 1 => 1, + 27 => 0, + 28 => 1, + v if v >= 35 => ((v - 1) % 2) as _, + _ => 4, + } +} + +impl<'a> TryFrom<&'a [u8]> for Signature { + type Error = SignatureError; + + /// Parses a raw signature which is expected to be 65 bytes long where + /// the first 32 bytes is the `r` value, the second 32 bytes the `s` value + /// and the final byte is the `v` value in 'Electrum' notation. + fn try_from(bytes: &'a [u8]) -> Result { + if bytes.len() != 65 { + return Err(SignatureError::InvalidLength(bytes.len())); + } + + let (r_bytes, remainder) = bytes.split_at(32); + let r = U256::from_be_bytes::<32>(r_bytes.try_into().unwrap()); + + let (s_bytes, remainder) = remainder.split_at(32); + let s = U256::from_be_bytes::<32>(s_bytes.try_into().unwrap()); + + let v = remainder[0]; + + Ok(Signature { r, s, v: v.into() }) + } +} + +impl FromStr for Signature { + type Err = SignatureError; + + fn from_str(s: &str) -> Result { + let s = s.strip_prefix("0x").unwrap_or(s); + let bytes = hex::decode(s)?; + Signature::try_from(&bytes[..]) + } +} + +impl From<&Signature> for [u8; 65] { + fn from(src: &Signature) -> [u8; 65] { + let mut sig = [0u8; 65]; + let r_bytes = src.r.to_be_bytes::<32>(); + let s_bytes = src.s.to_be_bytes::<32>(); + sig[..32].copy_from_slice(&r_bytes); + sig[32..64].copy_from_slice(&s_bytes); + // TODO: What if we try to serialize a signature where + // the `v` is not normalized? + + // The u64 to u8 cast is safe because `sig.v` can only ever be 27 or 28 + // here. Regarding EIP-155, the modification to `v` happens during tx + // creation only _after_ the transaction is signed using + // `ethers_signers::to_eip155_v`. + sig[64] = src.v as u8; + sig + } +} + +impl From for [u8; 65] { + fn from(src: Signature) -> [u8; 65] { + <[u8; 65]>::from(&src) + } +} + +impl From<&Signature> for Vec { + fn from(src: &Signature) -> Vec { + <[u8; 65]>::from(src).to_vec() + } +} + +impl From for Vec { + fn from(src: Signature) -> Vec { + <[u8; 65]>::from(&src).to_vec() + } +} + +impl From<&[u8]> for RecoveryMessage { + fn from(s: &[u8]) -> Self { + s.to_owned().into() + } +} + +impl From> for RecoveryMessage { + fn from(s: Vec) -> Self { + RecoveryMessage::Data(s) + } +} + +impl From<&str> for RecoveryMessage { + fn from(s: &str) -> Self { + s.as_bytes().to_owned().into() + } +} + +impl From for RecoveryMessage { + fn from(s: String) -> Self { + RecoveryMessage::Data(s.into_bytes()) + } +} + +impl From<[u8; 32]> for RecoveryMessage { + fn from(hash: [u8; 32]) -> Self { + H256(hash).into() + } +} + +impl From for RecoveryMessage { + fn from(hash: H256) -> Self { + RecoveryMessage::Hash(hash) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn recover_web3_signature() { + // test vector taken from: + // https://web3js.readthedocs.io/en/v1.2.2/web3-eth-accounts.html#sign + let signature = Signature::from_str( + "b91467e570a6466aa9e9876cbcd013baba02900b8979d43fe208a4a4f339f5fd6007e74cd82e037b800186422fc2da167c747ef045e5d18a5f5d4300f8e1a0291c" + ).expect("could not parse signature"); + assert_eq!( + signature.recover("Some data").unwrap(), + Address::from_str("2c7536E3605D9C16a7a3D7b1898e529396a65c23").unwrap() + ); + } + + #[test] + fn signature_from_str() { + let s1 = Signature::from_str( + "0xaa231fbe0ed2b5418e6ba7c19bee2522852955ec50996c02a2fe3e71d30ddaf1645baf4823fea7cb4fcc7150842493847cfb6a6d63ab93e8ee928ee3f61f503500" + ).expect("could not parse 0x-prefixed signature"); + + let s2 = Signature::from_str( + "aa231fbe0ed2b5418e6ba7c19bee2522852955ec50996c02a2fe3e71d30ddaf1645baf4823fea7cb4fcc7150842493847cfb6a6d63ab93e8ee928ee3f61f503500" + ).expect("could not parse non-prefixed signature"); + + assert_eq!(s1, s2); + } +} diff --git a/crates/rethnet_eth/src/state.rs b/crates/rethnet_eth/src/state.rs new file mode 100644 index 0000000000..b45e3282cd --- /dev/null +++ b/crates/rethnet_eth/src/state.rs @@ -0,0 +1,63 @@ +use hashbrown::HashMap; +use primitive_types::H256; +use ruint::aliases::U256; + +use crate::{account::BasicAccount, trie::sec_trie_root, Address}; + +/// State mapping of addresses to accounts. +pub type State = HashMap; + +/// Account storage mapping of indices to values. +pub type Storage = HashMap; + +pub fn state_root(state: &State) -> H256 { + sec_trie_root(state.iter().map(|(address, account)| { + let account = rlp::encode(account); + (address, account) + })) +} + +pub fn storage_root(storage: &Storage) -> H256 { + sec_trie_root(storage.iter().map(|(index, value)| { + let index = H256::from(index.to_be_bytes()); + let value = rlp::encode(value); + (index, value) + })) +} + +#[cfg(test)] +mod tests { + use std::str::FromStr; + + use crate::trie::KECCAK_NULL_RLP; + + use super::*; + + #[test] + fn empty_state_root() { + let state = State::default(); + + assert_eq!(state_root(&state), KECCAK_NULL_RLP); + } + + #[test] + fn empty_storage_root() { + let storage = Storage::default(); + + assert_eq!(storage_root(&storage), KECCAK_NULL_RLP); + } + + #[test] + fn precompiles_state_root() { + let mut state = State::default(); + + for idx in 1..=8 { + let mut address = Address::zero(); + address.0[19] = idx; + state.insert(address, BasicAccount::default()); + } + + const EXPECTED: &str = "0x5766c887a7240e4d1c035ccd3830a2f6a0c03d213a9f0b9b27c774916a4abcce"; + assert_eq!(state_root(&state), H256::from_str(EXPECTED).unwrap()) + } +} diff --git a/crates/rethnet_eth/src/transaction.rs b/crates/rethnet_eth/src/transaction.rs new file mode 100644 index 0000000000..a84aa85f61 --- /dev/null +++ b/crates/rethnet_eth/src/transaction.rs @@ -0,0 +1,1237 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/transaction/mod.rs + +//! transaction related data + +use crate::{ + access_list::{AccessList, AccessListItem}, + signature::{Signature, SignatureError}, + utils::enveloped, + Address, Bytes, H256, U256, +}; +use revm::common::keccak256; +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; + +/// Container type for various Ethereum transaction requests +/// +/// Its variants correspond to specific allowed transactions: +/// 1. Legacy (pre-EIP2718) [`LegacyTransactionRequest`] +/// 2. EIP2930 (state access lists) [`EIP2930TransactionRequest`] +/// 3. EIP1559 [`EIP1559TransactionRequest`] +#[derive(Debug, Clone, Eq, PartialEq)] +pub enum TransactionRequest { + Legacy(LegacyTransactionRequest), + EIP2930(EIP2930TransactionRequest), + EIP1559(EIP1559TransactionRequest), +} + +/// Represents _all_ transaction requests received from RPC +#[derive(Clone, Debug, PartialEq, Eq, Default)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(deny_unknown_fields))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct EthTransactionRequest { + /// from address + pub from: Option
, + /// to address + pub to: Option
, + /// legacy, gas Price + #[cfg_attr(feature = "serde", serde(default))] + pub gas_price: Option, + /// max base fee per gas sender is willing to pay + #[cfg_attr(feature = "serde", serde(default))] + pub max_fee_per_gas: Option, + /// miner tip + #[cfg_attr(feature = "serde", serde(default))] + pub max_priority_fee_per_gas: Option, + /// gas + pub gas: Option, + /// value of th tx in wei + pub value: Option, + /// Any additional data sent + pub data: Option, + /// Transaction nonce + pub nonce: Option, + /// warm storage access pre-payment + #[cfg_attr(feature = "serde", serde(default))] + pub access_list: Option>, + /// EIP-2718 type + #[cfg_attr(feature = "serde", serde(rename = "type"))] + pub transaction_type: Option, +} + +impl EthTransactionRequest { + /// Converts the request into a [TypedTransactionRequest] + pub fn into_typed_request(self) -> Option { + let EthTransactionRequest { + to, + gas_price, + max_fee_per_gas, + max_priority_fee_per_gas, + gas, + value, + data, + nonce, + mut access_list, + .. + } = self; + match (gas_price, max_fee_per_gas, access_list.take()) { + // legacy transaction + (Some(_), None, None) => Some(TransactionRequest::Legacy(LegacyTransactionRequest { + nonce: nonce.unwrap_or(0), + gas_price: gas_price.unwrap_or_default(), + gas_limit: gas.unwrap_or_default(), + value: value.unwrap_or(U256::ZERO), + input: data.unwrap_or_default(), + kind: match to { + Some(to) => TransactionKind::Call(to), + None => TransactionKind::Create, + }, + chain_id: None, + })), + // EIP2930 + (_, None, Some(access_list)) => { + Some(TransactionRequest::EIP2930(EIP2930TransactionRequest { + nonce: nonce.unwrap_or(0), + gas_price: gas_price.unwrap_or_default(), + gas_limit: gas.unwrap_or_default(), + value: value.unwrap_or(U256::ZERO), + input: data.unwrap_or_default(), + kind: match to { + Some(to) => TransactionKind::Call(to), + None => TransactionKind::Create, + }, + chain_id: 0, + access_list, + })) + } + // EIP1559 + (None, Some(_), access_list) | (None, None, access_list @ None) => { + // Empty fields fall back to the canonical transaction schema. + Some(TransactionRequest::EIP1559(EIP1559TransactionRequest { + nonce: nonce.unwrap_or(0), + max_fee_per_gas: max_fee_per_gas.unwrap_or_default(), + max_priority_fee_per_gas: max_priority_fee_per_gas.unwrap_or(U256::ZERO), + gas_limit: gas.unwrap_or_default(), + value: value.unwrap_or(U256::ZERO), + input: data.unwrap_or_default(), + kind: match to { + Some(to) => TransactionKind::Call(to), + None => TransactionKind::Create, + }, + chain_id: 0, + access_list: access_list.unwrap_or_default(), + })) + } + _ => None, + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum TransactionKind { + Call(Address), + Create, +} + +impl TransactionKind { + /// If this transaction is a call this returns the address of the callee + pub fn as_call(&self) -> Option<&Address> { + match self { + TransactionKind::Call(to) => Some(to), + TransactionKind::Create => None, + } + } +} + +impl Encodable for TransactionKind { + fn rlp_append(&self, s: &mut RlpStream) { + match self { + TransactionKind::Call(address) => { + s.encoder().encode_value(&address[..]); + } + TransactionKind::Create => s.encoder().encode_value(&[]), + } + } +} + +impl Decodable for TransactionKind { + fn decode(rlp: &Rlp) -> Result { + if rlp.is_empty() { + if rlp.is_data() { + Ok(TransactionKind::Create) + } else { + Err(DecoderError::RlpExpectedToBeData) + } + } else { + Ok(TransactionKind::Call(rlp.as_val()?)) + } + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Encodable for TransactionKind { + fn length(&self) -> usize { + match self { + TransactionKind::Call(to) => to.length(), + TransactionKind::Create => ([]).length(), + } + } + fn encode(&self, out: &mut dyn open_fastrlp::BufMut) { + match self { + TransactionKind::Call(to) => to.encode(out), + TransactionKind::Create => ([]).encode(out), + } + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Decodable for TransactionKind { + fn decode(buf: &mut &[u8]) -> Result { + use bytes::Buf; + + if let Some(&first) = buf.first() { + if first == 0x80 { + buf.advance(1); + Ok(TransactionKind::Create) + } else { + let addr =
::decode(buf)?; + Ok(TransactionKind::Call(addr)) + } + } else { + Err(open_fastrlp::DecodeError::InputTooShort) + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +pub struct EIP2930TransactionRequest { + pub chain_id: u64, + pub nonce: u64, + pub gas_price: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub access_list: Vec, +} + +impl EIP2930TransactionRequest { + pub fn hash(&self) -> H256 { + let encoded = rlp::encode(self); + let mut out = vec![0; 1 + encoded.len()]; + out[0] = 1; + out[1..].copy_from_slice(&encoded); + keccak256(&out) + } +} + +impl From for EIP2930TransactionRequest { + fn from(tx: EIP2930SignedTransaction) -> Self { + Self { + chain_id: tx.chain_id, + nonce: tx.nonce, + gas_price: tx.gas_price, + gas_limit: tx.gas_limit, + kind: tx.kind, + value: tx.value, + input: tx.input, + access_list: tx.access_list.0, + } + } +} + +impl Encodable for EIP2930TransactionRequest { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(8); + s.append(&self.chain_id); + s.append(&self.nonce); + s.append(&self.gas_price); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append_list(&self.access_list); + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct LegacyTransactionRequest { + pub nonce: u64, + pub gas_price: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub chain_id: Option, +} + +impl LegacyTransactionRequest { + pub fn hash(&self) -> H256 { + keccak256(&rlp::encode(self)) + } +} + +impl From for LegacyTransactionRequest { + fn from(tx: LegacySignedTransaction) -> Self { + let chain_id = tx.chain_id(); + Self { + nonce: tx.nonce, + gas_price: tx.gas_price, + gas_limit: tx.gas_limit, + kind: tx.kind, + value: tx.value, + input: tx.input, + chain_id, + } + } +} + +impl Encodable for LegacyTransactionRequest { + fn rlp_append(&self, s: &mut RlpStream) { + if let Some(chain_id) = self.chain_id { + s.begin_list(9); + s.append(&self.nonce); + s.append(&self.gas_price); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append(&chain_id); + s.append(&0u8); + s.append(&0u8); + } else { + s.begin_list(6); + s.append(&self.nonce); + s.append(&self.gas_price); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + } + } +} + +#[derive(Debug, Clone, PartialEq, Eq)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +pub struct EIP1559TransactionRequest { + pub chain_id: u64, + pub nonce: u64, + pub max_priority_fee_per_gas: U256, + pub max_fee_per_gas: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub access_list: Vec, +} + +impl EIP1559TransactionRequest { + pub fn hash(&self) -> H256 { + let encoded = rlp::encode(self); + let mut out = vec![0; 1 + encoded.len()]; + out[0] = 2; + out[1..].copy_from_slice(&encoded); + keccak256(&out) + } +} + +impl From for EIP1559TransactionRequest { + fn from(t: EIP1559SignedTransaction) -> Self { + Self { + chain_id: t.chain_id, + nonce: t.nonce, + max_priority_fee_per_gas: t.max_priority_fee_per_gas, + max_fee_per_gas: t.max_fee_per_gas, + gas_limit: t.gas_limit, + kind: t.kind, + value: t.value, + input: t.input, + access_list: t.access_list.0, + } + } +} + +impl Encodable for EIP1559TransactionRequest { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(9); + s.append(&self.chain_id); + s.append(&self.nonce); + s.append(&self.max_priority_fee_per_gas); + s.append(&self.max_fee_per_gas); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append_list(&self.access_list); + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub enum SignedTransaction { + /// Legacy transaction type + Legacy(LegacySignedTransaction), + /// EIP-2930 transaction + EIP2930(EIP2930SignedTransaction), + /// EIP-1559 transaction + EIP1559(EIP1559SignedTransaction), +} + +impl SignedTransaction { + pub fn gas_price(&self) -> U256 { + match self { + SignedTransaction::Legacy(tx) => tx.gas_price, + SignedTransaction::EIP2930(tx) => tx.gas_price, + SignedTransaction::EIP1559(tx) => tx.max_fee_per_gas, + } + } + + pub fn gas_limit(&self) -> u64 { + match self { + SignedTransaction::Legacy(tx) => tx.gas_limit, + SignedTransaction::EIP2930(tx) => tx.gas_limit, + SignedTransaction::EIP1559(tx) => tx.gas_limit, + } + } + + pub fn value(&self) -> U256 { + match self { + SignedTransaction::Legacy(tx) => tx.value, + SignedTransaction::EIP2930(tx) => tx.value, + SignedTransaction::EIP1559(tx) => tx.value, + } + } + + pub fn data(&self) -> &Bytes { + match self { + SignedTransaction::Legacy(tx) => &tx.input, + SignedTransaction::EIP2930(tx) => &tx.input, + SignedTransaction::EIP1559(tx) => &tx.input, + } + } + + /// Max cost of the transaction + pub fn max_cost(&self) -> U256 { + U256::from(self.gas_limit()).saturating_mul(self.gas_price()) + } + + /// Returns a helper type that contains commonly used values as fields + pub fn essentials(&self) -> TransactionEssentials { + match self { + SignedTransaction::Legacy(t) => TransactionEssentials { + kind: t.kind, + input: t.input.clone(), + nonce: t.nonce, + gas_limit: t.gas_limit, + gas_price: Some(t.gas_price), + max_fee_per_gas: None, + max_priority_fee_per_gas: None, + value: t.value, + chain_id: t.chain_id(), + access_list: Default::default(), + }, + SignedTransaction::EIP2930(t) => TransactionEssentials { + kind: t.kind, + input: t.input.clone(), + nonce: t.nonce, + gas_limit: t.gas_limit, + gas_price: Some(t.gas_price), + max_fee_per_gas: None, + max_priority_fee_per_gas: None, + value: t.value, + chain_id: Some(t.chain_id), + access_list: t.access_list.clone(), + }, + SignedTransaction::EIP1559(t) => TransactionEssentials { + kind: t.kind, + input: t.input.clone(), + nonce: t.nonce, + gas_limit: t.gas_limit, + gas_price: None, + max_fee_per_gas: Some(t.max_fee_per_gas), + max_priority_fee_per_gas: Some(t.max_priority_fee_per_gas), + value: t.value, + chain_id: Some(t.chain_id), + access_list: t.access_list.clone(), + }, + } + } + + pub fn nonce(&self) -> &u64 { + match self { + SignedTransaction::Legacy(t) => t.nonce(), + SignedTransaction::EIP2930(t) => t.nonce(), + SignedTransaction::EIP1559(t) => t.nonce(), + } + } + + pub fn chain_id(&self) -> Option { + match self { + SignedTransaction::Legacy(t) => t.chain_id(), + SignedTransaction::EIP2930(t) => Some(t.chain_id), + SignedTransaction::EIP1559(t) => Some(t.chain_id), + } + } + + pub fn as_legacy(&self) -> Option<&LegacySignedTransaction> { + match self { + SignedTransaction::Legacy(tx) => Some(tx), + _ => None, + } + } + + /// Returns true whether this tx is a legacy transaction + pub fn is_legacy(&self) -> bool { + matches!(self, SignedTransaction::Legacy(_)) + } + + /// Returns true whether this tx is a EIP1559 transaction + pub fn is_eip1559(&self) -> bool { + matches!(self, SignedTransaction::EIP1559(_)) + } + + pub fn hash(&self) -> H256 { + match self { + SignedTransaction::Legacy(t) => t.hash(), + SignedTransaction::EIP2930(t) => t.hash(), + SignedTransaction::EIP1559(t) => t.hash(), + } + } + + /// Recovers the Ethereum address which was used to sign the transaction. + pub fn recover(&self) -> Result { + match self { + SignedTransaction::Legacy(tx) => tx.recover(), + SignedTransaction::EIP2930(tx) => tx.recover(), + SignedTransaction::EIP1559(tx) => tx.recover(), + } + } + + /// Returns what kind of transaction this is + pub fn kind(&self) -> &TransactionKind { + match self { + SignedTransaction::Legacy(tx) => &tx.kind, + SignedTransaction::EIP2930(tx) => &tx.kind, + SignedTransaction::EIP1559(tx) => &tx.kind, + } + } + + /// Returns the callee if this transaction is a call + pub fn to(&self) -> Option<&Address> { + self.kind().as_call() + } + + /// Returns the Signature of the transaction + pub fn signature(&self) -> Signature { + match self { + SignedTransaction::Legacy(tx) => tx.signature, + SignedTransaction::EIP2930(tx) => { + let v = tx.odd_y_parity as u8; + let r = U256::from_be_bytes(tx.r.0); + let s = U256::from_be_bytes(tx.s.0); + Signature { r, s, v: v.into() } + } + SignedTransaction::EIP1559(tx) => { + let v = tx.odd_y_parity as u8; + let r = U256::from_be_bytes(tx.r.0); + let s = U256::from_be_bytes(tx.s.0); + Signature { r, s, v: v.into() } + } + } + } +} + +impl Encodable for SignedTransaction { + fn rlp_append(&self, s: &mut RlpStream) { + match self { + SignedTransaction::Legacy(tx) => tx.rlp_append(s), + SignedTransaction::EIP2930(tx) => enveloped(1, tx, s), + SignedTransaction::EIP1559(tx) => enveloped(2, tx, s), + } + } +} + +impl Decodable for SignedTransaction { + fn decode(rlp: &Rlp) -> Result { + let data = rlp.data()?; + let first = *data.first().ok_or(DecoderError::Custom("empty slice"))?; + if rlp.is_list() { + return Ok(SignedTransaction::Legacy(rlp.as_val()?)); + } + let s = data.get(1..).ok_or(DecoderError::Custom("no tx body"))?; + if first == 0x01 { + return rlp::decode(s).map(SignedTransaction::EIP2930); + } + if first == 0x02 { + return rlp::decode(s).map(SignedTransaction::EIP1559); + } + Err(DecoderError::Custom("invalid tx type")) + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Encodable for SignedTransaction { + fn length(&self) -> usize { + match self { + SignedTransaction::Legacy(tx) => tx.length(), + tx => { + let payload_len = match tx { + SignedTransaction::EIP2930(tx) => tx.length() + 1, + SignedTransaction::EIP1559(tx) => tx.length() + 1, + _ => unreachable!("legacy tx length already matched"), + }; + // we include a string header for signed types txs, so include the length here + payload_len + open_fastrlp::length_of_length(payload_len) + } + } + } + fn encode(&self, out: &mut dyn open_fastrlp::BufMut) { + match self { + SignedTransaction::Legacy(tx) => tx.encode(out), + tx => { + let payload_len = match tx { + SignedTransaction::EIP2930(tx) => tx.length() + 1, + SignedTransaction::EIP1559(tx) => tx.length() + 1, + _ => unreachable!("legacy tx length already matched"), + }; + + match tx { + SignedTransaction::EIP2930(tx) => { + let tx_string_header = open_fastrlp::Header { + list: false, + payload_length: payload_len, + }; + + tx_string_header.encode(out); + out.put_u8(0x01); + tx.encode(out); + } + SignedTransaction::EIP1559(tx) => { + let tx_string_header = open_fastrlp::Header { + list: false, + payload_length: payload_len, + }; + + tx_string_header.encode(out); + out.put_u8(0x02); + tx.encode(out); + } + _ => unreachable!("legacy tx encode already matched"), + } + } + } + } +} + +#[cfg(feature = "fastrlp")] +impl open_fastrlp::Decodable for SignedTransaction { + fn decode(buf: &mut &[u8]) -> Result { + use bytes::Buf; + use std::cmp::Ordering; + + let first = *buf + .first() + .ok_or(open_fastrlp::DecodeError::Custom("empty slice"))?; + + // a signed transaction is either encoded as a string (non legacy) or a list (legacy). + // We should not consume the buffer if we are decoding a legacy transaction, so let's + // check if the first byte is between 0x80 and 0xbf. + match first.cmp(&open_fastrlp::EMPTY_LIST_CODE) { + Ordering::Less => { + // strip out the string header + // NOTE: typed transaction encodings either contain a "rlp header" which contains + // the type of the payload and its length, or they do not contain a header and + // start with the tx type byte. + // + // This line works for both types of encodings because byte slices starting with + // 0x01 and 0x02 return a Header { list: false, payload_length: 1 } when input to + // Header::decode. + // If the encoding includes a header, the header will be properly decoded and + // consumed. + // Otherwise, header decoding will succeed but nothing is consumed. + let _header = open_fastrlp::Header::decode(buf)?; + let tx_type = *buf.first().ok_or(open_fastrlp::DecodeError::Custom( + "typed tx cannot be decoded from an empty slice", + ))?; + if tx_type == 0x01 { + buf.advance(1); + ::decode(buf) + .map(SignedTransaction::EIP2930) + } else if tx_type == 0x02 { + buf.advance(1); + ::decode(buf) + .map(SignedTransaction::EIP1559) + } else { + Err(open_fastrlp::DecodeError::Custom("invalid tx type")) + } + } + Ordering::Equal => Err(open_fastrlp::DecodeError::Custom( + "an empty list is not a valid transaction encoding", + )), + Ordering::Greater => ::decode(buf) + .map(SignedTransaction::Legacy), + } + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct LegacySignedTransaction { + pub nonce: u64, + pub gas_price: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub signature: Signature, +} + +impl LegacySignedTransaction { + pub fn nonce(&self) -> &u64 { + &self.nonce + } + + pub fn hash(&self) -> H256 { + keccak256(&rlp::encode(self)) + } + + /// Recovers the Ethereum address which was used to sign the transaction. + pub fn recover(&self) -> Result { + self.signature + .recover(LegacyTransactionRequest::from(self.clone()).hash()) + } + + pub fn chain_id(&self) -> Option { + if self.signature.v > 36 { + Some((self.signature.v - 35) / 2) + } else { + None + } + } + + /// See + /// > If you do, then the v of the signature MUST be set to {0,1} + CHAIN_ID * 2 + 35 where + /// > {0,1} is the parity of the y value of the curve point for which r is the x-value in the + /// > secp256k1 signing process. + pub fn meets_eip155(&self, chain_id: u64) -> bool { + let double_chain_id = chain_id.saturating_mul(2); + let v = self.signature.v; + v == double_chain_id + 35 || v == double_chain_id + 36 + } +} + +impl Encodable for LegacySignedTransaction { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(9); + s.append(&self.nonce); + s.append(&self.gas_price); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append(&self.signature.v); + s.append(&self.signature.r); + s.append(&self.signature.s); + } +} + +impl Decodable for LegacySignedTransaction { + fn decode(rlp: &Rlp) -> Result { + if rlp.item_count()? != 9 { + return Err(DecoderError::RlpIncorrectListLen); + } + + let v = rlp.val_at(6)?; + let r = rlp.val_at::(7)?; + let s = rlp.val_at::(8)?; + + Ok(Self { + nonce: rlp.val_at(0)?, + gas_price: rlp.val_at(1)?, + gas_limit: rlp.val_at(2)?, + kind: rlp.val_at(3)?, + value: rlp.val_at(4)?, + input: rlp.val_at::>(5)?.into(), + signature: Signature { v, r, s }, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct EIP2930SignedTransaction { + pub chain_id: u64, + pub nonce: u64, + pub gas_price: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub access_list: AccessList, + pub odd_y_parity: bool, + pub r: H256, + pub s: H256, +} + +impl EIP2930SignedTransaction { + pub fn nonce(&self) -> &u64 { + &self.nonce + } + + pub fn hash(&self) -> H256 { + let encoded = rlp::encode(self); + let mut out = vec![0; 1 + encoded.len()]; + out[0] = 1; + out[1..].copy_from_slice(&encoded); + keccak256(&out) + } + + /// Recovers the Ethereum address which was used to sign the transaction. + pub fn recover(&self) -> Result { + let mut sig = [0u8; 65]; + sig[0..32].copy_from_slice(&self.r[..]); + sig[32..64].copy_from_slice(&self.s[..]); + sig[64] = self.odd_y_parity as u8; + let signature = Signature::try_from(&sig[..])?; + signature.recover(EIP2930TransactionRequest::from(self.clone()).hash()) + } +} + +impl rlp::Encodable for EIP2930SignedTransaction { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(11); + s.append(&self.chain_id); + s.append(&self.nonce); + s.append(&self.gas_price); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append(&self.access_list); + s.append(&self.odd_y_parity); + s.append(&U256::from_be_bytes(self.r.0)); + s.append(&U256::from_be_bytes(self.s.0)); + } +} + +impl rlp::Decodable for EIP2930SignedTransaction { + fn decode(rlp: &Rlp) -> Result { + if rlp.item_count()? != 11 { + return Err(DecoderError::RlpIncorrectListLen); + } + + Ok(Self { + chain_id: rlp.val_at(0)?, + nonce: rlp.val_at(1)?, + gas_price: rlp.val_at(2)?, + gas_limit: rlp.val_at(3)?, + kind: rlp.val_at(4)?, + value: rlp.val_at(5)?, + input: rlp.val_at::>(6)?.into(), + access_list: rlp.val_at(7)?, + odd_y_parity: rlp.val_at(8)?, + r: { + let rarr = rlp.val_at::(9)?.to_be_bytes(); + H256::from(rarr) + }, + s: { + let sarr = rlp.val_at::(10)?.to_be_bytes(); + H256::from(sarr) + }, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq, Hash)] +#[cfg_attr( + feature = "fastrlp", + derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) +)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +pub struct EIP1559SignedTransaction { + pub chain_id: u64, + pub nonce: u64, + pub max_priority_fee_per_gas: U256, + pub max_fee_per_gas: U256, + pub gas_limit: u64, + pub kind: TransactionKind, + pub value: U256, + pub input: Bytes, + pub access_list: AccessList, + pub odd_y_parity: bool, + pub r: H256, + pub s: H256, +} + +impl EIP1559SignedTransaction { + pub fn nonce(&self) -> &u64 { + &self.nonce + } + + pub fn hash(&self) -> H256 { + let encoded = rlp::encode(self); + let mut out = vec![0; 1 + encoded.len()]; + out[0] = 2; + out[1..].copy_from_slice(&encoded); + keccak256(&out) + } + + /// Recovers the Ethereum address which was used to sign the transaction. + pub fn recover(&self) -> Result { + let mut sig = [0u8; 65]; + sig[0..32].copy_from_slice(&self.r[..]); + sig[32..64].copy_from_slice(&self.s[..]); + sig[64] = self.odd_y_parity as u8; + let signature = Signature::try_from(&sig[..])?; + signature.recover(EIP1559TransactionRequest::from(self.clone()).hash()) + } +} + +impl Encodable for EIP1559SignedTransaction { + fn rlp_append(&self, s: &mut RlpStream) { + s.begin_list(12); + s.append(&self.chain_id); + s.append(&self.nonce); + s.append(&self.max_priority_fee_per_gas); + s.append(&self.max_fee_per_gas); + s.append(&self.gas_limit); + s.append(&self.kind); + s.append(&self.value); + s.append(&self.input.as_ref()); + s.append(&self.access_list); + s.append(&self.odd_y_parity); + s.append(&U256::from_be_bytes(self.r.0)); + s.append(&U256::from_be_bytes(self.s.0)); + } +} + +impl Decodable for EIP1559SignedTransaction { + fn decode(rlp: &Rlp) -> Result { + if rlp.item_count()? != 12 { + return Err(DecoderError::RlpIncorrectListLen); + } + + Ok(Self { + chain_id: rlp.val_at(0)?, + nonce: rlp.val_at(1)?, + max_priority_fee_per_gas: rlp.val_at(2)?, + max_fee_per_gas: rlp.val_at(3)?, + gas_limit: rlp.val_at(4)?, + kind: rlp.val_at(5)?, + value: rlp.val_at(6)?, + input: rlp.val_at::>(7)?.into(), + access_list: rlp.val_at(8)?, + odd_y_parity: rlp.val_at(9)?, + r: { + let rarr = rlp.val_at::(10)?.to_be_bytes(); + H256::from(rarr) + }, + s: { + let sarr = rlp.val_at::(11)?.to_be_bytes(); + H256::from(sarr) + }, + }) + } +} + +#[derive(Clone, Debug, PartialEq, Eq)] +pub struct TransactionEssentials { + pub kind: TransactionKind, + pub input: Bytes, + pub nonce: u64, + pub gas_limit: u64, + pub gas_price: Option, + pub max_fee_per_gas: Option, + pub max_priority_fee_per_gas: Option, + pub value: U256, + pub chain_id: Option, + pub access_list: AccessList, +} + +#[cfg(test)] +mod tests { + use bytes::Bytes; + + use super::*; + + #[test] + fn can_recover_sender() { + let bytes = hex::decode("f85f800182520894095e7baea6a6c7c4c2dfeb977efac326af552d870a801ba048b55bfa915ac795c431978d8a6a992b628d557da5ff759b307d495a36649353a0efffd310ac743f371de3b9f7f9cb56c0b28ad43601b4ab949f53faa07bd2c804").unwrap(); + + let tx: SignedTransaction = rlp::decode(&bytes).expect("decoding TypedTransaction failed"); + let tx = match tx { + SignedTransaction::Legacy(tx) => tx, + _ => panic!("Invalid typed transaction"), + }; + assert_eq!(tx.input, Bytes::new()); + assert_eq!(tx.gas_price, U256::from(0x01u64)); + assert_eq!(tx.gas_limit, 0x5208u64); + assert_eq!(tx.nonce, 0x00u64); + if let TransactionKind::Call(ref to) = tx.kind { + assert_eq!( + *to, + "095e7baea6a6c7c4c2dfeb977efac326af552d87".parse().unwrap() + ); + } else { + panic!(); + } + assert_eq!(tx.value, U256::from(0x0au64)); + assert_eq!( + tx.recover().unwrap(), + "0f65fe9276bc9a24ae7083ae28e2660ef72df99e".parse().unwrap() + ); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn test_decode_fastrlp_create() { + use bytes::BytesMut; + use open_fastrlp::Encodable; + + // tests that a contract creation tx encodes and decodes properly + + let tx = SignedTransaction::EIP2930(EIP2930SignedTransaction { + chain_id: 1u64, + nonce: 0, + gas_price: U256::from(1), + gas_limit: 2, + kind: TransactionKind::Create, + value: U256::from(3), + input: Bytes::from(vec![1, 2]), + odd_y_parity: true, + r: H256::default(), + s: H256::default(), + access_list: vec![].into(), + }); + + let mut encoded = BytesMut::new(); + tx.encode(&mut encoded); + + let decoded = + ::decode(&mut &*encoded).unwrap(); + assert_eq!(decoded, tx); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn test_decode_fastrlp_create_goerli() { + // test that an example create tx from goerli decodes properly + let tx_bytes = + hex::decode("02f901ee05228459682f008459682f11830209bf8080b90195608060405234801561001057600080fd5b50610175806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80630c49c36c14610030575b600080fd5b61003861004e565b604051610045919061011d565b60405180910390f35b60606020600052600f6020527f68656c6c6f2073746174656d696e64000000000000000000000000000000000060405260406000f35b600081519050919050565b600082825260208201905092915050565b60005b838110156100be5780820151818401526020810190506100a3565b838111156100cd576000848401525b50505050565b6000601f19601f8301169050919050565b60006100ef82610084565b6100f9818561008f565b93506101098185602086016100a0565b610112816100d3565b840191505092915050565b6000602082019050818103600083015261013781846100e4565b90509291505056fea264697066735822122051449585839a4ea5ac23cae4552ef8a96b64ff59d0668f76bfac3796b2bdbb3664736f6c63430008090033c080a0136ebffaa8fc8b9fda9124de9ccb0b1f64e90fbd44251b4c4ac2501e60b104f9a07eb2999eec6d185ef57e91ed099afb0a926c5b536f0155dd67e537c7476e1471") + .unwrap(); + let _decoded = + ::decode(&mut &tx_bytes[..]).unwrap(); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn test_decode_fastrlp_call() { + use bytes::BytesMut; + use open_fastrlp::Encodable; + + let tx = SignedTransaction::EIP2930(EIP2930SignedTransaction { + chain_id: 1u64, + nonce: 0, + gas_price: U256::from(1), + gas_limit: 2, + kind: TransactionKind::Call(Address::default()), + value: U256::from(3), + input: Bytes::from(vec![1, 2]), + odd_y_parity: true, + r: H256::default(), + s: H256::default(), + access_list: vec![].into(), + }); + + let mut encoded = BytesMut::new(); + tx.encode(&mut encoded); + + let decoded = + ::decode(&mut &*encoded).unwrap(); + assert_eq!(decoded, tx); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn decode_transaction_consumes_buffer() { + let bytes = &mut &hex::decode("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469").unwrap()[..]; + let _transaction_res = + ::decode(bytes).unwrap(); + assert_eq!( + bytes.len(), + 0, + "did not consume all bytes in the buffer, {:?} remaining", + bytes.len() + ); + } + + #[test] + #[cfg(feature = "fastrlp")] + fn decode_multiple_network_txs() { + use std::str::FromStr; + + let bytes_first = &mut &hex::decode("f86b02843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba00eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5aea03a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18").unwrap()[..]; + let expected = SignedTransaction::Legacy(LegacySignedTransaction { + nonce: 2u64, + gas_price: 1000000000u64.into(), + gas_limit: 100000, + kind: TransactionKind::Call(Address::from_slice( + &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + )), + value: 1000000000000000u64.into(), + input: Bytes::default(), + signature: Signature { + v: 43, + r: U256::from_str( + "eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5ae", + ) + .unwrap(), + s: U256::from_str( + "3a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18", + ) + .unwrap(), + }, + }); + assert_eq!( + expected, + ::decode(bytes_first).unwrap() + ); + + let bytes_second = &mut &hex::decode("f86b01843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac3960468702769bb01b2a00802ba0e24d8bd32ad906d6f8b8d7741e08d1959df021698b19ee232feba15361587d0aa05406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631da").unwrap()[..]; + let expected = SignedTransaction::Legacy(LegacySignedTransaction { + nonce: 1, + gas_price: 1000000000u64.into(), + gas_limit: 100000, + kind: TransactionKind::Call(Address::from_slice( + &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + )), + value: 693361000000000u64.into(), + input: Bytes::default(), + signature: Signature { + v: 43, + r: U256::from_str( + "e24d8bd32ad906d6f8b8d7741e08d1959df021698b19ee232feba15361587d0a", + ) + .unwrap(), + s: U256::from_str( + "5406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631da", + ) + .unwrap(), + }, + }); + assert_eq!( + expected, + ::decode(bytes_second).unwrap() + ); + + let bytes_third = &mut &hex::decode("f86b0384773594008398968094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba0ce6834447c0a4193c40382e6c57ae33b241379c5418caac9cdc18d786fd12071a03ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88").unwrap()[..]; + let expected = SignedTransaction::Legacy(LegacySignedTransaction { + nonce: 3, + gas_price: 2000000000u64.into(), + gas_limit: 10000000, + kind: TransactionKind::Call(Address::from_slice( + &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + )), + value: 1000000000000000u64.into(), + input: Bytes::default(), + signature: Signature { + v: 43, + r: U256::from_str( + "ce6834447c0a4193c40382e6c57ae33b241379c5418caac9cdc18d786fd12071", + ) + .unwrap(), + s: U256::from_str( + "3ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88", + ) + .unwrap(), + }, + }); + assert_eq!( + expected, + ::decode(bytes_third).unwrap() + ); + + let bytes_fourth = &mut &hex::decode("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469").unwrap()[..]; + let expected = SignedTransaction::EIP1559(EIP1559SignedTransaction { + chain_id: 4, + nonce: 26, + max_priority_fee_per_gas: 1500000000u64.into(), + max_fee_per_gas: 1500000013u64.into(), + gas_limit: 21000, + kind: TransactionKind::Call(Address::from_slice( + &hex::decode("61815774383099e24810ab832a5b2a5425c154d5").unwrap()[..], + )), + value: 3000000000000000000u64.into(), + input: Bytes::default(), + access_list: AccessList::default(), + odd_y_parity: true, + r: H256::from_str("59e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafd") + .unwrap(), + s: H256::from_str("016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469") + .unwrap(), + }); + assert_eq!( + expected, + ::decode(bytes_fourth).unwrap() + ); + + let bytes_fifth = &mut &hex::decode("f8650f84832156008287fb94cf7f9e66af820a19257a2108375b180b0ec491678204d2802ca035b7bfeb9ad9ece2cbafaaf8e202e706b4cfaeb233f46198f00b44d4a566a981a0612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860").unwrap()[..]; + let expected = SignedTransaction::Legacy(LegacySignedTransaction { + nonce: 15u64, + gas_price: 2200000000u64.into(), + gas_limit: 34811, + kind: TransactionKind::Call(Address::from_slice( + &hex::decode("cf7f9e66af820a19257a2108375b180b0ec49167").unwrap()[..], + )), + value: 1234u64.into(), + input: Bytes::default(), + signature: Signature { + v: 44, + r: U256::from_str( + "35b7bfeb9ad9ece2cbafaaf8e202e706b4cfaeb233f46198f00b44d4a566a981", + ) + .unwrap(), + s: U256::from_str( + "612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860", + ) + .unwrap(), + }, + }); + assert_eq!( + expected, + ::decode(bytes_fifth).unwrap() + ); + } + + // + #[test] + fn test_recover_legacy_tx() { + let raw_tx = "f9015482078b8505d21dba0083022ef1947a250d5630b4cf539739df2c5dacb4c659f2488d880c46549a521b13d8b8e47ff36ab50000000000000000000000000000000000000000000066ab5a608bd00a23f2fe000000000000000000000000000000000000000000000000000000000000008000000000000000000000000048c04ed5691981c42154c6167398f95e8f38a7ff00000000000000000000000000000000000000000000000000000000632ceac70000000000000000000000000000000000000000000000000000000000000002000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006c6ee5e31d828de241282b9606c8e98ea48526e225a0c9077369501641a92ef7399ff81c21639ed4fd8fc69cb793cfa1dbfab342e10aa0615facb2f1bcf3274a354cfe384a38d0cc008a11c2dd23a69111bc6930ba27a8"; + + let tx: SignedTransaction = rlp::decode(&hex::decode(raw_tx).unwrap()).unwrap(); + let recovered = tx.recover().unwrap(); + let expected: Address = "0xa12e1462d0ced572f396f58b6e2d03894cd7c8a4" + .parse() + .unwrap(); + assert_eq!(expected, recovered); + } +} diff --git a/crates/rethnet_eth/src/trie.rs b/crates/rethnet_eth/src/trie.rs new file mode 100644 index 0000000000..4a9c844d87 --- /dev/null +++ b/crates/rethnet_eth/src/trie.rs @@ -0,0 +1,62 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/trie.rs + +//! Utility functions for Ethereum + +use hash256_std_hasher::Hash256StdHasher; +use primitive_types::H256; +use sha3::{ + digest::generic_array::{typenum::consts::U32, GenericArray}, + Digest, Keccak256, +}; + +/// The KECCAK of the RLP encoding of empty data. +pub const KECCAK_NULL_RLP: H256 = H256([ + 0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, + 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21, +]); + +/// Generates a trie root hash for a vector of key-value tuples +pub fn trie_root(input: I) -> H256 +where + I: IntoIterator, + K: AsRef<[u8]> + Ord, + V: AsRef<[u8]>, +{ + H256::from_slice(triehash::trie_root::(input).as_ref()) +} + +/// Generates a key-hashed (secure) trie root hash for a vector of key-value tuples. +pub fn sec_trie_root(input: I) -> H256 +where + I: IntoIterator, + K: AsRef<[u8]>, + V: AsRef<[u8]>, +{ + H256::from_slice(triehash::sec_trie_root::(input).as_ref()) +} + +/// Generates a trie root hash for a vector of values +pub fn ordered_trie_root(input: I) -> H256 +where + I: IntoIterator, + V: AsRef<[u8]>, +{ + H256::from_slice(triehash::ordered_trie_root::(input).as_ref()) +} + +struct KeccakHasher; + +impl hash_db::Hasher for KeccakHasher { + type Out = GenericArray; + + type StdHasher = Hash256StdHasher; + + const LENGTH: usize = 32; + + fn hash(x: &[u8]) -> Self::Out { + Keccak256::digest(x) + } +} diff --git a/crates/rethnet_eth/src/utils.rs b/crates/rethnet_eth/src/utils.rs new file mode 100644 index 0000000000..a7b4ba6e73 --- /dev/null +++ b/crates/rethnet_eth/src/utils.rs @@ -0,0 +1,42 @@ +// Part of this code was adapted from foundry and is distributed under their licenss: +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE +// - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT +// For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/utils.rs +// +// Part of this code was adapted from ethers-rs and is distributed under their licenss: +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-APACHE +// - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-MIT +// For the original context see: https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/ethers-core/src/utils/hash.rs + +use primitive_types::H256; +use revm::common::keccak256; +use rlp::RlpStream; + +pub fn enveloped(id: u8, v: &T, s: &mut RlpStream) { + use rlp::Encodable; + + let encoded = rlp::encode(v); + let mut out = vec![0; 1 + encoded.len()]; + out[0] = id; + out[1..].copy_from_slice(&encoded); + out.rlp_append(s) +} + +const PREFIX: &str = "\x19Ethereum Signed Message:\n"; + +/// Hash a message according to EIP-191. +/// +/// The data is a UTF-8 encoded string and will enveloped as follows: +/// `"\x19Ethereum Signed Message:\n" + message.length + message` and hashed +/// using keccak256. +pub fn hash_message(message: S) -> H256 +where + S: AsRef<[u8]>, +{ + let message = message.as_ref(); + + let mut eth_message = format!("{}{}", PREFIX, message.len()).into_bytes(); + eth_message.extend_from_slice(message); + + keccak256(ð_message) +} From 80b56eccd0b884e77f2d17ff9ef0c5b3f731f010 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 21 Dec 2022 12:04:34 -0600 Subject: [PATCH 013/406] feat: implement state management in Rethnet (#3358) Co-authored-by: Franco Victorio --- crates/rethnet/src/lib.rs | 5 +- crates/rethnet_eth/Cargo.toml | 5 +- crates/rethnet_eth/src/access_list.rs | 24 +- crates/rethnet_eth/src/account.rs | 26 +- crates/rethnet_eth/src/block.rs | 156 +++-- crates/rethnet_eth/src/lib.rs | 30 +- crates/rethnet_eth/src/receipt.rs | 31 +- crates/rethnet_eth/src/signature.rs | 16 +- crates/rethnet_eth/src/state.rs | 17 +- crates/rethnet_eth/src/transaction.rs | 87 +-- crates/rethnet_eth/src/trie.rs | 17 +- crates/rethnet_eth/src/utils.rs | 5 +- crates/rethnet_evm/Cargo.toml | 14 +- crates/rethnet_evm/src/block.rs | 37 ++ crates/rethnet_evm/src/block/builder.rs | 156 +++++ crates/rethnet_evm/src/blockchain.rs | 4 + crates/rethnet_evm/src/blockchain/request.rs | 49 ++ crates/rethnet_evm/src/blockchain/sync.rs | 129 +++++ crates/rethnet_evm/src/config.rs | 6 + crates/rethnet_evm/src/db.rs | 9 +- crates/rethnet_evm/src/db/layered_db.rs | 546 +++++++++++++----- crates/rethnet_evm/src/db/request.rs | 231 ++++++++ crates/rethnet_evm/src/db/sync.rs | 417 +++++++++++++ crates/rethnet_evm/src/debug.rs | 91 +-- crates/rethnet_evm/src/evm.rs | 33 ++ crates/rethnet_evm/src/inspector.rs | 53 +- crates/rethnet_evm/src/lib.rs | 34 +- crates/rethnet_evm/src/random.rs | 34 ++ crates/rethnet_evm/src/runtime.rs | 109 ++++ crates/rethnet_evm/src/signer.rs | 32 + crates/rethnet_evm/src/sync.rs | 113 ---- crates/rethnet_evm/src/sync/client.rs | 305 ---------- crates/rethnet_evm/src/sync/request.rs | 207 ------- crates/rethnet_evm/src/trace.rs | 47 ++ crates/rethnet_evm/src/transaction.rs | 175 ++++++ crates/rethnet_evm_napi/Cargo.toml | 5 +- crates/rethnet_evm_napi/src/access_list.rs | 34 ++ crates/rethnet_evm_napi/src/block.rs | 138 +++++ crates/rethnet_evm_napi/src/block/builder.rs | 116 ++++ crates/rethnet_evm_napi/src/blockchain.rs | 86 +++ .../src/blockchain/js_blockchain.rs | 36 ++ crates/rethnet_evm_napi/src/cast.rs | 29 +- crates/rethnet_evm_napi/src/db.rs | 27 - crates/rethnet_evm_napi/src/db/debug.rs | 516 ----------------- crates/rethnet_evm_napi/src/db/immutable.rs | 190 ------ crates/rethnet_evm_napi/src/db/mutable.rs | 105 ---- crates/rethnet_evm_napi/src/lib.rs | 482 +++------------- crates/rethnet_evm_napi/src/state.rs | 306 ++++++++++ crates/rethnet_evm_napi/src/sync.rs | 1 + crates/rethnet_evm_napi/src/trace.rs | 39 ++ crates/rethnet_evm_napi/src/transaction.rs | 115 ++++ crates/rethnet_evm_napi/test/evm/RethnetDb.ts | 73 ++- .../rethnet_evm_napi/test/evm/StateManager.ts | 90 +++ .../hardhat-network/provider/RethnetState.ts | 165 ++++++ .../internal/hardhat-network/provider/node.ts | 2 +- .../provider/utils/convertToRethnet.ts | 85 ++- .../hardhat-network/provider/utils/random.ts | 5 +- .../hardhat-network/provider/vm/dual.ts | 195 ++++++- .../hardhat-network/provider/vm/ethereumjs.ts | 8 + .../hardhat-network/provider/vm/rethnet.ts | 182 ++++-- .../hardhat-network/provider/vm/vm-adapter.ts | 5 +- .../stack-traces/vm-debug-tracer.ts | 12 +- .../hardhat-network/stack-traces/vm-tracer.ts | 5 +- .../provider/utils/runFullBlock.ts | 1 + rust-toolchain | 2 +- 65 files changed, 3883 insertions(+), 2422 deletions(-) create mode 100644 crates/rethnet_evm/src/block.rs create mode 100644 crates/rethnet_evm/src/block/builder.rs create mode 100644 crates/rethnet_evm/src/blockchain.rs create mode 100644 crates/rethnet_evm/src/blockchain/request.rs create mode 100644 crates/rethnet_evm/src/blockchain/sync.rs create mode 100644 crates/rethnet_evm/src/config.rs create mode 100644 crates/rethnet_evm/src/db/request.rs create mode 100644 crates/rethnet_evm/src/db/sync.rs create mode 100644 crates/rethnet_evm/src/evm.rs create mode 100644 crates/rethnet_evm/src/random.rs create mode 100644 crates/rethnet_evm/src/runtime.rs create mode 100644 crates/rethnet_evm/src/signer.rs delete mode 100644 crates/rethnet_evm/src/sync.rs delete mode 100644 crates/rethnet_evm/src/sync/client.rs delete mode 100644 crates/rethnet_evm/src/sync/request.rs create mode 100644 crates/rethnet_evm/src/trace.rs create mode 100644 crates/rethnet_evm/src/transaction.rs create mode 100644 crates/rethnet_evm_napi/src/access_list.rs create mode 100644 crates/rethnet_evm_napi/src/block.rs create mode 100644 crates/rethnet_evm_napi/src/block/builder.rs create mode 100644 crates/rethnet_evm_napi/src/blockchain.rs create mode 100644 crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs delete mode 100644 crates/rethnet_evm_napi/src/db.rs delete mode 100644 crates/rethnet_evm_napi/src/db/debug.rs delete mode 100644 crates/rethnet_evm_napi/src/db/immutable.rs delete mode 100644 crates/rethnet_evm_napi/src/db/mutable.rs create mode 100644 crates/rethnet_evm_napi/src/state.rs create mode 100644 crates/rethnet_evm_napi/src/trace.rs create mode 100644 crates/rethnet_evm_napi/src/transaction.rs create mode 100644 crates/rethnet_evm_napi/test/evm/StateManager.ts create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts diff --git a/crates/rethnet/src/lib.rs b/crates/rethnet/src/lib.rs index d98eb273de..37ee178a57 100644 --- a/crates/rethnet/src/lib.rs +++ b/crates/rethnet/src/lib.rs @@ -38,9 +38,6 @@ where { let args = Args::parse_from(args); match args.command { - Command::Start => { - println!("Hello, world!"); - Ok(ExitStatus::Success) - } + Command::Start => Ok(ExitStatus::Success), } } diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index e23c5d1eb8..f322c96d98 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -8,11 +8,12 @@ bytes = { version = "1.2.1", default-features = false } ethbloom = { version = "0.13.0", default-features = false, features = ["rlp"] } hash-db = { version = "0.15.2", default-features = false } hash256-std-hasher = { version = "0.15.2", default-features = false } -hashbrown = { version = "0.12.3", default-features = false } +hashbrown = { version = "0.13", default-features = false, features = ["ahash"] } hex = { version = "0.4.3", default-features = false, features = ["alloc"] } +hex-literal = { version = "0.3", default-features = false } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } -revm = { git = "https://github.com/wodann/revm", branch = "feat/optional-balance-check", version = "2.1.0", default-features = false } +revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false } rlp = { version = "0.5.2", default-features = false, features = ["derive"] } ruint = { version = "1.7.0", default-features = false } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc", "recovery"] } diff --git a/crates/rethnet_eth/src/access_list.rs b/crates/rethnet_eth/src/access_list.rs index ebfb30149b..a83c4e42ae 100644 --- a/crates/rethnet_eth/src/access_list.rs +++ b/crates/rethnet_eth/src/access_list.rs @@ -3,9 +3,9 @@ // - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-MIT // For the original context see: https://github.com/gakonst/ethers-rs/blob/3d9c3290d42b77c510e5b5d0b6f7a2f72913bfff/ethers-core/src/types/transaction/eip2930.rs -use ruint::aliases::U256; +use ruint::aliases::U160; -use crate::Address; +use crate::{Address, U256}; /// Access list // NB: Need to use `RlpEncodableWrapper` else we get an extra [] in the output @@ -27,7 +27,7 @@ impl From> for AccessList { } /// Access list item -#[derive(Debug, Default, Clone, PartialEq, Eq, Hash, rlp::RlpEncodable, rlp::RlpDecodable)] +#[derive(Debug, Default, Clone, PartialEq, Eq, Hash)] #[cfg_attr( feature = "fastrlp", derive(open_fastrlp::RlpEncodable, open_fastrlp::RlpDecodable) @@ -40,3 +40,21 @@ pub struct AccessListItem { /// Accessed storage keys pub storage_keys: Vec, } + +impl rlp::Encodable for AccessListItem { + fn rlp_append(&self, stream: &mut rlp::RlpStream) { + stream.begin_list(2); + stream.append(&ruint::aliases::B160::from_be_bytes(self.address.0)); + stream.append_list(&self.storage_keys); + } +} + +impl rlp::Decodable for AccessListItem { + fn decode(rlp: &rlp::Rlp) -> Result { + let result = AccessListItem { + address: Address::from(rlp.val_at::(0)?.to_be_bytes()), + storage_keys: rlp.list_at::(1)?, + }; + Ok(result) + } +} diff --git a/crates/rethnet_eth/src/account.rs b/crates/rethnet_eth/src/account.rs index 74842dd89d..248f32fdd7 100644 --- a/crates/rethnet_eth/src/account.rs +++ b/crates/rethnet_eth/src/account.rs @@ -3,12 +3,16 @@ // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT // For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/proof.rs -//! Return types for `eth_getProof` +//! Ethereum account types -use crate::trie::KECCAK_NULL_RLP; -use primitive_types::H256; -use revm::KECCAK_EMPTY; -use ruint::aliases::U256; +use hex_literal::hex; + +use crate::{trie::KECCAK_NULL_RLP, B256, U256}; + +/// The KECCAK for empty code. +pub const KECCAK_EMPTY: revm::B256 = revm::B256(hex!( + "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" +)); /// Basic account type. #[derive(Debug, Clone, PartialEq, Eq)] @@ -22,9 +26,9 @@ pub struct BasicAccount { /// Balance of the account. pub balance: U256, /// Storage root of the account. - pub storage_root: H256, + pub storage_root: B256, /// Code hash of the account. - pub code_hash: H256, + pub code_hash: B256, } impl Default for BasicAccount { @@ -43,8 +47,8 @@ impl rlp::Encodable for BasicAccount { stream.begin_list(4); stream.append(&self.nonce); stream.append(&self.balance); - stream.append(&self.storage_root); - stream.append(&self.code_hash); + stream.append(&ruint::aliases::B256::from_be_bytes(self.storage_root.0)); + stream.append(&ruint::aliases::B256::from_be_bytes(self.code_hash.0)); } } @@ -53,8 +57,8 @@ impl rlp::Decodable for BasicAccount { let result = BasicAccount { nonce: rlp.val_at(0)?, balance: rlp.val_at(1)?, - storage_root: rlp.val_at(2)?, - code_hash: rlp.val_at(3)?, + storage_root: B256::from(rlp.val_at::(2)?.to_be_bytes()), + code_hash: B256::from(rlp.val_at::(3)?.to_be_bytes()), }; Ok(result) } diff --git a/crates/rethnet_eth/src/block.rs b/crates/rethnet_eth/src/block.rs index c1f3648465..30f4d9f2dd 100644 --- a/crates/rethnet_eth/src/block.rs +++ b/crates/rethnet_eth/src/block.rs @@ -3,12 +3,11 @@ // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT // For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/block.rs -use primitive_types::H256; use revm::common::keccak256; use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; -use ruint::aliases::{B64, U256}; +use ruint::aliases::U160; -use crate::{transaction::SignedTransaction, trie, Address, Bloom, Bytes}; +use crate::{transaction::SignedTransaction, trie, Address, Bloom, Bytes, B256, B64, U256}; /// Ethereum block #[derive(Clone, Debug, PartialEq, Eq)] @@ -18,12 +17,16 @@ use crate::{transaction::SignedTransaction, trie, Address, Bloom, Bytes}; )] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Block { + /// The block's header pub header: Header, + /// The block's transactions pub transactions: Vec, + /// The block's ommers' headers pub ommers: Vec
, } impl Block { + /// Constructs a new block from the provided partial header, transactions, and ommers. pub fn new( partial_header: PartialHeader, transactions: Vec, @@ -65,20 +68,35 @@ impl Decodable for Block { #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] pub struct Header { - pub parent_hash: H256, - pub ommers_hash: H256, + /// The parent block's hash + pub parent_hash: B256, + /// The ommers' root hash + pub ommers_hash: B256, + /// The block's beneficiary address pub beneficiary: Address, - pub state_root: H256, - pub transactions_root: H256, - pub receipts_root: H256, + /// The state's root hash + pub state_root: B256, + /// The transactions' root hash + pub transactions_root: B256, + /// The receipts' root hash + pub receipts_root: B256, + /// The logs' bloom pub logs_bloom: Bloom, + /// The block's difficulty pub difficulty: U256, + /// The block's number pub number: U256, + /// The block's gas limit pub gas_limit: U256, + /// The amount of gas used by the block pub gas_used: U256, + /// The block's timestamp pub timestamp: u64, + /// The block's extra data pub extra_data: Bytes, - pub mix_hash: H256, + /// The block's mix hash + pub mix_hash: B256, + /// The block's nonce #[cfg_attr(feature = "serde", serde(with = "B64Def"))] pub nonce: B64, /// BaseFee was added by EIP-1559 and is ignored in legacy headers. @@ -98,7 +116,8 @@ impl From for B64 { } impl Header { - pub fn new(partial_header: PartialHeader, ommers_hash: H256, transactions_root: H256) -> Self { + /// Constructs a [`Header`] from the provided [`PartialHeader`], ommers' root hash, and transactions' root hash. + pub fn new(partial_header: PartialHeader, ommers_hash: B256, transactions_root: B256) -> Self { Self { parent_hash: partial_header.parent_hash, ommers_hash, @@ -119,7 +138,8 @@ impl Header { } } - pub fn hash(&self) -> H256 { + /// Calculates the block's hash. + pub fn hash(&self) -> B256 { keccak256(&rlp::encode(self)) } @@ -161,12 +181,14 @@ impl rlp::Encodable for Header { } else { s.begin_list(16); } - s.append(&self.parent_hash); - s.append(&self.ommers_hash); - s.append(&self.beneficiary); - s.append(&self.state_root); - s.append(&self.transactions_root); - s.append(&self.receipts_root); + s.append(&ruint::aliases::B256::from_be_bytes(self.parent_hash.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.ommers_hash.0)); + s.append(&ruint::aliases::B160::from_be_bytes(self.beneficiary.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.state_root.0)); + s.append(&ruint::aliases::B256::from_be_bytes( + self.transactions_root.0, + )); + s.append(&ruint::aliases::B256::from_be_bytes(self.receipts_root.0)); s.append(&self.logs_bloom); s.append(&self.difficulty); s.append(&self.number); @@ -174,7 +196,7 @@ impl rlp::Encodable for Header { s.append(&self.gas_used); s.append(&self.timestamp); s.append(&self.extra_data.as_ref()); - s.append(&self.mix_hash); + s.append(&ruint::aliases::B256::from_be_bytes(self.mix_hash.0)); s.append(&self.nonce); if let Some(ref base_fee) = self.base_fee_per_gas { s.append(base_fee); @@ -185,12 +207,12 @@ impl rlp::Encodable for Header { impl rlp::Decodable for Header { fn decode(rlp: &rlp::Rlp) -> Result { let result = Header { - parent_hash: rlp.val_at(0)?, - ommers_hash: rlp.val_at(1)?, - beneficiary: rlp.val_at(2)?, - state_root: rlp.val_at(3)?, - transactions_root: rlp.val_at(4)?, - receipts_root: rlp.val_at(5)?, + parent_hash: B256::from(rlp.val_at::(0)?.to_be_bytes()), + ommers_hash: B256::from(rlp.val_at::(1)?.to_be_bytes()), + beneficiary: Address::from(rlp.val_at::(2)?.to_be_bytes()), + state_root: B256::from(rlp.val_at::(3)?.to_be_bytes()), + transactions_root: B256::from(rlp.val_at::(4)?.to_be_bytes()), + receipts_root: B256::from(rlp.val_at::(5)?.to_be_bytes()), logs_bloom: rlp.val_at(6)?, difficulty: rlp.val_at(7)?, number: rlp.val_at(8)?, @@ -198,7 +220,7 @@ impl rlp::Decodable for Header { gas_used: rlp.val_at(10)?, timestamp: rlp.val_at(11)?, extra_data: rlp.val_at::>(12)?.into(), - mix_hash: rlp.val_at(13)?, + mix_hash: B256::from(rlp.val_at::(13)?.to_be_bytes()), nonce: rlp.val_at(14)?, base_fee_per_gas: if let Ok(base_fee) = rlp.at(15) { Some(::decode(&base_fee)?) @@ -256,12 +278,12 @@ impl open_fastrlp::Decodable for Header { let start_len = buf.len(); Ok(Header { - parent_hash: ::decode(buf)?, - ommers_hash: ::decode(buf)?, + parent_hash: ::decode(buf)?, + ommers_hash: ::decode(buf)?, beneficiary:
::decode(buf)?, - state_root: ::decode(buf)?, - transactions_root: ::decode(buf)?, - receipts_root: ::decode(buf)?, + state_root: ::decode(buf)?, + transactions_root: ::decode(buf)?, + receipts_root: ::decode(buf)?, logs_bloom: ::decode(buf)?, difficulty: ::decode(buf)?, number: ::decode(buf)?, @@ -269,7 +291,7 @@ impl open_fastrlp::Decodable for Header { gas_used: ::decode(buf)?, timestamp: ::decode(buf)?, extra_data: ::decode(buf)?, - mix_hash: ::decode(buf)?, + mix_hash: ::decode(buf)?, nonce: ::decode(buf)?, base_fee_per_gas: if start_len - header.payload_length < buf.len() { // if there is leftover data in the payload, decode the base fee @@ -284,19 +306,33 @@ impl open_fastrlp::Decodable for Header { /// Partial header definition without ommers hash and transactions root #[derive(Clone, Debug, PartialEq, Eq, Default)] pub struct PartialHeader { - pub parent_hash: H256, + /// The parent block's hash + pub parent_hash: B256, + /// The block's beneficiary address pub beneficiary: Address, - pub state_root: H256, - pub receipts_root: H256, + /// The state's root hash + pub state_root: B256, + /// The receipts' root hash + pub receipts_root: B256, + /// The logs' bloom pub logs_bloom: Bloom, + /// The block's difficulty pub difficulty: U256, + /// The block's number pub number: U256, + /// The block's gas limit pub gas_limit: U256, + /// The amount of gas used by the block pub gas_used: U256, + /// The block's timestamp pub timestamp: u64, + /// The block's extra data pub extra_data: Bytes, - pub mix_hash: H256, + /// The block's mix hash + pub mix_hash: B256, + /// The block's nonce pub nonce: B64, + /// BaseFee was added by EIP-1559 and is ignored in legacy headers. pub base_fee: Option, } @@ -407,12 +443,12 @@ mod tests { let expected = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); let mut data = vec![]; let header = Header { - parent_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - ommers_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + parent_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + ommers_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), beneficiary: H160::from_str("0000000000000000000000000000000000000000").unwrap(), - state_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - transactions_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - receipts_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + state_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + transactions_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + receipts_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), difficulty: 0x8aeu64.into(), number: 0xd05u64.into(), @@ -420,7 +456,7 @@ mod tests { gas_used: 0x15b3u64.into(), timestamp: 0x1a0au64, extra_data: hex::decode("7788").unwrap().into(), - mix_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + mix_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), nonce: U64::from(0x0), base_fee_per_gas: None, }; @@ -435,28 +471,28 @@ mod tests { use hex::FromHex; let expected_hash = - H256::from_str("6a251c7c3c5dca7b42407a3752ff48f3bbca1fab7f9868371d9918daf1988d1f") + B256::from_str("0x6a251c7c3c5dca7b42407a3752ff48f3bbca1fab7f9868371d9918daf1988d1f") .unwrap(); let header = Header { - parent_hash: H256::from_str( - "e0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a", + parent_hash: B256::from_str( + "0xe0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a", ) .unwrap(), - ommers_hash: H256::from_str( - "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", + ommers_hash: B256::from_str( + "0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", ) .unwrap(), - beneficiary: Address::from_str("ba5e000000000000000000000000000000000000").unwrap(), - state_root: H256::from_str( - "ec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7", + beneficiary: Address::from_str("0xba5e000000000000000000000000000000000000").unwrap(), + state_root: B256::from_str( + "0xec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7", ) .unwrap(), - transactions_root: H256::from_str( - "50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf", + transactions_root: B256::from_str( + "0x50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf", ) .unwrap(), - receipts_root: H256::from_str( - "29b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9", + receipts_root: B256::from_str( + "0x29b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9", ) .unwrap(), logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), @@ -466,7 +502,7 @@ mod tests { gas_used: U256::from(0x015534u64), timestamp: 0x079eu64, extra_data: hex::decode("42").unwrap().into(), - mix_hash: H256::from_str( + mix_hash: B256::from_str( "0000000000000000000000000000000000000000000000000000000000000000", ) .unwrap(), @@ -482,12 +518,12 @@ mod tests { fn test_decode_block_header() { let data = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); let expected = Header { - parent_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - ommers_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + parent_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + ommers_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), beneficiary: H160::from_str("0000000000000000000000000000000000000000").unwrap(), - state_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - transactions_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), - receipts_root: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + state_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + transactions_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + receipts_root: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), difficulty: 0x8aeu64.into(), number: 0xd05u64.into(), @@ -495,7 +531,7 @@ mod tests { gas_used: 0x15b3u64.into(), timestamp: 0x1a0au64, extra_data: hex::decode("7788").unwrap().into(), - mix_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + mix_hash: B256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), nonce: U64::from(0x0), base_fee_per_gas: None, }; diff --git a/crates/rethnet_eth/src/lib.rs b/crates/rethnet_eth/src/lib.rs index a7e29156ef..7a8a6986f1 100644 --- a/crates/rethnet_eth/src/lib.rs +++ b/crates/rethnet_eth/src/lib.rs @@ -1,20 +1,36 @@ +#![warn(missing_docs)] + +//! Ethereum types +//! +//! Ethereum types as needed by Rethnet. In particular, they are based on the same primitive types as `revm`. + +/// Ethereum access list types pub mod access_list; +/// Ethereum account types pub mod account; +/// Ethereum block types pub mod block; +/// Ethereum receipt types pub mod receipt; +/// Ethereum signature types pub mod signature; +/// Ethereum state types and functions pub mod state; +/// Ethereum transaction types pub mod transaction; +/// Ethereum trie functions pub mod trie; +/// Ethereum utility functions pub mod utils; pub use bytes::Bytes; pub use ethbloom::Bloom; -pub use primitive_types::{H256, H512}; -pub use ruint::aliases::{B64, U256, U64}; - -use primitive_types::H160; +pub use revm::{B160, B256}; +pub use ruint::aliases::{B512, B64, U256, U64}; -pub type Address = H160; -pub type Secret = H256; -pub type Public = H512; +/// An Ethereum address +pub type Address = B160; +/// A secret key +pub type Secret = B256; +/// A public key +pub type Public = B512; diff --git a/crates/rethnet_eth/src/receipt.rs b/crates/rethnet_eth/src/receipt.rs index 0854c294d1..ff5dfc801b 100644 --- a/crates/rethnet_eth/src/receipt.rs +++ b/crates/rethnet_eth/src/receipt.rs @@ -3,8 +3,12 @@ // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT // For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/receipt.rs -use crate::{utils::enveloped, Address, Bloom, Bytes, H256, U256}; +#![allow(missing_docs)] + use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; +use ruint::aliases::U160; + +use crate::{utils::enveloped, Address, Bloom, Bytes, B256, U256}; #[derive(Clone, Debug, PartialEq, Eq)] #[cfg_attr( @@ -14,7 +18,7 @@ use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct Log { pub address: Address, - pub topics: Vec, + pub topics: Vec, pub data: Bytes, } @@ -50,9 +54,15 @@ impl From for revm::Log { impl Encodable for Log { fn rlp_append(&self, stream: &mut rlp::RlpStream) { + let topics = self + .topics + .iter() + .map(|topic| ruint::aliases::B256::from_be_bytes(topic.0)) + .collect::>(); + stream.begin_list(3); - stream.append(&self.address); - stream.append_list(&self.topics); + stream.append(&ruint::aliases::B160::from_be_bytes(self.address.0)); + stream.append_list(&topics); stream.append(&self.data.as_ref()); } } @@ -60,8 +70,17 @@ impl Encodable for Log { impl Decodable for Log { fn decode(rlp: &Rlp) -> Result { let result = Log { - address: rlp.val_at(0)?, - topics: rlp.list_at(1)?, + address: { + let address = rlp.val_at::(0)?.to_be_bytes(); + Address::from(address) + }, + topics: { + let topics = rlp.list_at::(1)?; + topics + .into_iter() + .map(|topic| B256::from(topic.to_be_bytes())) + .collect() + }, data: rlp.val_at::>(2)?.into(), }; Ok(result) diff --git a/crates/rethnet_eth/src/signature.rs b/crates/rethnet_eth/src/signature.rs index 0ff8e03f1c..95857c0262 100644 --- a/crates/rethnet_eth/src/signature.rs +++ b/crates/rethnet_eth/src/signature.rs @@ -13,7 +13,7 @@ use secp256k1::{ use sha3::{Digest, Keccak256}; use thiserror::Error; -use crate::{utils::hash_message, Address, H256, U256}; +use crate::{utils::hash_message, Address, B256, U256}; /// Converts a [`PublicKey`] to an [`Address`]. pub fn public_key_to_address(public_key: PublicKey) -> Address { @@ -53,7 +53,7 @@ pub enum RecoveryMessage { /// Message bytes Data(Vec), /// Message hash - Hash(H256), + Hash(B256), } #[derive(Debug, Clone, PartialEq, Eq, Copy, Hash)] @@ -105,7 +105,7 @@ impl Signature { RecoveryMessage::Hash(hash) => hash, }; - struct Hash(H256); + struct Hash(B256); impl ThirtyTwoByteHash for Hash { fn into_32(self) -> [u8; 32] { @@ -290,12 +290,12 @@ impl From for RecoveryMessage { impl From<[u8; 32]> for RecoveryMessage { fn from(hash: [u8; 32]) -> Self { - H256(hash).into() + B256(hash).into() } } -impl From for RecoveryMessage { - fn from(hash: H256) -> Self { +impl From for RecoveryMessage { + fn from(hash: B256) -> Self { RecoveryMessage::Hash(hash) } } @@ -309,11 +309,11 @@ mod tests { // test vector taken from: // https://web3js.readthedocs.io/en/v1.2.2/web3-eth-accounts.html#sign let signature = Signature::from_str( - "b91467e570a6466aa9e9876cbcd013baba02900b8979d43fe208a4a4f339f5fd6007e74cd82e037b800186422fc2da167c747ef045e5d18a5f5d4300f8e1a0291c" + "0xb91467e570a6466aa9e9876cbcd013baba02900b8979d43fe208a4a4f339f5fd6007e74cd82e037b800186422fc2da167c747ef045e5d18a5f5d4300f8e1a0291c" ).expect("could not parse signature"); assert_eq!( signature.recover("Some data").unwrap(), - Address::from_str("2c7536E3605D9C16a7a3D7b1898e529396a65c23").unwrap() + Address::from_str("0x2c7536E3605D9C16a7a3D7b1898e529396a65c23").unwrap() ); } diff --git a/crates/rethnet_eth/src/state.rs b/crates/rethnet_eth/src/state.rs index b45e3282cd..9f264bdaf0 100644 --- a/crates/rethnet_eth/src/state.rs +++ b/crates/rethnet_eth/src/state.rs @@ -1,8 +1,6 @@ use hashbrown::HashMap; -use primitive_types::H256; -use ruint::aliases::U256; -use crate::{account::BasicAccount, trie::sec_trie_root, Address}; +use crate::{account::BasicAccount, trie::sec_trie_root, Address, B256, U256}; /// State mapping of addresses to accounts. pub type State = HashMap; @@ -10,18 +8,19 @@ pub type State = HashMap; /// Account storage mapping of indices to values. pub type Storage = HashMap; -pub fn state_root(state: &State) -> H256 { +/// Calculates the state root hash of the provided state. +pub fn state_root(state: &State) -> B256 { sec_trie_root(state.iter().map(|(address, account)| { let account = rlp::encode(account); (address, account) })) } -pub fn storage_root(storage: &Storage) -> H256 { +/// Calculates the storage root hash of the provided storage. +pub fn storage_root(storage: &Storage) -> B256 { sec_trie_root(storage.iter().map(|(index, value)| { - let index = H256::from(index.to_be_bytes()); let value = rlp::encode(value); - (index, value) + (index.to_be_bytes::<32>(), value) })) } @@ -51,13 +50,13 @@ mod tests { fn precompiles_state_root() { let mut state = State::default(); - for idx in 1..=8 { + for idx in 1..=8u8 { let mut address = Address::zero(); address.0[19] = idx; state.insert(address, BasicAccount::default()); } const EXPECTED: &str = "0x5766c887a7240e4d1c035ccd3830a2f6a0c03d213a9f0b9b27c774916a4abcce"; - assert_eq!(state_root(&state), H256::from_str(EXPECTED).unwrap()) + assert_eq!(state_root(&state), B256::from_str(EXPECTED).unwrap()) } } diff --git a/crates/rethnet_eth/src/transaction.rs b/crates/rethnet_eth/src/transaction.rs index a84aa85f61..9c2db2dcb1 100644 --- a/crates/rethnet_eth/src/transaction.rs +++ b/crates/rethnet_eth/src/transaction.rs @@ -2,17 +2,20 @@ // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-APACHE // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT // For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/transaction/mod.rs +#![allow(missing_docs)] //! transaction related data +use revm::common::keccak256; +use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; +use ruint::aliases::U160; + use crate::{ access_list::{AccessList, AccessListItem}, signature::{Signature, SignatureError}, utils::enveloped, - Address, Bytes, H256, U256, + Address, Bytes, B256, U256, }; -use revm::common::keccak256; -use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; /// Container type for various Ethereum transaction requests /// @@ -22,8 +25,11 @@ use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; /// 3. EIP1559 [`EIP1559TransactionRequest`] #[derive(Debug, Clone, Eq, PartialEq)] pub enum TransactionRequest { + /// A legacy transaction request Legacy(LegacyTransactionRequest), + /// An EIP-2930 transaction request EIP2930(EIP2930TransactionRequest), + /// An EIP-1559 transaction request EIP1559(EIP1559TransactionRequest), } @@ -167,7 +173,10 @@ impl Decodable for TransactionKind { Err(DecoderError::RlpExpectedToBeData) } } else { - Ok(TransactionKind::Call(rlp.as_val()?)) + Ok(TransactionKind::Call({ + let address = rlp.as_val::()?.to_be_bytes(); + Address::from(address) + })) } } } @@ -224,7 +233,7 @@ pub struct EIP2930TransactionRequest { } impl EIP2930TransactionRequest { - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { let encoded = rlp::encode(self); let mut out = vec![0; 1 + encoded.len()]; out[0] = 1; @@ -274,7 +283,7 @@ pub struct LegacyTransactionRequest { } impl LegacyTransactionRequest { - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { keccak256(&rlp::encode(self)) } } @@ -337,7 +346,7 @@ pub struct EIP1559TransactionRequest { } impl EIP1559TransactionRequest { - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { let encoded = rlp::encode(self); let mut out = vec![0; 1 + encoded.len()]; out[0] = 2; @@ -501,7 +510,7 @@ impl SignedTransaction { matches!(self, SignedTransaction::EIP1559(_)) } - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { match self { SignedTransaction::Legacy(t) => t.hash(), SignedTransaction::EIP2930(t) => t.hash(), @@ -706,7 +715,7 @@ impl LegacySignedTransaction { &self.nonce } - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { keccak256(&rlp::encode(self)) } @@ -788,8 +797,8 @@ pub struct EIP2930SignedTransaction { pub input: Bytes, pub access_list: AccessList, pub odd_y_parity: bool, - pub r: H256, - pub s: H256, + pub r: B256, + pub s: B256, } impl EIP2930SignedTransaction { @@ -797,7 +806,7 @@ impl EIP2930SignedTransaction { &self.nonce } - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { let encoded = rlp::encode(self); let mut out = vec![0; 1 + encoded.len()]; out[0] = 1; @@ -828,8 +837,8 @@ impl rlp::Encodable for EIP2930SignedTransaction { s.append(&self.input.as_ref()); s.append(&self.access_list); s.append(&self.odd_y_parity); - s.append(&U256::from_be_bytes(self.r.0)); - s.append(&U256::from_be_bytes(self.s.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.r.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.s.0)); } } @@ -849,14 +858,8 @@ impl rlp::Decodable for EIP2930SignedTransaction { input: rlp.val_at::>(6)?.into(), access_list: rlp.val_at(7)?, odd_y_parity: rlp.val_at(8)?, - r: { - let rarr = rlp.val_at::(9)?.to_be_bytes(); - H256::from(rarr) - }, - s: { - let sarr = rlp.val_at::(10)?.to_be_bytes(); - H256::from(sarr) - }, + r: B256::from(rlp.val_at::(9)?.to_be_bytes()), + s: B256::from(rlp.val_at::(10)?.to_be_bytes()), }) } } @@ -878,8 +881,8 @@ pub struct EIP1559SignedTransaction { pub input: Bytes, pub access_list: AccessList, pub odd_y_parity: bool, - pub r: H256, - pub s: H256, + pub r: B256, + pub s: B256, } impl EIP1559SignedTransaction { @@ -887,7 +890,7 @@ impl EIP1559SignedTransaction { &self.nonce } - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { let encoded = rlp::encode(self); let mut out = vec![0; 1 + encoded.len()]; out[0] = 2; @@ -919,8 +922,8 @@ impl Encodable for EIP1559SignedTransaction { s.append(&self.input.as_ref()); s.append(&self.access_list); s.append(&self.odd_y_parity); - s.append(&U256::from_be_bytes(self.r.0)); - s.append(&U256::from_be_bytes(self.s.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.r.0)); + s.append(&ruint::aliases::B256::from_be_bytes(self.s.0)); } } @@ -941,14 +944,8 @@ impl Decodable for EIP1559SignedTransaction { input: rlp.val_at::>(7)?.into(), access_list: rlp.val_at(8)?, odd_y_parity: rlp.val_at(9)?, - r: { - let rarr = rlp.val_at::(10)?.to_be_bytes(); - H256::from(rarr) - }, - s: { - let sarr = rlp.val_at::(11)?.to_be_bytes(); - H256::from(sarr) - }, + r: B256::from(rlp.val_at::(10)?.to_be_bytes()), + s: B256::from(rlp.val_at::(11)?.to_be_bytes()), }) } } @@ -989,7 +986,9 @@ mod tests { if let TransactionKind::Call(ref to) = tx.kind { assert_eq!( *to, - "095e7baea6a6c7c4c2dfeb977efac326af552d87".parse().unwrap() + "0x095e7baea6a6c7c4c2dfeb977efac326af552d87" + .parse() + .unwrap() ); } else { panic!(); @@ -997,7 +996,9 @@ mod tests { assert_eq!(tx.value, U256::from(0x0au64)); assert_eq!( tx.recover().unwrap(), - "0f65fe9276bc9a24ae7083ae28e2660ef72df99e".parse().unwrap() + "0x0f65fe9276bc9a24ae7083ae28e2660ef72df99e" + .parse() + .unwrap() ); } @@ -1018,8 +1019,8 @@ mod tests { value: U256::from(3), input: Bytes::from(vec![1, 2]), odd_y_parity: true, - r: H256::default(), - s: H256::default(), + r: B256::default(), + s: B256::default(), access_list: vec![].into(), }); @@ -1057,8 +1058,8 @@ mod tests { value: U256::from(3), input: Bytes::from(vec![1, 2]), odd_y_parity: true, - r: H256::default(), - s: H256::default(), + r: B256::default(), + s: B256::default(), access_list: vec![].into(), }); @@ -1184,9 +1185,9 @@ mod tests { input: Bytes::default(), access_list: AccessList::default(), odd_y_parity: true, - r: H256::from_str("59e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafd") + r: B256::from_str("59e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafd") .unwrap(), - s: H256::from_str("016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469") + s: B256::from_str("016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469") .unwrap(), }); assert_eq!( diff --git a/crates/rethnet_eth/src/trie.rs b/crates/rethnet_eth/src/trie.rs index 4a9c844d87..a131b635a0 100644 --- a/crates/rethnet_eth/src/trie.rs +++ b/crates/rethnet_eth/src/trie.rs @@ -6,45 +6,46 @@ //! Utility functions for Ethereum use hash256_std_hasher::Hash256StdHasher; -use primitive_types::H256; use sha3::{ digest::generic_array::{typenum::consts::U32, GenericArray}, Digest, Keccak256, }; +use crate::B256; + /// The KECCAK of the RLP encoding of empty data. -pub const KECCAK_NULL_RLP: H256 = H256([ +pub const KECCAK_NULL_RLP: B256 = B256([ 0x56, 0xe8, 0x1f, 0x17, 0x1b, 0xcc, 0x55, 0xa6, 0xff, 0x83, 0x45, 0xe6, 0x92, 0xc0, 0xf8, 0x6e, 0x5b, 0x48, 0xe0, 0x1b, 0x99, 0x6c, 0xad, 0xc0, 0x01, 0x62, 0x2f, 0xb5, 0xe3, 0x63, 0xb4, 0x21, ]); /// Generates a trie root hash for a vector of key-value tuples -pub fn trie_root(input: I) -> H256 +pub fn trie_root(input: I) -> B256 where I: IntoIterator, K: AsRef<[u8]> + Ord, V: AsRef<[u8]>, { - H256::from_slice(triehash::trie_root::(input).as_ref()) + B256::from_slice(triehash::trie_root::(input).as_ref()) } /// Generates a key-hashed (secure) trie root hash for a vector of key-value tuples. -pub fn sec_trie_root(input: I) -> H256 +pub fn sec_trie_root(input: I) -> B256 where I: IntoIterator, K: AsRef<[u8]>, V: AsRef<[u8]>, { - H256::from_slice(triehash::sec_trie_root::(input).as_ref()) + B256::from_slice(triehash::sec_trie_root::(input).as_ref()) } /// Generates a trie root hash for a vector of values -pub fn ordered_trie_root(input: I) -> H256 +pub fn ordered_trie_root(input: I) -> B256 where I: IntoIterator, V: AsRef<[u8]>, { - H256::from_slice(triehash::ordered_trie_root::(input).as_ref()) + B256::from_slice(triehash::ordered_trie_root::(input).as_ref()) } struct KeccakHasher; diff --git a/crates/rethnet_eth/src/utils.rs b/crates/rethnet_eth/src/utils.rs index a7b4ba6e73..f4b80cd08c 100644 --- a/crates/rethnet_eth/src/utils.rs +++ b/crates/rethnet_eth/src/utils.rs @@ -8,10 +8,11 @@ // - https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/LICENSE-MIT // For the original context see: https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/ethers-core/src/utils/hash.rs -use primitive_types::H256; +use crate::B256; use revm::common::keccak256; use rlp::RlpStream; +/// RLP-encodes the provided value, prepends it with the provided ID, and appends it to the provided [`RlpStream`]. pub fn enveloped(id: u8, v: &T, s: &mut RlpStream) { use rlp::Encodable; @@ -29,7 +30,7 @@ const PREFIX: &str = "\x19Ethereum Signed Message:\n"; /// The data is a UTF-8 encoded string and will enveloped as follows: /// `"\x19Ethereum Signed Message:\n" + message.length + message` and hashed /// using keccak256. -pub fn hash_message(message: S) -> H256 +pub fn hash_message(message: S) -> B256 where S: AsRef<[u8]>, { diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 44572a3df0..2e2c748f49 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -5,10 +5,14 @@ edition = "2021" [dependencies] anyhow = { version = "1.0.64", default-features = false, features = ["std"] } -bytes = { version = "1.2.1", default-features = false } -hashbrown = { version = "0.12.3", default-features = false, features = ["serde"] } +auto_impl = { version = "1.0.1", default-features = false } +ethers-signers = { version = "1.0.0", default-features = false } +hashbrown = { version = "0.13", default-features = false, features = ["ahash", "serde"] } log = { version = "0.4.17", default-features = false } -primitive-types = { version = "0.11.1", default-features = false, features = ["impl-serde"] } -revm = { git = "https://github.com/bluealloy/revm/", rev = "9f8cdbd", default-features = false, features = ["dev", "k256", "with-serde"] } +parking_lot = { version = "0.12.1", default-features = false } +rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } +revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false, features = ["dev", "k256", "with-serde"] } +secp256k1 = { version = "0.24.1", default-features = false, features = ["alloc"] } sha3 = { version = "0.10.4", default-features = false } -tokio = { version = "1.21.2", default-features = false, features = ["sync"] } +signature = { version = "1.6.4", default-features = false, features = ["std"] } +tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } diff --git a/crates/rethnet_evm/src/block.rs b/crates/rethnet_evm/src/block.rs new file mode 100644 index 0000000000..929dd94c5e --- /dev/null +++ b/crates/rethnet_evm/src/block.rs @@ -0,0 +1,37 @@ +mod builder; + +use rethnet_eth::{block::Block, receipt::TypedReceipt, Address, B256, U256}; + +use crate::transaction::TransactionInfo; + +pub use builder::BlockBuilder; + +/// Container type that gathers all block data +#[derive(Debug, Clone)] +pub struct BlockInfo { + pub block: Block, + pub transactions: Vec, + pub receipts: Vec, +} + +/// Data of a block header +pub struct HeaderData { + /// The block number + pub number: Option, + /// The block's beneficiary + pub coinbase: Option
, + /// The block's timestamp + pub timestamp: Option, + /// The block's difficulty + pub difficulty: Option, + /// The block's base gas fee + pub basefee: Option, + /// The block's gas limit + pub gas_limit: Option, + /// The parent block's hash + pub parent_hash: Option, + // pub uncle_hash: Option, + // pub state_root: Option, + // pub transactions_trie: Option, + // pub receipt_trie: Option, +} diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs new file mode 100644 index 0000000000..f3bd96a511 --- /dev/null +++ b/crates/rethnet_evm/src/block/builder.rs @@ -0,0 +1,156 @@ +use std::{fmt::Debug, sync::Arc}; + +use anyhow::bail; +use rethnet_eth::{ + block::{Header, PartialHeader}, + Address, U256, +}; +use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; +use tokio::runtime::Runtime; + +use crate::{ + blockchain::{AsyncBlockchain, SyncBlockchain}, + db::{AsyncDatabase, SyncDatabase}, + evm::build_evm, + inspector::RethnetInspector, + trace::Trace, + HeaderData, +}; + +/// A builder for constructing Ethereum blocks. +pub struct BlockBuilder +where + E: Debug + Send + 'static, +{ + blockchain: Arc>, E>>, + state: Arc>, E>>, + header: PartialHeader, + transactions: Vec, + cfg: CfgEnv, +} + +impl BlockBuilder +where + E: Debug + Send + 'static, +{ + /// Creates an intance of [`BlockBuilder`], creating a checkpoint in the process. + pub async fn new( + blockchain: Arc>, E>>, + db: Arc>, E>>, + cfg: CfgEnv, + parent: Header, + header: HeaderData, + ) -> Result { + // TODO: Proper implementation of a block builder + // db.checkpoint().await?; + + // TODO: Allow user to pass in values + let header = PartialHeader { + parent_hash: header.parent_hash.unwrap_or(parent.parent_hash), + number: header.number.unwrap_or(parent.number + U256::from(1)), + gas_limit: header.gas_limit.unwrap_or(parent.gas_limit), + ..PartialHeader::default() + }; + + Ok(Self { + blockchain, + state: db, + header, + transactions: Vec::new(), + cfg, + }) + } + + /// Retrieves the runtime of the [`BlockBuilder`]. + pub fn runtime(&self) -> &Runtime { + self.state.runtime() + } + + /// Retrieves the amount of gas used in the block, so far. + pub fn gas_used(&self) -> U256 { + self.header.gas_used + } + + /// Retrieves the amount of gas left in the block. + pub fn gas_remaining(&self) -> U256 { + self.header.gas_limit - self.gas_used() + } + + // fn miner_reward(num_ommers: u64) -> U256 { + // // TODO: This is the LONDON block reward. Did it change? + // const BLOCK_REWARD: u64 = 2 * 10u64.pow(18); + // const NIBLING_REWARD: u64 = BLOCK_REWARD / 32; + + // U256::from(BLOCK_REWARD + num_ommers * NIBLING_REWARD) + // } + + /// Adds a pending transaction to + pub async fn add_transaction( + &mut self, + transaction: TxEnv, + ) -> anyhow::Result<(ExecutionResult, Trace)> { + // transaction's gas limit cannot be greater than the remaining gas in the block + if U256::from(transaction.gas_limit) > self.gas_remaining() { + bail!("tx has a higher gas limit than the remaining gas in the block"); + } + + self.transactions.push(transaction.clone()); + let block = BlockEnv { + number: self.header.number, + coinbase: self.header.beneficiary, + timestamp: U256::from(self.header.timestamp), + difficulty: self.header.difficulty, + basefee: self.header.base_fee.unwrap_or(U256::ZERO), + gas_limit: self.header.gas_limit, + prevrandao: if self.cfg.spec_id > SpecId::MERGE { + Some(self.header.mix_hash) + } else { + None + }, + }; + + let blockchain = self.blockchain.clone(); + let db = self.state.clone(); + let cfg = self.cfg.clone(); + + let (result, changes, trace) = self + .state + .runtime() + .spawn(async move { + let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); + + let mut inspector = RethnetInspector::default(); + let (result, state) = evm.inspect(&mut inspector); + (result, state, inspector.into_trace()) + }) + .await + .unwrap(); + + self.state.apply(changes).await; + + self.header.gas_used += U256::from(result.gas_used); + + // TODO: store receipt + Ok((result, trace)) + } + + /// Finalizes the block, returning the state root. + /// TODO: Build a full block + pub async fn finalize(self, rewards: Vec<(Address, U256)>) -> Result<(), E> { + for (address, reward) in rewards { + self.state + .modify_account( + address, + Box::new(move |balance, _nonce, _code| *balance += reward), + ) + .await?; + } + + Ok(()) + } + + /// Aborts building of the block, reverting all transactions in the process. + pub async fn abort(self) -> Result<(), E> { + self.state.revert().await + } +} diff --git a/crates/rethnet_evm/src/blockchain.rs b/crates/rethnet_evm/src/blockchain.rs new file mode 100644 index 0000000000..6c7d91710c --- /dev/null +++ b/crates/rethnet_evm/src/blockchain.rs @@ -0,0 +1,4 @@ +mod request; +mod sync; + +pub use sync::{AsyncBlockchain, SyncBlockchain}; diff --git a/crates/rethnet_evm/src/blockchain/request.rs b/crates/rethnet_evm/src/blockchain/request.rs new file mode 100644 index 0000000000..2114f71fc0 --- /dev/null +++ b/crates/rethnet_evm/src/blockchain/request.rs @@ -0,0 +1,49 @@ +use std::fmt::Debug; + +use rethnet_eth::{B256, U256}; +use revm::blockchain::Blockchain; +use tokio::sync::oneshot; + +/// The request type used internally by a [`SyncDatabase`]. +#[derive(Debug)] +pub enum Request +where + E: Debug, +{ + BlockHashByNumber { + number: U256, + sender: oneshot::Sender>, + }, + // InsertBlock { + // block_number: U256, + // block_hash: B256, + // sender: oneshot::Sender>, + // }, + Terminate, +} + +impl Request +where + E: Debug, +{ + pub fn handle(self, db: &mut D) -> bool + where + D: Blockchain, + { + match self { + Request::BlockHashByNumber { number, sender } => { + sender.send(db.block_hash(number)).unwrap() + } + // Request::InsertBlock { + // block_number, + // block_hash, + // sender, + // } => sender + // .send(db.insert_block(block_number, block_hash)) + // .unwrap(), + Request::Terminate => return false, + } + + true + } +} diff --git a/crates/rethnet_evm/src/blockchain/sync.rs b/crates/rethnet_evm/src/blockchain/sync.rs new file mode 100644 index 0000000000..ba050a6d20 --- /dev/null +++ b/crates/rethnet_evm/src/blockchain/sync.rs @@ -0,0 +1,129 @@ +use std::{fmt::Debug, io, marker::PhantomData}; + +use rethnet_eth::{B256, U256}; +use revm::blockchain::Blockchain; +use tokio::{ + runtime::{Builder, Runtime}, + sync::{ + mpsc::{unbounded_channel, UnboundedSender}, + oneshot, + }, + task::{self, JoinHandle}, +}; + +use super::request::Request; + +/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncBlockchain`]. +pub trait SyncBlockchain: Blockchain + Send + Sync + 'static +where + E: Debug + Send, +{ +} + +impl SyncBlockchain for B +where + B: Blockchain + Send + Sync + 'static, + E: Debug + Send, +{ +} + +/// A helper class for converting a synchronous blockchain into an asynchronous blockchain. +/// +/// Requires the inner blockchain to implement [`Blockchain`]. +pub struct AsyncBlockchain +where + B: SyncBlockchain, + E: Debug + Send, +{ + runtime: Runtime, + request_sender: UnboundedSender>, + blockchain_handle: Option>, + phantom: PhantomData, +} + +impl AsyncBlockchain +where + B: SyncBlockchain, + E: Debug + Send + 'static, +{ + /// Constructs an [`AsyncBlockchain`] instance with the provided database. + pub fn new(mut blockchain: B) -> io::Result { + let runtime = Builder::new_multi_thread().build()?; + + let (sender, mut receiver) = unbounded_channel::>(); + + let blockchain_handle = runtime.spawn(async move { + while let Some(request) = receiver.recv().await { + if !request.handle(&mut blockchain) { + break; + } + } + }); + + Ok(Self { + runtime, + request_sender: sender, + blockchain_handle: Some(blockchain_handle), + phantom: PhantomData, + }) + } + + /// Retrieves the runtime of the [`AsyncBlockchain`]. + pub fn runtime(&self) -> &Runtime { + &self.runtime + } + + /// Retrieves the hash of the block corresponding to the specified number. + pub async fn block_hash_by_number(&self, number: U256) -> Result { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::BlockHashByNumber { number, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + // /// Inserts the specified block number and hash into the state. + // pub async fn insert_block(&self, block_number: U256, block_hash: B256) -> Result<(), E> { + // let (sender, receiver) = oneshot::channel(); + + // self.request_sender + // .send(Request::InsertBlock { + // block_number, + // block_hash, + // sender, + // }) + // .expect("Failed to send request"); + + // receiver.await.unwrap() + // } +} + +impl Drop for AsyncBlockchain +where + D: SyncBlockchain, + E: Debug + Send, +{ + fn drop(&mut self) { + if let Some(handle) = self.blockchain_handle.take() { + self.request_sender + .send(Request::Terminate) + .expect("Failed to send request"); + + self.runtime.block_on(handle).unwrap(); + } + } +} + +impl<'b, B, E> Blockchain for &'b AsyncBlockchain +where + B: SyncBlockchain, + E: Debug + Send + 'static, +{ + type Error = E; + + fn block_hash(&mut self, number: U256) -> Result { + task::block_in_place(move || self.runtime.block_on(self.block_hash_by_number(number))) + } +} diff --git a/crates/rethnet_evm/src/config.rs b/crates/rethnet_evm/src/config.rs new file mode 100644 index 0000000000..ff0af49e85 --- /dev/null +++ b/crates/rethnet_evm/src/config.rs @@ -0,0 +1,6 @@ +use rethnet_eth::signature::Signature; + +#[derive(Clone, Debug)] +pub struct Config { + signature: Option, +} diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs index 2108b1340a..cc11b88bdc 100644 --- a/crates/rethnet_evm/src/db.rs +++ b/crates/rethnet_evm/src/db.rs @@ -1 +1,8 @@ -pub mod layered_db; +mod layered_db; +mod request; +mod sync; + +pub(super) use sync::AsyncDatabaseWrapper; +pub use sync::{AsyncDatabase, SyncDatabase}; + +pub use layered_db::{LayeredDatabase, RethnetLayer}; diff --git a/crates/rethnet_evm/src/db/layered_db.rs b/crates/rethnet_evm/src/db/layered_db.rs index fb10a3c110..7b7d6cce50 100644 --- a/crates/rethnet_evm/src/db/layered_db.rs +++ b/crates/rethnet_evm/src/db/layered_db.rs @@ -1,20 +1,41 @@ use anyhow::anyhow; -use bytes::Bytes; use hashbrown::HashMap; -use primitive_types::{H160, H256, U256}; +use rethnet_eth::{ + account::BasicAccount, + state::{state_root, storage_root}, + trie::KECCAK_NULL_RLP, + Address, B256, U256, +}; use revm::{Account, AccountInfo, Bytecode, Database, DatabaseCommit, KECCAK_EMPTY}; use crate::DatabaseDebug; +#[derive(Clone, Debug)] +struct RevertedLayers { + /// The parent layer's state root + pub parent_state_root: B256, + /// The reverted layers + pub stack: Vec, +} + /// A database consisting of layers. -pub struct LayeredDatabase { +#[derive(Clone, Debug)] +pub struct LayeredDatabase { stack: Vec, + /// The old parent layer state root and the reverted layers + reverted_layers: Option>, + /// Snapshots + snapshots: HashMap>, // naive implementation } -impl LayeredDatabase { +impl LayeredDatabase { /// Creates a [`LayeredDatabase`] with the provided layer at the bottom. pub fn with_layer(layer: Layer) -> Self { - Self { stack: vec![layer] } + Self { + stack: vec![layer], + reverted_layers: None, + snapshots: HashMap::new(), + } } /// Returns the index of the top layer. @@ -49,7 +70,7 @@ impl LayeredDatabase { } } -impl LayeredDatabase { +impl LayeredDatabase { /// Adds a default layer to the top, returning its index and a /// mutable reference to the layer. pub fn add_layer_default(&mut self) -> (usize, &mut Layer) { @@ -57,42 +78,54 @@ impl LayeredDatabase { } } -impl Default for LayeredDatabase { +impl Default for LayeredDatabase { fn default() -> Self { Self { stack: vec![Layer::default()], + reverted_layers: None, + snapshots: HashMap::new(), } } } /// A layer with information needed for [`Rethnet`]. -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct RethnetLayer { /// Address -> AccountInfo - account_infos: HashMap, + account_infos: HashMap>, /// Address -> Storage - storage: HashMap>, + storage: HashMap>>, /// Code hash -> Address - contracts: HashMap, - /// Block number -> Block hash - block_hashes: HashMap, + contracts: HashMap, + /// Cached state root + state_root: Option, } impl RethnetLayer { /// Creates a `RethnetLayer` with the provided genesis accounts. - pub fn with_genesis_accounts(genesis_accounts: HashMap) -> Self { + pub fn with_genesis_accounts(genesis_accounts: HashMap) -> Self { + let genesis_accounts = genesis_accounts + .into_iter() + .map(|(address, account_info)| (address, Some(account_info))) + .collect(); + Self { account_infos: genesis_accounts, ..Default::default() } } - /// Insert the `AccountInfo` with at the specified `address`. - pub fn insert_account(&mut self, address: H160, mut account_info: AccountInfo) { + /// Returns whether the layer has a state root. + pub fn has_state_root(&self) -> bool { + self.state_root.is_some() + } + + /// Insert the provided `AccountInfo` at the specified `address`. + pub fn insert_account(&mut self, address: Address, mut account_info: AccountInfo) { if let Some(code) = account_info.code.take() { if !code.is_empty() { account_info.code_hash = code.hash(); - self.contracts.insert(code.hash(), code.bytes().clone()); + self.contracts.insert(code.hash(), code); } } @@ -100,26 +133,138 @@ impl RethnetLayer { account_info.code_hash = KECCAK_EMPTY; } - self.account_infos.insert(address, account_info); + self.account_infos.insert(address, Some(account_info)); + } +} + +impl LayeredDatabase { + /// Retrieves a reference to the account corresponding to the address, if it exists. + pub fn account(&self, address: &Address) -> Option<&AccountInfo> { + self.iter() + .find_map(|layer| { + layer + .account_infos + .get(address) + .map(|account_info| account_info.as_ref()) + }) + .flatten() + } + + /// Retrieves a mutable reference to the account corresponding to the address, if it exists. + pub fn account_mut(&mut self, address: &Address) -> Option<&mut AccountInfo> { + // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html + if self.last_layer_mut().account_infos.contains_key(address) { + return self + .last_layer_mut() + .account_infos + .get_mut(address) + .and_then(|account_info| account_info.as_mut()); + } + + self.account(address).cloned().map(|account_info| { + self.last_layer_mut() + .account_infos + .insert_unique_unchecked(*address, Some(account_info)) + .1 + .as_mut() + .unwrap() + }) + } + + /// Retrieves a mutable reference to the account corresponding to the address, if it exists. + /// Otherwise, inserts a new account. + pub fn account_or_insert_mut(&mut self, address: &Address) -> &mut AccountInfo { + // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html + if self.last_layer_mut().account_infos.contains_key(address) { + let was_deleted = self + .last_layer_mut() + .account_infos + .get(address) + .unwrap() + .is_none(); + + if !was_deleted { + return self + .last_layer_mut() + .account_infos + .get_mut(address) + .unwrap() + .as_mut() + .unwrap(); + } + } + + let account_info = self.account(address).cloned().unwrap_or(AccountInfo { + balance: U256::ZERO, + nonce: 0, + code_hash: KECCAK_EMPTY, + code: None, + }); + + self.last_layer_mut() + .account_infos + .insert_unique_unchecked(*address, Some(account_info)) + .1 + .as_mut() + .unwrap() + } + + /// Removes the [`AccountInfo`] corresponding to the specified address. + pub fn remove_account(&mut self, address: &Address) { + let account_info = self + .iter() + .find_map(|layer| layer.account_infos.get(address)); + + if let Some(Some(account_info)) = account_info { + debug_assert!(account_info.code.is_none()); + + let code_hash = account_info.code_hash; + + self.last_layer_mut() + .contracts + .insert(code_hash, Bytecode::new()); + + // Write None to signal that the account was deleted + self.last_layer_mut().account_infos.insert(*address, None); + } + + let storage = self.iter().find_map(|layer| layer.storage.get(address)); + + if let Some(Some(_)) = storage { + // Write None to signal that the account's storage was deleted + self.last_layer_mut().storage.insert(*address, None); + } } } impl Database for LayeredDatabase { type Error = anyhow::Error; - fn basic(&mut self, address: H160) -> anyhow::Result> { - Ok(self + fn basic(&mut self, address: Address) -> anyhow::Result> { + let account = self .iter() - .find_map(|layer| layer.account_infos.get(&address).cloned())) + .find_map(|layer| layer.account_infos.get(&address)) + .cloned() + .flatten(); + + log::debug!("account with address `{}`: {:?}", address, account); + + // TODO: Move this out of LayeredDatabase when forking + Ok(account.or(Some(AccountInfo { + balance: U256::ZERO, + nonce: 0, + code_hash: KECCAK_EMPTY, + code: None, + }))) } - fn code_by_hash(&mut self, code_hash: H256) -> anyhow::Result { + fn code_by_hash(&mut self, code_hash: B256) -> anyhow::Result { + if code_hash == KECCAK_EMPTY { + return Ok(Bytecode::new()); + } + self.iter() - .find_map(|layer| { - layer.contracts.get(&code_hash).map(|bytecode| unsafe { - Bytecode::new_raw_with_hash(bytecode.clone(), code_hash) - }) - }) + .find_map(|layer| layer.contracts.get(&code_hash).cloned()) .ok_or_else(|| { anyhow!( "Layered database does not contain contract with code hash: {}.", @@ -128,68 +273,62 @@ impl Database for LayeredDatabase { }) } - fn storage(&mut self, address: H160, index: U256) -> anyhow::Result { - self.iter() - .find_map(|layer| { - layer - .storage - .get(&address) - .and_then(|storage| storage.get(&index)) - .cloned() - }) - .ok_or_else(|| { - anyhow!( - "Layered database does not contain storage with address: {}; and index: {}.", - address, - index - ) - }) - } - - fn block_hash(&mut self, number: U256) -> anyhow::Result { - self.iter() - .find_map(|layer| layer.block_hashes.get(&number).cloned()) - .ok_or_else(|| { - anyhow!( - "Layered database does not contain block hash with number: {}.", - number - ) - }) + fn storage(&mut self, address: Address, index: U256) -> anyhow::Result { + Ok(self + .iter() + .find_map(|layer| layer.storage.get(&address).map(|storage| storage.as_ref())) + .flatten() + .and_then(|storage| storage.get(&index)) + .cloned() + .unwrap_or(U256::ZERO)) } } impl DatabaseCommit for LayeredDatabase { - fn commit(&mut self, changes: HashMap) { - let last_layer = self.last_layer_mut(); - + fn commit(&mut self, changes: HashMap) { changes.into_iter().for_each(|(address, account)| { if account.is_empty() || account.is_destroyed { - last_layer.account_infos.remove(&address); + self.remove_account(&address); } else { - last_layer.insert_account(address, account.info); - - let storage = last_layer - .storage - .entry(address) - .and_modify(|storage| { - if account.storage_cleared { - storage.clear(); - } - }) - .or_default(); + self.last_layer_mut().insert_account(address, account.info); + + let storage = if self.last_layer_mut().storage.contains_key(&address) { + let storage = self.last_layer_mut().storage.get_mut(&address).unwrap(); + + let was_deleted = storage.is_none(); + if was_deleted { + storage.replace(HashMap::new()); + } + + storage.as_mut().unwrap() + } else { + let storage = self + .iter() + .find_map(|layer| layer.storage.get(&address)) + .cloned() + .flatten() + .unwrap_or_default(); + + self.last_layer_mut() + .storage + .insert_unique_unchecked(address, Some(storage)) + .1 + .as_mut() + .unwrap() + }; + + if account.storage_cleared { + storage.clear(); + } account.storage.into_iter().for_each(|(index, value)| { let value = value.present_value(); - if value.is_zero() { + if value == U256::ZERO { storage.remove(&index); } else { storage.insert(index, value); } }); - - if storage.is_empty() { - last_layer.storage.remove(&address); - } } }); } @@ -200,119 +339,236 @@ impl DatabaseDebug for LayeredDatabase { fn insert_account( &mut self, - address: H160, + address: Address, account_info: AccountInfo, ) -> Result<(), Self::Error> { - self.last_layer_mut() - .account_infos - .insert(address, account_info); - - Ok(()) - } - - fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error> { - self.last_layer_mut() - .block_hashes - .insert(block_number, block_hash); + self.last_layer_mut().insert_account(address, account_info); Ok(()) } - fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { - if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { - account_info.balance = balance; - } else { - let mut account_info = self - .iter() - .find_map(|layer| layer.account_infos.get(&address).cloned()) - .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; - - account_info.balance = balance; - self.last_layer_mut().insert_account(address, account_info); + fn make_snapshot(&mut self) -> B256 { + let state_root = self.state_root().unwrap(); + let mut snapshot = self.stack.clone(); + if let Some(layer) = snapshot.last_mut() { + layer.state_root.replace(state_root); } - Ok(()) + // Currently overwrites old snapshots + self.snapshots.insert(state_root, snapshot); + + state_root } - fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error> { - let code_hash = code.hash(); + fn modify_account( + &mut self, + address: Address, + modifier: Box) + Send>, + ) -> Result<(), Self::Error> { + // TODO: Move account insertion out of LayeredDatabase when forking + let account_info = self.account_or_insert_mut(&address); + let old_code_hash = account_info.code_hash; - let old_code_hash = - if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { - let old_code_hash = if account_info.code_hash != KECCAK_EMPTY { - Some(code_hash) - } else { - None - }; + modifier( + &mut account_info.balance, + &mut account_info.nonce, + &mut account_info.code, + ); - account_info.code_hash = code_hash; + if let Some(code) = account_info.code.take() { + let new_code_hash = code.hash(); - old_code_hash - } else { - let mut account_info = self - .iter() - .find_map(|layer| layer.account_infos.get(&address).cloned()) - .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; + if old_code_hash != new_code_hash { + account_info.code_hash = new_code_hash; - account_info.code_hash = code_hash; - self.last_layer_mut().insert_account(address, account_info); + let last_layer = self.last_layer_mut(); - None - }; + // The old contract should now return empty bytecode + last_layer.contracts.insert(old_code_hash, Bytecode::new()); - if let Some(code_hash) = old_code_hash { - self.last_layer_mut().contracts.remove(&code_hash); + last_layer.contracts.insert(new_code_hash, code); + } } - self.last_layer_mut() - .contracts - .insert(code_hash, code.bytes().clone()); - Ok(()) } - fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + // Set None to indicate the account was deleted if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { - account_info.nonce = nonce; - } else { - let mut account_info = self - .iter() - .find_map(|layer| layer.account_infos.get(&address).cloned()) - .ok_or_else(|| anyhow!("Unknown account with address: {}", address))?; + let old_account_info = account_info.clone(); + + *account_info = None; - account_info.nonce = nonce; - self.last_layer_mut().insert_account(address, account_info); + Ok(old_account_info) + } else { + self.last_layer_mut().account_infos.insert(address, None); + Ok(None) } + } - Ok(()) + fn remove_snapshot(&mut self, state_root: &B256) -> bool { + self.snapshots.remove(state_root).is_some() } fn set_account_storage_slot( &mut self, - address: H160, + address: Address, index: U256, value: U256, ) -> Result<(), Self::Error> { - match self.last_layer_mut().storage.entry(address) { - hashbrown::hash_map::Entry::Occupied(mut entry) => { - entry.get_mut().insert(index, value); - } - hashbrown::hash_map::Entry::Vacant(entry) => { + self.last_layer_mut() + .storage + .entry(address) + .and_modify(|entry| { + let was_deleted = entry.is_none(); + if was_deleted { + entry.replace(HashMap::new()); + } + + entry.as_mut().unwrap().insert(index, value); + }) + .or_insert_with(|| { let mut account_storage = HashMap::new(); account_storage.insert(index, value); - entry.insert(account_storage); - } - } + + Some(account_storage) + }); Ok(()) } - fn storage_root(&mut self) -> Result { - todo!() + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + // Ensure the last layer has a state root + if !self.last_layer_mut().has_state_root() { + let state_root = self.state_root()?; + self.last_layer_mut().state_root.replace(state_root); + } + + if let Some(snapshot) = self.snapshots.get(state_root) { + // Retain all layers except the first + self.reverted_layers = Some(RevertedLayers { + parent_state_root: self.stack.first().unwrap().state_root.unwrap(), + stack: self.stack.split_off(1), + }); + self.stack = snapshot.clone(); + + return Ok(()); + } + + // Check whether the state root is contained in the previously reverted layers + let reinstated_layers = self.reverted_layers.take().and_then(|mut reverted_layers| { + let layer_id = + reverted_layers + .stack + .iter() + .enumerate() + .find_map(|(layer_id, layer)| { + if layer.state_root.unwrap() == *state_root { + Some(layer_id) + } else { + None + } + }); + + if let Some(layer_id) = layer_id { + reverted_layers.stack.truncate(layer_id + 1); + + Some(reverted_layers) + } else { + None + } + }); + + let state_root = reinstated_layers + .as_ref() + .map_or(state_root, |reinstated_layers| { + &reinstated_layers.parent_state_root + }); + + let layer_id = self.stack.iter().enumerate().find_map(|(layer_id, layer)| { + if layer.state_root.unwrap() == *state_root { + Some(layer_id) + } else { + None + } + }); + + if let Some(layer_id) = layer_id { + let reverted_layers = self.stack.split_off(layer_id + 1); + let parent_state_root = self.stack.last().unwrap().state_root.unwrap(); + + if let Some(mut reinstated_layers) = reinstated_layers { + self.stack.append(&mut reinstated_layers.stack); + } + + self.add_layer_default(); + + self.reverted_layers = if reverted_layers.is_empty() { + None + } else { + Some(RevertedLayers { + parent_state_root, + stack: reverted_layers, + }) + }; + + Ok(()) + } else { + Err(anyhow!("Unknown state root: {}", state_root)) + } + } + + fn state_root(&mut self) -> Result { + let mut storage = HashMap::new(); + + self.iter().flat_map(|layer| layer.storage.iter()).for_each( + |(address, account_storage)| { + storage.entry(*address).or_insert(account_storage.clone()); + }, + ); + + let storage_roots: HashMap = storage + .into_iter() + .filter_map(|(address, storage)| { + storage.map(|storage| (address, storage_root(&storage))) + }) + .collect(); + + let mut state = HashMap::new(); + + self.iter() + .flat_map(|layer| layer.account_infos.iter()) + .for_each(|(address, account_info)| { + let storage_root = storage_roots + .get(address) + .cloned() + .unwrap_or(KECCAK_NULL_RLP); + + state + .entry(*address) + .or_insert(account_info.as_ref().map(|account_info| BasicAccount { + nonce: U256::from(account_info.nonce), + balance: account_info.balance, + storage_root, + code_hash: account_info.code_hash, + })); + }); + + let state: HashMap = state + .into_iter() + .filter_map(|(address, account)| account.map(|account| (address, account))) + .collect(); + + Ok(state_root(&state)) } fn checkpoint(&mut self) -> Result<(), Self::Error> { + let state_root = self.state_root()?; + self.last_layer_mut().state_root.replace(state_root); + self.add_layer_default(); + Ok(()) } diff --git a/crates/rethnet_evm/src/db/request.rs b/crates/rethnet_evm/src/db/request.rs new file mode 100644 index 0000000000..815b846750 --- /dev/null +++ b/crates/rethnet_evm/src/db/request.rs @@ -0,0 +1,231 @@ +use std::fmt::Debug; + +use hashbrown::HashMap; +use rethnet_eth::{Address, B256, U256}; +use revm::{Account, AccountInfo, Bytecode, Database, DatabaseCommit}; +use tokio::sync::oneshot; + +use crate::{debug::ModifierFn, DatabaseDebug}; + +/// The request type used internally by a [`SyncDatabase`]. +pub enum Request +where + E: Debug, +{ + AccountByAddress { + address: Address, + sender: oneshot::Sender, E>>, + }, + Checkpoint { + sender: oneshot::Sender>, + }, + CodeByHash { + code_hash: B256, + sender: oneshot::Sender>, + }, + Commit { + changes: HashMap, + sender: oneshot::Sender<()>, + }, + InsertAccount { + address: Address, + account_info: AccountInfo, + sender: oneshot::Sender>, + }, + MakeSnapshot { + sender: oneshot::Sender, + }, + ModifyAccount { + address: Address, + modifier: ModifierFn, + sender: oneshot::Sender>, + }, + RemoveAccount { + address: Address, + sender: oneshot::Sender, E>>, + }, + RemoveSnapshot { + state_root: B256, + sender: oneshot::Sender, + }, + Revert { + sender: oneshot::Sender>, + }, + SetStorageSlot { + address: Address, + index: U256, + value: U256, + sender: oneshot::Sender>, + }, + SetStateRoot { + state_root: B256, + sender: oneshot::Sender>, + }, + StateRoot { + sender: oneshot::Sender>, + }, + StorageSlot { + address: Address, + index: U256, + sender: oneshot::Sender>, + }, + Terminate, +} + +impl Request +where + E: Debug, +{ + pub fn handle(self, db: &mut D) -> bool + where + D: Database + DatabaseCommit + DatabaseDebug, + { + match self { + Request::AccountByAddress { address, sender } => { + sender.send(db.basic(address)).unwrap() + } + Request::Checkpoint { sender } => sender.send(db.checkpoint()).unwrap(), + Request::CodeByHash { code_hash, sender } => { + sender.send(db.code_by_hash(code_hash)).unwrap() + } + Request::Commit { changes, sender } => { + db.commit(changes); + sender.send(()).unwrap() + } + Request::InsertAccount { + address, + account_info, + sender, + } => sender + .send(db.insert_account(address, account_info)) + .unwrap(), + Request::MakeSnapshot { sender } => sender.send(db.make_snapshot()).unwrap(), + Request::ModifyAccount { + address, + modifier, + sender, + } => sender.send(db.modify_account(address, modifier)).unwrap(), + Request::RemoveAccount { address, sender } => { + sender.send(db.remove_account(address)).unwrap() + } + Request::RemoveSnapshot { state_root, sender } => { + sender.send(db.remove_snapshot(&state_root)).unwrap() + } + Request::Revert { sender } => sender.send(db.revert()).unwrap(), + Request::SetStorageSlot { + address, + index, + value, + sender, + } => sender + .send(db.set_account_storage_slot(address, index, value)) + .unwrap(), + Request::SetStateRoot { state_root, sender } => { + sender.send(db.set_state_root(&state_root)).unwrap() + } + Request::StateRoot { sender } => sender.send(db.state_root()).unwrap(), + Request::StorageSlot { + address, + index, + sender, + } => sender.send(db.storage(address, index)).unwrap(), + Request::Terminate => return false, + } + + true + } +} + +impl Debug for Request +where + E: Debug, +{ + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + Self::AccountByAddress { address, sender } => f + .debug_struct("AccountByAddress") + .field("address", address) + .field("sender", sender) + .finish(), + Self::Checkpoint { sender } => f + .debug_struct("Checkpoint") + .field("sender", sender) + .finish(), + Self::CodeByHash { code_hash, sender } => f + .debug_struct("CodeByHash") + .field("code_hash", code_hash) + .field("sender", sender) + .finish(), + Self::Commit { changes, sender } => f + .debug_struct("Commit") + .field("changes", changes) + .field("sender", sender) + .finish(), + Self::InsertAccount { + address, + account_info, + sender, + } => f + .debug_struct("InsertAccount") + .field("address", address) + .field("account_info", account_info) + .field("sender", sender) + .finish(), + Self::MakeSnapshot { sender } => f + .debug_struct("MakeSnapshot") + .field("sender", sender) + .finish(), + Self::ModifyAccount { + address, + modifier: _modifier, + sender, + } => f + .debug_struct("ModifyAccount") + .field("address", address) + .field("sender", sender) + .finish(), + Self::RemoveAccount { address, sender } => f + .debug_struct("RemoveAccount") + .field("address", address) + .field("sender", sender) + .finish(), + Self::RemoveSnapshot { state_root, sender } => f + .debug_struct("RemoveSnapshot") + .field("state_root", state_root) + .field("sender", sender) + .finish(), + Self::Revert { sender } => f.debug_struct("Revert").field("sender", sender).finish(), + Self::SetStorageSlot { + address, + index, + value, + sender, + } => f + .debug_struct("SetStorageSlot") + .field("address", address) + .field("index", index) + .field("value", value) + .field("sender", sender) + .finish(), + Self::SetStateRoot { state_root, sender } => f + .debug_struct("SetStateRoot") + .field("state_root", state_root) + .field("sender", sender) + .finish(), + Self::StateRoot { sender } => { + f.debug_struct("StateRoot").field("sender", sender).finish() + } + Self::StorageSlot { + address, + index, + sender, + } => f + .debug_struct("StorageSlot") + .field("address", address) + .field("index", index) + .field("sender", sender) + .finish(), + Self::Terminate => write!(f, "Terminate"), + } + } +} diff --git a/crates/rethnet_evm/src/db/sync.rs b/crates/rethnet_evm/src/db/sync.rs new file mode 100644 index 0000000000..1d83496cb3 --- /dev/null +++ b/crates/rethnet_evm/src/db/sync.rs @@ -0,0 +1,417 @@ +use std::{fmt::Debug, io, marker::PhantomData}; + +use hashbrown::HashMap; +use rethnet_eth::{Address, B256, U256}; +use revm::{db::Database, Account, AccountInfo, Bytecode, DatabaseCommit}; +use tokio::{ + runtime::{Builder, Runtime}, + sync::{ + mpsc::{unbounded_channel, UnboundedSender}, + oneshot, + }, + task::{self, JoinHandle}, +}; + +use crate::{debug::ModifierFn, DatabaseDebug}; + +use super::request::Request; + +/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. +pub trait SyncDatabase: + Database + DatabaseCommit + DatabaseDebug + Send + Sync + 'static +where + E: Debug + Send, +{ +} + +impl SyncDatabase for D +where + D: Database + DatabaseCommit + DatabaseDebug + Send + Sync + 'static, + E: Debug + Send, +{ +} + +/// A helper class for converting a synchronous database into an asynchronous database. +/// +/// Requires the inner database to implement [`Database`], [`DatabaseCommit`], and [`DatabaseDebug`]. +pub struct AsyncDatabase +where + D: SyncDatabase, + E: Debug + Send, +{ + runtime: Runtime, + request_sender: UnboundedSender>, + db_handle: Option>, + phantom: PhantomData, +} + +impl AsyncDatabase +where + D: SyncDatabase, + E: Debug + Send + 'static, +{ + /// Constructs an [`AsyncDatabase`] instance with the provided database. + pub fn new(mut db: D) -> io::Result { + let runtime = Builder::new_multi_thread().build()?; + + let (sender, mut receiver) = unbounded_channel::>(); + + let db_handle = runtime.spawn(async move { + while let Some(request) = receiver.recv().await { + if !request.handle(&mut db) { + break; + } + } + }); + + Ok(Self { + runtime, + request_sender: sender, + db_handle: Some(db_handle), + phantom: PhantomData, + }) + } + + /// Retrieves the runtime of the [`AsyncDatabase`]. + pub fn runtime(&self) -> &Runtime { + &self.runtime + } + + /// Retrieves the account corresponding to the specified address. + pub async fn account_by_address(&self, address: Address) -> Result, E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::AccountByAddress { address, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Retrieves the storage slot corresponding to the specified address and index. + pub async fn account_storage_slot(&self, address: Address, index: U256) -> Result { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::StorageSlot { + address, + index, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Applies the provided changes to the state. + pub async fn apply(&self, changes: HashMap) { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Commit { changes, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Creates a state checkpoint that can be reverted to using [`revert`]. + pub async fn checkpoint(&self) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Checkpoint { sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Retrieves the code corresponding to the specified hash. + pub async fn code_by_hash(&self, code_hash: B256) -> Result { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::CodeByHash { code_hash, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Inserts the specified account into the state. + pub async fn insert_account( + &self, + address: Address, + account_info: AccountInfo, + ) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::InsertAccount { + address, + account_info, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. + pub async fn make_snapshot(&self) -> B256 { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::MakeSnapshot { sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Modifies the account at the specified address using the provided function. + pub async fn modify_account(&self, address: Address, modifier: ModifierFn) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::ModifyAccount { + address, + modifier, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Removes and returns the account at the specified address, if it exists. + pub async fn remove_account(&self, address: Address) -> Result, E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::RemoveAccount { address, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. + pub async fn remove_snapshot(&self, state_root: B256) -> bool { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::RemoveSnapshot { state_root, sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Reverts to the previous checkpoint, created using [`checkpoint`]. + pub async fn revert(&self) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Revert { sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Sets the storage slot at the specified address and index to the provided value. + pub async fn set_account_storage_slot( + &self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::SetStorageSlot { + address, + index, + value, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Reverts the state to match the specified state root. + pub async fn set_state_root(&self, state_root: &B256) -> Result<(), E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::SetStateRoot { + state_root: *state_root, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + + /// Retrieves the state's root. + pub async fn state_root(&self) -> Result { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::StateRoot { sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } +} + +impl Drop for AsyncDatabase +where + D: SyncDatabase, + E: Debug + Send, +{ + fn drop(&mut self) { + if let Some(handle) = self.db_handle.take() { + self.request_sender + .send(Request::Terminate) + .expect("Failed to send request"); + + self.runtime.block_on(handle).unwrap(); + } + } +} + +/// Wrapper around an [`AsyncDatabase`] to allow synchronous function calls. +pub struct AsyncDatabaseWrapper<'d, D, E> +where + D: SyncDatabase, + E: Debug + Send, +{ + db: &'d AsyncDatabase, +} + +impl<'d, D, E> AsyncDatabaseWrapper<'d, D, E> +where + D: SyncDatabase, + E: Debug + Send, +{ + /// Constructs an [`AsyncDatabaseWrapper`] instance. + pub fn new(db: &'d AsyncDatabase) -> Self { + Self { db } + } +} + +impl<'d, D, E> Database for AsyncDatabaseWrapper<'d, D, E> +where + D: SyncDatabase, + E: Debug + Send + 'static, +{ + type Error = E; + + fn basic(&mut self, address: Address) -> Result, Self::Error> { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.account_by_address(address)) + }) + } + + fn code_by_hash(&mut self, code_hash: B256) -> Result { + task::block_in_place(move || self.db.runtime().block_on(self.db.code_by_hash(code_hash))) + } + + fn storage(&mut self, address: Address, index: U256) -> Result { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.account_storage_slot(address, index)) + }) + } +} + +impl<'d, D, E> DatabaseCommit for AsyncDatabaseWrapper<'d, D, E> +where + D: SyncDatabase, + E: Debug + Send + 'static, +{ + fn commit(&mut self, changes: HashMap) { + task::block_in_place(move || self.db.runtime().block_on(self.db.apply(changes))) + } +} + +impl<'d, D, E> DatabaseDebug for AsyncDatabaseWrapper<'d, D, E> +where + D: SyncDatabase, + E: Debug + Send + 'static, +{ + type Error = E; + + fn insert_account( + &mut self, + address: Address, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.insert_account(address, account_info)) + }) + } + + fn modify_account( + &mut self, + address: Address, + modifier: Box) + Send>, + ) -> Result<(), Self::Error> { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.modify_account(address, modifier)) + }) + } + + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + task::block_in_place(move || self.db.runtime().block_on(self.db.remove_account(address))) + } + + fn set_account_storage_slot( + &mut self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.set_account_storage_slot(address, index, value)) + }) + } + + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.set_state_root(state_root)) + }) + } + + fn state_root(&mut self) -> Result { + task::block_in_place(move || self.db.runtime().block_on(self.db.state_root())) + } + + fn checkpoint(&mut self) -> Result<(), Self::Error> { + task::block_in_place(move || self.db.runtime().block_on(self.db.checkpoint())) + } + + fn revert(&mut self) -> Result<(), Self::Error> { + task::block_in_place(move || self.db.runtime().block_on(self.db.revert())) + } + + fn make_snapshot(&mut self) -> B256 { + task::block_in_place(move || self.db.runtime().block_on(self.db.make_snapshot())) + } + + fn remove_snapshot(&mut self, state_root: &B256) -> bool { + task::block_in_place(move || { + self.db + .runtime() + .block_on(self.db.remove_snapshot(*state_root)) + }) + } +} diff --git a/crates/rethnet_evm/src/debug.rs b/crates/rethnet_evm/src/debug.rs index 50db8edafb..c2fb596096 100644 --- a/crates/rethnet_evm/src/debug.rs +++ b/crates/rethnet_evm/src/debug.rs @@ -1,7 +1,11 @@ -use primitive_types::{H160, H256, U256}; +use auto_impl::auto_impl; +use rethnet_eth::{Address, B256, U256}; use revm::{AccountInfo, Bytecode}; +pub type ModifierFn = Box) + Send>; + /// A trait for debug operation on a database. +#[auto_impl(Box)] pub trait DatabaseDebug { /// The database's error type. type Error; @@ -9,95 +13,40 @@ pub trait DatabaseDebug { /// Inserts an account with the specified `address`. fn insert_account( &mut self, - address: H160, + address: Address, account_info: AccountInfo, ) -> Result<(), Self::Error>; - /// Inserts a block with the specified `block_number` and `block_hash`. - fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error>; - - /// Sets the account balance at the specified address to the provided value. - fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error>; + /// Modifies the account at the specified address using the provided function. + fn modify_account(&mut self, address: Address, modifier: ModifierFn) + -> Result<(), Self::Error>; - /// Sets the account code at the specified address to the provided value. - fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error>; - - /// Sets the account nonce at the specified address to the provided value. - fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error>; + /// Removes and returns the account at the specified address, if it exists. + fn remove_account(&mut self, address: Address) -> Result, Self::Error>; /// Sets the storage slot at the specified address and index to the provided value. fn set_account_storage_slot( &mut self, - address: H160, + address: Address, index: U256, value: U256, ) -> Result<(), Self::Error>; + /// Reverts the state to match the specified state root. + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error>; + /// Retrieves the storage root of the database. - fn storage_root(&mut self) -> Result; + fn state_root(&mut self) -> Result; /// Creates a checkpoint that can be reverted to using [`revert`]. fn checkpoint(&mut self) -> Result<(), Self::Error>; /// Reverts to the previous checkpoint, created using [`checkpoint`]. fn revert(&mut self) -> Result<(), Self::Error>; -} - -/// A trait for objects that support [`DatabaseDebug`]. -pub trait HasDatabaseDebug { - /// The database's error type. - type Error; - - /// Retrieves the owned `DatabaseDebug`. - fn db_debug(&mut self) -> &mut dyn DatabaseDebug; -} - -impl DatabaseDebug for T { - type Error = ::Error; - - fn insert_account( - &mut self, - address: H160, - account_info: AccountInfo, - ) -> Result<(), Self::Error> { - self.db_debug().insert_account(address, account_info) - } - - fn insert_block(&mut self, block_number: U256, block_hash: H256) -> Result<(), Self::Error> { - self.db_debug().insert_block(block_number, block_hash) - } - - fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { - self.db_debug().set_account_balance(address, balance) - } - - fn set_account_code(&mut self, address: H160, code: Bytecode) -> Result<(), Self::Error> { - self.db_debug().set_account_code(address, code) - } - - fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { - self.db_debug().set_account_nonce(address, nonce) - } - - fn set_account_storage_slot( - &mut self, - address: H160, - index: U256, - value: U256, - ) -> Result<(), Self::Error> { - self.db_debug() - .set_account_storage_slot(address, index, value) - } - - fn storage_root(&mut self) -> Result { - self.db_debug().storage_root() - } - fn checkpoint(&mut self) -> Result<(), Self::Error> { - self.db_debug().checkpoint() - } + /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. + fn make_snapshot(&mut self) -> B256; - fn revert(&mut self) -> Result<(), Self::Error> { - self.db_debug().revert() - } + /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. + fn remove_snapshot(&mut self, state_root: &B256) -> bool; } diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs new file mode 100644 index 0000000000..ca0ddc2f96 --- /dev/null +++ b/crates/rethnet_evm/src/evm.rs @@ -0,0 +1,33 @@ +use std::fmt::Debug; + +use revm::{BlockEnv, CfgEnv, TxEnv}; + +use crate::{ + blockchain::{AsyncBlockchain, SyncBlockchain}, + db::{AsyncDatabase, AsyncDatabaseWrapper, SyncDatabase}, +}; + +/// Creates an evm from the provided database, config, transaction, and block. +#[allow(clippy::type_complexity)] +pub fn build_evm<'b, 'd, E>( + blockchain: &'b AsyncBlockchain>, E>, + db: &'d AsyncDatabase>, E>, + cfg: CfgEnv, + transaction: TxEnv, + block: BlockEnv, +) -> revm::EVM< + AsyncDatabaseWrapper<'d, Box>, E>, + &'b AsyncBlockchain>, E>, +> +where + E: Debug + Send + 'static, +{ + let mut evm = revm::EVM::new(); + evm.set_blockchain(blockchain); + evm.database(AsyncDatabaseWrapper::new(db)); + evm.env.cfg = cfg; + evm.env.block = block; + evm.env.tx = transaction; + + evm +} diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs index 306146bb6e..cb3491a354 100644 --- a/crates/rethnet_evm/src/inspector.rs +++ b/crates/rethnet_evm/src/inspector.rs @@ -1,32 +1,53 @@ -use log::trace; -use revm::{opcode, Database, EVMData, Inspector, Interpreter, Return}; +use revm::{blockchain::Blockchain, opcode, Database, EVMData, Inspector, Interpreter, Return}; -pub struct RethnetInspector; +use crate::trace::Trace; -impl Default for RethnetInspector { - fn default() -> Self { - Self +#[derive(Default)] +pub struct RethnetInspector { + trace: Trace, + opcode_stack: Vec, +} + +impl RethnetInspector { + /// Converts the [`RethnetInspector`] into its [`Trace`]. + pub fn into_trace(self) -> Trace { + self.trace } } -impl Inspector for RethnetInspector +impl Inspector for RethnetInspector where D: Database, + BC: Blockchain, { fn step( &mut self, interp: &mut Interpreter, - _data: &mut EVMData<'_, D>, + _data: &mut EVMData<'_, D, BC>, + _is_static: bool, + ) -> Return { + self.opcode_stack.push(interp.current_opcode()); + + Return::Continue + } + + fn step_end( + &mut self, + interp: &mut Interpreter, + _data: &mut EVMData<'_, D, BC>, _is_static: bool, + exit_code: Return, ) -> Return { - let opcode = unsafe { *interp.instruction_pointer }; - trace!( - "opcode: {:?} | fee: {} | gasLeft: {} | gasSpent: {}", - opcode::OPCODE_JUMPMAP[usize::from(opcode)], - opcode::spec_opcode_gas(_data.env.cfg.spec_id)[usize::from(opcode)].get_gas(), - interp.gas().remaining(), - interp.gas().spend() - ); + let opcode = self + .opcode_stack + .pop() + .expect("There must always be an opcode when ending a step"); + + self.trace.add_step(opcode, interp.gas(), exit_code); + + if opcode == opcode::RETURN { + self.trace.return_value = interp.return_value(); + } Return::Continue } diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index 44366e89e6..b72dd4a1ed 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -4,21 +4,39 @@ //! Virtual Machine (or EVM). #![warn(missing_docs)] -pub use bytes::Bytes; -pub use db::layered_db::{LayeredDatabase, RethnetLayer}; -pub use debug::{DatabaseDebug, HasDatabaseDebug}; +use rethnet_eth::Address; + pub use hashbrown::HashMap; -pub use primitive_types::{H160, H256, U256}; pub use revm::{ - db::{DatabaseRef, EmptyDB}, + blockchain::{Blockchain, BlockchainRef}, + db::EmptyDB, Account, AccountInfo, BlockEnv, Bytecode, CfgEnv, CreateScheme, Database, DatabaseCommit, ExecutionResult, Log, Return, SpecId, TransactOut, TransactTo, TxEnv, EVM, }; +pub use crate::{ + block::{BlockBuilder, HeaderData}, + debug::DatabaseDebug, + runtime::Rethnet, + transaction::PendingTransaction, +}; + /// State mapping of addresses to accounts. -pub type State = HashMap; +pub type State = HashMap; + +/// Types for managing Ethereum blockchain +pub mod blockchain; + +/// Database types for managing Ethereum state +pub mod db; + +/// Types used for tracing EVM calls +pub mod trace; -mod db; +mod block; mod debug; +pub(crate) mod evm; mod inspector; -pub mod sync; +pub(crate) mod random; +mod runtime; +mod transaction; diff --git a/crates/rethnet_evm/src/random.rs b/crates/rethnet_evm/src/random.rs new file mode 100644 index 0000000000..d0ea9e2941 --- /dev/null +++ b/crates/rethnet_evm/src/random.rs @@ -0,0 +1,34 @@ +#![allow(dead_code)] + +use rethnet_eth::B256; +use revm::common::keccak256; + +/// A pseudorandom hash generator which allows overriding of the next generated hash. +#[derive(Debug)] +pub struct RandomHashGenerator { + /// The next hash that will be returned + next_value: B256, +} + +impl RandomHashGenerator { + /// Constructs a [`RandomHashGenerator`] with the specified seed. + pub fn with_seed(seed: &str) -> Self { + let next_value = keccak256(seed.as_bytes()); + + Self { next_value } + } + + /// Returns the next hash, generated the future next hash, and caches it. + pub fn next(&mut self) -> B256 { + let mut next_value = keccak256(self.next_value.as_bytes()); + + std::mem::swap(&mut self.next_value, &mut next_value); + + next_value + } + + /// Overwrites the next hash output by the generator. + pub fn set_next(&mut self, next_value: B256) { + self.next_value = next_value; + } +} diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs new file mode 100644 index 0000000000..1ec1bd6861 --- /dev/null +++ b/crates/rethnet_evm/src/runtime.rs @@ -0,0 +1,109 @@ +use std::{fmt::Debug, sync::Arc}; + +use rethnet_eth::B256; +use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; + +use crate::{ + blockchain::{AsyncBlockchain, SyncBlockchain}, + db::{AsyncDatabase, SyncDatabase}, + evm::build_evm, + inspector::RethnetInspector, + trace::Trace, + State, +}; + +/// The asynchronous Rethnet runtime. +pub struct Rethnet +where + E: Debug + Send + 'static, +{ + blockchain: Arc>, E>>, + db: Arc>, E>>, + cfg: CfgEnv, +} + +impl Rethnet +where + E: Debug + Send + 'static, +{ + /// Constructs a new [`Rethnet`] instance. + pub fn new( + blockchain: Arc>, E>>, + db: Arc>, E>>, + cfg: CfgEnv, + ) -> Self { + Self { + blockchain, + db, + cfg, + } + } + + /// Runs a transaction without committing the state. + pub async fn dry_run( + &self, + transaction: TxEnv, + mut block: BlockEnv, + ) -> (ExecutionResult, State, Trace) { + let blockchain = self.blockchain.clone(); + let db = self.db.clone(); + let cfg = self.cfg.clone(); + + if cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { + block.prevrandao = Some(B256::zero()); + } + + self.db + .runtime() + .spawn(async move { + let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); + + let mut inspector = RethnetInspector::default(); + let (result, state) = evm.inspect(&mut inspector); + (result, state, inspector.into_trace()) + }) + .await + .unwrap() + } + + /// Runs a transaction without committing the state, while disabling balance checks and creating accounts for new addresses. + pub async fn guaranteed_dry_run( + &self, + transaction: TxEnv, + mut block: BlockEnv, + ) -> Result<(ExecutionResult, State, Trace), E> { + let blockchain = self.blockchain.clone(); + let db = self.db.clone(); + + let mut cfg = self.cfg.clone(); + cfg.disable_balance_check = true; + + if cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { + block.prevrandao = Some(B256::zero()); + } + + let result = self + .db + .runtime() + .spawn(async move { + let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); + + let mut inspector = RethnetInspector::default(); + let (result, state) = evm.inspect(&mut inspector); + (result, state, inspector.into_trace()) + }) + .await + .unwrap(); + + Ok(result) + } + + /// Runs a transaction, committing the state in the process. + pub async fn run(&self, transaction: TxEnv, block: BlockEnv) -> (ExecutionResult, Trace) { + let (result, changes, trace) = self.dry_run(transaction, block).await; + + self.db.apply(changes).await; + + (result, trace) + } +} diff --git a/crates/rethnet_evm/src/signer.rs b/crates/rethnet_evm/src/signer.rs new file mode 100644 index 0000000000..046abd02c2 --- /dev/null +++ b/crates/rethnet_evm/src/signer.rs @@ -0,0 +1,32 @@ +use anyhow::anyhow; +use hashbrown::HashMap; +use rethnet_eth::{ + transaction::{SignedTransaction, TransactionRequest}, + Address, +}; +use secp256k1::{Secp256k1, SecretKey, VerifyOnly}; + +pub struct Signer { + accounts: HashMap, + context: Secp256k1, +} + +impl Signer { + pub fn new() -> Self { + Self { + accounts: HashMap::new(), + context: Secp256k1::verification_only(), + } + } + + pub fn sign( + &self, + request: TransactionRequest, + caller: &Address, + ) -> anyhow::Result { + let signer = self + .accounts + .get(caller) + .ok_or_else(|| anyhow!("Signer for address `{}` does not exist.", caller))?; + } +} diff --git a/crates/rethnet_evm/src/sync.rs b/crates/rethnet_evm/src/sync.rs deleted file mode 100644 index f04668594e..0000000000 --- a/crates/rethnet_evm/src/sync.rs +++ /dev/null @@ -1,113 +0,0 @@ -//! Synchronisation types for the Rethnet EVM. - -mod client; -pub(self) mod request; - -use anyhow::bail; - -use revm::{CfgEnv, Database, DatabaseCommit, EVM}; -use tokio::sync::mpsc::UnboundedReceiver; - -use crate::DatabaseDebug; - -pub use self::client::Client; -use self::request::Request; - -/// The asynchronous Rethnet runtime. -/// -/// Depending on the traits of the database passed to [`new`], [`Rethnet`] will support -/// running with`Request::Debug` and `Request::DatabaseMut`. -pub struct Rethnet { - evm: EVM, - request_receiver: UnboundedReceiver, -} - -impl Rethnet { - /// Creates a new [`Rethnet`] instance. - pub fn new(request_receiver: UnboundedReceiver, cfg: CfgEnv, db: D) -> Self { - let mut evm = EVM::new(); - evm.env.cfg = cfg; - evm.database(db); - - Self { - evm, - request_receiver, - } - } - - /// Runs [`Rethnet`] immutably. - pub async fn run(mut self) -> anyhow::Result<()> - where - D: Database, - { - while let Some(request) = self.request_receiver.recv().await { - match request { - Request::Debug(_) => { - bail!("Rethnet client does not support `DatabaseDebug`.") - } - Request::Database(request) => request.handle_event(&mut self.evm)?, - Request::DatabaseMut(_) => { - bail!("Rethnet client does not support `DatabaseCommit`.") - } - Request::Terminate => return Ok(()), - } - } - - Ok(()) - } - - /// Runs [`Rethnet`] immutably with debug capability. - pub async fn run_debug(mut self) -> anyhow::Result<()> - where - D: Database + DatabaseDebug, - { - while let Some(request) = self.request_receiver.recv().await { - match request { - Request::Debug(request) => request.handle_event(&mut self.evm)?, - Request::Database(request) => request.handle_event(&mut self.evm)?, - Request::DatabaseMut(_) => { - bail!("Rethnet client does not support `DatabaseCommit`.") - } - Request::Terminate => return Ok(()), - } - } - - Ok(()) - } - - /// Runs [`Rethnet`] mutably. - pub async fn run_mut(mut self) -> anyhow::Result<()> - where - D: Database + DatabaseCommit, - { - while let Some(request) = self.request_receiver.recv().await { - match request { - Request::Debug(_) => { - bail!("Rethnet client does not support `DatabaseDebug`.") - } - Request::Database(request) => request.handle_event(&mut self.evm)?, - Request::DatabaseMut(request) => request.handle_event(&mut self.evm)?, - Request::Terminate => return Ok(()), - } - } - - Ok(()) - } - - /// Runs [`Rethnet`] mutably with debug capability. - pub async fn run_mut_debug(mut self) -> anyhow::Result<()> - where - D: Database + DatabaseCommit + DatabaseDebug, - { - while let Some(request) = self.request_receiver.recv().await { - match request { - Request::Debug(request) => request.handle_event(&mut self.evm)?, - Request::Database(request) => request.handle_event(&mut self.evm)?, - Request::DatabaseMut(request) => request.handle_event(&mut self.evm)?, - Request::Terminate => return Ok(()), - } - } - - Ok(()) - } -} diff --git a/crates/rethnet_evm/src/sync/client.rs b/crates/rethnet_evm/src/sync/client.rs deleted file mode 100644 index 305f47dbe5..0000000000 --- a/crates/rethnet_evm/src/sync/client.rs +++ /dev/null @@ -1,305 +0,0 @@ -use std::future::Future; - -use bytes::Bytes; -use primitive_types::{H160, H256, U256}; -use revm::{AccountInfo, BlockEnv, CfgEnv, Database, DatabaseCommit, ExecutionResult, TxEnv}; -use tokio::{ - runtime::{Builder, Runtime}, - sync::{ - mpsc::{unbounded_channel, UnboundedSender}, - oneshot, - }, - task::JoinHandle, -}; - -use crate::{DatabaseDebug, State}; - -use super::{ - request::{DatabaseMutRequest, DatabaseRequest, DebugRequest, Request}, - Rethnet, -}; - -/// The asynchronous client that communicates with a [`Rethnet`] object. -pub struct Client { - request_sender: UnboundedSender, - rethnet_handle: Option>>, - runtime: Runtime, -} - -impl Client { - /// Creates a [`Client`] by spawning an asynchronous task to run [`Rethnet`]. - fn new(request_sender: UnboundedSender, future: F) -> anyhow::Result - where - F: Future> + Send + 'static, - { - let runtime = Builder::new_multi_thread().build()?; - let rethnet_handle = Some(runtime.spawn(future)); - - Ok(Self { - request_sender, - rethnet_handle, - runtime, - }) - } - - /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. - pub fn with_db(cfg: CfgEnv, db: D) -> anyhow::Result - where - D: Database + Send + 'static, - { - let (request_sender, request_receiver) = unbounded_channel(); - - Self::new(request_sender, async { - Rethnet::new(request_receiver, cfg, db).run().await - }) - } - - /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. - pub fn with_db_debug(cfg: CfgEnv, db: D) -> anyhow::Result - where - D: Database + DatabaseDebug + Send + 'static, - { - let (request_sender, request_receiver) = unbounded_channel(); - - Self::new(request_sender, async { - Rethnet::new(request_receiver, cfg, db).run_debug().await - }) - } - - /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. - pub fn with_db_mut(cfg: CfgEnv, db: D) -> anyhow::Result - where - D: Database + DatabaseCommit + Send + 'static, - { - let (request_sender, request_receiver) = unbounded_channel(); - - Self::new(request_sender, async { - Rethnet::new(request_receiver, cfg, db).run_mut().await - }) - } - - /// Constructs [`Rethnet`] with the provided database and runs it asynchronously. - pub fn with_db_mut_debug(cfg: CfgEnv, db: D) -> anyhow::Result - where - D: Database - + DatabaseCommit - + DatabaseDebug - + Send - + 'static, - { - let (request_sender, request_receiver) = unbounded_channel(); - - Self::new(request_sender, async { - Rethnet::new(request_receiver, cfg, db) - .run_mut_debug() - .await - }) - } - - /// Runs a transaction with committing the state. - pub async fn dry_run(&self, transaction: TxEnv, block: BlockEnv) -> (ExecutionResult, State) { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Database(DatabaseRequest::DryRun { - transaction, - block, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Runs a transaction, committing the state in the process. - pub async fn run(&self, transaction: TxEnv) -> ExecutionResult { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::DatabaseMut(DatabaseMutRequest::Run { - transaction, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Guarantees that a transaction will succeed. - pub async fn guarantee_transaction(&self, transaction: TxEnv) -> anyhow::Result<()> { - let total_gas = U256::from(transaction.gas_limit) - * (transaction.gas_price + transaction.gas_priority_fee.unwrap_or_else(U256::zero)) - + transaction.value; - - let caller = transaction.caller; - - let account_info = - if let Some(account_info) = self.get_account_by_address(caller).await.unwrap_or(None) { - account_info - } else { - let account_info = AccountInfo::default(); - self.insert_account(caller, account_info.clone()).await?; - account_info - }; - - if account_info.balance < total_gas { - self.set_account_balance(caller, total_gas).await?; - } - - Ok(()) - } - - /// Creates a state checkpoint that can be reverted to using [`revert`]. - pub async fn checkpoint(&self) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::Checkpoint { sender })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Reverts to the previous checkpoint, created using [`checkpoint`]. - pub async fn revert(&self) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::Revert { sender })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Retrieves the account corresponding to the address, if it exists. - pub async fn get_account_by_address( - &self, - address: H160, - ) -> anyhow::Result> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Database(DatabaseRequest::AccountByAddress { - address, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Inserts the specified account into the state. - pub async fn insert_account( - &self, - address: H160, - account_info: AccountInfo, - ) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::InsertAccount { - address, - account_info, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Inserts the specified block number and hash into the state. - pub async fn insert_block(&self, block_number: U256, block_hash: H256) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::InsertBlock { - block_number, - block_hash, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Sets the account balance at the specified address to the provided value. - pub async fn set_account_balance(&self, address: H160, balance: U256) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::SetAccountBalance { - address, - balance, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Sets the account code at the specified address to the provided value. - pub async fn set_account_code(&self, address: H160, code: Bytes) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::SetAccountCode { - address, - bytes: code, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Sets the account nonce at the specified address to the provided value. - pub async fn set_account_nonce(&self, address: H160, nonce: u64) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::SetAccountNonce { - address, - nonce, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } - - /// Sets the storage slot at the specified address and index to the provided value. - pub async fn set_account_storage_slot( - &self, - address: H160, - index: U256, - value: U256, - ) -> anyhow::Result<()> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Debug(DebugRequest::SetAccountStorageSlot { - address, - index, - value, - sender, - })) - .expect("Failed to send request"); - - receiver.await.expect("Rethnet unexpectedly crashed") - } -} - -impl Drop for Client { - fn drop(&mut self) { - if let Some(handle) = self.rethnet_handle.take() { - self.request_sender - .send(Request::Terminate) - .expect("Failed to send request"); - - self.runtime - .block_on(handle) - .unwrap() - .expect("Rethnet closed unexpectedly"); - } - } -} diff --git a/crates/rethnet_evm/src/sync/request.rs b/crates/rethnet_evm/src/sync/request.rs deleted file mode 100644 index 839675632c..0000000000 --- a/crates/rethnet_evm/src/sync/request.rs +++ /dev/null @@ -1,207 +0,0 @@ -use anyhow::bail; -use bytes::Bytes; -use primitive_types::{H160, H256, U256}; -use revm::{ - AccountInfo, BlockEnv, Bytecode, Database, DatabaseCommit, ExecutionResult, TxEnv, EVM, -}; -use tokio::sync::oneshot; - -use crate::{inspector::RethnetInspector, DatabaseDebug, State}; - -/// The request type for a [`Client`] to communicate with [`Rethnet`]. -#[allow(clippy::large_enum_variant)] -#[derive(Debug)] -pub enum Request { - Debug(DebugRequest), - Database(DatabaseRequest), - DatabaseMut(DatabaseMutRequest), - Terminate, -} - -#[derive(Debug)] -pub enum DebugRequest { - Checkpoint { - sender: oneshot::Sender>, - }, - InsertAccount { - address: H160, - account_info: AccountInfo, - sender: oneshot::Sender>, - }, - InsertBlock { - block_number: U256, - block_hash: H256, - sender: oneshot::Sender>, - }, - Revert { - sender: oneshot::Sender>, - }, - SetAccountBalance { - address: H160, - balance: U256, - sender: oneshot::Sender>, - }, - SetAccountCode { - address: H160, - bytes: Bytes, - sender: oneshot::Sender>, - }, - SetAccountNonce { - address: H160, - nonce: u64, - sender: oneshot::Sender>, - }, - SetAccountStorageSlot { - address: H160, - index: U256, - value: U256, - sender: oneshot::Sender>, - }, -} - -impl DebugRequest { - pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> - where - D: DatabaseDebug, - { - let sent_response = match self { - DebugRequest::Checkpoint { sender } => { - sender.send(evm.db().unwrap().checkpoint()).is_ok() - } - DebugRequest::InsertAccount { - address, - account_info, - sender, - } => sender - .send(evm.db().unwrap().insert_account(address, account_info)) - .is_ok(), - DebugRequest::InsertBlock { - block_number, - block_hash, - sender, - } => sender - .send(evm.db().unwrap().insert_block(block_number, block_hash)) - .is_ok(), - DebugRequest::Revert { sender } => sender.send(evm.db().unwrap().revert()).is_ok(), - DebugRequest::SetAccountBalance { - address, - balance, - sender, - } => sender - .send(evm.db().unwrap().set_account_balance(address, balance)) - .is_ok(), - DebugRequest::SetAccountCode { - address, - bytes, - sender, - } => sender - .send( - evm.db() - .unwrap() - .set_account_code(address, Bytecode::new_raw(bytes)), - ) - .is_ok(), - DebugRequest::SetAccountNonce { - address, - nonce, - sender, - } => sender - .send(evm.db().unwrap().set_account_nonce(address, nonce)) - .is_ok(), - DebugRequest::SetAccountStorageSlot { - address, - index, - value, - sender, - } => sender - .send( - evm.db() - .unwrap() - .set_account_storage_slot(address, index, value), - ) - .is_ok(), - }; - - if !sent_response { - bail!("Failed to send response"); - } - - Ok(()) - } -} - -#[allow(clippy::large_enum_variant)] -#[derive(Debug)] -pub enum DatabaseRequest { - AccountByAddress { - address: H160, - sender: oneshot::Sender>>, - }, - DryRun { - transaction: TxEnv, - block: BlockEnv, - sender: oneshot::Sender<(ExecutionResult, State)>, - }, -} - -impl DatabaseRequest { - pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> - where - D: Database, - { - let sent_response = match self { - DatabaseRequest::AccountByAddress { address, sender } => { - sender.send(evm.db().unwrap().basic(address)).is_ok() - } - DatabaseRequest::DryRun { - transaction, - block, - sender, - } => { - evm.env.tx = transaction; - evm.env.block = block; - - sender - .send(evm.inspect(RethnetInspector::default())) - .is_ok() - } - }; - - if !sent_response { - bail!("Failed to send response"); - } - - Ok(()) - } -} - -#[derive(Debug)] -pub enum DatabaseMutRequest { - Run { - transaction: TxEnv, - sender: oneshot::Sender, - }, -} - -impl DatabaseMutRequest { - pub fn handle_event(self, evm: &mut EVM) -> anyhow::Result<()> - where - D: Database + DatabaseCommit, - { - let sent_response = match self { - DatabaseMutRequest::Run { - transaction, - sender, - } => { - evm.env.tx = transaction; - sender.send(evm.transact_commit()).is_ok() - } - }; - - if !sent_response { - bail!("Failed to send response"); - } - - Ok(()) - } -} diff --git a/crates/rethnet_evm/src/trace.rs b/crates/rethnet_evm/src/trace.rs new file mode 100644 index 0000000000..1f62e3033a --- /dev/null +++ b/crates/rethnet_evm/src/trace.rs @@ -0,0 +1,47 @@ +use rethnet_eth::Bytes; +use revm::{Gas, Return}; + +/// A trace for an EVM call. +#[derive(Default)] +pub struct Trace { + /// The individual steps of the call + pub steps: Vec, + /// The return value of the call + pub return_value: Bytes, + gas: Option, +} + +/// A single EVM step. +pub struct Step { + /// The executed op code + pub opcode: u8, + /// The amount of gas that was used by the step + pub gas_cost: u64, + /// The amount of gas that was refunded by the step + pub gas_refunded: i64, + /// The exit code of the step + pub exit_code: Return, +} + +impl Trace { + /// Adds a VM step to the trace. + pub fn add_step(&mut self, opcode: u8, gas: &Gas, exit_code: Return) { + let step = if let Some(old_gas) = self.gas.replace(*gas) { + Step { + opcode, + gas_cost: gas.spend() - old_gas.spend(), + gas_refunded: gas.refunded() - old_gas.refunded(), + exit_code, + } + } else { + Step { + opcode, + gas_cost: gas.spend(), + gas_refunded: gas.refunded(), + exit_code, + } + }; + + self.steps.push(step); + } +} diff --git a/crates/rethnet_evm/src/transaction.rs b/crates/rethnet_evm/src/transaction.rs new file mode 100644 index 0000000000..1e61c5bc26 --- /dev/null +++ b/crates/rethnet_evm/src/transaction.rs @@ -0,0 +1,175 @@ +use rethnet_eth::{ + receipt::Log, + signature::SignatureError, + transaction::{ + EIP1559SignedTransaction, EIP2930SignedTransaction, LegacySignedTransaction, + SignedTransaction, TransactionKind, + }, + Address, Bloom, Bytes, B256, U256, +}; + +/// Represents all relevant information of an executed transaction +#[derive(Debug, Eq, PartialEq, Clone)] +pub struct TransactionInfo { + pub transaction_hash: B256, + pub transaction_index: u32, + pub from: Address, + pub to: Option
, + pub contract_address: Option
, + pub logs: Vec, + pub logs_bloom: Bloom, + // pub traces: todo!(), + pub exit: revm::Return, + pub out: Option, +} + +/// A transaction that's pending inclusion in a block. +pub struct PendingTransaction { + /// A signed transaction + pub transaction: SignedTransaction, + caller: Address, +} + +impl PendingTransaction { + /// Create a [`PendingTransaction`] by attempting to validate and recover the caller address of the provided transaction. + pub fn new(transaction: SignedTransaction) -> Result { + let caller = transaction.recover()?; + Ok(Self::with_caller(transaction, caller)) + } + + /// Creates a [`PendingTransaction`] with the provided transaction and caller address. + pub fn with_caller(transaction: SignedTransaction, caller: Address) -> Self { + Self { + transaction, + caller, + } + } +} + +impl From for revm::TxEnv { + fn from(transaction: PendingTransaction) -> Self { + fn transact_to(kind: TransactionKind) -> revm::TransactTo { + match kind { + TransactionKind::Call(address) => revm::TransactTo::Call(address), + TransactionKind::Create => revm::TransactTo::Create(revm::CreateScheme::Create), + } + } + + fn into_access_list( + access_list: rethnet_eth::access_list::AccessList, + ) -> Vec<(Address, Vec)> { + access_list + .0 + .into_iter() + .map(|item| (item.address, item.storage_keys)) + .collect() + } + + let chain_id = transaction.transaction.chain_id(); + match transaction.transaction { + SignedTransaction::Legacy(LegacySignedTransaction { + nonce, + gas_price, + gas_limit, + kind, + value, + input, + .. + }) => Self { + caller: transaction.caller, + gas_limit, + gas_price, + gas_priority_fee: None, + transact_to: transact_to(kind), + value, + data: input, + chain_id, + nonce: Some(nonce), + access_list: Vec::new(), + }, + SignedTransaction::EIP2930(EIP2930SignedTransaction { + nonce, + gas_price, + gas_limit, + kind, + value, + input, + access_list, + .. + }) => Self { + caller: transaction.caller, + gas_limit, + gas_price, + gas_priority_fee: None, + transact_to: transact_to(kind), + value, + data: input, + chain_id, + nonce: Some(nonce), + access_list: into_access_list(access_list), + }, + SignedTransaction::EIP1559(EIP1559SignedTransaction { + nonce, + max_priority_fee_per_gas, + max_fee_per_gas, + gas_limit, + kind, + value, + input, + access_list, + .. + }) => Self { + caller: transaction.caller, + gas_limit, + gas_price: max_fee_per_gas, + gas_priority_fee: Some(max_priority_fee_per_gas), + transact_to: transact_to(kind), + value, + data: input, + chain_id, + nonce: Some(nonce), + access_list: into_access_list(access_list), + }, + } + } +} + +// /// Queued transaction +// #[derive(Clone, Debug, PartialEq, Eq)] +// pub struct PendingTransaction { +// /// The actual transaction +// pub transaction: TypedTransaction, +// /// the recovered sender of this transaction +// sender: Address, +// /// hash of `transaction`, so it can easily be reused with encoding and hashing agan +// hash: B256, +// } + +// impl PendingTransaction { +// /// Creates a new pending transaction and tries to verify transaction and recover sender. +// pub fn new(transaction: TypedTransaction) -> Result { +// let sender = transaction.recover()?; +// Ok(Self::with_sender(transaction, sender)) +// } + +// /// Creates a new transaction with the given sender +// pub fn with_sender(transaction: TypedTransaction, sender: Address) -> Self { +// Self { +// hash: transaction.hash(), +// transaction, +// sender, +// } +// } + +// pub fn nonce(&self) -> &u64 { +// self.transaction.nonce() +// } + +// pub fn hash(&self) -> &B256 { +// &self.hash +// } + +// pub fn sender(&self) -> &Address { +// &self.sender +// } +// } diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 16891bebfa..a78097be46 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -9,11 +9,12 @@ crate-type = ["cdylib"] [dependencies] anyhow = "1.0.64" crossbeam-channel = { version = "0.5.6", default-features = false } -napi = { version = "2.9.0", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } -napi-derive = "2.9.0" +napi = { version = "2.10.1", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi-derive = "2.9.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } +rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc"] } serde_json = { version = "1.0.85", default-features = false, features = ["alloc"] } sha3 = { version = "0.10.5", default-features = false } diff --git a/crates/rethnet_evm_napi/src/access_list.rs b/crates/rethnet_evm_napi/src/access_list.rs new file mode 100644 index 0000000000..1449fd4604 --- /dev/null +++ b/crates/rethnet_evm_napi/src/access_list.rs @@ -0,0 +1,34 @@ +use std::str::FromStr; + +use napi::Status; +use napi_derive::napi; +use rethnet_eth::{Address, U256}; + +#[napi(object)] +pub struct AccessListItem { + pub address: String, + pub storage_keys: Vec, +} + +impl TryFrom for rethnet_eth::access_list::AccessListItem { + type Error = napi::Error; + + fn try_from(value: AccessListItem) -> Result { + let address = Address::from_str(&value.address) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; + + let storage_keys = value + .storage_keys + .into_iter() + .map(|key| { + U256::from_str(&key) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string())) + }) + .collect::>>()?; + + Ok(rethnet_eth::access_list::AccessListItem { + address, + storage_keys, + }) + } +} diff --git a/crates/rethnet_evm_napi/src/block.rs b/crates/rethnet_evm_napi/src/block.rs new file mode 100644 index 0000000000..61d31a0e13 --- /dev/null +++ b/crates/rethnet_evm_napi/src/block.rs @@ -0,0 +1,138 @@ +mod builder; + +use napi::bindgen_prelude::{BigInt, Buffer}; +use napi_derive::napi; +use rethnet_eth::{Address, Bloom, Bytes, B256, B64, U64}; +use rethnet_evm::BlockEnv; + +use crate::cast::TryCast; + +pub use builder::BlockBuilder; + +#[napi(object)] +pub struct BlockConfig { + pub number: Option, + pub coinbase: Option, + pub timestamp: Option, + pub difficulty: Option, + pub prevrandao: Option, + pub basefee: Option, + pub gas_limit: Option, + pub parent_hash: Option, +} + +impl TryFrom for BlockEnv { + type Error = napi::Error; + + fn try_from(value: BlockConfig) -> std::result::Result { + let default = BlockEnv::default(); + + let number = value.number.map_or(Ok(default.number), BigInt::try_cast)?; + let coinbase = value + .coinbase + .map_or(default.coinbase, |coinbase| Address::from_slice(&coinbase)); + let difficulty = value.difficulty.map_or_else( + || Ok(default.difficulty), + |difficulty| difficulty.try_cast(), + )?; + let prevrandao = value + .prevrandao + .map(|prevrandao| B256::from_slice(&prevrandao)); + let timestamp = value + .timestamp + .map_or(Ok(default.timestamp), BigInt::try_cast)?; + let basefee = value + .basefee + .map_or_else(|| Ok(default.basefee), |basefee| basefee.try_cast())?; + let gas_limit = value + .gas_limit + .map_or(Ok(default.gas_limit), |gas_limit| gas_limit.try_cast())?; + + Ok(Self { + number, + coinbase, + timestamp, + difficulty, + prevrandao, + basefee, + gas_limit, + }) + } +} + +impl TryFrom for rethnet_evm::HeaderData { + type Error = napi::Error; + + fn try_from(value: BlockConfig) -> std::result::Result { + Ok(Self { + number: value + .number + .map_or(Ok(None), |number| number.try_cast().map(Some))?, + coinbase: value + .coinbase + .map(|coinbase| Address::from_slice(&coinbase)), + timestamp: value + .timestamp + .map_or(Ok(None), |timestamp| timestamp.try_cast().map(Some))?, + difficulty: value + .difficulty + .map_or(Ok(None), |difficulty| difficulty.try_cast().map(Some))?, + basefee: value + .basefee + .map_or(Ok(None), |basefee| basefee.try_cast().map(Some))?, + gas_limit: value + .gas_limit + .map_or(Ok(None), |gas_limit| gas_limit.try_cast().map(Some))?, + parent_hash: value + .parent_hash + .map_or(Ok(None), |parent_hash| parent_hash.try_cast().map(Some))?, + }) + } +} + +#[napi(object)] +pub struct BlockHeader { + pub parent_hash: Buffer, + pub ommers_hash: Buffer, + pub beneficiary: Buffer, + pub state_root: Buffer, + pub transactions_root: Buffer, + pub receipts_root: Buffer, + pub logs_bloom: Buffer, + pub difficulty: BigInt, + pub number: BigInt, + pub gas_limit: BigInt, + pub gas_used: BigInt, + pub timestamp: BigInt, + pub extra_data: Buffer, + pub mix_hash: Buffer, + pub nonce: BigInt, + pub base_fee_per_gas: Option, +} + +impl TryFrom for rethnet_eth::block::Header { + type Error = napi::Error; + + fn try_from(value: BlockHeader) -> Result { + Ok(Self { + parent_hash: B256::from_slice(&value.parent_hash), + ommers_hash: B256::from_slice(&value.ommers_hash), + beneficiary: Address::from_slice(&value.beneficiary), + state_root: B256::from_slice(&value.state_root), + transactions_root: B256::from_slice(&value.transactions_root), + receipts_root: B256::from_slice(&value.receipts_root), + logs_bloom: Bloom::from_slice(&value.logs_bloom), + difficulty: value.difficulty.try_cast()?, + number: value.number.try_cast()?, + gas_limit: value.gas_limit.try_cast()?, + gas_used: value.gas_used.try_cast()?, + timestamp: value.timestamp.get_u64().1, + extra_data: Bytes::copy_from_slice(&value.extra_data), + mix_hash: B256::from_slice(&value.mix_hash), + nonce: B64::from(U64::from(value.nonce.get_u64().1)), + base_fee_per_gas: value + .base_fee_per_gas + .map_or(Ok(None), |fee| fee.try_cast().map(Some))?, + }) + } +} diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs new file mode 100644 index 0000000000..3c7e87d087 --- /dev/null +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -0,0 +1,116 @@ +use std::sync::Arc; + +use napi::{ + bindgen_prelude::{BigInt, Buffer}, + tokio::sync::Mutex, + Status, +}; +use napi_derive::napi; +use rethnet_eth::{Address, U256}; + +use crate::{ + blockchain::Blockchain, cast::TryCast, state::StateManager, transaction::Transaction, Config, + ExecutionResult, +}; + +use super::{BlockConfig, BlockHeader}; + +#[napi] +pub struct BlockBuilder { + builder: Arc>>>, +} + +#[napi] +impl BlockBuilder { + #[napi] + pub async fn new( + blockchain: &Blockchain, + state_manager: &StateManager, + config: Config, + parent: BlockHeader, + block: BlockConfig, + ) -> napi::Result { + let config = config.try_into()?; + let parent = parent.try_into()?; + let block = block.try_into()?; + + let builder = rethnet_evm::BlockBuilder::new( + blockchain.as_inner().clone(), + state_manager.db.clone(), + config, + parent, + block, + ) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(Self { + builder: Arc::new(Mutex::new(Some(builder))), + }) + } + + #[napi] + pub async fn add_transaction(&self, transaction: Transaction) -> napi::Result { + let mut builder = self.builder.lock().await; + if let Some(builder) = builder.as_mut() { + let transaction = transaction.try_into()?; + + let result = builder + .add_transaction(transaction) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + result.try_into() + } else { + Err(napi::Error::new( + Status::InvalidArg, + "`this` has been moved in Rust".to_owned(), + )) + } + } + + #[napi] + /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call + /// methods on the JS object. + pub async fn finalize(&self, rewards: Vec<(Buffer, BigInt)>) -> napi::Result<()> { + let mut builder = self.builder.lock().await; + if let Some(builder) = builder.take() { + let rewards = rewards + .into_iter() + .map(|(address, reward)| { + reward + .try_cast() + .map(|reward| (Address::from_slice(&address), reward)) + }) + .collect::>>()?; + + builder + .finalize(rewards) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } else { + Err(napi::Error::new( + Status::InvalidArg, + "The BlockBuilder object has been moved in Rust".to_owned(), + )) + } + } + + #[napi] + /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call + /// methods on the JS object. + pub async fn abort(&self) -> napi::Result<()> { + let mut builder = self.builder.lock().await; + if let Some(builder) = builder.take() { + builder + .abort() + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } else { + Err(napi::Error::new( + Status::InvalidArg, + "The BlockBuilder object has been moved in Rust".to_owned(), + )) + } + } +} diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs new file mode 100644 index 0000000000..0ad9c28f8a --- /dev/null +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -0,0 +1,86 @@ +mod js_blockchain; + +use std::sync::Arc; + +use napi::{bindgen_prelude::Buffer, Env, JsFunction, NapiRaw, Status}; +use napi_derive::napi; +use rethnet_eth::B256; +use rethnet_evm::blockchain::{AsyncBlockchain, SyncBlockchain}; + +use crate::{ + sync::{await_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction}, +}; + +use self::js_blockchain::{GetBlockHashCall, JsBlockchain}; + +#[napi] +pub struct Blockchain { + inner: Arc>, anyhow::Error>>, +} + +impl Blockchain { + pub fn as_inner( + &self, + ) -> &Arc>, anyhow::Error>> { + &self.inner + } +} + +#[napi] +impl Blockchain { + #[napi(constructor)] + pub fn new( + env: Env, + #[napi(ts_arg_type = "(blockNumber: bigint) => Promise")] + get_block_hash_fn: JsFunction, + ) -> napi::Result { + let get_block_hash_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { get_block_hash_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let block_number = ctx + .env + .create_bigint_from_words(false, ctx.value.block_number.as_limbs().to_vec())?; + + let promise = ctx.callback.call(None, &[block_number.into_unknown()?])?; + let result = await_promise::(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + Self::with_blockchain(JsBlockchain { get_block_hash_fn }) + } + + fn with_blockchain(blockchain: B) -> napi::Result + where + B: SyncBlockchain, + { + let blockchain: Box> = Box::new(blockchain); + let blockchain = AsyncBlockchain::new(blockchain) + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(Self { + inner: Arc::new(blockchain), + }) + } + + // #[napi] + // pub async fn insert_block( + // &mut self, + // block_number: BigInt, + // block_hash: Buffer, + // ) -> napi::Result<()> { + // let block_number = BigInt::try_cast(block_number)?; + // let block_hash = B256::from_slice(&block_hash); + + // self.db + // .insert_block(block_number, block_hash) + // .await + // .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + // } +} diff --git a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs new file mode 100644 index 0000000000..74d33af5e1 --- /dev/null +++ b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs @@ -0,0 +1,36 @@ +use std::sync::mpsc::{channel, Sender}; + +use anyhow::anyhow; +use napi::Status; +use rethnet_eth::{B256, U256}; +use rethnet_evm::Blockchain; + +use crate::threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode}; + +pub struct GetBlockHashCall { + pub block_number: U256, + pub sender: Sender>, +} + +pub struct JsBlockchain { + pub(super) get_block_hash_fn: ThreadsafeFunction, +} + +impl Blockchain for JsBlockchain { + type Error = anyhow::Error; + + fn block_hash(&mut self, block_number: U256) -> Result { + let (sender, receiver) = channel(); + + let status = self.get_block_hash_fn.call( + GetBlockHashCall { + block_number, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + } +} diff --git a/crates/rethnet_evm_napi/src/cast.rs b/crates/rethnet_evm_napi/src/cast.rs index 5749abe148..26310c9d23 100644 --- a/crates/rethnet_evm_napi/src/cast.rs +++ b/crates/rethnet_evm_napi/src/cast.rs @@ -2,9 +2,10 @@ use napi::{ bindgen_prelude::{BigInt, Buffer}, Status, }; -use rethnet_evm::{AccountInfo, Bytecode, Bytes, H256, U256}; +use rethnet_eth::{Bytes, B256, U256}; +use rethnet_evm::{AccountInfo, Bytecode}; -use crate::Account; +use crate::{Account, AccountData}; /// An attempted conversion that consumes `self`, which may or may not be /// expensive. It is identical to [`TryInto`], but it allows us to implement @@ -24,7 +25,7 @@ impl TryCast for Account { Ok(AccountInfo { balance: self.balance.try_cast()?, nonce: self.nonce.get_u64().1, - code_hash: H256::from_slice(&self.code_hash), + code_hash: B256::from_slice(&self.code_hash), code: self .code .map(|code| Bytecode::new_raw(Bytes::copy_from_slice(&code))), @@ -32,11 +33,25 @@ impl TryCast for Account { } } -impl TryCast for Buffer { +impl TryCast<(U256, u64, Option)> for AccountData { type Error = napi::Error; - fn try_cast(self) -> std::result::Result { - Ok(H256::from_slice(&self)) + fn try_cast(self) -> Result<(U256, u64, Option), Self::Error> { + let balance = self.balance.try_cast()?; + let nonce = self.nonce.get_u64().1; + let code = self + .code + .map(|code| Bytecode::new_raw(Bytes::copy_from_slice(&code))); + + Ok((balance, nonce, code)) + } +} + +impl TryCast for Buffer { + type Error = napi::Error; + + fn try_cast(self) -> std::result::Result { + Ok(B256::from_slice(&self)) } } @@ -66,6 +81,6 @@ impl TryCast for BigInt { } } - Ok(U256(self.words.try_into().unwrap())) + Ok(U256::from_limbs(self.words.try_into().unwrap())) } } diff --git a/crates/rethnet_evm_napi/src/db.rs b/crates/rethnet_evm_napi/src/db.rs deleted file mode 100644 index 3613a4b261..0000000000 --- a/crates/rethnet_evm_napi/src/db.rs +++ /dev/null @@ -1,27 +0,0 @@ -mod debug; -mod immutable; -mod mutable; - -pub use debug::*; -pub use immutable::*; -pub use mutable::*; -use rethnet_evm::{sync::Client, CfgEnv}; - -pub(super) fn client( - cfg: CfgEnv, - db: JsDatabase, - db_commit: Option, - db_debug: Option, -) -> anyhow::Result { - if let Some(db_commit) = db_commit { - if let Some(db_debug) = db_debug { - Client::with_db_mut_debug(cfg, JsDatabaseCommitDebug::new(db, db_commit, db_debug)) - } else { - Client::with_db_mut(cfg, JsDatabaseCommit::new(db, db_commit)) - } - } else if let Some(db_debug) = db_debug { - Client::with_db_debug(cfg, JsDatabaseDebug::new(db, db_debug)) - } else { - Client::with_db(cfg, db) - } -} diff --git a/crates/rethnet_evm_napi/src/db/debug.rs b/crates/rethnet_evm_napi/src/db/debug.rs deleted file mode 100644 index 70470c7f6c..0000000000 --- a/crates/rethnet_evm_napi/src/db/debug.rs +++ /dev/null @@ -1,516 +0,0 @@ -use std::sync::mpsc::{channel, Sender}; - -use anyhow::anyhow; -use napi::{bindgen_prelude::Buffer, JsUnknown, NapiRaw, Status}; -use rethnet_evm::{ - Account, AccountInfo, Bytecode, Database, DatabaseCommit, DatabaseDebug, HasDatabaseDebug, - HashMap, H160, H256, U256, -}; - -use crate::{ - sync::{await_promise, await_void_promise, handle_error}, - threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - DatabaseDebugCallbacks, -}; - -use super::{JsDatabase, JsDatabaseCommitInner}; - -pub struct CheckpointCall { - pub sender: Sender>, -} - -pub struct RevertCall { - pub sender: Sender>, -} - -pub struct GetStorageRootCall { - pub sender: Sender>, -} - -pub struct InsertAccountCall { - pub address: H160, - pub account_info: AccountInfo, - pub sender: Sender>, -} - -pub struct SetAccountBalanceCall { - pub address: H160, - pub balance: U256, - pub sender: Sender>, -} - -pub struct SetAccountCodeCall { - pub address: H160, - pub code: Bytecode, - pub sender: Sender>, -} - -pub struct SetAccountNonceCall { - pub address: H160, - pub nonce: u64, - pub sender: Sender>, -} - -pub struct SetAccountStorageSlotCall { - pub address: H160, - pub index: U256, - pub value: U256, - pub sender: Sender>, -} - -pub struct JsDatabaseDebug { - db: JsDatabase, - db_debug: JsDatabaseDebugInner, -} - -impl JsDatabaseDebug { - /// Creates a new [`JsDatabaseDebug`]. - pub(super) fn new(db: JsDatabase, db_debug: JsDatabaseDebugInner) -> Self { - Self { db, db_debug } - } -} - -impl HasDatabaseDebug for JsDatabaseDebug { - type Error = anyhow::Error; - - fn db_debug(&mut self) -> &mut dyn DatabaseDebug { - &mut self.db_debug - } -} - -impl Database for JsDatabaseDebug { - type Error = anyhow::Error; - - fn basic(&mut self, address: rethnet_evm::H160) -> Result, Self::Error> { - self.db.basic(address) - } - - fn code_by_hash( - &mut self, - code_hash: rethnet_evm::H256, - ) -> Result { - self.db.code_by_hash(code_hash) - } - - fn storage(&mut self, address: rethnet_evm::H160, index: U256) -> Result { - self.db.storage(address, index) - } - - fn block_hash(&mut self, number: U256) -> Result { - self.db.block_hash(number) - } -} - -pub struct JsDatabaseCommitDebug { - db: JsDatabase, - db_commit: JsDatabaseCommitInner, - db_debug: JsDatabaseDebugInner, -} - -impl JsDatabaseCommitDebug { - /// Creates a new [`JsDatabaseCommitDebug`]. - pub(super) fn new( - db: JsDatabase, - db_commit: JsDatabaseCommitInner, - db_debug: JsDatabaseDebugInner, - ) -> Self { - Self { - db, - db_commit, - db_debug, - } - } -} - -impl HasDatabaseDebug for JsDatabaseCommitDebug { - type Error = anyhow::Error; - - fn db_debug(&mut self) -> &mut dyn rethnet_evm::DatabaseDebug { - &mut self.db_debug - } -} - -impl Database for JsDatabaseCommitDebug { - type Error = anyhow::Error; - - fn basic(&mut self, address: H160) -> Result, Self::Error> { - self.db.basic(address) - } - - fn code_by_hash( - &mut self, - code_hash: rethnet_evm::H256, - ) -> Result { - self.db.code_by_hash(code_hash) - } - - fn storage( - &mut self, - address: H160, - index: rethnet_evm::U256, - ) -> Result { - self.db.storage(address, index) - } - - fn block_hash(&mut self, number: rethnet_evm::U256) -> Result { - self.db.block_hash(number) - } -} - -impl DatabaseCommit for JsDatabaseCommitDebug { - fn commit(&mut self, changes: HashMap) { - self.db_commit.commit(changes) - } -} - -pub(crate) struct JsDatabaseDebugInner { - checkpoint_fn: ThreadsafeFunction, - revert_fn: ThreadsafeFunction, - get_storage_root_fn: ThreadsafeFunction, - insert_account_fn: ThreadsafeFunction, - set_account_balance_fn: ThreadsafeFunction, - set_account_code_fn: ThreadsafeFunction, - set_account_nonce_fn: ThreadsafeFunction, - set_account_storage_slot_fn: ThreadsafeFunction, -} - -impl JsDatabaseDebugInner { - /// Creates a new `JsDatabaseDebug`. - pub fn new(env: &napi::Env, callbacks: DatabaseDebugCallbacks) -> napi::Result { - let checkpoint_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.checkpoint_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let promise = ctx.callback.call::(None, &[])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let revert_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.revert_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let promise = ctx.callback.call::(None, &[])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let get_storage_root_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.get_storage_root_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - - let promise = ctx.callback.call::(None, &[])?; - let result = await_promise::(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let insert_account_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.insert_account_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let mut account = ctx.env.create_object()?; - - let balance = ctx - .env - .create_bigint_from_words(false, ctx.value.account_info.balance.0.to_vec())?; - account.set_named_property("balance", balance)?; - - let nonce = ctx - .env - .create_bigint_from_u64(ctx.value.account_info.nonce)?; - account.set_named_property("nonce", nonce)?; - - let code_hash = ctx - .env - .create_buffer_copy(ctx.value.account_info.code_hash.as_bytes())? - .into_raw(); - account.set_named_property("codeHash", code_hash)?; - - if let Some(code) = ctx.value.account_info.code { - let code = ctx - .env - .create_buffer_copy(code.bytes().as_ref())? - .into_raw(); - - account.set_named_property("code", code)?; - } else { - account.set_named_property("code", ctx.env.get_null()?)?; - } - - let promise = ctx - .callback - .call(None, &[address.into_unknown(), account.into_unknown()])?; - - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let set_account_balance_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.set_account_balance_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let balance = ctx - .env - .create_bigint_from_words(false, ctx.value.balance.0.to_vec())?; - - let promise = ctx - .callback - .call(None, &[address.into_unknown(), balance.into_unknown()?])?; - - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let set_account_code_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.set_account_code_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let code = ctx - .env - .create_buffer_copy(ctx.value.code.bytes().as_ref())? - .into_raw(); - - let promise = ctx.callback.call(None, &[address, code])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let set_account_nonce_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.set_account_nonce_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let nonce = ctx.env.create_bigint_from_u64(ctx.value.nonce)?; - - let promise = ctx - .callback - .call(None, &[address.into_unknown(), nonce.into_unknown()?])?; - - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - let set_account_storage_slot_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.set_account_storage_slot_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let index = ctx - .env - .create_bigint_from_words(false, ctx.value.index.0.to_vec())?; - - let value = ctx - .env - .create_bigint_from_words(false, ctx.value.value.0.to_vec())?; - - let promise = ctx.callback.call( - None, - &[ - address.into_unknown(), - index.into_unknown()?, - value.into_unknown()?, - ], - )?; - - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - Ok(Self { - checkpoint_fn, - revert_fn, - get_storage_root_fn, - insert_account_fn, - set_account_balance_fn, - set_account_code_fn, - set_account_nonce_fn, - set_account_storage_slot_fn, - }) - } -} - -impl DatabaseDebug for JsDatabaseDebugInner { - type Error = anyhow::Error; - - fn insert_account( - &mut self, - address: H160, - account_info: AccountInfo, - ) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.insert_account_fn.call( - InsertAccountCall { - address, - account_info, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn insert_block(&mut self, _block_number: U256, _block_hash: H256) -> Result<(), Self::Error> { - todo!() - } - - fn set_account_balance(&mut self, address: H160, balance: U256) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.set_account_balance_fn.call( - SetAccountBalanceCall { - address, - balance, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn set_account_code( - &mut self, - address: H160, - code: rethnet_evm::Bytecode, - ) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.set_account_code_fn.call( - SetAccountCodeCall { - address, - code, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn set_account_nonce(&mut self, address: H160, nonce: u64) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.set_account_nonce_fn.call( - SetAccountNonceCall { - address, - nonce, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn set_account_storage_slot( - &mut self, - address: H160, - index: U256, - value: U256, - ) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.set_account_storage_slot_fn.call( - SetAccountStorageSlotCall { - address, - index, - value, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn storage_root(&mut self) -> Result { - let (sender, receiver) = channel(); - - let status = self.get_storage_root_fn.call( - GetStorageRootCall { sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn checkpoint(&mut self) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self.checkpoint_fn.call( - CheckpointCall { sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn revert(&mut self) -> Result<(), Self::Error> { - let (sender, receiver) = channel(); - - let status = self - .revert_fn - .call(RevertCall { sender }, ThreadsafeFunctionCallMode::Blocking); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } -} diff --git a/crates/rethnet_evm_napi/src/db/immutable.rs b/crates/rethnet_evm_napi/src/db/immutable.rs deleted file mode 100644 index 2a46eb0369..0000000000 --- a/crates/rethnet_evm_napi/src/db/immutable.rs +++ /dev/null @@ -1,190 +0,0 @@ -use std::sync::mpsc::{channel, Sender}; - -use anyhow::anyhow; -use napi::{ - bindgen_prelude::{BigInt, Buffer}, - NapiRaw, Status, -}; -use rethnet_evm::{AccountInfo, Bytecode, Database, H160, H256, U256}; - -use crate::{ - sync::{await_promise, handle_error}, - threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - Account, DatabaseCallbacks, -}; - -pub struct GetAccountByAddressCall { - pub address: H160, - pub sender: Sender>, -} - -pub struct GetAccountStorageSlotCall { - pub address: H160, - pub index: U256, - pub sender: Sender>, -} - -pub struct GetBlockHashCall { - pub block_number: U256, - pub sender: Sender>, -} - -pub struct GetCodeByHashCall { - pub code_hash: H256, - pub sender: Sender>, -} - -pub struct JsDatabase { - get_account_by_address_fn: ThreadsafeFunction, - get_account_storage_slot_fn: ThreadsafeFunction, - get_block_hash_fn: ThreadsafeFunction, - get_code_by_hash_fn: ThreadsafeFunction, -} - -impl JsDatabase { - /// Creates a new [`JsDatabase`]. - pub fn new(env: &napi::Env, callbacks: DatabaseCallbacks) -> napi::Result { - let get_account_by_address_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.get_account_by_address_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx.env.create_buffer_copy(ctx.value.address.as_bytes())?; - - let promise = ctx.callback.call(None, &[address.into_raw()])?; - let result = - await_promise::(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let get_account_storage_slot_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.get_account_storage_slot_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let address = ctx - .env - .create_buffer_copy(ctx.value.address.as_bytes())? - .into_raw(); - - let index = ctx - .env - .create_bigint_from_words(false, ctx.value.index.0.to_vec())?; - - let promise = ctx - .callback - .call(None, &[address.into_unknown(), index.into_unknown()?])?; - - let result = await_promise::(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let get_block_hash_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.get_block_hash_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - - let block_number = ctx - .env - .create_bigint_from_words(false, ctx.value.block_number.0.to_vec())?; - - let promise = ctx.callback.call(None, &[block_number.into_unknown()?])?; - let result = await_promise::(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let get_code_by_hash_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.get_code_by_hash_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let code_hash = ctx.env.create_buffer_copy(ctx.value.code_hash.as_bytes())?; - - let promise = ctx.callback.call(None, &[code_hash.into_raw()])?; - let result = await_promise::(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - Ok(Self { - get_account_by_address_fn, - get_account_storage_slot_fn, - get_block_hash_fn, - get_code_by_hash_fn, - }) - } -} - -impl Database for JsDatabase { - type Error = anyhow::Error; - - fn basic(&mut self, address: H160) -> anyhow::Result> { - let (sender, receiver) = channel(); - - let status = self.get_account_by_address_fn.call( - GetAccountByAddressCall { address, sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_or_else( - |e| Err(anyhow!(e.to_string())), - |account_info| Ok(Some(account_info)), - ) - } - - fn code_by_hash(&mut self, code_hash: H256) -> anyhow::Result { - let (sender, receiver) = channel(); - - let status = self.get_code_by_hash_fn.call( - GetCodeByHashCall { code_hash, sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn storage(&mut self, address: H160, index: U256) -> anyhow::Result { - let (sender, receiver) = channel(); - - let status = self.get_account_storage_slot_fn.call( - GetAccountStorageSlotCall { - address, - index, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } - - fn block_hash(&mut self, block_number: U256) -> anyhow::Result { - let (sender, receiver) = channel(); - - let status = self.get_block_hash_fn.call( - GetBlockHashCall { - block_number, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) - } -} diff --git a/crates/rethnet_evm_napi/src/db/mutable.rs b/crates/rethnet_evm_napi/src/db/mutable.rs deleted file mode 100644 index deec17b8b6..0000000000 --- a/crates/rethnet_evm_napi/src/db/mutable.rs +++ /dev/null @@ -1,105 +0,0 @@ -use std::sync::mpsc::{channel, Sender}; - -use napi::{JsUnknown, NapiRaw, Status}; -use rethnet_evm::{Account, Database, DatabaseCommit, HashMap, H160}; - -use crate::{ - sync::{await_void_promise, handle_error}, - threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - DatabaseCommitCallbacks, -}; - -use super::JsDatabase; - -pub trait HasDatabaseCommit { - /// The database's error type. - type Error; - - /// Retrieves the owned `DatabaseCommit`. - fn db_commit(&mut self) -> &mut dyn DatabaseCommit; -} - -pub struct CommitCall { - pub sender: Sender>, -} - -pub struct JsDatabaseCommit { - db: JsDatabase, - db_commit: JsDatabaseCommitInner, -} - -impl JsDatabaseCommit { - /// Creates a new [`JsDatabaseCommit`]. - pub(super) fn new(db: JsDatabase, db_commit: JsDatabaseCommitInner) -> Self { - Self { db, db_commit } - } -} - -impl Database for JsDatabaseCommit { - type Error = anyhow::Error; - - fn basic(&mut self, address: H160) -> Result, Self::Error> { - self.db.basic(address) - } - - fn code_by_hash( - &mut self, - code_hash: rethnet_evm::H256, - ) -> Result { - self.db.code_by_hash(code_hash) - } - - fn storage( - &mut self, - address: H160, - index: rethnet_evm::U256, - ) -> Result { - self.db.storage(address, index) - } - - fn block_hash(&mut self, number: rethnet_evm::U256) -> Result { - self.db.block_hash(number) - } -} - -impl DatabaseCommit for JsDatabaseCommit { - fn commit(&mut self, changes: HashMap) { - self.db_commit.commit(changes) - } -} - -pub(crate) struct JsDatabaseCommitInner { - commit_fn: ThreadsafeFunction, -} - -impl JsDatabaseCommitInner { - /// Creates a new [`JsDatabaseCommit`]. - pub fn new(env: &napi::Env, callbacks: DatabaseCommitCallbacks) -> napi::Result { - let commit_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.commit_fn.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - let promise = ctx.callback.call::(None, &[])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - handle_error(sender, result) - }, - )?; - - Ok(Self { commit_fn }) - } -} - -impl DatabaseCommit for JsDatabaseCommitInner { - fn commit(&mut self, _changes: HashMap) { - let (sender, receiver) = channel(); - - let status = self - .commit_fn - .call(CommitCall { sender }, ThreadsafeFunctionCallMode::Blocking); - assert_eq!(status, Status::Ok); - - receiver.recv().unwrap().expect("Failed to commit") - } -} diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 8824ef1919..150d592ce5 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -1,22 +1,29 @@ +mod access_list; +mod block; +mod blockchain; mod cast; -mod db; +mod state; mod sync; mod threadsafe_function; +mod trace; +mod transaction; use std::{fmt::Debug, str::FromStr}; -use db::{JsDatabaseCommitInner, JsDatabaseDebugInner}; +use block::BlockConfig; +use blockchain::Blockchain; use napi::{bindgen_prelude::*, Status}; use napi_derive::napi; use once_cell::sync::OnceCell; -use rethnet_evm::{ - sync::Client, AccountInfo, BlockEnv, Bytes, CfgEnv, CreateScheme, HashMap, LayeredDatabase, - RethnetLayer, TransactTo, TxEnv, H160, H256, U256, -}; +use rethnet_eth::Address; +use rethnet_evm::{AccountInfo, CfgEnv, TxEnv}; use secp256k1::{PublicKey, Secp256k1, SecretKey, SignOnly}; use sha3::{Digest, Keccak256}; +use state::StateManager; +use trace::Trace; +use transaction::{Transaction, TransactionOutput}; -use crate::{cast::TryCast, db::JsDatabase}; +use crate::cast::TryCast; struct Logger; @@ -40,6 +47,19 @@ pub struct Account { pub code: Option, } +#[napi(object)] +pub struct AccountData { + /// Account balance + #[napi(readonly)] + pub balance: BigInt, + /// Account nonce + #[napi(readonly)] + pub nonce: BigInt, + /// Optionally, byte code + #[napi(readonly)] + pub code: Option, +} + impl Debug for Account { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("Account") @@ -55,13 +75,13 @@ impl From for Account { Self { balance: BigInt { sign_bit: false, - words: account_info.balance.0.to_vec(), + words: account_info.balance.as_limbs().to_vec(), }, nonce: BigInt::from(account_info.nonce), code_hash: Buffer::from(account_info.code_hash.as_bytes()), code: account_info .code - .map(|code| Buffer::from(code.bytes().as_ref())), + .map(|code| Buffer::from(&code.bytes()[..code.len()])), } } } @@ -69,7 +89,7 @@ impl From for Account { fn private_key_to_address( context: &Secp256k1, private_key: String, -) -> napi::Result { +) -> napi::Result
{ private_to_public_key(context, private_key).map(public_key_to_address) } @@ -85,10 +105,10 @@ fn private_to_public_key( ) } -fn public_key_to_address(public_key: PublicKey) -> H160 { +fn public_key_to_address(public_key: PublicKey) -> Address { let hash = Keccak256::digest(&public_key.serialize_uncompressed()[1..]); // Only take the lower 160 bits of the hash - H160::from_slice(&hash[12..]) + Address::from_slice(&hash[12..]) } #[napi(object)] @@ -99,173 +119,6 @@ pub struct GenesisAccount { pub balance: BigInt, } -#[napi(object)] -pub struct AccessListItem { - pub address: String, - pub storage_keys: Vec, -} - -impl TryFrom for (H160, Vec) { - type Error = napi::Error; - - fn try_from(value: AccessListItem) -> std::result::Result { - let address = H160::from_str(&value.address) - .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; - - let storage_keys = value - .storage_keys - .into_iter() - .map(|key| { - U256::from_str(&key) - .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string())) - }) - .collect::, _>>()?; - - Ok((address, storage_keys)) - } -} - -#[napi(object)] -pub struct Transaction { - /// 160-bit address for caller - /// Defaults to `0x00.0` address. - pub from: Option, - /// 160-bit address for receiver - /// Creates a contract if no address is provided. - pub to: Option, - /// Maximum gas allowance for the code execution to avoid infinite loops. - /// Defaults to 2^63. - pub gas_limit: Option, - /// Number of wei to pay for each unit of gas during execution. - /// Defaults to 1 wei. - pub gas_price: Option, - /// Maximum tip per gas that's given directly to the forger. - pub gas_priority_fee: Option, - /// (Up to) 256-bit unsigned value. - pub value: Option, - /// Nonce of sender account. - pub nonce: Option, - /// Input byte data - pub input: Option, - /// A list of addresses and storage keys that the transaction plans to access. - pub access_list: Option>, - /// Transaction is only valid on networks with this chain ID. - pub chain_id: Option, -} - -impl TryFrom for TxEnv { - type Error = napi::Error; - - fn try_from(value: Transaction) -> std::result::Result { - let caller = if let Some(from) = value.from.as_ref() { - H160::from_slice(from) - } else { - H160::default() - }; - - let transact_to = if let Some(to) = value.to.as_ref() { - TransactTo::Call(H160::from_slice(to)) - } else { - TransactTo::Create(CreateScheme::Create) - }; - - let data = value - .input - .map_or(Bytes::default(), |input| Bytes::copy_from_slice(&input)); - - let access_list = value.access_list.map_or(Ok(Vec::new()), |access_list| { - access_list - .into_iter() - .map(|item| item.try_into()) - .collect::)>, _>>() - })?; - - Ok(Self { - caller, - gas_limit: value - .gas_limit - .map_or(2u64.pow(63), |limit| limit.get_u64().1), - gas_price: value - .gas_price - .map_or(Ok(U256::from(0)), BigInt::try_cast)?, - gas_priority_fee: value - .gas_priority_fee - .map_or(Ok(None), |fee| BigInt::try_cast(fee).map(Some))?, - transact_to, - value: value.value.map_or(Ok(U256::default()), BigInt::try_cast)?, - data, - chain_id: value.chain_id.map(|chain_id| chain_id.get_u64().1), - nonce: value.nonce.map(|nonce| nonce.get_u64().1), - access_list, - }) - } -} - -#[napi(object)] -pub struct TransactionOutput { - /// Return value from Call or Create transactions - #[napi(readonly)] - pub output: Option, - /// Optionally, a 160-bit address from Create transactions - #[napi(readonly)] - pub address: Option, -} - -impl From for TransactionOutput { - fn from(value: rethnet_evm::TransactOut) -> Self { - let (output, address) = match value { - rethnet_evm::TransactOut::None => (None, None), - rethnet_evm::TransactOut::Call(output) => (Some(Buffer::from(output.as_ref())), None), - rethnet_evm::TransactOut::Create(output, address) => ( - Some(Buffer::from(output.as_ref())), - address.map(|address| Buffer::from(address.as_bytes())), - ), - }; - - Self { output, address } - } -} - -#[napi(object)] -pub struct Block { - pub number: BigInt, - pub coinbase: Option, - pub timestamp: BigInt, - pub difficulty: Option, - pub basefee: Option, - pub gas_limit: Option, -} - -impl TryFrom for BlockEnv { - type Error = napi::Error; - - fn try_from(value: Block) -> std::result::Result { - let default = BlockEnv::default(); - let coinbase = value - .coinbase - .map_or(default.coinbase, |coinbase| H160::from_slice(&coinbase)); - let difficulty = value.difficulty.map_or_else( - || Ok(default.difficulty), - |difficulty| difficulty.try_cast(), - )?; - let basefee = value - .basefee - .map_or_else(|| Ok(default.basefee), |basefee| basefee.try_cast())?; - let gas_limit = value - .gas_limit - .map_or(Ok(default.gas_limit), |gas_limit| gas_limit.try_cast())?; - - Ok(Self { - number: value.number.try_cast()?, - coinbase, - timestamp: value.timestamp.try_cast()?, - difficulty, - basefee, - gas_limit, - }) - } -} - /// If not set, uses defaults from [`CfgEnv`]. #[napi(object)] pub struct Config { @@ -372,24 +225,28 @@ pub struct ExecutionResult { pub gas_used: BigInt, pub gas_refunded: BigInt, pub logs: Vec, + pub trace: Trace, } -impl TryFrom for ExecutionResult { +impl TryFrom<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { type Error = napi::Error; - fn try_from(value: rethnet_evm::ExecutionResult) -> std::result::Result { - let logs = value + fn try_from( + (result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace), + ) -> std::result::Result { + let logs = result .logs .into_iter() .map(serde_json::to_value) .collect::>>()?; Ok(Self { - exit_code: value.exit_reason as u8, - output: value.out.into(), - gas_used: BigInt::from(value.gas_used), - gas_refunded: BigInt::from(value.gas_refunded), + exit_code: result.exit_reason as u8, + output: result.out.into(), + gas_used: BigInt::from(result.gas_used), + gas_refunded: BigInt::from(result.gas_refunded), logs, + trace: trace.into(), }) } } @@ -400,57 +257,29 @@ pub struct TransactionResult { pub state: serde_json::Value, } -impl TryFrom<(rethnet_evm::ExecutionResult, rethnet_evm::State)> for TransactionResult { +impl + TryFrom<( + rethnet_evm::ExecutionResult, + rethnet_evm::State, + rethnet_evm::trace::Trace, + )> for TransactionResult +{ type Error = napi::Error; fn try_from( - value: (rethnet_evm::ExecutionResult, rethnet_evm::State), + (result, state, trace): ( + rethnet_evm::ExecutionResult, + rethnet_evm::State, + rethnet_evm::trace::Trace, + ), ) -> std::result::Result { - let exec_result = value.0.try_into()?; - let state = serde_json::to_value(value.1)?; + let exec_result = (result, trace).try_into()?; + let state = serde_json::to_value(state)?; Ok(Self { exec_result, state }) } } -#[napi(object)] -pub struct DatabaseCallbacks { - #[napi(ts_type = "(address: Buffer) => Promise")] - pub get_account_by_address_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, index: bigint) => Promise")] - pub get_account_storage_slot_fn: JsFunction, - #[napi(ts_type = "(blockNumber: bigint) => Promise")] - pub get_block_hash_fn: JsFunction, - #[napi(ts_type = "(codeHash: Buffer) => Promise")] - pub get_code_by_hash_fn: JsFunction, -} - -#[napi(object)] -pub struct DatabaseCommitCallbacks { - #[napi(ts_type = "() => Promise")] - pub commit_fn: JsFunction, -} - -#[napi(object)] -pub struct DatabaseDebugCallbacks { - #[napi(ts_type = "() => Promise")] - pub checkpoint_fn: JsFunction, - #[napi(ts_type = "() => Promise")] - pub revert_fn: JsFunction, - #[napi(ts_type = "() => Promise")] - pub get_storage_root_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, account: Account) => Promise")] - pub insert_account_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, balance: bigint) => Promise")] - pub set_account_balance_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, code: Buffer) => Promise")] - pub set_account_code_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, nonce: bigint) => Promise")] - pub set_account_nonce_fn: JsFunction, - #[napi(ts_type = "(address: Buffer, index: bigint, value: bigint) => Promise")] - pub set_account_storage_slot_fn: JsFunction, -} - #[napi(object)] pub struct TracingMessage { /// Recipient address. None if it is a Create message. @@ -491,207 +320,66 @@ pub struct TracingMessageResult { #[napi] pub struct Rethnet { - client: Client, + runtime: rethnet_evm::Rethnet, } #[napi] impl Rethnet { - #[allow(clippy::new_without_default)] #[napi(constructor)] - pub fn new(cfg: Config) -> napi::Result { - let cfg = cfg.try_into()?; - - Ok(Self::with_logger(Client::with_db_mut_debug( - cfg, - LayeredDatabase::default(), - )?)) - } - - #[napi(factory)] - pub fn with_callbacks( - env: Env, + pub fn new( + blockchain: &Blockchain, + state_manager: &StateManager, cfg: Config, - db_callbacks: DatabaseCallbacks, - db_mut_callbacks: Option, - db_debug_callbacks: Option, ) -> napi::Result { - let cfg = cfg.try_into()?; - - let db = JsDatabase::new(&env, db_callbacks)?; - let db_commit = db_mut_callbacks.map_or(Ok(None), |db| { - JsDatabaseCommitInner::new(&env, db).map(Some) - })?; - let db_debug = db_debug_callbacks - .map_or(Ok(None), |db| JsDatabaseDebugInner::new(&env, db).map(Some))?; - - db::client(cfg, db, db_commit, db_debug).map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |client| Ok(Self::with_logger(client)), - ) - } - - fn with_logger(client: Client) -> Self { let _logger = LOGGER.get_or_init(|| { pretty_env_logger::init(); Logger }); - Self { client } - } - - #[napi(factory)] - pub fn with_genesis_accounts(cfg: Config, accounts: Vec) -> napi::Result { let cfg = cfg.try_into()?; - let context = Secp256k1::signing_only(); - let genesis_accounts = accounts - .into_iter() - .map(|account| { - let address = private_key_to_address(&context, account.private_key)?; - account.balance.try_cast().map(|balance| { - let account_info = AccountInfo { - balance, - ..Default::default() - }; - - (address, account_info) - }) - }) - .collect::>>()?; - - let mut database = - LayeredDatabase::with_layer(RethnetLayer::with_genesis_accounts(genesis_accounts)); - database.add_layer_default(); - - Client::with_db(cfg, database).map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |client| Ok(Self::with_logger(client)), - ) + let runtime = + rethnet_evm::Rethnet::new(blockchain.as_inner().clone(), state_manager.db.clone(), cfg); + + Ok(Self { runtime }) } #[napi] pub async fn dry_run( &self, transaction: Transaction, - block: Block, + block: BlockConfig, ) -> Result { let transaction = transaction.try_into()?; let block = block.try_into()?; - self.client.dry_run(transaction, block).await.try_into() - } - - #[napi] - pub async fn run(&self, transaction: Transaction) -> Result { - let transaction = transaction.try_into()?; - self.client.run(transaction).await.try_into() - } - - #[napi] - pub async fn checkpoint(&self) -> Result<()> { - self.client - .checkpoint() - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn revert(&self) -> Result<()> { - self.client - .revert() - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + self.runtime.dry_run(transaction, block).await.try_into() } #[napi] - pub async fn get_account_by_address(&self, address: Buffer) -> Result> { - let address = H160::from_slice(&address); - self.client - .get_account_by_address(address) - .await - .map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |account_info| Ok(account_info.map(Account::from)), - ) - } - - #[napi] - pub async fn guarantee_transaction(&self, transaction: Transaction) -> Result<()> { + pub async fn guaranteed_dry_run( + &self, + transaction: Transaction, + block: BlockConfig, + ) -> Result { let transaction = transaction.try_into()?; + let block = block.try_into()?; - self.client - .guarantee_transaction(transaction) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn insert_account(&self, address: Buffer) -> Result<()> { - let address = H160::from_slice(&address); - self.client - .insert_account(address, AccountInfo::default()) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn insert_block(&self, block_number: BigInt, block_hash: Buffer) -> Result<()> { - let block_number = BigInt::try_cast(block_number)?; - let block_hash = H256::from_slice(&block_hash); - - self.client - .insert_block(block_number, block_hash) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn set_account_balance(&self, address: Buffer, balance: BigInt) -> Result<()> { - let address = H160::from_slice(&address); - let balance = BigInt::try_cast(balance)?; - - self.client - .set_account_balance(address, balance) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn set_account_code(&self, address: Buffer, code: Buffer) -> Result<()> { - let address = H160::from_slice(&address); - let code = Bytes::copy_from_slice(&code); - - self.client - .set_account_code(address, code) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - } - - #[napi] - pub async fn set_account_nonce(&self, address: Buffer, nonce: BigInt) -> Result<()> { - let address = H160::from_slice(&address); - let nonce = nonce.get_u64().1; - - self.client - .set_account_nonce(address, nonce) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + self.runtime + .guaranteed_dry_run(transaction, block) + .await? + .try_into() } #[napi] - pub async fn set_account_storage_slot( + pub async fn run( &self, - address: Buffer, - index: BigInt, - value: BigInt, - ) -> Result<()> { - let address = H160::from_slice(&address); - let index = BigInt::try_cast(index)?; - let value = BigInt::try_cast(value)?; - - self.client - .set_account_storage_slot(address, index, value) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + transaction: Transaction, + block: BlockConfig, + ) -> Result { + let transaction: TxEnv = transaction.try_into()?; + let block = block.try_into()?; + + self.runtime.run(transaction, block).await.try_into() } } diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs new file mode 100644 index 0000000000..4d08fe6749 --- /dev/null +++ b/crates/rethnet_evm_napi/src/state.rs @@ -0,0 +1,306 @@ +use std::sync::{ + mpsc::{channel, Sender}, + Arc, +}; + +use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; +use napi_derive::napi; +use rethnet_eth::{Address, B256, U256}; +use rethnet_evm::{ + db::{AsyncDatabase, LayeredDatabase, RethnetLayer, SyncDatabase}, + AccountInfo, Bytecode, DatabaseDebug, HashMap, +}; +use secp256k1::Secp256k1; + +use crate::{ + private_key_to_address, + sync::{await_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, + Account, AccountData, GenesisAccount, TryCast, +}; + +struct ModifyAccountCall { + pub balance: U256, + pub nonce: u64, + pub code: Option, + pub sender: Sender)>>, +} + +#[napi] +pub struct StateManager { + pub(super) db: Arc>, anyhow::Error>>, +} + +#[napi] +impl StateManager { + #[napi(constructor)] + pub fn new() -> napi::Result { + Self::with_accounts(HashMap::default()) + } + + #[napi(factory)] + pub fn with_genesis_accounts(accounts: Vec) -> napi::Result { + let context = Secp256k1::signing_only(); + let genesis_accounts = accounts + .into_iter() + .map(|account| { + let address = private_key_to_address(&context, account.private_key)?; + account.balance.try_cast().map(|balance| { + let account_info = AccountInfo { + balance, + ..Default::default() + }; + + (address, account_info) + }) + }) + .collect::>>()?; + + Self::with_accounts(genesis_accounts) + } + + fn with_accounts(mut accounts: HashMap) -> napi::Result { + // Mimic precompiles activation + for idx in 1..=8 { + let mut address = Address::zero(); + address.0[19] = idx; + accounts.insert(address, AccountInfo::default()); + } + + let mut database = + LayeredDatabase::with_layer(RethnetLayer::with_genesis_accounts(accounts)); + + database.checkpoint().unwrap(); + + Self::with_db(database) + } + + fn with_db(db: D) -> napi::Result + where + D: SyncDatabase, + { + let db: Box> = Box::new(db); + let db = AsyncDatabase::new(db) + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(Self { db: Arc::new(db) }) + } + + #[napi] + pub async fn checkpoint(&self) -> napi::Result<()> { + self.db + .checkpoint() + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn revert(&self) -> napi::Result<()> { + self.db + .revert() + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn get_account_by_address(&self, address: Buffer) -> napi::Result> { + let address = Address::from_slice(&address); + + self.db.account_by_address(address).await.map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |account_info| Ok(account_info.map(Account::from)), + ) + } + + #[napi] + pub async fn get_account_storage_slot( + &self, + address: Buffer, + index: BigInt, + ) -> napi::Result { + let address = Address::from_slice(&address); + let index = BigInt::try_cast(index)?; + + self.db + .account_storage_slot(address, index) + .await + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |value| { + Ok(BigInt { + sign_bit: false, + words: value.into_limbs().to_vec(), + }) + }, + ) + } + + #[napi] + pub async fn get_code_by_hash(&self, code_hash: Buffer) -> napi::Result { + let code_hash = B256::from_slice(&code_hash); + + self.db.code_by_hash(code_hash).await.map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |code| Ok(Buffer::from(&code.bytes()[..code.len()])), + ) + } + + #[napi] + pub async fn get_state_root(&self) -> napi::Result { + self.db.state_root().await.map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |root| Ok(Buffer::from(root.as_ref())), + ) + } + + #[napi] + pub async fn insert_account(&self, address: Buffer, account: Account) -> napi::Result<()> { + let address = Address::from_slice(&address); + let account = account.try_cast()?; + + self.db + .insert_account(address, account) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn make_snapshot(&self) -> Buffer { + >::as_ref(&self.db.make_snapshot().await).into() + } + + /// Modifies the account with the provided address using the specified modifier function. + /// The modifier function receives the current values as individual parameters and will update the account's values + /// to the returned `AccountData` values. + #[napi(ts_return_type = "Promise")] + pub fn modify_account( + &self, + env: Env, + address: Buffer, + #[napi( + ts_arg_type = "(balance: bigint, nonce: bigint, code: Buffer | undefined) => Promise" + )] + modify_account_fn: JsFunction, + ) -> napi::Result { + let address = Address::from_slice(&address); + + let modify_account_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { modify_account_fn.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let balance = ctx + .env + .create_bigint_from_words(false, ctx.value.balance.into_limbs().to_vec())? + .into_unknown()?; + + let nonce = ctx + .env + .create_bigint_from_u64(ctx.value.nonce)? + .into_unknown()?; + + let code = if let Some(code) = ctx.value.code { + ctx.env + .create_buffer_copy(&code.bytes()[..code.len()])? + .into_unknown() + } else { + ctx.env.get_null()?.into_unknown() + }; + + let promise = ctx.callback.call(None, &[balance, nonce, code])?; + let result = await_promise::)>( + ctx.env, + promise, + ctx.value.sender, + ); + + handle_error(sender, result) + }, + )?; + + let (deferred, promise) = env.create_deferred()?; + let db = self.db.clone(); + + self.db.runtime().spawn(async move { + let result = db + .modify_account( + address, + Box::new( + move |balance: &mut U256, nonce: &mut u64, code: &mut Option| { + let (sender, receiver) = channel(); + + let status = modify_account_fn.call( + ModifyAccountCall { + sender, + balance: *balance, + nonce: *nonce, + code: code.clone(), + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + let (new_balance, new_nonce, new_code) = + receiver.recv().unwrap().expect("Failed to commit"); + + *balance = new_balance; + *nonce = new_nonce; + *code = new_code; + }, + ), + ) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())); + + deferred.resolve(|_| result); + }); + + Ok(promise) + } + + #[napi] + pub async fn remove_account(&self, address: Buffer) -> napi::Result> { + let address = Address::from_slice(&address); + + self.db.remove_account(address).await.map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |account| Ok(account.map(Account::from)), + ) + } + + #[napi] + pub async fn remove_snapshot(&self, state_root: Buffer) -> bool { + let state_root = B256::from_slice(&state_root); + + self.db.remove_snapshot(state_root).await + } + + #[napi] + pub async fn set_account_storage_slot( + &self, + address: Buffer, + index: BigInt, + value: BigInt, + ) -> napi::Result<()> { + let address = Address::from_slice(&address); + let index = BigInt::try_cast(index)?; + let value = BigInt::try_cast(value)?; + + self.db + .set_account_storage_slot(address, index, value) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } + + #[napi] + pub async fn set_state_root(&self, state_root: Buffer) -> napi::Result<()> { + let state_root = B256::from_slice(&state_root); + + self.db + .set_state_root(&state_root) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + } +} diff --git a/crates/rethnet_evm_napi/src/sync.rs b/crates/rethnet_evm_napi/src/sync.rs index a45b88bcd3..123c5a5c75 100644 --- a/crates/rethnet_evm_napi/src/sync.rs +++ b/crates/rethnet_evm_napi/src/sync.rs @@ -39,6 +39,7 @@ where Ok(()) } +#[allow(dead_code)] pub fn await_void_promise( env: Env, result: JsUnknown, diff --git a/crates/rethnet_evm_napi/src/trace.rs b/crates/rethnet_evm_napi/src/trace.rs new file mode 100644 index 0000000000..b8efe9a618 --- /dev/null +++ b/crates/rethnet_evm_napi/src/trace.rs @@ -0,0 +1,39 @@ +use napi::bindgen_prelude::{BigInt, Buffer}; +use napi_derive::napi; + +#[napi(object)] +pub struct Trace { + pub steps: Vec, + pub return_value: Buffer, +} + +impl From for Trace { + fn from(value: rethnet_evm::trace::Trace) -> Self { + let steps = value.steps.into_iter().map(From::from).collect(); + let return_value = Buffer::from(value.return_value.as_ref()); + + Self { + steps, + return_value, + } + } +} + +#[napi(object)] +pub struct Step { + pub opcode: u8, + pub gas_cost: BigInt, + pub gas_refunded: i64, + pub exit_code: u8, +} + +impl From for Step { + fn from(value: rethnet_evm::trace::Step) -> Self { + Self { + opcode: value.opcode, + gas_cost: BigInt::from(value.gas_cost), + gas_refunded: value.gas_refunded, + exit_code: value.exit_code as u8, + } + } +} diff --git a/crates/rethnet_evm_napi/src/transaction.rs b/crates/rethnet_evm_napi/src/transaction.rs new file mode 100644 index 0000000000..2c08fcd950 --- /dev/null +++ b/crates/rethnet_evm_napi/src/transaction.rs @@ -0,0 +1,115 @@ +use napi::bindgen_prelude::{BigInt, Buffer}; +use napi_derive::napi; +use rethnet_eth::{Address, Bytes, U256}; +use rethnet_evm::{CreateScheme, TransactTo}; + +use crate::{access_list::AccessListItem, cast::TryCast}; + +#[napi(object)] +pub struct Transaction { + /// 160-bit address for caller + /// Defaults to `0x00.0` address. + pub from: Option, + /// 160-bit address for receiver + /// Creates a contract if no address is provided. + pub to: Option, + /// Maximum gas allowance for the code execution to avoid infinite loops. + /// Defaults to 2^63. + pub gas_limit: Option, + /// Number of wei to pay for each unit of gas during execution. + /// Defaults to 1 wei. + pub gas_price: Option, + /// Maximum tip per gas that's given directly to the forger. + pub gas_priority_fee: Option, + /// (Up to) 256-bit unsigned value. + pub value: Option, + /// Nonce of sender account. + pub nonce: Option, + /// Input byte data + pub input: Option, + /// A list of addresses and storage keys that the transaction plans to access. + pub access_list: Option>, + /// Transaction is only valid on networks with this chain ID. + pub chain_id: Option, +} + +impl TryFrom for rethnet_evm::TxEnv { + type Error = napi::Error; + + fn try_from(value: Transaction) -> std::result::Result { + let caller = if let Some(from) = value.from.as_ref() { + Address::from_slice(from) + } else { + Address::default() + }; + + let transact_to = if let Some(to) = value.to.as_ref() { + TransactTo::Call(Address::from_slice(to)) + } else { + TransactTo::Create(CreateScheme::Create) + }; + + let data = value + .input + .map_or(Bytes::default(), |input| Bytes::copy_from_slice(&input)); + + let access_list = value.access_list.map_or(Ok(Vec::new()), |access_list| { + access_list + .into_iter() + .map(|item| { + rethnet_eth::access_list::AccessListItem::try_from(item) + .map(|item| (item.address, item.storage_keys)) + }) + .collect::>>() + })?; + + Ok(Self { + caller, + gas_limit: value + .gas_limit + .map_or(2u64.pow(63), |limit| limit.get_u64().1), + gas_price: value + .gas_price + .map_or(Ok(U256::from(0)), BigInt::try_cast)?, + gas_priority_fee: value + .gas_priority_fee + .map_or(Ok(None), |fee| BigInt::try_cast(fee).map(Some))?, + transact_to, + value: value.value.map_or(Ok(U256::default()), BigInt::try_cast)?, + data, + chain_id: value.chain_id.map(|chain_id| chain_id.get_u64().1), + nonce: value.nonce.map(|nonce| nonce.get_u64().1), + access_list, + }) + } +} + +#[napi(object)] +pub struct TransactionConfig { + pub disable_balance_check: Option, +} + +#[napi(object)] +pub struct TransactionOutput { + /// Return value from Call or Create transactions + #[napi(readonly)] + pub output: Option, + /// Optionally, a 160-bit address from Create transactions + #[napi(readonly)] + pub address: Option, +} + +impl From for TransactionOutput { + fn from(value: rethnet_evm::TransactOut) -> Self { + let (output, address) = match value { + rethnet_evm::TransactOut::None => (None, None), + rethnet_evm::TransactOut::Call(output) => (Some(Buffer::from(output.as_ref())), None), + rethnet_evm::TransactOut::Create(output, address) => ( + Some(Buffer::from(output.as_ref())), + address.map(|address| Buffer::from(address.as_bytes())), + ), + }; + + Self { output, address } + } +} diff --git a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts index 2ec2cf3316..77fdc19bc4 100644 --- a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts +++ b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts @@ -1,9 +1,17 @@ import { expect } from "chai"; -import { Address } from "@nomicfoundation/ethereumjs-util"; - -import { Block, Config, Rethnet, Transaction } from "../.."; - -describe("Rethnet DB", () => { +import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; + +import { + AccountData, + Blockchain, + BlockConfig, + Config, + Rethnet, + StateManager, + Transaction, +} from "../.."; + +describe("Rethnet", () => { const caller = Address.fromString( "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" ); @@ -11,47 +19,32 @@ describe("Rethnet DB", () => { "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" ); + let blockchain: Blockchain; + let stateManager: StateManager; let rethnet: Rethnet; - beforeEach(function () { + beforeEach(async function () { + (blockchain = new Blockchain(async function ( + _blockNumber: bigint + ): Promise { + return Buffer.allocUnsafe(0); + })), + (stateManager = new StateManager()); const cfg: Config = { chainId: BigInt(0), limitContractCodeSize: BigInt(2n) ** BigInt(32n), disableEip3607: true, }; - rethnet = new Rethnet(cfg); - }); - - // TODO: insertBlock, setAccountCode, setAccountStorageSlot - it("getAccountByAddress", async () => { - await rethnet.insertAccount(caller.buf); - let account = await rethnet.getAccountByAddress(caller.buf); - - expect(account?.balance).to.equal(0n); - expect(account?.nonce).to.equal(0n); + rethnet = new Rethnet(blockchain, stateManager, cfg); }); - it("setAccountBalance", async () => { - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountBalance(caller.buf, 100n); - let account = await rethnet.getAccountByAddress(caller.buf); - - expect(account?.balance).to.equal(100n); - expect(account?.nonce).to.equal(0n); - }); - it("setAccountNonce", async () => { - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountNonce(caller.buf, 5n); - - let account = await rethnet.getAccountByAddress(caller.buf); - - expect(account?.balance).to.equal(0n); - expect(account?.nonce).to.equal(5n); - }); it("call", async () => { // Add funds to caller - await rethnet.insertAccount(caller.buf); - await rethnet.setAccountBalance(caller.buf, BigInt("0xffffffff")); + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: BigInt("0xffffffff"), + codeHash: KECCAK256_NULL, + }); // send some value const sendValue: Transaction = { @@ -61,7 +54,7 @@ describe("Rethnet DB", () => { value: 100n, }; - const block: Block = { + const block: BlockConfig = { number: BigInt(1), timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), }; @@ -69,9 +62,11 @@ describe("Rethnet DB", () => { // receiver should have 100 (0x64) wei expect( - sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"].info - .balance - ).to.equal("0x64"); + BigInt( + sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"] + .info.balance + ) + ).to.equal(BigInt("0x64")); // create a contract const createContract: Transaction = { diff --git a/crates/rethnet_evm_napi/test/evm/StateManager.ts b/crates/rethnet_evm_napi/test/evm/StateManager.ts new file mode 100644 index 0000000000..99f1dffc23 --- /dev/null +++ b/crates/rethnet_evm_napi/test/evm/StateManager.ts @@ -0,0 +1,90 @@ +import { expect } from "chai"; +import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; + +import { Account, AccountData, Config, StateManager, Transaction } from "../.."; + +describe("State Manager", () => { + const caller = Address.fromString( + "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" + ); + const receiver = Address.fromString( + "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" + ); + + let stateManager: StateManager; + + beforeEach(function () { + stateManager = new StateManager(); + }); + + // TODO: insertBlock, setAccountCode, setAccountStorageSlot + it("getAccountByAddress", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + codeHash: KECCAK256_NULL, + }); + let account = await stateManager.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(0n); + }); + + it("setAccountBalance", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + codeHash: KECCAK256_NULL, + }); + + await stateManager.modifyAccount( + caller.buf, + async function ( + _balance: bigint, + nonce: bigint, + code: Buffer | undefined + ): Promise { + return { + balance: 100n, + nonce, + code, + }; + } + ); + + let account = await stateManager.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(100n); + expect(account?.nonce).to.equal(0n); + expect(account?.codeHash).to.eql(KECCAK256_NULL); + }); + + it("setAccountNonce", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + codeHash: KECCAK256_NULL, + }); + + await stateManager.modifyAccount( + caller.buf, + async function ( + balance: bigint, + nonce: bigint, + code: Buffer | undefined + ): Promise { + return { + balance, + nonce: 5n, + code, + }; + } + ); + + let account = await stateManager.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(5n); + expect(account?.codeHash).to.eql(KECCAK256_NULL); + }); +}); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts new file mode 100644 index 0000000000..3b2b832d4d --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -0,0 +1,165 @@ +import { + Account, + Address, + bufferToBigInt, + KECCAK256_NULL, + toBuffer, +} from "@nomicfoundation/ethereumjs-util"; +import { StateManager, AccountData } from "rethnet-evm"; +import { GenesisAccount } from "./node-types"; + +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ +/* eslint-disable @typescript-eslint/no-unused-vars */ + +export class RethnetStateManager { + constructor(private _state: StateManager = new StateManager()) {} + + public static withGenesisAccounts( + genesisAccounts: GenesisAccount[] + ): RethnetStateManager { + return new RethnetStateManager( + StateManager.withGenesisAccounts( + genesisAccounts.map((account) => { + return { + privateKey: account.privateKey, + balance: BigInt(account.balance), + }; + }) + ) + ); + } + + public asInner(): StateManager { + return this._state; + } + + public async accountExists(address: Address): Promise { + const account = await this._state.getAccountByAddress(address.buf); + return account !== null; + } + + public async getAccount(address: Address): Promise { + const account = await this._state.getAccountByAddress(address.buf); + return new Account( + account?.nonce, + account?.balance, + undefined, + account?.codeHash + ); + } + + public async putAccount(address: Address, account: Account): Promise { + await this._state.insertAccount(address.buf, { + balance: account.balance, + nonce: account.nonce, + codeHash: account.codeHash, + }); + } + + public async accountIsEmpty(address: Address): Promise { + const account = await this._state.getAccountByAddress(address.buf); + return ( + account === null || + (account.balance === 0n && + account.nonce === 0n && + account.codeHash.equals(KECCAK256_NULL)) + ); + } + + public async deleteAccount(address: Address): Promise { + await this._state.removeAccount(address.buf); + } + + public async makeSnapshot(): Promise { + return this._state.makeSnapshot(); + } + + public async modifyAccountFields( + address: Address, + accountFields: Partial> + ): Promise { + await this._state.modifyAccount( + address.buf, + async function ( + balance: bigint, + nonce: bigint, + code: Buffer | undefined + ): Promise { + return { + balance: accountFields.balance ?? balance, + nonce: accountFields.nonce ?? nonce, + code, + }; + } + ); + } + + public async putContractCode(address: Address, value: Buffer): Promise { + await this._state.modifyAccount( + address.buf, + async function ( + balance: bigint, + nonce: bigint, + _code: Buffer | undefined + ): Promise { + return { + balance, + nonce, + code: value, + }; + } + ); + } + + public async getContractCode(address: Address): Promise { + const account = await this._state.getAccountByAddress(address.buf); + if (account === null) { + return Buffer.allocUnsafe(0); + } + + if (account.code !== undefined) { + return account.code; + } + + return this._state.getCodeByHash(account.codeHash); + } + + public async getContractStorage( + address: Address, + key: Buffer + ): Promise { + const index = bufferToBigInt(key); + + const value = await this._state.getAccountStorageSlot(address.buf, index); + return toBuffer(value); + } + + public async putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise { + const index = bufferToBigInt(key); + const number = bufferToBigInt(value); + + await this._state.setAccountStorageSlot(address.buf, index, number); + } + + public async checkpoint(): Promise { + return this._state.checkpoint(); + } + + public async commit(): Promise {} + + public async revert(): Promise { + return this._state.revert(); + } + + public async getStateRoot(): Promise { + return this._state.getStateRoot(); + } + + public async setStateRoot(stateRoot: Buffer): Promise { + return this._state.setStateRoot(stateRoot); + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 08ea4fcb6c..ed89463dfc 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -971,7 +971,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu id, date: new Date(), latestBlock: await this.getLatestBlock(), - stateRoot: await this._vm.getStateRoot(), + stateRoot: await this._vm.makeSnapshot(), txPoolSnapshotId: this._txPool.snapshot(), blockTimeOffsetSeconds: this.getTimeIncrement(), nextBlockTimestamp: this.getNextBlockTimestamp(), diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 69939350d0..56f167678e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -1,3 +1,7 @@ +import { + BlockHeader as EthereumJSBlockHeader, + HeaderData, +} from "@nomicfoundation/ethereumjs-block"; import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; import { DefaultStateManager, @@ -14,14 +18,16 @@ import { bigIntToBuffer, bufferToBigInt, setLengthLeft, + toBuffer, } from "@nomicfoundation/ethereumjs-util"; import { Account as RethnetAccount, - Config, + BlockConfig, + BlockHeader as RethnetBlockHeader, ExecutionResult, - Rethnet, Transaction, } from "rethnet-evm"; +import { fromBigIntLike } from "../../../util/bigint"; import { Exit } from "../vm/exit"; import { RunTxResult } from "../vm/vm-adapter"; @@ -135,6 +141,51 @@ export class HardhatDB { } } +export function ethereumjsBlockHeaderToRethnet( + blockHeader: EthereumJSBlockHeader +): RethnetBlockHeader { + return { + parentHash: blockHeader.parentHash, + ommersHash: blockHeader.uncleHash, + beneficiary: blockHeader.coinbase.buf, + stateRoot: blockHeader.stateRoot, + transactionsRoot: blockHeader.transactionsTrie, + receiptsRoot: blockHeader.receiptTrie, + logsBloom: blockHeader.logsBloom, + difficulty: blockHeader.difficulty, + number: blockHeader.number, + gasLimit: blockHeader.gasLimit, + gasUsed: blockHeader.gasUsed, + timestamp: blockHeader.timestamp, + extraData: blockHeader.extraData, + mixHash: blockHeader.mixHash, + nonce: BigInt(`0x${blockHeader.nonce.toString("hex")}`), + baseFeePerGas: blockHeader.baseFeePerGas, + }; +} + +export function ethereumjsHeaderDataToRethnet( + headerData?: HeaderData, + difficulty?: bigint, + prevRandao?: Buffer +): BlockConfig { + const coinbase = + headerData?.coinbase === undefined + ? undefined + : toBuffer(headerData.coinbase); + + return { + number: fromBigIntLike(headerData?.number), + coinbase, + timestamp: fromBigIntLike(headerData?.timestamp), + difficulty, + prevrandao: prevRandao, + basefee: fromBigIntLike(headerData?.baseFeePerGas), + gasLimit: fromBigIntLike(headerData?.gasLimit), + parentHash: headerData?.parentHash as Buffer, + }; +} + export function ethereumjsTransactionToRethnet( tx: TypedTransaction ): Transaction { @@ -165,36 +216,6 @@ export function ethereumjsTransactionToRethnet( return rethnetTx; } -export function createRethnetFromHardhatDB( - cfg: Config, - hardhatDB: HardhatDB -): Rethnet { - return Rethnet.withCallbacks( - cfg, - { - getAccountByAddressFn: - HardhatDB.prototype.getAccountByAddress.bind(hardhatDB), - getAccountStorageSlotFn: - HardhatDB.prototype.getAccountStorageSlot.bind(hardhatDB), - getBlockHashFn: HardhatDB.prototype.getBlockHash.bind(hardhatDB), - getCodeByHashFn: HardhatDB.prototype.getCodeByHash.bind(hardhatDB), - }, - null, - { - checkpointFn: HardhatDB.prototype.checkpoint.bind(hardhatDB), - revertFn: HardhatDB.prototype.revert.bind(hardhatDB), - getStorageRootFn: HardhatDB.prototype.getStorageRoot.bind(hardhatDB), - insertAccountFn: HardhatDB.prototype.insertAccount.bind(hardhatDB), - setAccountBalanceFn: - HardhatDB.prototype.setAccountBalance.bind(hardhatDB), - setAccountCodeFn: HardhatDB.prototype.setAccountCode.bind(hardhatDB), - setAccountNonceFn: HardhatDB.prototype.setAccountNonce.bind(hardhatDB), - setAccountStorageSlotFn: - HardhatDB.prototype.setAccountStorageSlot.bind(hardhatDB), - } - ); -} - export function rethnetResultToRunTxResult( rethnetResult: ExecutionResult ): RunTxResult { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts index 1ea7c7b4bf..f73f00003f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts @@ -54,4 +54,7 @@ export const randomAddressString = () => { return bufferToHex(randomAddressBuffer()); }; -export const randomAddressBuffer = () => randomHashBuffer().slice(0, 20); +const addressGenerator = RandomBufferGenerator.create("seed"); +export const randomAddressBuffer = (): Buffer => { + return addressGenerator.next().slice(0, 20); +}; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index f2cd944057..af523537a7 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -1,7 +1,11 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; -import { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import { + Account, + Address, + bufferToHex, +} from "@nomicfoundation/ethereumjs-util"; import { assertHardhatInvariant } from "../../../core/errors"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; @@ -20,8 +24,14 @@ function printEthereumJSTrace(trace: any) { console.log(JSON.stringify(trace, null, 2)); } -function printRethnetTrace(_trace: any) { - // not implemented +function printRethnetTrace(trace: any) { + console.log( + JSON.stringify( + trace, + (key, value) => (typeof value === "bigint" ? value.toString() : value), + 2 + ) + ); } export class DualModeAdapter implements VMAdapter { @@ -44,8 +54,6 @@ export class DualModeAdapter implements VMAdapter { ); const rethnetAdapter = await RethnetAdapter.create( - // eslint-disable-next-line @typescript-eslint/dot-notation - ethereumJSAdapter["_stateManager"], config, selectHardfork, async (blockNumber) => { @@ -75,7 +83,7 @@ export class DualModeAdapter implements VMAdapter { try { assertEqualRunTxResults(ethereumJSResult, rethnetResult); - return [rethnetResult, null]; + return [rethnetResult, rethnetTrace]; } catch (e) { // if the results didn't match, print the traces console.log("EthereumJS trace"); @@ -89,30 +97,95 @@ export class DualModeAdapter implements VMAdapter { } public async getStateRoot(): Promise { - return this._ethereumJSAdapter.getStateRoot(); + const ethereumJSRoot = await this._ethereumJSAdapter.getStateRoot(); + const rethnetRoot = await this._rethnetAdapter.getStateRoot(); + + if (!ethereumJSRoot.equals(rethnetRoot)) { + console.trace( + `Different state root: ${ethereumJSRoot.toString( + "hex" + )} !== ${rethnetRoot.toString("hex")}` + ); + throw new Error("Different state root"); + } + + return rethnetRoot; } public async getAccount(address: Address): Promise { - return this._ethereumJSAdapter.getAccount(address); + const ethereumJSAccount = await this._ethereumJSAdapter.getAccount(address); + const rethnetAccount = await this._rethnetAdapter.getAccount(address); + + assertEqualAccounts(address, ethereumJSAccount, rethnetAccount); + + return ethereumJSAccount; } public async getContractStorage( address: Address, key: Buffer ): Promise { - return this._ethereumJSAdapter.getContractStorage(address, key); + const ethereumJSStorageSlot = + await this._ethereumJSAdapter.getContractStorage(address, key); + + const rethnetStorageSlot = await this._rethnetAdapter.getContractStorage( + address, + key + ); + + if (!ethereumJSStorageSlot.equals(rethnetStorageSlot)) { + // we only throw if any of the returned values was non-empty, but + // ethereumjs and rethnet return different values when that happens + if ( + ethereumJSStorageSlot.length !== 0 || + !rethnetStorageSlot.equals(Buffer.from([0x00])) + ) { + console.trace( + `Different storage slot: ${bufferToHex( + ethereumJSStorageSlot + )} !== ${bufferToHex(rethnetStorageSlot)}` + ); + throw new Error("Different storage slot"); + } + } + + return rethnetStorageSlot; } - public async getContractCode(address: Address): Promise { - return this._ethereumJSAdapter.getContractCode(address); + public async getContractCode( + address: Address, + ethJsOnly?: boolean + ): Promise { + const ethereumJSCode = await this._ethereumJSAdapter.getContractCode( + address + ); + + if (ethJsOnly === true) { + return ethereumJSCode; + } + + const rethnetCode = await this._rethnetAdapter.getContractCode(address); + + if (!ethereumJSCode.equals(rethnetCode)) { + console.trace( + `Different contract code: ${ethereumJSCode.toString( + "hex" + )} !== ${rethnetCode.toString("hex")}` + ); + throw new Error("Different contract code"); + } + + return rethnetCode; } public async putAccount(address: Address, account: Account): Promise { - return this._ethereumJSAdapter.putAccount(address, account); + await this._ethereumJSAdapter.putAccount(address, account); + await this._rethnetAdapter.putAccount(address, account); } public async putContractCode(address: Address, value: Buffer): Promise { - return this._ethereumJSAdapter.putContractCode(address, value); + await this._ethereumJSAdapter.putContractCode(address, value); + await this._rethnetAdapter.putContractCode(address, value); } public async putContractStorage( @@ -120,11 +193,13 @@ export class DualModeAdapter implements VMAdapter { key: Buffer, value: Buffer ): Promise { - return this._ethereumJSAdapter.putContractStorage(address, key, value); + await this._ethereumJSAdapter.putContractStorage(address, key, value); + await this._rethnetAdapter.putContractStorage(address, key, value); } public async restoreContext(stateRoot: Buffer): Promise { - return this._ethereumJSAdapter.restoreContext(stateRoot); + await this._ethereumJSAdapter.restoreContext(stateRoot); + await this._rethnetAdapter.restoreContext(stateRoot); } public async traceTransaction( @@ -147,36 +222,84 @@ export class DualModeAdapter implements VMAdapter { block: Block, irregularStateOrUndefined: Buffer | undefined ): Promise { - return this._ethereumJSAdapter.setBlockContext( + await this._ethereumJSAdapter.setBlockContext( + block, + irregularStateOrUndefined + ); + + await this._rethnetAdapter.setBlockContext( block, irregularStateOrUndefined ); } public async startBlock(): Promise { - return this._ethereumJSAdapter.startBlock(); + await this._rethnetAdapter.startBlock(); + await this._ethereumJSAdapter.startBlock(); } public async runTxInBlock( tx: TypedTransaction, block: Block ): Promise<[RunTxResult, Trace]> { - return this._ethereumJSAdapter.runTxInBlock(tx, block); + const [ethereumJSResult, ethereumJSTrace] = + await this._ethereumJSAdapter.runTxInBlock(tx, block); + + const [rethnetResult, rethnetTrace] = + await this._rethnetAdapter.runTxInBlock(tx, block); + + try { + assertEqualRunTxResults(ethereumJSResult, rethnetResult); + + if (rethnetResult.createdAddress !== undefined) { + const _test = this.getAccount(rethnetResult.createdAddress); + } + + return [ethereumJSResult, ethereumJSTrace]; + } catch (e) { + // if the results didn't match, print the traces + console.log("EthereumJS trace"); + printEthereumJSTrace(ethereumJSTrace); + console.log(); + console.log("Rethnet trace"); + printRethnetTrace(rethnetTrace); + + throw e; + } } public async addBlockRewards( rewards: Array<[Address, bigint]> ): Promise { + await this._rethnetAdapter.addBlockRewards(rewards); return this._ethereumJSAdapter.addBlockRewards(rewards); } public async sealBlock(): Promise { + await this._rethnetAdapter.sealBlock(); return this._ethereumJSAdapter.sealBlock(); } public async revertBlock(): Promise { + await this._rethnetAdapter.revertBlock(); return this._ethereumJSAdapter.revertBlock(); } + + public async makeSnapshot(): Promise { + const ethereumJSRoot = await this._ethereumJSAdapter.makeSnapshot(); + const rethnetRoot = await this._rethnetAdapter.makeSnapshot(); + + if (!ethereumJSRoot.equals(rethnetRoot)) { + console.trace( + `Different snapshot state root: ${ethereumJSRoot.toString( + "hex" + )} !== ${rethnetRoot.toString("hex")}` + ); + throw new Error("Different snapshot state root"); + } + + return rethnetRoot; + } } function assertEqualRunTxResults( @@ -224,3 +347,39 @@ function assertEqualRunTxResults( } } } + +function assertEqualAccounts( + address: Address, + ethereumJSAccount: Account, + rethnetAccount: Account +) { + if (ethereumJSAccount.balance !== rethnetAccount.balance) { + console.trace(`Account: ${address}`); + console.trace( + `Different balance: ${ethereumJSAccount.balance} !== ${rethnetAccount.balance}` + ); + throw new Error("Different balance"); + } + + if (!ethereumJSAccount.codeHash.equals(rethnetAccount.codeHash)) { + console.trace( + `Different codeHash: ${ethereumJSAccount.codeHash} !== ${rethnetAccount.codeHash}` + ); + throw new Error("Different codeHash"); + } + + if (ethereumJSAccount.nonce !== rethnetAccount.nonce) { + console.trace( + `Different nonce: ${ethereumJSAccount.nonce} !== ${rethnetAccount.nonce}` + ); + throw new Error("Different nonce"); + } + + // TODO: Add storageRoot to Rethnet + // if (ethereumJSAccount.storageRoot !== rethnetAccount.storageRoot) { + // console.trace( + // `Different storageRoot: ${ethereumJSAccount.storageRoot} !== ${rethnetAccount.storageRoot}` + // ); + // throw new Error("Different storageRoot"); + // } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 4410e85156..eb8c67576d 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -456,6 +456,10 @@ export class EthereumJSAdapter implements VMAdapter { this._blockStartStateRoot = undefined; } + public async makeSnapshot(): Promise { + return this.getStateRoot(); + } + private _getCommonForTracing(networkId: number, blockNumber: bigint): Common { try { const common = Common.custom( @@ -539,6 +543,10 @@ export class EthereumJSAdapter implements VMAdapter { gasUsed: result.execResult.executionGasUsed, gasRefunded: result.execResult.gasRefund ?? 0n, logs: result.execResult.logs ?? [], + trace: { + steps: [], + returnValue: result.execResult.returnValue, + }, }, }, next diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 99e0bc0f68..719e0d7440 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -1,22 +1,17 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; -import { StateManager } from "@nomicfoundation/ethereumjs-statemanager"; -import { - Account, - Address, - bufferToBigInt, -} from "@nomicfoundation/ethereumjs-util"; +import { Account, Address } from "@nomicfoundation/ethereumjs-util"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; -import { Rethnet } from "rethnet-evm"; +import { BlockBuilder, Blockchain, Rethnet } from "rethnet-evm"; import { NodeConfig } from "../node-types"; import { - createRethnetFromHardhatDB, + ethereumjsHeaderDataToRethnet, ethereumjsTransactionToRethnet, - HardhatDB, rethnetResultToRunTxResult, } from "../utils/convertToRethnet"; import { hardforkGte, HardforkName } from "../../../util/hardforks"; import { RpcDebugTraceOutput } from "../output"; +import { RethnetStateManager } from "../RethnetState"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; @@ -26,32 +21,34 @@ import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; export class RethnetAdapter implements VMAdapter { constructor( + private _blockchain: Blockchain, + private _state: RethnetStateManager, private _rethnet: Rethnet, private readonly _selectHardfork: (blockNumber: bigint) => string ) {} public static async create( - stateManager: StateManager, config: NodeConfig, selectHardfork: (blockNumber: bigint) => string, getBlockHash: (blockNumber: bigint) => Promise ): Promise { - const hardhatDB = new HardhatDB(stateManager, getBlockHash); + const blockchain = new Blockchain(getBlockHash); const limitContractCodeSize = config.allowUnlimitedContractSize === true ? 2n ** 64n - 1n : undefined; - const rethnet = createRethnetFromHardhatDB( - { - chainId: BigInt(config.chainId), - limitContractCodeSize, - disableBlockGasLimit: true, - disableEip3607: true, - }, - hardhatDB + const state = RethnetStateManager.withGenesisAccounts( + config.genesisAccounts ); - return new RethnetAdapter(rethnet, selectHardfork); + const rethnet = new Rethnet(blockchain, state.asInner(), { + chainId: BigInt(config.chainId), + limitContractCodeSize, + disableBlockGasLimit: true, + disableEip3607: true, + }); + + return new RethnetAdapter(blockchain, state, rethnet, selectHardfork); } /** @@ -65,13 +62,15 @@ export class RethnetAdapter implements VMAdapter { const rethnetTx = ethereumjsTransactionToRethnet(tx); const difficulty = this._getBlockEnvDifficulty( + blockContext.header.difficulty + ); + + const prevRandao = this._getBlockPrevRandao( blockContext.header.number, - blockContext.header.difficulty, - bufferToBigInt(blockContext.header.mixHash) + blockContext.header.mixHash ); - await this._rethnet.guaranteeTransaction(rethnetTx); - const rethnetResult = await this._rethnet.dryRun(rethnetTx, { + const rethnetResult = await this._rethnet.guaranteedDryRun(rethnetTx, { number: blockContext.header.number, coinbase: blockContext.header.coinbase.buf, timestamp: blockContext.header.timestamp, @@ -79,18 +78,24 @@ export class RethnetAdapter implements VMAdapter { forceBaseFeeZero === true ? 0n : blockContext.header.baseFeePerGas, gasLimit: blockContext.header.gasLimit, difficulty, + prevrandao: prevRandao, }); - const result = rethnetResultToRunTxResult(rethnetResult.execResult); - - return [result, null]; + try { + const result = rethnetResultToRunTxResult(rethnetResult.execResult); + return [result, rethnetResult.execResult.trace]; + } catch (e) { + console.log("Rethnet trace"); + console.log(rethnetResult.execResult.trace); + throw e; + } } /** * Get the account info for the given address. */ public async getAccount(address: Address): Promise { - throw new Error("not implemented"); + return this._state.getAccount(address); } /** @@ -100,28 +105,37 @@ export class RethnetAdapter implements VMAdapter { address: Address, key: Buffer ): Promise { - throw new Error("not implemented"); + return this._state.getContractStorage(address, key); } /** * Get the contract code at the given address. */ - public async getContractCode(address: Address): Promise { - throw new Error("not implemented"); + public async getContractCode( + address: Address, + ethJsOnly?: boolean + ): Promise { + if (ethJsOnly === true) { + throw new Error( + "Calling RethnetAdapter.getContractCode with ethJsOnly=true, this shouldn't happen" + ); + } + + return this._state.getContractCode(address); } /** * Update the account info for the given address. */ public async putAccount(address: Address, account: Account): Promise { - throw new Error("not implemented"); + return this._state.putAccount(address, account); } /** * Update the contract code for the given address. */ public async putContractCode(address: Address, value: Buffer): Promise { - throw new Error("not implemented"); + return this._state.putContractCode(address, value); } /** @@ -132,14 +146,14 @@ export class RethnetAdapter implements VMAdapter { key: Buffer, value: Buffer ): Promise { - throw new Error("not implemented"); + await this._state.putContractStorage(address, key, value); } /** * Get the root of the current state trie. */ public async getStateRoot(): Promise { - throw new Error("not implemented"); + return this._state.getStateRoot(); } /** @@ -150,7 +164,9 @@ export class RethnetAdapter implements VMAdapter { block: Block, irregularStateOrUndefined: Buffer | undefined ): Promise { - throw new Error("not implemented"); + return this._state.setStateRoot( + irregularStateOrUndefined ?? block.header.stateRoot + ); } /** @@ -159,14 +175,14 @@ export class RethnetAdapter implements VMAdapter { * Throw if it can't. */ public async restoreContext(stateRoot: Buffer): Promise { - throw new Error("not implemented"); + return this._state.setStateRoot(stateRoot); } /** * Start a new block and accept transactions sent with `runTxInBlock`. */ public async startBlock(): Promise { - throw new Error("not implemented"); + await this._state.checkpoint(); } /** @@ -176,7 +192,28 @@ export class RethnetAdapter implements VMAdapter { tx: TypedTransaction, block: Block ): Promise<[RunTxResult, Trace]> { - throw new Error("not implemented"); + const rethnetTx = ethereumjsTransactionToRethnet(tx); + + const difficulty = this._getBlockEnvDifficulty(block.header.difficulty); + + const prevRandao = this._getBlockPrevRandao( + block.header.number, + block.header.mixHash + ); + + const rethnetResult = await this._rethnet.run( + rethnetTx, + ethereumjsHeaderDataToRethnet(block.header, difficulty, prevRandao) + ); + + try { + const result = rethnetResultToRunTxResult(rethnetResult); + return [result, rethnetResult.trace]; + } catch (e) { + console.log("Rethnet trace"); + console.log(rethnetResult.trace); + throw e; + } } /** @@ -185,22 +222,49 @@ export class RethnetAdapter implements VMAdapter { public async addBlockRewards( rewards: Array<[Address, bigint]> ): Promise { - throw new Error("not implemented"); + const blockBuilder = await BlockBuilder.new( + this._blockchain, + this._state.asInner(), + {}, + { + // Dummy values + parentHash: Buffer.alloc(32, 0), + ommersHash: Buffer.alloc(32, 0), + beneficiary: Buffer.alloc(20, 0), + stateRoot: Buffer.alloc(32, 0), + transactionsRoot: Buffer.alloc(32, 0), + receiptsRoot: Buffer.alloc(32, 0), + logsBloom: Buffer.alloc(256, 0), + difficulty: 0n, + number: 0n, + gasLimit: 0n, + gasUsed: 0n, + timestamp: 0n, + extraData: Buffer.allocUnsafe(0), + mixHash: Buffer.alloc(32, 0), + nonce: 0n, + }, + {} + ); + + await blockBuilder.finalize( + rewards.map(([address, reward]) => { + return [address.buf, reward]; + }) + ); } /** * Finish the block successfully. Must be called after `addBlockRewards`. */ - public async sealBlock(): Promise { - throw new Error("not implemented"); - } + public async sealBlock(): Promise {} /** * Revert the block and discard the changes to the state. Can be called * at any point after `startBlock`. */ public async revertBlock(): Promise { - throw new Error("not implemented"); + await this._state.revert(); } /** @@ -229,11 +293,29 @@ export class RethnetAdapter implements VMAdapter { throw new Error("not implemented"); } + public async makeSnapshot(): Promise { + return this._state.makeSnapshot(); + } + private _getBlockEnvDifficulty( - blockNumber: bigint, - difficulty: bigint | undefined, - mixHash: bigint | undefined + difficulty: bigint | undefined ): bigint | undefined { + const MAX_DIFFICULTY = 2n ** 32n - 1n; + if (difficulty !== undefined && difficulty > MAX_DIFFICULTY) { + console.debug( + "Difficulty is larger than U256::max:", + difficulty.toString(16) + ); + return MAX_DIFFICULTY; + } + + return difficulty; + } + + private _getBlockPrevRandao( + blockNumber: bigint, + mixHash: Buffer | undefined + ): Buffer | undefined { const hardfork = this._selectHardfork(blockNumber); const isPostMergeHardfork = hardforkGte( hardfork as HardforkName, @@ -241,9 +323,13 @@ export class RethnetAdapter implements VMAdapter { ); if (isPostMergeHardfork) { + if (mixHash === undefined) { + throw new Error("mixHash must be set for post-merge hardfork"); + } + return mixHash; } - return difficulty; + return undefined; } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 5032bf438d..724e7ca000 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -49,7 +49,7 @@ export interface VMAdapter { // getters getAccount(address: Address): Promise; getContractStorage(address: Address, key: Buffer): Promise; - getContractCode(address: Address): Promise; + getContractCode(address: Address, ethJsOnly?: boolean): Promise; // setters putAccount(address: Address, account: Account): Promise; @@ -86,4 +86,7 @@ export interface VMAdapter { ): Promise; enableTracing(callbacks: TracingCallbacks): void; disableTracing(): void; + + // methods for snapshotting + makeSnapshot(): Promise; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts index 96400fb49b..e325ec3adf 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-debug-tracer.ts @@ -446,8 +446,16 @@ export class VMDebugTracer { } else if (step.opcode.name === "INVALID") { const code = await this._getContractCode(step.codeAddress); - const opcodeHex = code[step.pc].toString(16); - op = `opcode 0x${opcodeHex} not defined`; + if (code.length > step.pc) { + const opcodeHex = code[step.pc].toString(16); + op = `opcode 0x${opcodeHex} not defined`; + } else { + // This can happen if there is an invalid opcode in a constructor. + // We don't have an easy way to access the init code from here, so we + // don't show the value of the opcode in this case. + op = "opcode not defined"; + } + error = {}; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts index 2882b7214e..a4cfa4a9ad 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts @@ -143,7 +143,10 @@ export class VMTracer { "codeAddress should be defined" ); - const code = await this._vm.getContractCode(new Address(codeAddress)); + const code = await this._vm.getContractCode( + new Address(codeAddress), + true // ethJsOnly, temporary fix + ); const callTrace: CallMessageTrace = { code, diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts index cbf529c1c1..6236761dfb 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts @@ -85,6 +85,7 @@ export async function runFullBlock( // TODO remove "as any" and make this work with VMAdapter await (forkedNode["_vm"] as any).blockchain.putBlock(modifiedBlock); + await (forkedNode["_vm"] as any).putBlock(modifiedBlock); await forkedNode["_saveBlockAsSuccessfullyRun"]( modifiedBlock, afterBlockEvent diff --git a/rust-toolchain b/rust-toolchain index 58e4eb6b29..5b6cd6b3cd 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.63 +1.65 From 68aec905650b751f3795617d47e7af32b218dbe3 Mon Sep 17 00:00:00 2001 From: Wodann Date: Fri, 23 Dec 2022 13:10:35 -0600 Subject: [PATCH 014/406] improvement: return error when prevrandao is missing for post-merge tx (#3443) Co-authored-by: Franco Victorio --- crates/rethnet_evm/Cargo.toml | 1 + crates/rethnet_evm/src/runtime.rs | 47 ++--- crates/rethnet_evm/src/transaction.rs | 8 + crates/rethnet_evm_napi/Cargo.toml | 2 +- crates/rethnet_evm_napi/package.json | 2 +- crates/rethnet_evm_napi/src/lib.rs | 26 ++- crates/rethnet_evm_napi/test/evm/RethnetDb.ts | 10 +- .../provider/utils/convertToRethnet.ts | 167 +++++------------- .../hardhat-network/provider/vm/rethnet.ts | 2 + 9 files changed, 108 insertions(+), 157 deletions(-) diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 2e2c748f49..c431346c0d 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -15,4 +15,5 @@ revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3 secp256k1 = { version = "0.24.1", default-features = false, features = ["alloc"] } sha3 = { version = "0.10.4", default-features = false } signature = { version = "1.6.4", default-features = false, features = ["std"] } +thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index 1ec1bd6861..201af1793e 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -1,6 +1,5 @@ use std::{fmt::Debug, sync::Arc}; -use rethnet_eth::B256; use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; use crate::{ @@ -9,6 +8,7 @@ use crate::{ evm::build_evm, inspector::RethnetInspector, trace::Trace, + transaction::TransactionError, State, }; @@ -43,17 +43,18 @@ where pub async fn dry_run( &self, transaction: TxEnv, - mut block: BlockEnv, - ) -> (ExecutionResult, State, Trace) { + block: BlockEnv, + ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { + return Err(TransactionError::MissingPrevrandao); + } + let blockchain = self.blockchain.clone(); let db = self.db.clone(); let cfg = self.cfg.clone(); - if cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { - block.prevrandao = Some(B256::zero()); - } - - self.db + Ok(self + .db .runtime() .spawn(async move { let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); @@ -63,26 +64,26 @@ where (result, state, inspector.into_trace()) }) .await - .unwrap() + .unwrap()) } /// Runs a transaction without committing the state, while disabling balance checks and creating accounts for new addresses. pub async fn guaranteed_dry_run( &self, transaction: TxEnv, - mut block: BlockEnv, - ) -> Result<(ExecutionResult, State, Trace), E> { + block: BlockEnv, + ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { + return Err(TransactionError::MissingPrevrandao); + } + let blockchain = self.blockchain.clone(); let db = self.db.clone(); let mut cfg = self.cfg.clone(); cfg.disable_balance_check = true; - if cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { - block.prevrandao = Some(B256::zero()); - } - - let result = self + Ok(self .db .runtime() .spawn(async move { @@ -93,17 +94,19 @@ where (result, state, inspector.into_trace()) }) .await - .unwrap(); - - Ok(result) + .unwrap()) } /// Runs a transaction, committing the state in the process. - pub async fn run(&self, transaction: TxEnv, block: BlockEnv) -> (ExecutionResult, Trace) { - let (result, changes, trace) = self.dry_run(transaction, block).await; + pub async fn run( + &self, + transaction: TxEnv, + block: BlockEnv, + ) -> Result<(ExecutionResult, Trace), TransactionError> { + let (result, changes, trace) = self.dry_run(transaction, block).await?; self.db.apply(changes).await; - (result, trace) + Ok((result, trace)) } } diff --git a/crates/rethnet_evm/src/transaction.rs b/crates/rethnet_evm/src/transaction.rs index 1e61c5bc26..ff9a047596 100644 --- a/crates/rethnet_evm/src/transaction.rs +++ b/crates/rethnet_evm/src/transaction.rs @@ -8,6 +8,14 @@ use rethnet_eth::{ Address, Bloom, Bytes, B256, U256, }; +/// Invalid transaction error +#[derive(Debug, thiserror::Error)] +pub enum TransactionError { + /// The transaction is expected to have a prevrandao, as the executor's config is on a post-merge hardfork. + #[error("Post-merge transaction is missing prevrandao")] + MissingPrevrandao, +} + /// Represents all relevant information of an executed transaction #[derive(Debug, Eq, PartialEq, Clone)] pub struct TransactionInfo { diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index a78097be46..997b6da8f7 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -9,7 +9,7 @@ crate-type = ["cdylib"] [dependencies] anyhow = "1.0.64" crossbeam-channel = { version = "0.5.6", default-features = false } -napi = { version = "2.10.1", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi = { version = "= 2.10.2", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.9.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" diff --git a/crates/rethnet_evm_napi/package.json b/crates/rethnet_evm_napi/package.json index de7da05fe5..34de16a557 100644 --- a/crates/rethnet_evm_napi/package.json +++ b/crates/rethnet_evm_napi/package.json @@ -11,7 +11,7 @@ "scripts": { "build": "napi build --release", "build:debug": "napi build", - "test": "mocha --recursive \"test/**/*.ts\"" + "test": "mocha --recursive \"test/**/*.ts\" --exit" }, "devDependencies": { "@napi-rs/cli": "^2.11.4", diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 150d592ce5..ba9b81dbf0 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -12,7 +12,10 @@ use std::{fmt::Debug, str::FromStr}; use block::BlockConfig; use blockchain::Blockchain; -use napi::{bindgen_prelude::*, Status}; +use napi::{ + bindgen_prelude::{BigInt, Buffer, ToNapiValue}, + Status, +}; use napi_derive::napi; use once_cell::sync::OnceCell; use rethnet_eth::Address; @@ -349,11 +352,15 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - ) -> Result { + ) -> napi::Result { let transaction = transaction.try_into()?; let block = block.try_into()?; - self.runtime.dry_run(transaction, block).await.try_into() + self.runtime + .dry_run(transaction, block) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? + .try_into() } #[napi] @@ -361,13 +368,14 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - ) -> Result { + ) -> napi::Result { let transaction = transaction.try_into()?; let block = block.try_into()?; self.runtime .guaranteed_dry_run(transaction, block) - .await? + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? .try_into() } @@ -376,10 +384,14 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - ) -> Result { + ) -> napi::Result { let transaction: TxEnv = transaction.try_into()?; let block = block.try_into()?; - self.runtime.run(transaction, block).await.try_into() + self.runtime + .run(transaction, block) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? + .try_into() } } diff --git a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts index 77fdc19bc4..083b2d90ef 100644 --- a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts +++ b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts @@ -7,6 +7,7 @@ import { BlockConfig, Config, Rethnet, + SpecId, StateManager, Transaction, } from "../.."; @@ -24,14 +25,17 @@ describe("Rethnet", () => { let rethnet: Rethnet; beforeEach(async function () { - (blockchain = new Blockchain(async function ( + blockchain = new Blockchain(async function ( _blockNumber: bigint ): Promise { return Buffer.allocUnsafe(0); - })), - (stateManager = new StateManager()); + }); + + stateManager = new StateManager(); + const cfg: Config = { chainId: BigInt(0), + specId: SpecId.GrayGlacier, limitContractCodeSize: BigInt(2n) ** BigInt(32n), disableEip3607: true, }; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 56f167678e..6724df4d58 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -2,144 +2,25 @@ import { BlockHeader as EthereumJSBlockHeader, HeaderData, } from "@nomicfoundation/ethereumjs-block"; -import { BlockchainInterface } from "@nomicfoundation/ethereumjs-blockchain"; -import { - DefaultStateManager, - StateManager, -} from "@nomicfoundation/ethereumjs-statemanager"; import { AccessListEIP2930Transaction, FeeMarketEIP1559Transaction, TypedTransaction, } from "@nomicfoundation/ethereumjs-tx"; +import { Address, toBuffer } from "@nomicfoundation/ethereumjs-util"; import { - Account, - Address, - bigIntToBuffer, - bufferToBigInt, - setLengthLeft, - toBuffer, -} from "@nomicfoundation/ethereumjs-util"; -import { - Account as RethnetAccount, BlockConfig, BlockHeader as RethnetBlockHeader, ExecutionResult, + SpecId, Transaction, } from "rethnet-evm"; import { fromBigIntLike } from "../../../util/bigint"; +import { HardforkName } from "../../../util/hardforks"; import { Exit } from "../vm/exit"; import { RunTxResult } from "../vm/vm-adapter"; -export class HardhatDB { - private _stateManager: StateManager; - private _blockchain: BlockchainInterface | undefined; - - constructor( - stateManager: StateManager, - private _getBlockHash: (blockNumber: bigint) => Promise - ) { - this._stateManager = stateManager; - } - - public async commit() { - return this._stateManager.commit(); - } - - public async checkpoint() { - return this._stateManager.checkpoint(); - } - - public async revert() { - return this._stateManager.revert(); - } - - public async getAccountByAddress(address: Buffer) { - return this._stateManager.getAccount(new Address(address)); - } - - public async getAccountStorageSlot(address: Buffer, index: bigint) { - const key = setLengthLeft(bigIntToBuffer(index), 32); - let data = await this._stateManager.getContractStorage( - new Address(address), - key - ); - - const EXPECTED_DATA_SIZE = 32; - if (data.length < EXPECTED_DATA_SIZE) { - data = Buffer.concat( - [Buffer.alloc(EXPECTED_DATA_SIZE - data.length, 0), data], - EXPECTED_DATA_SIZE - ); - } - - return bufferToBigInt(data); - } - - public async getBlockHash(blockNumber: bigint) { - return this._getBlockHash(blockNumber); - } - - public async getCodeByHash(codeHash: Buffer) { - if (this._stateManager instanceof DefaultStateManager) { - // eslint-disable-next-line @typescript-eslint/dot-notation - const db = this._stateManager._trie["_db"]; - const code = await db.get(Buffer.concat([Buffer.from("c"), codeHash])); - - if (code === null) { - // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error("returning null in getCodeByHash is not supported"); - } - - return code; - } - - // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error("getCodeByHash not implemented for ForkStateManager"); - } - - public async getStorageRoot() { - return this._stateManager.getStateRoot(); - } - - public async insertAccount( - address: Buffer, - account: RethnetAccount - ): Promise { - return this._stateManager.putAccount( - new Address(address), - new Account(account.nonce, account.balance, undefined, account.codeHash) - ); - } - - public async setAccountBalance(address: Buffer, balance: bigint) { - return this._stateManager.modifyAccountFields(new Address(address), { - balance, - }); - } - - public async setAccountCode(address: Buffer, code: Buffer) { - return this._stateManager.putContractCode(new Address(address), code); - } - - public async setAccountNonce(address: Buffer, nonce: bigint) { - return this._stateManager.modifyAccountFields(new Address(address), { - nonce, - }); - } - - public async setAccountStorageSlot( - address: Buffer, - index: bigint, - value: bigint - ) { - return this._stateManager.putContractStorage( - new Address(address), - setLengthLeft(bigIntToBuffer(index), 32), - setLengthLeft(bigIntToBuffer(value), 32) - ); - } -} +/* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ export function ethereumjsBlockHeaderToRethnet( blockHeader: EthereumJSBlockHeader @@ -164,6 +45,46 @@ export function ethereumjsBlockHeaderToRethnet( }; } +export function ethereumsjsHardforkToRethnet(hardfork: HardforkName): SpecId { + switch (hardfork) { + case HardforkName.FRONTIER: + return SpecId.Frontier; + case HardforkName.HOMESTEAD: + return SpecId.Homestead; + case HardforkName.DAO: + return SpecId.DaoFork; + case HardforkName.TANGERINE_WHISTLE: + return SpecId.Tangerine; + case HardforkName.SPURIOUS_DRAGON: + return SpecId.SpuriousDragon; + case HardforkName.BYZANTIUM: + return SpecId.Byzantium; + case HardforkName.CONSTANTINOPLE: + return SpecId.Constantinople; + case HardforkName.PETERSBURG: + return SpecId.Petersburg; + case HardforkName.ISTANBUL: + return SpecId.Istanbul; + case HardforkName.MUIR_GLACIER: + return SpecId.MuirGlacier; + case HardforkName.BERLIN: + return SpecId.Berlin; + case HardforkName.LONDON: + return SpecId.London; + case HardforkName.ARROW_GLACIER: + return SpecId.ArrowGlacier; + case HardforkName.GRAY_GLACIER: + return SpecId.GrayGlacier; + case HardforkName.MERGE: + return SpecId.Merge; + default: + const _exhaustiveCheck: never = hardfork; + throw new Error( + `Unknown hardfork name '${hardfork as string}', this shouldn't happen` + ); + } +} + export function ethereumjsHeaderDataToRethnet( headerData?: HeaderData, difficulty?: bigint, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 719e0d7440..a83b6f207a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -7,6 +7,7 @@ import { NodeConfig } from "../node-types"; import { ethereumjsHeaderDataToRethnet, ethereumjsTransactionToRethnet, + ethereumsjsHardforkToRethnet, rethnetResultToRunTxResult, } from "../utils/convertToRethnet"; import { hardforkGte, HardforkName } from "../../../util/hardforks"; @@ -43,6 +44,7 @@ export class RethnetAdapter implements VMAdapter { const rethnet = new Rethnet(blockchain, state.asInner(), { chainId: BigInt(config.chainId), + specId: ethereumsjsHardforkToRethnet(config.hardfork as HardforkName), limitContractCodeSize, disableBlockGasLimit: true, disableEip3607: true, From 02dc6a814e228ef6817d01b7bb252ad037822ae3 Mon Sep 17 00:00:00 2001 From: Wodann Date: Fri, 23 Dec 2022 23:53:38 -0600 Subject: [PATCH 015/406] feat: account storage root API and validation in dual adapter (#3454) --- crates/rethnet_evm/src/db/layered_db.rs | 7 ++++++ crates/rethnet_evm/src/db/request.rs | 12 ++++++++++ crates/rethnet_evm/src/db/sync.rs | 22 +++++++++++++++++++ crates/rethnet_evm/src/debug.rs | 5 ++++- crates/rethnet_evm_napi/src/state.rs | 11 ++++++++++ .../rethnet_evm_napi/test/evm/StateManager.ts | 2 +- .../hardhat-network/provider/RethnetState.ts | 3 ++- .../hardhat-network/provider/vm/dual.ts | 13 +++++------ 8 files changed, 65 insertions(+), 10 deletions(-) diff --git a/crates/rethnet_evm/src/db/layered_db.rs b/crates/rethnet_evm/src/db/layered_db.rs index 7b7d6cce50..80218f4842 100644 --- a/crates/rethnet_evm/src/db/layered_db.rs +++ b/crates/rethnet_evm/src/db/layered_db.rs @@ -337,6 +337,13 @@ impl DatabaseCommit for LayeredDatabase { impl DatabaseDebug for LayeredDatabase { type Error = anyhow::Error; + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + Ok(self + .iter() + .find_map(|layer| layer.storage.get(address)) + .map(|storage| storage.as_ref().map_or(KECCAK_NULL_RLP, storage_root))) + } + fn insert_account( &mut self, address: Address, diff --git a/crates/rethnet_evm/src/db/request.rs b/crates/rethnet_evm/src/db/request.rs index 815b846750..36c949c39e 100644 --- a/crates/rethnet_evm/src/db/request.rs +++ b/crates/rethnet_evm/src/db/request.rs @@ -16,6 +16,10 @@ where address: Address, sender: oneshot::Sender, E>>, }, + AccountStorageRoot { + address: Address, + sender: oneshot::Sender, E>>, + }, Checkpoint { sender: oneshot::Sender>, }, @@ -84,6 +88,9 @@ where Request::AccountByAddress { address, sender } => { sender.send(db.basic(address)).unwrap() } + Request::AccountStorageRoot { address, sender } => { + sender.send(db.account_storage_root(&address)).unwrap() + } Request::Checkpoint { sender } => sender.send(db.checkpoint()).unwrap(), Request::CodeByHash { code_hash, sender } => { sender.send(db.code_by_hash(code_hash)).unwrap() @@ -147,6 +154,11 @@ where .field("address", address) .field("sender", sender) .finish(), + Self::AccountStorageRoot { address, sender } => f + .debug_struct("AccountStorageRoot") + .field("address", address) + .field("sender", sender) + .finish(), Self::Checkpoint { sender } => f .debug_struct("Checkpoint") .field("sender", sender) diff --git a/crates/rethnet_evm/src/db/sync.rs b/crates/rethnet_evm/src/db/sync.rs index 1d83496cb3..19076da0ee 100644 --- a/crates/rethnet_evm/src/db/sync.rs +++ b/crates/rethnet_evm/src/db/sync.rs @@ -88,6 +88,20 @@ where receiver.await.unwrap() } + /// Retrieves the storage root of the account at the specified address. + pub async fn account_storage_root(&self, address: &Address) -> Result, E> { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::AccountStorageRoot { + address: *address, + sender, + }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + /// Retrieves the storage slot corresponding to the specified address and index. pub async fn account_storage_slot(&self, address: Address, index: U256) -> Result { let (sender, receiver) = oneshot::channel(); @@ -342,6 +356,14 @@ where { type Error = E; + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + task::block_in_place(move || { + self.db + .runtime + .block_on(self.db.account_storage_root(address)) + }) + } + fn insert_account( &mut self, address: Address, diff --git a/crates/rethnet_evm/src/debug.rs b/crates/rethnet_evm/src/debug.rs index c2fb596096..be867b1999 100644 --- a/crates/rethnet_evm/src/debug.rs +++ b/crates/rethnet_evm/src/debug.rs @@ -10,7 +10,10 @@ pub trait DatabaseDebug { /// The database's error type. type Error; - /// Inserts an account with the specified `address`. + /// Retrieves the storage root of the account at the specified address. + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error>; + + /// Inserts an account with the specified address. fn insert_account( &mut self, address: Address, diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 4d08fe6749..52a5fa4178 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -112,6 +112,17 @@ impl StateManager { ) } + /// Retrieves the storage root of the account at the specified address. + #[napi] + pub async fn get_account_storage_root(&self, address: Buffer) -> napi::Result> { + let address = Address::from_slice(&address); + + self.db.account_storage_root(&address).await.map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |root| Ok(root.map(|root| Buffer::from(root.as_ref()))), + ) + } + #[napi] pub async fn get_account_storage_slot( &self, diff --git a/crates/rethnet_evm_napi/test/evm/StateManager.ts b/crates/rethnet_evm_napi/test/evm/StateManager.ts index 99f1dffc23..b771b0877a 100644 --- a/crates/rethnet_evm_napi/test/evm/StateManager.ts +++ b/crates/rethnet_evm_napi/test/evm/StateManager.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; -import { Account, AccountData, Config, StateManager, Transaction } from "../.."; +import { AccountData, StateManager } from "../.."; describe("State Manager", () => { const caller = Address.fromString( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 3b2b832d4d..2c2963caf4 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -40,10 +40,11 @@ export class RethnetStateManager { public async getAccount(address: Address): Promise { const account = await this._state.getAccountByAddress(address.buf); + const storageRoot = await this._state.getAccountStorageRoot(address.buf); return new Account( account?.nonce, account?.balance, - undefined, + storageRoot ?? undefined, account?.codeHash ); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index af523537a7..ffdaa12acc 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -375,11 +375,10 @@ function assertEqualAccounts( throw new Error("Different nonce"); } - // TODO: Add storageRoot to Rethnet - // if (ethereumJSAccount.storageRoot !== rethnetAccount.storageRoot) { - // console.trace( - // `Different storageRoot: ${ethereumJSAccount.storageRoot} !== ${rethnetAccount.storageRoot}` - // ); - // throw new Error("Different storageRoot"); - // } + if (!ethereumJSAccount.storageRoot.equals(rethnetAccount.storageRoot)) { + console.trace( + `Different storageRoot: ${ethereumJSAccount.storageRoot} !== ${rethnetAccount.storageRoot}` + ); + throw new Error("Different storageRoot"); + } } From 728fb2fcd8eef00fc9ed88e48ac819b9f6477372 Mon Sep 17 00:00:00 2001 From: Wodann Date: Sat, 24 Dec 2022 00:12:05 -0600 Subject: [PATCH 016/406] feat: calculate bloom for Rethnet logs and validate against ethereumjs (#3456) --- crates/rethnet_evm_napi/src/block/builder.rs | 2 +- crates/rethnet_evm_napi/src/lib.rs | 50 ++++++++++++------- .../hardhat-network/provider/utils/bloom.ts | 7 +++ .../provider/utils/convertToRethnet.ts | 18 +++++-- .../hardhat-network/provider/vm/dual.ts | 7 +++ .../hardhat-network/provider/vm/ethereumjs.ts | 14 ++++-- 6 files changed, 73 insertions(+), 25 deletions(-) diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs index 3c7e87d087..4e76958865 100644 --- a/crates/rethnet_evm_napi/src/block/builder.rs +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -60,7 +60,7 @@ impl BlockBuilder { .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - result.try_into() + Ok(result.into()) } else { Err(napi::Error::new( Status::InvalidArg, diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index ba9b81dbf0..986dad86e8 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -221,36 +221,51 @@ impl From for rethnet_evm::SpecId { } } +#[napi(object)] +pub struct Log { + pub address: Buffer, + pub topics: Vec, + pub data: Buffer, +} + +impl From for Log { + fn from(log: rethnet_evm::Log) -> Self { + let topics = log + .topics + .into_iter() + .map(|topic| Buffer::from(topic.as_bytes())) + .collect(); + + Self { + address: Buffer::from(log.address.as_bytes()), + topics, + data: Buffer::from(log.data.as_ref()), + } + } +} + #[napi(object)] pub struct ExecutionResult { pub exit_code: u8, pub output: TransactionOutput, pub gas_used: BigInt, pub gas_refunded: BigInt, - pub logs: Vec, + pub logs: Vec, pub trace: Trace, } -impl TryFrom<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { - type Error = napi::Error; - - fn try_from( - (result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace), - ) -> std::result::Result { - let logs = result - .logs - .into_iter() - .map(serde_json::to_value) - .collect::>>()?; +impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { + fn from((result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)) -> Self { + let logs = result.logs.into_iter().map(Log::from).collect(); - Ok(Self { + Self { exit_code: result.exit_reason as u8, output: result.out.into(), gas_used: BigInt::from(result.gas_used), gas_refunded: BigInt::from(result.gas_refunded), logs, trace: trace.into(), - }) + } } } @@ -276,7 +291,7 @@ impl rethnet_evm::trace::Trace, ), ) -> std::result::Result { - let exec_result = (result, trace).try_into()?; + let exec_result = (result, trace).into(); let state = serde_json::to_value(state)?; Ok(Self { exec_result, state }) @@ -388,10 +403,11 @@ impl Rethnet { let transaction: TxEnv = transaction.try_into()?; let block = block.try_into()?; - self.runtime + Ok(self + .runtime .run(transaction, block) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .try_into() + .into()) } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts index 532843c45d..c85896fcab 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/bloom.ts @@ -81,4 +81,11 @@ export class Bloom { this.bitvector[i] = this.bitvector[i] | bloom.bitvector[i]; } } + + /** + * Checks equality with another bloom. + */ + public equals(otherBloom: Bloom): boolean { + return this.bitvector.equals(otherBloom.bitvector); + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 6724df4d58..df8cd7d1ee 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -12,6 +12,7 @@ import { BlockConfig, BlockHeader as RethnetBlockHeader, ExecutionResult, + Log, SpecId, Transaction, } from "rethnet-evm"; @@ -19,6 +20,7 @@ import { fromBigIntLike } from "../../../util/bigint"; import { HardforkName } from "../../../util/hardforks"; import { Exit } from "../vm/exit"; import { RunTxResult } from "../vm/vm-adapter"; +import { Bloom } from "./bloom"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ @@ -137,6 +139,17 @@ export function ethereumjsTransactionToRethnet( return rethnetTx; } +function rethnetLogsToBloom(logs: Log[]): Bloom { + const bloom = new Bloom(); + for (const log of logs) { + bloom.add(log.address); + for (const topic of log.topics) { + bloom.add(topic); + } + } + return bloom; +} + export function rethnetResultToRunTxResult( rethnetResult: ExecutionResult ): RunTxResult { @@ -153,10 +166,7 @@ export function rethnetResultToRunTxResult( : undefined, exit: vmError, returnValue: rethnetResult.output.output ?? Buffer.from([]), - get bloom(): any { - console.trace("bloom not implemented"); - return process.exit(1); - }, + bloom: rethnetLogsToBloom(rethnetResult.logs), get receipt(): any { console.trace("receipt not implemented"); return process.exit(1); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index ffdaa12acc..a59df5af44 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -346,6 +346,13 @@ function assertEqualRunTxResults( throw new Error("Different returnValue"); } } + + if (!ethereumJSResult.bloom.equals(rethnetResult.bloom)) { + console.trace( + `Different bloom: ${ethereumJSResult.bloom} !== ${rethnetResult.bloom}` + ); + throw new Error("Different bloom"); + } } function assertEqualAccounts( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index eb8c67576d..a4f2364f3c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -32,6 +32,7 @@ import { FakeSenderAccessListEIP2930Transaction } from "../transactions/FakeSend import { FakeSenderEIP1559Transaction } from "../transactions/FakeSenderEIP1559Transaction"; import { FakeSenderTransaction } from "../transactions/FakeSenderTransaction"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; +import { Bloom } from "../utils/bloom"; import { makeForkClient } from "../utils/makeForkClient"; import { makeStateTrie } from "../utils/makeStateTrie"; import { Exit } from "./exit"; @@ -197,7 +198,7 @@ export class EthereumJSAdapter implements VMAdapter { const ethereumJSError = ethereumJSResult.execResult.exceptionError; const result: RunTxResult = { - bloom: ethereumJSResult.bloom, + bloom: new Bloom(ethereumJSResult.bloom.bitvector), gasUsed: ethereumJSResult.totalGasSpent, receipt: ethereumJSResult.receipt, returnValue: ethereumJSResult.execResult.returnValue, @@ -418,7 +419,7 @@ export class EthereumJSAdapter implements VMAdapter { const ethereumJSError = ethereumJSResult.execResult.exceptionError; const result: RunTxResult = { - bloom: ethereumJSResult.bloom, + bloom: new Bloom(ethereumJSResult.bloom.bitvector), gasUsed: ethereumJSResult.totalGasSpent, receipt: ethereumJSResult.receipt, returnValue: ethereumJSResult.execResult.returnValue, @@ -542,7 +543,14 @@ export class EthereumJSAdapter implements VMAdapter { }, gasUsed: result.execResult.executionGasUsed, gasRefunded: result.execResult.gasRefund ?? 0n, - logs: result.execResult.logs ?? [], + logs: + result.execResult.logs?.map((log) => { + return { + address: log[0], + topics: log[1], + data: log[2], + }; + }) ?? [], trace: { steps: [], returnValue: result.execResult.returnValue, From 5aff68364d087591b8ef6bccf0cb250beb15bd5b Mon Sep 17 00:00:00 2001 From: Wodann Date: Fri, 30 Dec 2022 09:35:13 -0600 Subject: [PATCH 017/406] feat: generate receipt for Rethnet transactions and validate (#3483) --- crates/rethnet_evm_napi/src/lib.rs | 1 + crates/rethnet_evm_napi/src/receipt.rs | 12 ++++ .../provider/utils/convertToRethnet.ts | 18 +++-- .../hardhat-network/provider/vm/dual.ts | 65 +++++++++++++++++++ .../hardhat-network/provider/vm/rethnet.ts | 10 ++- 5 files changed, 99 insertions(+), 7 deletions(-) create mode 100644 crates/rethnet_evm_napi/src/receipt.rs diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 986dad86e8..69159a5947 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -2,6 +2,7 @@ mod access_list; mod block; mod blockchain; mod cast; +mod receipt; mod state; mod sync; mod threadsafe_function; diff --git a/crates/rethnet_evm_napi/src/receipt.rs b/crates/rethnet_evm_napi/src/receipt.rs new file mode 100644 index 0000000000..e01724fdb1 --- /dev/null +++ b/crates/rethnet_evm_napi/src/receipt.rs @@ -0,0 +1,12 @@ +use napi::bindgen_prelude::{BigInt, Buffer}; +use napi_derive::napi; + +use crate::Log; + +#[napi(object)] +pub struct Receipt { + pub cumulative_block_gas_used: BigInt, + pub logs_bloom: Buffer, + pub logs: Vec, + pub status: bool, +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index df8cd7d1ee..9d2b907e4f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -151,13 +151,16 @@ function rethnetLogsToBloom(logs: Log[]): Bloom { } export function rethnetResultToRunTxResult( - rethnetResult: ExecutionResult + rethnetResult: ExecutionResult, + blockGasUsed: bigint ): RunTxResult { const vmError = Exit.fromRethnetExitCode(rethnetResult.exitCode); // We return an object with only the properties that are used by Hardhat. // To be extra sure that the other properties are not used, we add getters // that exit the process if accessed. + const bloom = rethnetLogsToBloom(rethnetResult.logs); + return { gasUsed: rethnetResult.gasUsed, createdAddress: @@ -166,10 +169,15 @@ export function rethnetResultToRunTxResult( : undefined, exit: vmError, returnValue: rethnetResult.output.output ?? Buffer.from([]), - bloom: rethnetLogsToBloom(rethnetResult.logs), - get receipt(): any { - console.trace("receipt not implemented"); - return process.exit(1); + bloom, + receipt: { + // Receipts have a 0 as status on error + status: vmError.isError() ? 0 : 1, + cumulativeBlockGasUsed: blockGasUsed + rethnetResult.gasUsed, + bitvector: bloom.bitvector, + logs: rethnetResult.logs.map((log) => { + return [log.address, log.topics, log.data]; + }), }, }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index a59df5af44..b49d5e94f9 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -1,5 +1,6 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; +import { Log } from "@nomicfoundation/ethereumjs-evm"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { Account, @@ -353,6 +354,70 @@ function assertEqualRunTxResults( ); throw new Error("Different bloom"); } + + if ( + !ethereumJSResult.receipt.bitvector.equals(rethnetResult.receipt.bitvector) + ) { + console.trace( + `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} !== ${rethnetResult.receipt.bitvector}` + ); + throw new Error("Different receipt bitvector"); + } + + if ( + ethereumJSResult.receipt.cumulativeBlockGasUsed !== + rethnetResult.receipt.cumulativeBlockGasUsed + ) { + console.trace( + `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} !== ${rethnetResult.receipt.cumulativeBlockGasUsed}` + ); + throw new Error("Different receipt cumulativeBlockGasUsed"); + } + + assertEqualLogs(ethereumJSResult.receipt.logs, rethnetResult.receipt.logs); +} + +function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { + if (ethereumJSLogs.length !== rethnetLogs.length) { + console.trace( + `Different logs length: ${ethereumJSLogs.length} !== ${rethnetLogs.length}` + ); + throw new Error("Different logs length"); + } + + for (let logIdx = 0; logIdx < ethereumJSLogs.length; ++logIdx) { + if (!ethereumJSLogs[logIdx][0].equals(rethnetLogs[logIdx][0])) { + console.trace( + `Different log[${logIdx}] address: ${ethereumJSLogs[logIdx][0]} !== ${rethnetLogs[logIdx][0]}` + ); + throw new Error("Different log address"); + } + + const ethereumJSTopics = ethereumJSLogs[logIdx][1]; + const rethnetTopics = rethnetLogs[logIdx][1]; + if (ethereumJSTopics.length !== rethnetTopics.length) { + console.trace( + `Different log[${logIdx}] topics length: ${ethereumJSTopics.length} !== ${rethnetTopics.length}` + ); + throw new Error("Different log topics length"); + } + + for (let topicIdx = 0; topicIdx < ethereumJSTopics.length; ++topicIdx) { + if (!ethereumJSTopics[topicIdx].equals(rethnetTopics[topicIdx])) { + console.trace( + `Different log[${logIdx}] topic[${topicIdx}]: ${ethereumJSTopics[topicIdx]} !== ${rethnetTopics[topicIdx]}` + ); + throw new Error("Different log topic"); + } + } + + if (!ethereumJSLogs[logIdx][2].equals(rethnetLogs[logIdx][2])) { + console.trace( + `Different log[${logIdx}] data: ${ethereumJSLogs[logIdx][2]} !== ${rethnetLogs[logIdx][2]}` + ); + throw new Error("Different log data"); + } + } } function assertEqualAccounts( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index a83b6f207a..084dc661eb 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -84,7 +84,10 @@ export class RethnetAdapter implements VMAdapter { }); try { - const result = rethnetResultToRunTxResult(rethnetResult.execResult); + const result = rethnetResultToRunTxResult( + rethnetResult.execResult, + blockContext.header.gasUsed + ); return [result, rethnetResult.execResult.trace]; } catch (e) { console.log("Rethnet trace"); @@ -209,7 +212,10 @@ export class RethnetAdapter implements VMAdapter { ); try { - const result = rethnetResultToRunTxResult(rethnetResult); + const result = rethnetResultToRunTxResult( + rethnetResult, + block.header.gasUsed + ); return [result, rethnetResult.trace]; } catch (e) { console.log("Rethnet trace"); From 7e7d5a1a1e6b2f65c15a40cfc3791798f8a7bcd3 Mon Sep 17 00:00:00 2001 From: Wodann Date: Mon, 2 Jan 2023 12:09:48 -0600 Subject: [PATCH 018/406] misc: cleanup ethereumjs-specific code (#3484) Co-authored-by: Franco Victorio --- .../internal/hardhat-network/provider/node.ts | 45 +++++++++---------- .../provider/utils/putGenesisBlock.ts | 5 +-- 2 files changed, 23 insertions(+), 27 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index ed89463dfc..295d181489 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -103,7 +103,6 @@ import { Bloom } from "./utils/bloom"; import { getCurrentTimestamp } from "./utils/getCurrentTimestamp"; import { makeCommon } from "./utils/makeCommon"; import { makeForkClient } from "./utils/makeForkClient"; -import { makeStateTrie } from "./utils/makeStateTrie"; import { putGenesisBlock } from "./utils/putGenesisBlock"; import { txMapToArray } from "./utils/txMapToArray"; import { RandomBufferGenerator } from "./utils/random"; @@ -194,35 +193,13 @@ export class HardhatNode extends EventEmitter { hardforkActivations = config.chains.get(forkNetworkId)!.hardforkHistory; } } else { - const stateTrie = await makeStateTrie(genesisAccounts); - - const hardhatBlockchain = new HardhatBlockchain(common); - - const genesisBlockBaseFeePerGas = hardforkGte( - hardfork, - HardforkName.LONDON - ) - ? initialBaseFeePerGasConfig ?? - BigInt(HARDHAT_NETWORK_DEFAULT_INITIAL_BASE_FEE_PER_GAS) - : undefined; - - await putGenesisBlock( - hardhatBlockchain, - common, - config, - stateTrie, - hardfork, - mixHashGenerator.next(), - genesisBlockBaseFeePerGas - ); + blockchain = new HardhatBlockchain(common); if (config.initialDate !== undefined) { initialBlockTimeOffset = BigInt( getDifferenceInSeconds(config.initialDate, new Date()) ); } - - blockchain = hardhatBlockchain; } const currentHardfork = common.hardfork(); @@ -239,6 +216,26 @@ export class HardhatNode extends EventEmitter { ) ); + if (!isForkedNodeConfig(config)) { + const genesisBlockBaseFeePerGas = hardforkGte( + hardfork, + HardforkName.LONDON + ) + ? initialBaseFeePerGasConfig ?? + BigInt(HARDHAT_NETWORK_DEFAULT_INITIAL_BASE_FEE_PER_GAS) + : undefined; + + await putGenesisBlock( + blockchain as HardhatBlockchain, + common, + config, + await vm.getStateRoot(), + hardfork, + mixHashGenerator.next(), + genesisBlockBaseFeePerGas + ); + } + const txPool = new TxPool( (address) => vm.getAccount(address), BigInt(blockGasLimit), diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/putGenesisBlock.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/putGenesisBlock.ts index a348c678e6..49fe295ab6 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/putGenesisBlock.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/putGenesisBlock.ts @@ -1,6 +1,5 @@ import { Block, HeaderData } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; -import { Trie } from "@nomicfoundation/ethereumjs-trie"; import { bufferToHex } from "@nomicfoundation/ethereumjs-util"; import { dateToTimestampSeconds } from "../../../util/date"; @@ -13,7 +12,7 @@ export async function putGenesisBlock( blockchain: HardhatBlockchain, common: Common, { initialDate, blockGasLimit }: LocalNodeConfig, - stateTrie: Trie, + stateRoot: Buffer, hardfork: HardforkName, initialMixHash: Buffer, initialBaseFee?: bigint @@ -31,7 +30,7 @@ export async function putGenesisBlock( difficulty: isPostMerge ? 0 : 1, nonce: isPostMerge ? "0x0000000000000000" : "0x0000000000000042", extraData: "0x1234", - stateRoot: bufferToHex(stateTrie.root()), + stateRoot: bufferToHex(stateRoot), }; if (isPostMerge) { From 2e59b77fcaa673f4a175bdc86ce97da5682afff6 Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Thu, 5 Jan 2023 12:22:59 +0100 Subject: [PATCH 019/406] Add an envar to select the vm --- .../internal/hardhat-network/provider/node.ts | 21 +++++----- .../hardhat-network/provider/vm/creation.ts | 39 +++++++++++++++++++ 2 files changed, 48 insertions(+), 12 deletions(-) create mode 100644 packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 295d181489..ebd608513c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -106,10 +106,10 @@ import { makeForkClient } from "./utils/makeForkClient"; import { putGenesisBlock } from "./utils/putGenesisBlock"; import { txMapToArray } from "./utils/txMapToArray"; import { RandomBufferGenerator } from "./utils/random"; -import { DualModeAdapter } from "./vm/dual"; import { RunBlockResult, RunTxResult, VMAdapter } from "./vm/vm-adapter"; import { BlockBuilder } from "./vm/block-builder"; import { ExitCode, Exit } from "./vm/exit"; +import { createVm } from "./vm/creation"; const log = debug("hardhat:core:hardhat-network:node"); @@ -203,17 +203,14 @@ export class HardhatNode extends EventEmitter { } const currentHardfork = common.hardfork(); - const vm = await DualModeAdapter.create( - common, - blockchain, - config, - (blockNumber) => - selectHardfork( - forkBlockNum, - currentHardfork, - hardforkActivations, - blockNumber - ) + + const vm = await createVm(common, blockchain, config, (blockNumber) => + selectHardfork( + forkBlockNum, + currentHardfork, + hardforkActivations, + blockNumber + ) ); if (!isForkedNodeConfig(config)) { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts new file mode 100644 index 0000000000..a671610781 --- /dev/null +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts @@ -0,0 +1,39 @@ +import { Common } from "@nomicfoundation/ethereumjs-common"; +import { assertHardhatInvariant } from "../../../core/errors"; + +import { NodeConfig } from "../node-types"; +import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; +import { DualModeAdapter } from "./dual"; +import { EthereumJSAdapter } from "./ethereumjs"; +import { RethnetAdapter } from "./rethnet"; +import { VMAdapter } from "./vm-adapter"; + +/** + * Creates an instance of a VMAdapter. Which implementation is used depends on + * the value of the HARDHAT_EXPERIMENTAL_VM_MODE environment variable. + */ +export function createVm( + common: Common, + blockchain: HardhatBlockchainInterface, + config: NodeConfig, + selectHardfork: (blockNumber: bigint) => string +): Promise { + const vmModeEnvVar = process.env.HARDHAT_EXPERIMENTAL_VM_MODE; + + if (vmModeEnvVar === "ethereumjs") { + return EthereumJSAdapter.create(common, blockchain, config, selectHardfork); + } else if (vmModeEnvVar === "rethnet") { + return RethnetAdapter.create( + config, + selectHardfork, + async (blockNumber) => { + const block = await blockchain.getBlock(blockNumber); + assertHardhatInvariant(block !== null, "Should be able to get block"); + + return block.header.hash(); + } + ); + } else { + return DualModeAdapter.create(common, blockchain, config, selectHardfork); + } +} From 1fc35a0669e5c4f5a431fbda93306fa8f863e9b3 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 11 Jan 2023 10:56:44 -0500 Subject: [PATCH 020/406] refactor: simplify async generics (#3561) --- crates/rethnet_evm/src/block/builder.rs | 16 ++-- crates/rethnet_evm/src/blockchain/sync.rs | 18 ++-- crates/rethnet_evm/src/db.rs | 1 - crates/rethnet_evm/src/db/sync.rs | 106 ++++++++-------------- crates/rethnet_evm/src/evm.rs | 16 +--- crates/rethnet_evm/src/runtime.rs | 17 ++-- crates/rethnet_evm_napi/src/blockchain.rs | 6 +- crates/rethnet_evm_napi/src/state.rs | 2 +- 8 files changed, 65 insertions(+), 117 deletions(-) diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index f3bd96a511..397167def3 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -9,12 +9,8 @@ use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; use tokio::runtime::Runtime; use crate::{ - blockchain::{AsyncBlockchain, SyncBlockchain}, - db::{AsyncDatabase, SyncDatabase}, - evm::build_evm, - inspector::RethnetInspector, - trace::Trace, - HeaderData, + blockchain::AsyncBlockchain, db::AsyncDatabase, evm::build_evm, inspector::RethnetInspector, + trace::Trace, HeaderData, }; /// A builder for constructing Ethereum blocks. @@ -22,8 +18,8 @@ pub struct BlockBuilder where E: Debug + Send + 'static, { - blockchain: Arc>, E>>, - state: Arc>, E>>, + blockchain: Arc>, + state: Arc>, header: PartialHeader, transactions: Vec, cfg: CfgEnv, @@ -35,8 +31,8 @@ where { /// Creates an intance of [`BlockBuilder`], creating a checkpoint in the process. pub async fn new( - blockchain: Arc>, E>>, - db: Arc>, E>>, + blockchain: Arc>, + db: Arc>, cfg: CfgEnv, parent: Header, header: HeaderData, diff --git a/crates/rethnet_evm/src/blockchain/sync.rs b/crates/rethnet_evm/src/blockchain/sync.rs index ba050a6d20..7376bfefdf 100644 --- a/crates/rethnet_evm/src/blockchain/sync.rs +++ b/crates/rethnet_evm/src/blockchain/sync.rs @@ -1,4 +1,4 @@ -use std::{fmt::Debug, io, marker::PhantomData}; +use std::{fmt::Debug, io}; use rethnet_eth::{B256, U256}; use revm::blockchain::Blockchain; @@ -30,24 +30,21 @@ where /// A helper class for converting a synchronous blockchain into an asynchronous blockchain. /// /// Requires the inner blockchain to implement [`Blockchain`]. -pub struct AsyncBlockchain +pub struct AsyncBlockchain where - B: SyncBlockchain, E: Debug + Send, { runtime: Runtime, request_sender: UnboundedSender>, blockchain_handle: Option>, - phantom: PhantomData, } -impl AsyncBlockchain +impl AsyncBlockchain where - B: SyncBlockchain, E: Debug + Send + 'static, { /// Constructs an [`AsyncBlockchain`] instance with the provided database. - pub fn new(mut blockchain: B) -> io::Result { + pub fn new>(mut blockchain: B) -> io::Result { let runtime = Builder::new_multi_thread().build()?; let (sender, mut receiver) = unbounded_channel::>(); @@ -64,7 +61,6 @@ where runtime, request_sender: sender, blockchain_handle: Some(blockchain_handle), - phantom: PhantomData, }) } @@ -100,9 +96,8 @@ where // } } -impl Drop for AsyncBlockchain +impl Drop for AsyncBlockchain where - D: SyncBlockchain, E: Debug + Send, { fn drop(&mut self) { @@ -116,9 +111,8 @@ where } } -impl<'b, B, E> Blockchain for &'b AsyncBlockchain +impl<'b, E> Blockchain for &'b AsyncBlockchain where - B: SyncBlockchain, E: Debug + Send + 'static, { type Error = E; diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs index cc11b88bdc..305c552319 100644 --- a/crates/rethnet_evm/src/db.rs +++ b/crates/rethnet_evm/src/db.rs @@ -2,7 +2,6 @@ mod layered_db; mod request; mod sync; -pub(super) use sync::AsyncDatabaseWrapper; pub use sync::{AsyncDatabase, SyncDatabase}; pub use layered_db::{LayeredDatabase, RethnetLayer}; diff --git a/crates/rethnet_evm/src/db/sync.rs b/crates/rethnet_evm/src/db/sync.rs index 19076da0ee..de6a21b0d8 100644 --- a/crates/rethnet_evm/src/db/sync.rs +++ b/crates/rethnet_evm/src/db/sync.rs @@ -1,4 +1,4 @@ -use std::{fmt::Debug, io, marker::PhantomData}; +use std::{fmt::Debug, io}; use hashbrown::HashMap; use rethnet_eth::{Address, B256, U256}; @@ -34,24 +34,21 @@ where /// A helper class for converting a synchronous database into an asynchronous database. /// /// Requires the inner database to implement [`Database`], [`DatabaseCommit`], and [`DatabaseDebug`]. -pub struct AsyncDatabase +pub struct AsyncDatabase where - D: SyncDatabase, E: Debug + Send, { runtime: Runtime, request_sender: UnboundedSender>, db_handle: Option>, - phantom: PhantomData, } -impl AsyncDatabase +impl AsyncDatabase where - D: SyncDatabase, E: Debug + Send + 'static, { /// Constructs an [`AsyncDatabase`] instance with the provided database. - pub fn new(mut db: D) -> io::Result { + pub fn new>(mut db: D) -> io::Result { let runtime = Builder::new_multi_thread().build()?; let (sender, mut receiver) = unbounded_channel::>(); @@ -68,7 +65,6 @@ where runtime, request_sender: sender, db_handle: Some(db_handle), - phantom: PhantomData, }) } @@ -275,9 +271,8 @@ where } } -impl Drop for AsyncDatabase +impl Drop for AsyncDatabase where - D: SyncDatabase, E: Debug + Send, { fn drop(&mut self) { @@ -291,76 +286,53 @@ where } } -/// Wrapper around an [`AsyncDatabase`] to allow synchronous function calls. -pub struct AsyncDatabaseWrapper<'d, D, E> +impl<'d, E> Database for &'d AsyncDatabase where - D: SyncDatabase, - E: Debug + Send, -{ - db: &'d AsyncDatabase, -} - -impl<'d, D, E> AsyncDatabaseWrapper<'d, D, E> -where - D: SyncDatabase, - E: Debug + Send, -{ - /// Constructs an [`AsyncDatabaseWrapper`] instance. - pub fn new(db: &'d AsyncDatabase) -> Self { - Self { db } - } -} - -impl<'d, D, E> Database for AsyncDatabaseWrapper<'d, D, E> -where - D: SyncDatabase, E: Debug + Send + 'static, { type Error = E; fn basic(&mut self, address: Address) -> Result, Self::Error> { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.account_by_address(address)) + self.runtime + .block_on(AsyncDatabase::account_by_address(*self, address)) }) } fn code_by_hash(&mut self, code_hash: B256) -> Result { - task::block_in_place(move || self.db.runtime().block_on(self.db.code_by_hash(code_hash))) + task::block_in_place(move || { + self.runtime + .block_on(AsyncDatabase::code_by_hash(*self, code_hash)) + }) } fn storage(&mut self, address: Address, index: U256) -> Result { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.account_storage_slot(address, index)) + self.runtime + .block_on(AsyncDatabase::account_storage_slot(*self, address, index)) }) } } -impl<'d, D, E> DatabaseCommit for AsyncDatabaseWrapper<'d, D, E> +impl<'d, E> DatabaseCommit for &'d AsyncDatabase where - D: SyncDatabase, E: Debug + Send + 'static, { fn commit(&mut self, changes: HashMap) { - task::block_in_place(move || self.db.runtime().block_on(self.db.apply(changes))) + task::block_in_place(move || self.runtime.block_on(self.apply(changes))) } } -impl<'d, D, E> DatabaseDebug for AsyncDatabaseWrapper<'d, D, E> +impl<'d, E> DatabaseDebug for &'d AsyncDatabase where - D: SyncDatabase, E: Debug + Send + 'static, { type Error = E; fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { task::block_in_place(move || { - self.db - .runtime - .block_on(self.db.account_storage_root(address)) + self.runtime + .block_on(AsyncDatabase::account_storage_root(*self, address)) }) } @@ -370,9 +342,8 @@ where account_info: AccountInfo, ) -> Result<(), Self::Error> { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.insert_account(address, account_info)) + self.runtime + .block_on(AsyncDatabase::insert_account(*self, address, account_info)) }) } @@ -382,14 +353,16 @@ where modifier: Box) + Send>, ) -> Result<(), Self::Error> { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.modify_account(address, modifier)) + self.runtime + .block_on(AsyncDatabase::modify_account(*self, address, modifier)) }) } fn remove_account(&mut self, address: Address) -> Result, Self::Error> { - task::block_in_place(move || self.db.runtime().block_on(self.db.remove_account(address))) + task::block_in_place(move || { + self.runtime + .block_on(AsyncDatabase::remove_account(*self, address)) + }) } fn set_account_storage_slot( @@ -399,41 +372,40 @@ where value: U256, ) -> Result<(), Self::Error> { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.set_account_storage_slot(address, index, value)) + self.runtime + .block_on(AsyncDatabase::set_account_storage_slot( + *self, address, index, value, + )) }) } fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.set_state_root(state_root)) + self.runtime + .block_on(AsyncDatabase::set_state_root(*self, state_root)) }) } fn state_root(&mut self) -> Result { - task::block_in_place(move || self.db.runtime().block_on(self.db.state_root())) + task::block_in_place(move || self.runtime.block_on(AsyncDatabase::state_root(*self))) } fn checkpoint(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.db.runtime().block_on(self.db.checkpoint())) + task::block_in_place(move || self.runtime.block_on(AsyncDatabase::checkpoint(*self))) } fn revert(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.db.runtime().block_on(self.db.revert())) + task::block_in_place(move || self.runtime.block_on(AsyncDatabase::revert(*self))) } fn make_snapshot(&mut self) -> B256 { - task::block_in_place(move || self.db.runtime().block_on(self.db.make_snapshot())) + task::block_in_place(move || self.runtime.block_on(AsyncDatabase::make_snapshot(*self))) } fn remove_snapshot(&mut self, state_root: &B256) -> bool { task::block_in_place(move || { - self.db - .runtime() - .block_on(self.db.remove_snapshot(*state_root)) + self.runtime + .block_on(AsyncDatabase::remove_snapshot(*self, *state_root)) }) } } diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index ca0ddc2f96..3639cafc78 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -2,29 +2,23 @@ use std::fmt::Debug; use revm::{BlockEnv, CfgEnv, TxEnv}; -use crate::{ - blockchain::{AsyncBlockchain, SyncBlockchain}, - db::{AsyncDatabase, AsyncDatabaseWrapper, SyncDatabase}, -}; +use crate::{blockchain::AsyncBlockchain, db::AsyncDatabase}; /// Creates an evm from the provided database, config, transaction, and block. #[allow(clippy::type_complexity)] pub fn build_evm<'b, 'd, E>( - blockchain: &'b AsyncBlockchain>, E>, - db: &'d AsyncDatabase>, E>, + blockchain: &'b AsyncBlockchain, + db: &'d AsyncDatabase, cfg: CfgEnv, transaction: TxEnv, block: BlockEnv, -) -> revm::EVM< - AsyncDatabaseWrapper<'d, Box>, E>, - &'b AsyncBlockchain>, E>, -> +) -> revm::EVM<&'d AsyncDatabase, &'b AsyncBlockchain> where E: Debug + Send + 'static, { let mut evm = revm::EVM::new(); evm.set_blockchain(blockchain); - evm.database(AsyncDatabaseWrapper::new(db)); + evm.database(db); evm.env.cfg = cfg; evm.env.block = block; evm.env.tx = transaction; diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index 201af1793e..49a04858cf 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -3,13 +3,8 @@ use std::{fmt::Debug, sync::Arc}; use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; use crate::{ - blockchain::{AsyncBlockchain, SyncBlockchain}, - db::{AsyncDatabase, SyncDatabase}, - evm::build_evm, - inspector::RethnetInspector, - trace::Trace, - transaction::TransactionError, - State, + blockchain::AsyncBlockchain, db::AsyncDatabase, evm::build_evm, inspector::RethnetInspector, + trace::Trace, transaction::TransactionError, State, }; /// The asynchronous Rethnet runtime. @@ -17,8 +12,8 @@ pub struct Rethnet where E: Debug + Send + 'static, { - blockchain: Arc>, E>>, - db: Arc>, E>>, + blockchain: Arc>, + db: Arc>, cfg: CfgEnv, } @@ -28,8 +23,8 @@ where { /// Constructs a new [`Rethnet`] instance. pub fn new( - blockchain: Arc>, E>>, - db: Arc>, E>>, + blockchain: Arc>, + db: Arc>, cfg: CfgEnv, ) -> Self { Self { diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs index 0ad9c28f8a..7ad9ce415c 100644 --- a/crates/rethnet_evm_napi/src/blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -16,13 +16,11 @@ use self::js_blockchain::{GetBlockHashCall, JsBlockchain}; #[napi] pub struct Blockchain { - inner: Arc>, anyhow::Error>>, + inner: Arc>, } impl Blockchain { - pub fn as_inner( - &self, - ) -> &Arc>, anyhow::Error>> { + pub fn as_inner(&self) -> &Arc> { &self.inner } } diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 52a5fa4178..dc0df360ef 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -28,7 +28,7 @@ struct ModifyAccountCall { #[napi] pub struct StateManager { - pub(super) db: Arc>, anyhow::Error>>, + pub(super) db: Arc>, } #[napi] From 9175fcf9ce73c46a25c23933fd02f3801a02dab9 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 17 Jan 2023 18:59:48 -0600 Subject: [PATCH 021/406] chore: run clippy & rustfmt in CI (#3591) --- .github/workflows/rethnet-ci.yml | 44 +++++++++++++++++++++++++++----- 1 file changed, 38 insertions(+), 6 deletions(-) diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml index 63c54a8cda..65ae24641a 100644 --- a/.github/workflows/rethnet-ci.yml +++ b/.github/workflows/rethnet-ci.yml @@ -6,6 +6,7 @@ on: - $default-branch - "rethnet/main" paths: + - ".github/workflows/rethnet-ci.yml" - "config/**" - "crates/**" - "Cargo.toml" @@ -13,6 +14,7 @@ on: pull_request: branches: ["**"] paths: + - ".github/workflows/rethnet-ci.yml" - "config/**" - "crates/**" - "Cargo.toml" @@ -38,13 +40,13 @@ jobs: profile: minimal override: true - - uses: Swatinem/rust-cache@v1 + - uses: Swatinem/rust-cache@v2 - name: Cargo check uses: actions-rs/cargo@v1 with: command: check - args: --all-targets + args: --workspace --all-features --all-targets test-js: name: Test Node.js @@ -79,7 +81,7 @@ jobs: override: true components: rustfmt - - uses: Swatinem/rust-cache@v1 + - uses: Swatinem/rust-cache@v2 - name: Build run: yarn build @@ -109,7 +111,7 @@ jobs: override: true components: rustfmt - - uses: Swatinem/rust-cache@v1 + - uses: Swatinem/rust-cache@v2 - name: Doctests uses: actions-rs/cargo@v1 @@ -117,7 +119,7 @@ jobs: RUSTFLAGS: ${{ matrix.RUSTFLAGS }} with: command: test - args: --doc --all-features + args: --doc --workspace --all-features - name: Install latest nextest release uses: taiki-e/install-action@nextest @@ -129,4 +131,34 @@ jobs: CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} with: command: nextest - args: run --all-features + args: run --workspace --all-features + + style: + name: Check Style + runs-on: ubuntu-latest + needs: check + steps: + - uses: actions/checkout@v3 + with: + submodules: true + + - name: Install stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + components: clippy, rustfmt + + - uses: Swatinem/rust-cache@v2 + + - name: Run cargo fmt + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all --check + + - name: Run cargo clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --workspace --all-features From 60e9ad19959265f48ec762efb5948b1985027a92 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Tue, 17 Jan 2023 20:29:51 -0500 Subject: [PATCH 022/406] feat: rpc client for ethereum state api (#3383) Co-authored-by: Wodann --- .github/workflows/rethnet-ci.yml | 1 + crates/rethnet_eth/Cargo.toml | 9 +- crates/rethnet_eth/src/lib.rs | 3 + crates/rethnet_eth/src/remote.rs | 785 +++++++++++++++++++++++ crates/rethnet_eth/src/remote/eth.rs | 187 ++++++ crates/rethnet_eth/src/remote/jsonrpc.rs | 86 +++ 6 files changed, 1069 insertions(+), 2 deletions(-) create mode 100644 crates/rethnet_eth/src/remote.rs create mode 100644 crates/rethnet_eth/src/remote/eth.rs create mode 100644 crates/rethnet_eth/src/remote/jsonrpc.rs diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml index 65ae24641a..901bb3444e 100644 --- a/.github/workflows/rethnet-ci.yml +++ b/.github/workflows/rethnet-ci.yml @@ -129,6 +129,7 @@ jobs: env: RUSTFLAGS: ${{ matrix.RUSTFLAGS }} CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} + ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} with: command: nextest args: run --workspace --all-features diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index f322c96d98..336dd76ccb 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -13,17 +13,22 @@ hex = { version = "0.4.3", default-features = false, features = ["alloc"] } hex-literal = { version = "0.3", default-features = false } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } -revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false } +reqwest = { version = "0.11", features = ["blocking", "json"] } +revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false, features = ["k256"] } rlp = { version = "0.5.2", default-features = false, features = ["derive"] } ruint = { version = "1.7.0", default-features = false } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc", "recovery"] } serde = { version = "1.0.147", default-features = false, features = ["derive"], optional = true } +serde_json = { version = "1.0.89", optional = true } sha3 = { version = "0.10.6", default-features = false } thiserror = { version = "1.0.37", default-features = false } triehash = { version = "0.8.4", default-features = false } +[dev-dependencies] +tokio = { version = "1.23.0", features = ["macros"] } + [features] default = ["std"] # fastrlp = ["dep:open-fastrlp", "ruint/fastrlp"] Broken due to lack of support for fastrlp in primitive-types -serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "ruint/serde"] +serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "revm/with-serde", "ruint/serde", "serde_json"] std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "open-fastrlp?/std", "primitive-types/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] diff --git a/crates/rethnet_eth/src/lib.rs b/crates/rethnet_eth/src/lib.rs index 7a8a6986f1..ef7c23b00f 100644 --- a/crates/rethnet_eth/src/lib.rs +++ b/crates/rethnet_eth/src/lib.rs @@ -12,6 +12,9 @@ pub mod account; pub mod block; /// Ethereum receipt types pub mod receipt; +/// Remote node interaction +#[cfg(feature = "serde")] +pub mod remote; /// Ethereum signature types pub mod signature; /// Ethereum state types and functions diff --git a/crates/rethnet_eth/src/remote.rs b/crates/rethnet_eth/src/remote.rs new file mode 100644 index 0000000000..f7941c79d9 --- /dev/null +++ b/crates/rethnet_eth/src/remote.rs @@ -0,0 +1,785 @@ +use std::sync::atomic::{AtomicU64, Ordering}; + +use revm::AccountInfo; + +use crate::{Address, Bytes, B256, U256}; + +mod eth; +mod jsonrpc; + +/// Specialized error types +#[derive(thiserror::Error, Debug)] +pub enum RpcClientError { + /// The remote node's response did not conform to the expected format + #[error("Response was not of the expected type")] + InterpretationError { + /// A more specific message + msg: String, + /// The body of the request that was submitted to elicit the response + request_body: String, + /// The Rust type which was expected to be decoded from the JSON received + expected_type: String, + /// The body of the response given by the remote node + response_text: String, + }, + + /// The message could not be sent to the remote node + #[error("Failed to send request")] + SendError { + /// The error message + msg: String, + /// The body of the request that was submitted + request_body: String, + }, + + /// The remote node failed to reply with the body of the response + #[error("Failed to get response body")] + ResponseError { + /// The specific error message + msg: String, + /// The body of the request that was submitted + request_body: String, + }, + + /// Some other error from an underlying dependency + #[error(transparent)] + OtherError(#[from] std::io::Error), +} + +/// A client for executing RPC methods on a remote Ethereum node +pub struct RpcClient { + url: String, + client: reqwest::Client, + next_id: AtomicU64, +} + +struct U64(u64); +impl serde::Serialize for U64 { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + serializer.serialize_str(&format!("{:#x}", self.0)) + } +} + +impl From for U64 { + fn from(u: u64) -> U64 { + U64(u) + } +} + +fn single_to_sequence(val: &T, s: S) -> Result +where + S: serde::Serializer, + T: serde::Serialize, +{ + use serde::ser::SerializeSeq; + let mut seq = s.serialize_seq(Some(1))?; + seq.serialize_element(val)?; + seq.end() +} + +#[derive(serde::Serialize)] +#[serde(tag = "method", content = "params")] +enum MethodInvocation { + #[serde(rename = "eth_getStorageAt")] + StorageAt( + Address, + /// position + U256, + /// block_number + #[serde(skip_serializing_if = "Option::is_none")] + Option, + ), + #[serde( + rename = "eth_getTransactionByHash", + serialize_with = "single_to_sequence" + )] + TxByHash(B256), + #[serde( + rename = "eth_getTransactionReceipt", + serialize_with = "single_to_sequence" + )] + TxReceipt(B256), + #[serde(rename = "eth_getLogs", serialize_with = "single_to_sequence")] + Logs(GetLogsInput), + #[serde(rename = "eth_getBalance")] + Balance( + Address, + /// block number + #[serde(skip_serializing_if = "Option::is_none")] + Option, + ), + #[serde(rename = "eth_getBlockByHash")] + BlockByHash( + /// hash + B256, + /// include transactions + bool, + ), + #[serde(rename = "eth_getBlockByNumber")] + BlockByNumber( + /// block number + U64, + /// include transactions + bool, + ), + #[serde(rename = "eth_getCode")] + Code( + Address, + /// block number + #[serde(skip_serializing_if = "Option::is_none")] + Option, + ), + #[serde(rename = "eth_getTransactionCount")] + TxCount( + Address, + /// block number + #[serde(skip_serializing_if = "Option::is_none")] + Option, + ), +} + +struct Response { + text: String, + request_body: String, + request_id: jsonrpc::Id, +} + +struct BatchResponse { + text: String, + request_body: String, + request_ids: Vec, +} + +#[derive(serde::Serialize)] +#[serde(rename_all = "camelCase")] +struct GetLogsInput { + from_block: U64, + to_block: U64, + address: Address, +} + +#[derive(serde::Serialize)] +struct Request<'a> { + version: jsonrpc::Version, + #[serde(flatten)] + method: &'a MethodInvocation, + id: jsonrpc::Id, +} + +impl RpcClient { + fn verify_success(response: Response) -> Result + where + T: for<'a> serde::Deserialize<'a>, + { + let response_text = response.text.clone(); + let success: jsonrpc::Success = serde_json::from_str(&response.text).map_err(|err| { + RpcClientError::InterpretationError { + msg: err.to_string(), + request_body: response.request_body, + expected_type: format!( + "rethnet_eth::remote::jsonrpc::Success<{}>", + std::any::type_name::() + ), + response_text, + } + })?; + + assert_eq!(success.id, response.request_id); + + Ok(success.result) + } + + /// returns response text + async fn send_request_body(&self, request_body: String) -> Result { + use RpcClientError::{ResponseError, SendError}; + self.client + .post(self.url.to_string()) + .body(request_body.to_string()) + .send() + .await + .map_err(|err| SendError { + msg: err.to_string(), + request_body: request_body.to_string(), + })? + .text() + .await + .map_err(|err| ResponseError { + msg: err.to_string(), + request_body: request_body.to_string(), + }) + } + + async fn call(&self, input: &MethodInvocation) -> Result + where + T: for<'a> serde::Deserialize<'a>, + { + let id = jsonrpc::Id::Num(self.next_id.fetch_add(1, Ordering::Relaxed)); + let json = serde_json::json!(Request { + version: crate::remote::jsonrpc::Version::V2_0, + id: id.clone(), + method: input, + }) + .to_string(); + + Self::verify_success(Response { + request_id: id, + request_body: json.clone(), + text: self.send_request_body(json).await?, + }) + } + + async fn batch_call( + &self, + inputs: &[MethodInvocation], + ) -> Result { + let (request_strings, request_ids): (Vec, Vec) = inputs + .iter() + .map(|i| { + let id = jsonrpc::Id::Num(self.next_id.fetch_add(1, Ordering::Relaxed)); + let json = serde_json::json!(Request { + version: crate::remote::jsonrpc::Version::V2_0, + id: id.clone(), + method: i, + }) + .to_string(); + (json, id) + }) + .unzip(); + + let request_body = format!("[{}]", request_strings.join(",")); + + let response_text = self.send_request_body(request_body.clone()).await?; + + Ok(BatchResponse { + request_body, + request_ids, + text: response_text, + }) + } + + /// Create a new RpcClient instance, given a remote node URL. + pub fn new(url: &str) -> Self { + RpcClient { + url: url.to_string(), + client: reqwest::Client::new(), + next_id: AtomicU64::new(0), + } + } + + /// eth_getTransactionByHash + pub async fn get_tx_by_hash(&self, tx_hash: &B256) -> Result { + self.call(&MethodInvocation::TxByHash(*tx_hash)).await + } + + /// eth_getTransactionReceipt + pub async fn get_tx_receipt( + &self, + tx_hash: &B256, + ) -> Result { + self.call(&MethodInvocation::TxReceipt(*tx_hash)).await + } + + /// eth_getLogs + pub async fn get_logs( + &self, + from_block: u64, + to_block: u64, + address: &Address, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::Logs(GetLogsInput { + from_block: U64::from(from_block), + to_block: U64::from(to_block), + address: *address, + })) + .await + } + + /// eth_getBlockByHash + pub async fn get_block_by_hash( + &self, + hash: &B256, + include_transactions: bool, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::BlockByHash(*hash, include_transactions)) + .await + } + + /// eth_getBlockByNumber + pub async fn get_block_by_number( + &self, + number: u64, + include_transactions: bool, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::BlockByNumber( + U64::from(number), + include_transactions, + )) + .await + } + + /// eth_getTransactionCount + pub async fn get_transaction_count( + &self, + address: &Address, + block_number: Option, + ) -> Result { + self.call(&MethodInvocation::TxCount( + *address, + block_number.map(U64::from), + )) + .await + } + + /// eth_getStorageAt + pub async fn get_storage_at( + &self, + address: &Address, + position: U256, + block_number: Option, + ) -> Result { + self.call(&MethodInvocation::StorageAt( + *address, + position, + block_number.map(U64::from), + )) + .await + } + + /// Submit a consolidated batch of RPC method invocations in order to obtain the set of data + /// contained in AccountInfo. + pub async fn get_account_info( + &self, + address: &Address, + block_number: Option, + ) -> Result { + let inputs = Vec::from([ + MethodInvocation::Balance(*address, block_number.map(U64::from)), + MethodInvocation::Code(*address, block_number.map(U64::from)), + MethodInvocation::TxCount(*address, block_number.map(U64::from)), + ]); + + let response = self.batch_call(&inputs).await?; + + let results: ( + jsonrpc::Success, + jsonrpc::Success, + jsonrpc::Success, + ) = serde_json::from_str(&response.text).map_err(|err| { + RpcClientError::InterpretationError { + msg: err.to_string(), + request_body: response.request_body.clone(), + expected_type: String::from("Array"), + response_text: response.text.clone(), + } + })?; + + assert_eq!(results.0.id, response.request_ids[0]); + assert_eq!(results.1.id, response.request_ids[1]); + assert_eq!(results.2.id, response.request_ids[2]); + + let code = revm::Bytecode::new_raw(results.1.result); + + Ok(AccountInfo { + balance: results.0.result, + code: Some(code.clone()), + code_hash: code.hash(), + nonce: results.2.result.to(), + }) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::str::FromStr; + + use crate::{Address, Bytes, U256}; + + fn get_alchemy_url() -> Result { + Ok(std::env::var_os("ALCHEMY_URL") + .expect("ALCHEMY_URL environment variable not defined") + .into_string() + .expect("couldn't convert OsString into a String")) + } + + #[tokio::test] + async fn get_tx_by_hash_success() { + use std::str::FromStr; + + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let tx: eth::Transaction = RpcClient::new(&alchemy_url) + .get_tx_by_hash(&hash) + .await + .expect("failed to get transaction by hash"); + + assert_eq!( + tx.block_hash, + Some( + B256::from_str( + "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" + ) + .expect("couldn't parse data") + ) + ); + assert_eq!( + tx.block_number, + Some(u64::from_str_radix("a74fde", 16).expect("couldn't parse data")) + ); + assert_eq!(tx.hash, hash); + assert_eq!( + tx.from, + Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") + .expect("couldn't parse data") + ); + assert_eq!( + tx.gas, + U256::from_str_radix("30d40", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.gas_price, + Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) + ); + assert_eq!( + tx.input, + Bytes::from("0xa9059cbb000000000000000000000000e2c1e729e05f34c07d80083982ccd9154045dcc600000000000000000000000000000000000000000000000000000004a817c800") + ); + assert_eq!( + tx.nonce, + U256::from_str_radix("653b", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.r, + U256::from_str_radix( + "eb56df45bd355e182fba854506bc73737df275af5a323d30f98db13fdf44393a", + 16 + ) + .expect("couldn't parse data") + ); + assert_eq!( + tx.s, + U256::from_str_radix( + "2c6efcd210cdc7b3d3191360f796ca84cab25a52ed8f72efff1652adaabc1c83", + 16 + ) + .expect("couldn't parse data") + ); + assert_eq!( + tx.to, + Some( + Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") + .expect("couldn't parse data") + ) + ); + assert_eq!( + tx.transaction_index, + Some(u64::from_str_radix("88", 16).expect("couldn't parse data")) + ); + assert_eq!( + tx.v, + u64::from_str_radix("1c", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.value, + U256::from_str_radix("0", 16).expect("couldn't parse data") + ); + } + + #[tokio::test] + async fn get_tx_by_hash_dns_error() { + let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let error_string = format!( + "{:?}", + RpcClient::new(alchemy_url) + .get_tx_by_hash(&hash) + .await + .expect_err("should have failed to connect to a garbage domain name") + ); + + assert!(error_string.contains("SendError")); + assert!(error_string.contains("dns error")); + } + + #[tokio::test] + async fn get_tx_by_hash_bad_api_key() { + let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let error_string = format!( + "{:?}", + RpcClient::new(alchemy_url) + .get_tx_by_hash(&hash) + .await + .expect_err("should have failed to interpret response as a Transaction") + ); + + assert!(error_string.contains("InterpretationError")); + assert!(error_string.contains("Success")); + assert!(error_string.contains("Must be authenticated!")); + } + + #[tokio::test] + async fn get_tx_receipt_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let receipt: eth::TransactionReceipt = RpcClient::new(&alchemy_url) + .get_tx_receipt(&hash) + .await + .expect("failed to get transaction by hash"); + + assert_eq!( + receipt.block_hash, + Some( + B256::from_str( + "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" + ) + .expect("couldn't parse data") + ) + ); + assert_eq!( + receipt.block_number, + Some(u64::from_str_radix("a74fde", 16).expect("couldn't parse data")) + ); + assert_eq!(receipt.contract_address, None); + assert_eq!( + receipt.cumulative_gas_used, + U256::from_str_radix("56c81b", 16).expect("couldn't parse data") + ); + assert_eq!( + receipt.effective_gas_price, + Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) + ); + assert_eq!( + receipt.from, + Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") + .expect("couldn't parse data") + ); + assert_eq!( + receipt.gas_used, + Some(U256::from_str_radix("a0f9", 16).expect("couldn't parse data")) + ); + assert_eq!(receipt.logs.len(), 1); + assert_eq!(receipt.root, None); + assert_eq!(receipt.status, Some(1)); + assert_eq!( + receipt.to, + Some( + Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") + .expect("couldn't parse data") + ) + ); + assert_eq!(receipt.transaction_hash, hash); + assert_eq!(receipt.transaction_index, 136); + assert_eq!(receipt.transaction_type, Some(0)); + } + + #[tokio::test] + async fn get_tx_receipt_dns_error() { + let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let error_string = format!( + "{:?}", + RpcClient::new(alchemy_url) + .get_tx_receipt(&hash) + .await + .expect_err("should have failed to connect to a garbage domain name") + ); + + assert!(error_string.contains("SendError")); + assert!(error_string.contains("dns error")); + } + + #[tokio::test] + async fn get_tx_receipt_bad_api_key() { + let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") + .expect("failed to parse hash from string"); + + let error_string = format!( + "{:?}", + RpcClient::new(alchemy_url) + .get_tx_receipt(&hash) + .await + .expect_err("should have failed to interpret response as a Receipt") + ); + + assert!(error_string.contains("InterpretationError")); + assert!(error_string.contains("Success")); + assert!(error_string.contains("Must be authenticated!")); + } + + #[tokio::test] + async fn get_logs_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let logs = RpcClient::new(&alchemy_url) + .get_logs( + 10496585, + 10496585, + &Address::from_str("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") + .expect("failed to parse data"), + ) + .await + .expect("failed to get logs"); + assert_eq!(logs.len(), 12); + // TODO: assert more things about the log(s) + // TODO: consider asserting something about the logs bloom + } + + #[tokio::test] + async fn get_block_by_hash_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let hash = + B256::from_str("0x71d5e7c8ff9ea737034c16e333a75575a4a94d29482e0c2b88f0a6a8369c1812") + .expect("failed to parse hash from string"); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_hash(&hash, true) + .await + .expect("should have succeeded"); + + assert_eq!(block.hash, Some(hash)); + assert_eq!(block.transactions.len(), 192); + } + + #[tokio::test] + async fn get_block_by_number_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let block_number = 16222385; + + let block = RpcClient::new(&alchemy_url) + .get_block_by_number(block_number, true) + .await + .expect("should have succeeded"); + + assert_eq!(block.number, Some(block_number)); + assert_eq!(block.transactions.len(), 102); + } + + #[tokio::test] + async fn get_storage_at_with_block_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let total_supply: U256 = RpcClient::new(&alchemy_url) + .get_storage_at( + &dai_address, + U256::from_str_radix( + "0000000000000000000000000000000000000000000000000000000000000001", + 16, + ) + .expect("failed to parse storage location"), + Some(16220843), + ) + .await + .expect("should have succeeded"); + + assert_eq!( + total_supply, + U256::from_str_radix( + "000000000000000000000000000000000000000010a596ae049e066d4991945c", + 16 + ) + .expect("failed to parse storage location") + ); + } + + #[tokio::test] + async fn get_storage_at_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let total_supply: U256 = RpcClient::new(&alchemy_url) + .get_storage_at( + &dai_address, + U256::from_str_radix( + "0000000000000000000000000000000000000000000000000000000000000001", + 16, + ) + .expect("failed to parse storage location"), + None, + ) + .await + .expect("should have succeeded"); + + assert!(total_supply > U256::from(0)); + } + + #[tokio::test] + async fn get_transaction_count_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let transaction_count = RpcClient::new(&alchemy_url) + .get_transaction_count(&dai_address, Some(16220843)) + .await + .expect("should have succeeded"); + + assert_eq!(transaction_count, U256::from(1)); + } + + #[tokio::test] + async fn get_account_info_with_block_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info = RpcClient::new(&alchemy_url) + .get_account_info(&dai_address, Some(16220843)) + .await + .expect("should have succeeded"); + + assert_eq!(account_info.balance, U256::from(0)); + assert_eq!(account_info.nonce, 1); + } + + #[tokio::test] + async fn get_account_info_success() { + let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info = RpcClient::new(&alchemy_url) + .get_account_info(&dai_address, None) + .await + .expect("should have succeeded"); + + assert_eq!(account_info.balance, U256::from(0)); + assert_eq!(account_info.nonce, 1); + } +} diff --git a/crates/rethnet_eth/src/remote/eth.rs b/crates/rethnet_eth/src/remote/eth.rs new file mode 100644 index 0000000000..d6a170e8ac --- /dev/null +++ b/crates/rethnet_eth/src/remote/eth.rs @@ -0,0 +1,187 @@ +#![cfg(feature = "serde")] + +// Parts of this code were adapted from github.com/gakonst/ethers-rs and are distributed under its +// licenses: +// - https://github.com/gakonst/ethers-rs/blob/7e6c3ba98363bdf6131e8284f186cc2c70ff48c3/LICENSE-APACHE +// - https://github.com/gakonst/ethers-rs/blob/7e6c3ba98363bdf6131e8284f186cc2c70ff48c3/LICENSE-MIT +// For the original context, see https://github.com/gakonst/ethers-rs/tree/7e6c3ba98363bdf6131e8284f186cc2c70ff48c3 + +use std::fmt::Debug; + +use crate::{Address, Bloom, Bytes, B256, U256}; + +#[derive(Clone, Debug, PartialEq, Eq, Default, serde::Deserialize, serde::Serialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct AccessListEntry { + address: Address, + storage_keys: Vec, +} + +#[derive(Clone, Debug, PartialEq, Eq, Default, serde::Deserialize, serde::Serialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct Transaction { + /// The transaction's hash + pub hash: B256, + pub nonce: U256, + pub block_hash: Option, + #[serde(deserialize_with = "optional_u64_from_hex")] + pub block_number: Option, + #[serde(deserialize_with = "optional_u64_from_hex")] + pub transaction_index: Option, + pub from: Address, + pub to: Option
, + pub value: U256, + pub gas_price: Option, + pub gas: U256, + pub input: Bytes, + #[serde(deserialize_with = "u64_from_hex")] + pub v: u64, + pub r: U256, + pub s: U256, + #[serde(default, deserialize_with = "optional_u64_from_hex")] + pub chain_id: Option, + #[serde( + rename = "type", + default, + skip_serializing_if = "Option::is_none", + deserialize_with = "optional_u64_from_hex" + )] + pub transaction_type: Option, + #[serde(default)] + pub access_list: Option>, + #[serde(default)] + pub max_fee_per_gas: Option, + #[serde(default)] + pub max_priority_fee_per_gas: Option, +} + +fn optional_u64_from_hex<'de, D>(deserializer: D) -> Result, D::Error> +where + D: serde::Deserializer<'de>, +{ + let s: &str = serde::Deserialize::deserialize(deserializer)?; + Ok(Some( + u64::from_str_radix(&s[2..], 16).expect("failed to parse u64"), + )) +} + +fn u64_from_hex<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let s: &str = serde::Deserialize::deserialize(deserializer)?; + Ok(u64::from_str_radix(&s[2..], 16).expect("failed to parse u64")) +} + +#[derive(Debug, Clone, PartialEq, Eq, serde::Deserialize, serde::Serialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct Log { + pub address: Address, + pub topics: Vec, + pub data: Bytes, + #[serde(skip_serializing_if = "Option::is_none")] + pub block_hash: Option, + #[serde( + skip_serializing_if = "Option::is_none", + deserialize_with = "optional_u64_from_hex" + )] + pub block_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub transaction_hash: Option, + #[serde( + skip_serializing_if = "Option::is_none", + deserialize_with = "optional_u64_from_hex" + )] + pub transaction_index: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub log_index: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub transaction_log_index: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub log_type: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub removed: Option, +} + +#[derive(Clone, Debug, PartialEq, Eq, Default, serde::Deserialize, serde::Serialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct TransactionReceipt { + pub block_hash: Option, + #[serde(deserialize_with = "optional_u64_from_hex")] + pub block_number: Option, + pub contract_address: Option
, + pub cumulative_gas_used: U256, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub effective_gas_price: Option, + pub from: Address, + pub gas_used: Option, + pub logs: Vec, + pub logs_bloom: Bloom, + #[serde(default, skip_serializing_if = "Option::is_none")] + pub root: Option, + #[serde(deserialize_with = "optional_u64_from_hex")] + pub status: Option, + pub to: Option
, + pub transaction_hash: B256, + #[serde(deserialize_with = "u64_from_hex")] + pub transaction_index: u64, + #[serde( + rename = "type", + default, + skip_serializing_if = "Option::is_none", + deserialize_with = "optional_u64_from_hex" + )] + pub transaction_type: Option, +} + +#[derive(Debug, Default, Clone, PartialEq, Eq, serde::Deserialize, serde::Serialize)] +#[serde(deny_unknown_fields)] +#[serde(rename_all = "camelCase")] +pub struct Block +where + TX: Debug + Default + Clone + PartialEq + Eq, +{ + pub hash: Option, + pub parent_hash: B256, + pub sha3_uncles: B256, + pub author: Option
, + pub state_root: B256, + pub transactions_root: B256, + pub receipts_root: B256, + #[serde(deserialize_with = "optional_u64_from_hex")] + pub number: Option, + pub gas_used: U256, + pub gas_limit: U256, + pub extra_data: Bytes, + pub logs_bloom: Option, + #[serde(default)] + pub timestamp: U256, + #[serde(default)] + pub difficulty: U256, + pub total_difficulty: Option, + #[serde(default, deserialize_with = "deserialize_null_default")] + pub seal_fields: Vec, + #[serde(default)] + pub uncles: Vec, + #[serde(default)] + pub transactions: Vec, + pub size: Option, + pub mix_hash: Option, + pub nonce: Option, + pub base_fee_per_gas: Option, + pub miner: Address, +} + +fn deserialize_null_default<'de, D, T>(deserializer: D) -> Result +where + T: Default + serde::Deserialize<'de>, + D: serde::Deserializer<'de>, +{ + use serde::Deserialize; + let opt = Option::deserialize(deserializer)?; + Ok(opt.unwrap_or_default()) +} diff --git a/crates/rethnet_eth/src/remote/jsonrpc.rs b/crates/rethnet_eth/src/remote/jsonrpc.rs new file mode 100644 index 0000000000..afb9c7a58d --- /dev/null +++ b/crates/rethnet_eth/src/remote/jsonrpc.rs @@ -0,0 +1,86 @@ +// Parts of this code were adapted from github.com/koushiro/async-jsonrpc and are distributed under its +// licenses: +// - https://github.com/koushiro/async-jsonrpc/blob/9b42602f4faa63dd4b6a1a9fe359bffa97e636d5/LICENSE-APACHE +// - https://github.com/koushiro/async-jsonrpc/blob/9b42602f4faa63dd4b6a1a9fe359bffa97e636d5/LICENSE-MIT +// For the original context, see https://github.com/koushiro/async-jsonrpc/tree/9b42602f4faa63dd4b6a1a9fe359bffa97e636d5 + +use serde::{Deserialize, Serialize}; + +/// Represents JSON-RPC 2.0 success response. +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +pub struct Success { + /// A String specifying the version of the JSON-RPC protocol. + pub jsonrpc: Version, + /// Successful execution result. + pub result: T, + /// Correlation id. + /// + /// It **MUST** be the same as the value of the id member in the Request Object. + pub id: Id, +} + +/// Represents JSON-RPC request/response id. +/// +/// An identifier established by the Client that MUST contain a String, Number, +/// or NULL value if included, If it is not included it is assumed to be a notification. +/// The value SHOULD normally not be Null and Numbers SHOULD NOT contain fractional parts. +/// +/// The Server **MUST** reply with the same value in the Response object if included. +/// This member is used to correlate the context between the two objects. +#[derive(Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash, Serialize, Deserialize)] +#[serde(deny_unknown_fields)] +#[serde(untagged)] +pub enum Id { + /// Numeric id + Num(u64), + /// String id + Str(String), +} +/// Represents JSON-RPC protocol version. +#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)] +pub enum Version { + /// Represents JSON-RPC 2.0 version. + V2_0, +} + +impl Serialize for Version { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + match self { + Version::V2_0 => serializer.serialize_str("2.0"), + } + } +} + +impl<'a> Deserialize<'a> for Version { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'a>, + { + deserializer.deserialize_identifier(VersionVisitor) + } +} + +struct VersionVisitor; +impl<'a> serde::de::Visitor<'a> for VersionVisitor { + type Value = Version; + + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + formatter.write_str("a string") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + match value { + "2.0" => Ok(Version::V2_0), + _ => Err(serde::de::Error::custom( + "Invalid JSON-RPC protocol version", + )), + } + } +} From 1d534b12378a66fb1f8bf0aa0c374e7b3823db6f Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Wed, 25 Jan 2023 09:07:22 -0500 Subject: [PATCH 023/406] feat: read-only database for querying the blockchain (#3546) Co-authored-by: Franco Victorio --- .github/workflows/e2e-ci.yml | 2 + crates/rethnet_evm/Cargo.toml | 2 +- crates/rethnet_evm/src/db.rs | 3 + crates/rethnet_evm/src/db/remote.rs | 86 +++++++++++++++++++ .../hardhat-core/src/builtin-tasks/compile.ts | 2 +- packages/hardhat-etherscan/src/ABITypes.ts | 4 +- 6 files changed, 95 insertions(+), 4 deletions(-) create mode 100644 crates/rethnet_evm/src/db/remote.rs diff --git a/.github/workflows/e2e-ci.yml b/.github/workflows/e2e-ci.yml index 7ef9d0fc0c..1409159af5 100644 --- a/.github/workflows/e2e-ci.yml +++ b/.github/workflows/e2e-ci.yml @@ -11,6 +11,8 @@ on: pull_request: branches: - "**" + - "!rethnet*/**" + - "!49-implement-read-only-struct-remotedatabase-for-querying-the-blockchain" paths: - "packages/e2e/**" - "packages/hardhat-core/**" diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index c431346c0d..98d8076dc6 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -10,7 +10,7 @@ ethers-signers = { version = "1.0.0", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash", "serde"] } log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } -rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } +rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false, features = ["dev", "k256", "with-serde"] } secp256k1 = { version = "0.24.1", default-features = false, features = ["alloc"] } sha3 = { version = "0.10.4", default-features = false } diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs index 305c552319..e2c841b6be 100644 --- a/crates/rethnet_evm/src/db.rs +++ b/crates/rethnet_evm/src/db.rs @@ -1,7 +1,10 @@ mod layered_db; +mod remote; mod request; mod sync; pub use sync::{AsyncDatabase, SyncDatabase}; pub use layered_db::{LayeredDatabase, RethnetLayer}; + +pub use remote::RemoteDatabase; diff --git a/crates/rethnet_evm/src/db/remote.rs b/crates/rethnet_evm/src/db/remote.rs new file mode 100644 index 0000000000..e59785c6d6 --- /dev/null +++ b/crates/rethnet_evm/src/db/remote.rs @@ -0,0 +1,86 @@ +use tokio::runtime::{Builder, Runtime}; + +use rethnet_eth::remote::{RpcClient, RpcClientError}; +use rethnet_eth::{Address, B256, U256}; + +use revm::{db::DatabaseRef, AccountInfo, Bytecode}; + +/// An revm database backed by a remote Ethereum node +pub struct RemoteDatabase { + client: RpcClient, + runtime: Runtime, +} + +/// Errors that might be returned from RemoteDatabase +#[derive(thiserror::Error, Debug)] +pub enum RemoteDatabaseError { + #[error(transparent)] + RpcError(#[from] RpcClientError), + + /// Some other error from an underlying dependency + #[error(transparent)] + OtherError(#[from] std::io::Error), +} + +impl RemoteDatabase { + /// Construct a new RemoteDatabse given the URL of a remote Ethereum node. + pub fn new(url: &str) -> Self { + Self { + client: RpcClient::new(url), + runtime: Builder::new_multi_thread() + .enable_io() + .enable_time() + .build() + .expect("failed to construct async runtime"), + } + } +} + +impl DatabaseRef for RemoteDatabase { + type Error = RemoteDatabaseError; + + fn basic(&self, address: Address) -> Result, Self::Error> { + Ok(Some( + self.runtime + .block_on(self.client.get_account_info(&address, None)) + .map_err(RemoteDatabaseError::RpcError)?, + )) + } + + /// unimplemented + fn code_by_hash(&self, _code_hash: B256) -> Result { + unimplemented!(); + } + + fn storage(&self, address: Address, index: U256) -> Result { + self.runtime + .block_on(self.client.get_storage_at(&address, index, None)) + .map_err(RemoteDatabaseError::RpcError) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::str::FromStr; + + #[test] + fn basic_success() { + let alchemy_url = std::env::var_os("ALCHEMY_URL") + .expect("ALCHEMY_URL environment variable not defined") + .into_string() + .expect("couldn't convert OsString into a String"); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info: AccountInfo = RemoteDatabase::new(&alchemy_url) + .basic(dai_address) + .expect("should succeed") + .unwrap(); + + assert_eq!(account_info.balance, U256::from(0)); + assert_eq!(account_info.nonce, 1); + } +} diff --git a/packages/hardhat-core/src/builtin-tasks/compile.ts b/packages/hardhat-core/src/builtin-tasks/compile.ts index 44b8005a64..1f2f7ee78e 100644 --- a/packages/hardhat-core/src/builtin-tasks/compile.ts +++ b/packages/hardhat-core/src/builtin-tasks/compile.ts @@ -94,7 +94,7 @@ function isConsoleLogError(error: any): boolean { return ( error.type === "TypeError" && typeof error.message === "string" && - error.message.includes("log") && + error.message.includes("log") === true && error.message.includes("type(library console)") ); } diff --git a/packages/hardhat-etherscan/src/ABITypes.ts b/packages/hardhat-etherscan/src/ABITypes.ts index afb5e034d9..869b7ba487 100644 --- a/packages/hardhat-etherscan/src/ABITypes.ts +++ b/packages/hardhat-etherscan/src/ABITypes.ts @@ -19,10 +19,10 @@ export function isABIArgumentLengthError( ): error is ABIArgumentLengthError { return ( error.code === "INVALID_ARGUMENT" && - error.count && + Boolean(error.count) && typeof error.count.types === "number" && typeof error.count.values === "number" && - error.value && + Boolean(error.value) && typeof error.value.types === "object" && typeof error.value.values === "object" && error instanceof Error From 89025426d970f49a3d1ca9ae682cbdf81d2cb61e Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 26 Jan 2023 15:01:35 -0300 Subject: [PATCH 024/406] feat: dependabot support for Rust (#3614) --- .github/dependabot.yml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/.github/dependabot.yml b/.github/dependabot.yml index 165b1817af..7fbeda9b02 100644 --- a/.github/dependabot.yml +++ b/.github/dependabot.yml @@ -3,6 +3,12 @@ version: 2 updates: + - package-ecosystem: "cargo" + directory: "/" + schedule: + interval: "daily" + commit-message: + prefix: "bump" - package-ecosystem: "npm" directory: "/" schedule: From ce44184059faef71e711a2f35610ec11c43fdc15 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 13:18:51 -0600 Subject: [PATCH 025/406] feat: tracing in Rethnet using JS callbacks (#3593) Co-authored-by: Franco Victorio --- crates/rethnet_eth/Cargo.toml | 8 +- crates/rethnet_eth/src/account.rs | 2 +- crates/rethnet_eth/src/block.rs | 2 +- crates/rethnet_eth/src/lib.rs | 2 +- crates/rethnet_eth/src/receipt.rs | 10 +- crates/rethnet_eth/src/remote.rs | 50 +- crates/rethnet_eth/src/transaction.rs | 2 +- crates/rethnet_eth/src/utils.rs | 2 +- crates/rethnet_evm/Cargo.toml | 7 +- crates/rethnet_evm/src/block/builder.rs | 102 ++- crates/rethnet_evm/src/blockchain/request.rs | 4 +- crates/rethnet_evm/src/blockchain/sync.rs | 10 +- crates/rethnet_evm/src/db.rs | 10 - crates/rethnet_evm/src/debug.rs | 8 +- crates/rethnet_evm/src/evm.rs | 59 +- crates/rethnet_evm/src/inspector.rs | 24 +- crates/rethnet_evm/src/lib.rs | 28 +- crates/rethnet_evm/src/random.rs | 2 +- crates/rethnet_evm/src/runtime.rs | 103 +-- crates/rethnet_evm/src/state.rs | 24 + .../src/{db => state}/layered_db.rs | 60 +- .../rethnet_evm/src/{db => state}/remote.rs | 15 +- .../rethnet_evm/src/{db => state}/request.rs | 44 +- crates/rethnet_evm/src/{db => state}/sync.rs | 71 +- crates/rethnet_evm/src/trace.rs | 6 +- crates/rethnet_evm/src/transaction.rs | 43 +- crates/rethnet_evm_napi/Cargo.toml | 1 - crates/rethnet_evm_napi/src/account.rs | 60 ++ crates/rethnet_evm_napi/src/block/builder.rs | 29 +- crates/rethnet_evm_napi/src/blockchain.rs | 11 +- .../src/blockchain/js_blockchain.rs | 9 +- crates/rethnet_evm_napi/src/cast.rs | 2 +- crates/rethnet_evm_napi/src/config.rs | 136 +++ crates/rethnet_evm_napi/src/lib.rs | 383 +-------- crates/rethnet_evm_napi/src/log.rs | 26 + crates/rethnet_evm_napi/src/receipt.rs | 2 +- crates/rethnet_evm_napi/src/runtime.rs | 125 +++ crates/rethnet_evm_napi/src/state.rs | 83 +- crates/rethnet_evm_napi/src/tracer.rs | 30 + .../rethnet_evm_napi/src/tracer/js_tracer.rs | 786 ++++++++++++++++++ crates/rethnet_evm_napi/src/transaction.rs | 27 +- .../src/transaction/result.rs | 209 +++++ .../internal/hardhat-network/provider/node.ts | 25 +- .../provider/utils/convertToRethnet.ts | 55 +- .../provider/vm/block-builder.ts | 4 + .../hardhat-network/provider/vm/creation.ts | 3 +- .../hardhat-network/provider/vm/dual.ts | 574 +++++++++++-- .../hardhat-network/provider/vm/ethereumjs.ts | 229 ++--- .../hardhat-network/provider/vm/exit.ts | 74 +- .../hardhat-network/provider/vm/rethnet.ts | 146 +++- .../hardhat-network/provider/vm/vm-adapter.ts | 22 +- .../stack-traces/error-inferrer.ts | 8 + .../stack-traces/message-trace.ts | 38 + .../hardhat-network/stack-traces/model.ts | 61 ++ .../hardhat-network/stack-traces/vm-tracer.ts | 118 +-- .../hardhat-network/helpers/contracts.ts | 58 ++ .../provider/modules/eth/hardforks.ts | 2 +- .../modules/eth/methods/sendTransaction.ts | 36 +- .../provider/utils/runFullBlock.ts | 5 +- .../hardhat-network/stack-traces/execution.ts | 19 +- 60 files changed, 2983 insertions(+), 1111 deletions(-) delete mode 100644 crates/rethnet_evm/src/db.rs create mode 100644 crates/rethnet_evm/src/state.rs rename crates/rethnet_evm/src/{db => state}/layered_db.rs (91%) rename crates/rethnet_evm/src/{db => state}/remote.rs (90%) rename crates/rethnet_evm/src/{db => state}/request.rs (82%) rename crates/rethnet_evm/src/{db => state}/sync.rs (84%) create mode 100644 crates/rethnet_evm_napi/src/account.rs create mode 100644 crates/rethnet_evm_napi/src/config.rs create mode 100644 crates/rethnet_evm_napi/src/log.rs create mode 100644 crates/rethnet_evm_napi/src/runtime.rs create mode 100644 crates/rethnet_evm_napi/src/tracer.rs create mode 100644 crates/rethnet_evm_napi/src/tracer/js_tracer.rs create mode 100644 crates/rethnet_evm_napi/src/transaction/result.rs diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index 336dd76ccb..45594fb6b8 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -14,7 +14,8 @@ hex-literal = { version = "0.3", default-features = false } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } reqwest = { version = "0.11", features = ["blocking", "json"] } -revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false, features = ["k256"] } +revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "8e6f4f2", version = "1.0", default-features = false } +# revm-primitives = { path = "../../../revm/crates/primitives", version = "1.0", default-features = false } rlp = { version = "0.5.2", default-features = false, features = ["derive"] } ruint = { version = "1.7.0", default-features = false } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc", "recovery"] } @@ -25,10 +26,11 @@ thiserror = { version = "1.0.37", default-features = false } triehash = { version = "0.8.4", default-features = false } [dev-dependencies] +test-with = { version = "0.9.1", default-features = false } tokio = { version = "1.23.0", features = ["macros"] } [features] default = ["std"] # fastrlp = ["dep:open-fastrlp", "ruint/fastrlp"] Broken due to lack of support for fastrlp in primitive-types -serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "revm/with-serde", "ruint/serde", "serde_json"] -std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "open-fastrlp?/std", "primitive-types/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] +serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "revm-primitives/serde", "ruint/serde", "serde_json"] +std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "open-fastrlp?/std", "primitive-types/std", "revm-primitives/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] diff --git a/crates/rethnet_eth/src/account.rs b/crates/rethnet_eth/src/account.rs index 248f32fdd7..a24252e941 100644 --- a/crates/rethnet_eth/src/account.rs +++ b/crates/rethnet_eth/src/account.rs @@ -10,7 +10,7 @@ use hex_literal::hex; use crate::{trie::KECCAK_NULL_RLP, B256, U256}; /// The KECCAK for empty code. -pub const KECCAK_EMPTY: revm::B256 = revm::B256(hex!( +pub const KECCAK_EMPTY: revm_primitives::B256 = revm_primitives::B256(hex!( "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" )); diff --git a/crates/rethnet_eth/src/block.rs b/crates/rethnet_eth/src/block.rs index 30f4d9f2dd..fb9391402f 100644 --- a/crates/rethnet_eth/src/block.rs +++ b/crates/rethnet_eth/src/block.rs @@ -3,7 +3,7 @@ // - https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/LICENSE-MIT // For the original context see: https://github.com/foundry-rs/foundry/blob/01b16238ff87dc7ca8ee3f5f13e389888c2a2ee4/anvil/core/src/eth/block.rs -use revm::common::keccak256; +use revm_primitives::keccak256; use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; use ruint::aliases::U160; diff --git a/crates/rethnet_eth/src/lib.rs b/crates/rethnet_eth/src/lib.rs index ef7c23b00f..19e984169e 100644 --- a/crates/rethnet_eth/src/lib.rs +++ b/crates/rethnet_eth/src/lib.rs @@ -28,7 +28,7 @@ pub mod utils; pub use bytes::Bytes; pub use ethbloom::Bloom; -pub use revm::{B160, B256}; +pub use revm_primitives::{B160, B256}; pub use ruint::aliases::{B512, B64, U256, U64}; /// An Ethereum address diff --git a/crates/rethnet_eth/src/receipt.rs b/crates/rethnet_eth/src/receipt.rs index ff5dfc801b..b6a78abb39 100644 --- a/crates/rethnet_eth/src/receipt.rs +++ b/crates/rethnet_eth/src/receipt.rs @@ -22,9 +22,9 @@ pub struct Log { pub data: Bytes, } -impl From for Log { - fn from(log: revm::Log) -> Self { - let revm::Log { +impl From for Log { + fn from(log: revm_primitives::Log) -> Self { + let revm_primitives::Log { address, topics, data, @@ -37,14 +37,14 @@ impl From for Log { } } -impl From for revm::Log { +impl From for revm_primitives::Log { fn from(log: Log) -> Self { let Log { address, topics, data, } = log; - revm::Log { + revm_primitives::Log { address, topics, data, diff --git a/crates/rethnet_eth/src/remote.rs b/crates/rethnet_eth/src/remote.rs index f7941c79d9..ac5a638367 100644 --- a/crates/rethnet_eth/src/remote.rs +++ b/crates/rethnet_eth/src/remote.rs @@ -1,6 +1,6 @@ use std::sync::atomic::{AtomicU64, Ordering}; -use revm::AccountInfo; +use revm_primitives::AccountInfo; use crate::{Address, Bytes, B256, U256}; @@ -380,7 +380,7 @@ impl RpcClient { assert_eq!(results.1.id, response.request_ids[1]); assert_eq!(results.2.id, response.request_ids[2]); - let code = revm::Bytecode::new_raw(results.1.result); + let code = revm_primitives::Bytecode::new_raw(results.1.result); Ok(AccountInfo { balance: results.0.result, @@ -393,24 +393,23 @@ impl RpcClient { #[cfg(test)] mod tests { - use super::*; - use std::str::FromStr; use crate::{Address, Bytes, U256}; - fn get_alchemy_url() -> Result { - Ok(std::env::var_os("ALCHEMY_URL") + use super::*; + + fn get_alchemy_url() -> String { + std::env::var_os("ALCHEMY_URL") .expect("ALCHEMY_URL environment variable not defined") .into_string() - .expect("couldn't convert OsString into a String")) + .expect("Couldn't convert OsString into a String") } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_by_hash_success() { - use std::str::FromStr; - - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let hash = B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") @@ -493,6 +492,7 @@ mod tests { ); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_by_hash_dns_error() { let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; @@ -513,6 +513,7 @@ mod tests { assert!(error_string.contains("dns error")); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_by_hash_bad_api_key() { let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; @@ -534,9 +535,10 @@ mod tests { assert!(error_string.contains("Must be authenticated!")); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_receipt_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let hash = B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") @@ -593,6 +595,7 @@ mod tests { assert_eq!(receipt.transaction_type, Some(0)); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_receipt_dns_error() { let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; @@ -613,6 +616,7 @@ mod tests { assert!(error_string.contains("dns error")); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_tx_receipt_bad_api_key() { let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; @@ -634,9 +638,10 @@ mod tests { assert!(error_string.contains("Must be authenticated!")); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_logs_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let logs = RpcClient::new(&alchemy_url) .get_logs( 10496585, @@ -651,9 +656,10 @@ mod tests { // TODO: consider asserting something about the logs bloom } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_block_by_hash_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let hash = B256::from_str("0x71d5e7c8ff9ea737034c16e333a75575a4a94d29482e0c2b88f0a6a8369c1812") @@ -668,9 +674,10 @@ mod tests { assert_eq!(block.transactions.len(), 192); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_block_by_number_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let block_number = 16222385; @@ -683,9 +690,10 @@ mod tests { assert_eq!(block.transactions.len(), 102); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_storage_at_with_block_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); @@ -713,9 +721,10 @@ mod tests { ); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_storage_at_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); @@ -736,9 +745,10 @@ mod tests { assert!(total_supply > U256::from(0)); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_transaction_count_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); @@ -751,9 +761,10 @@ mod tests { assert_eq!(transaction_count, U256::from(1)); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_account_info_with_block_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); @@ -767,9 +778,10 @@ mod tests { assert_eq!(account_info.nonce, 1); } + #[test_with::env(ALCHEMY_URL)] #[tokio::test] async fn get_account_info_success() { - let alchemy_url = get_alchemy_url().expect("failed to get Alchemy URL"); + let alchemy_url = get_alchemy_url(); let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); diff --git a/crates/rethnet_eth/src/transaction.rs b/crates/rethnet_eth/src/transaction.rs index 9c2db2dcb1..7213fb3da3 100644 --- a/crates/rethnet_eth/src/transaction.rs +++ b/crates/rethnet_eth/src/transaction.rs @@ -6,7 +6,7 @@ //! transaction related data -use revm::common::keccak256; +use revm_primitives::keccak256; use rlp::{Decodable, DecoderError, Encodable, Rlp, RlpStream}; use ruint::aliases::U160; diff --git a/crates/rethnet_eth/src/utils.rs b/crates/rethnet_eth/src/utils.rs index f4b80cd08c..e5b4802ac5 100644 --- a/crates/rethnet_eth/src/utils.rs +++ b/crates/rethnet_eth/src/utils.rs @@ -9,7 +9,7 @@ // For the original context see: https://github.com/gakonst/ethers-rs/blob/cba6f071aedafb766e82e4c2f469ed5e4638337d/ethers-core/src/utils/hash.rs use crate::B256; -use revm::common::keccak256; +use revm_primitives::keccak256; use rlp::RlpStream; /// RLP-encodes the provided value, prepends it with the provided ID, and appends it to the provided [`RlpStream`]. diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 98d8076dc6..74a83a5274 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -4,16 +4,19 @@ version = "0.1.0-dev" edition = "2021" [dependencies] -anyhow = { version = "1.0.64", default-features = false, features = ["std"] } auto_impl = { version = "1.0.1", default-features = false } ethers-signers = { version = "1.0.0", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash", "serde"] } log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } -revm = { git = "https://github.com/wodann/revm", rev = "7c28358", version = "2.3", default-features = false, features = ["dev", "k256", "with-serde"] } +revm = { git = "https://github.com/bluealloy/revm", rev = "8e6f4f2", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } +# revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } secp256k1 = { version = "0.24.1", default-features = false, features = ["alloc"] } sha3 = { version = "0.10.4", default-features = false } signature = { version = "1.6.4", default-features = false, features = ["std"] } thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } + +[dev-dependencies] +test-with = { version = "0.9.1", default-features = false } diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index 397167def3..2afed9532e 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -1,42 +1,74 @@ use std::{fmt::Debug, sync::Arc}; -use anyhow::bail; use rethnet_eth::{ block::{Header, PartialHeader}, Address, U256, }; -use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; +use revm::{ + db::DatabaseComponentError, + primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, SpecId, TxEnv}, + Inspector, +}; use tokio::runtime::Runtime; use crate::{ - blockchain::AsyncBlockchain, db::AsyncDatabase, evm::build_evm, inspector::RethnetInspector, + blockchain::AsyncBlockchain, evm::run_transaction, runtime::AsyncDatabase, state::AsyncState, trace::Trace, HeaderData, }; +#[derive(Debug, thiserror::Error)] +pub enum BlockTransactionError { + #[error(transparent)] + BlockHash(BE), + #[error("Transaction has a higher gas limit than the remaining gas in the block")] + ExceedsBlockGasLimit, + #[error("Invalid transaction")] + InvalidTransaction(InvalidTransaction), + #[error(transparent)] + State(SE), +} + +impl From>> for BlockTransactionError +where + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ + fn from(error: EVMError>) -> Self { + match error { + EVMError::Transaction(e) => Self::InvalidTransaction(e), + EVMError::PrevrandaoNotSet => unreachable!(), + EVMError::Database(DatabaseComponentError::State(e)) => Self::State(e), + EVMError::Database(DatabaseComponentError::BlockHash(e)) => Self::BlockHash(e), + } + } +} + /// A builder for constructing Ethereum blocks. -pub struct BlockBuilder +pub struct BlockBuilder where - E: Debug + Send + 'static, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, { - blockchain: Arc>, - state: Arc>, + blockchain: Arc>, + state: Arc>, header: PartialHeader, transactions: Vec, cfg: CfgEnv, } -impl BlockBuilder +impl BlockBuilder where - E: Debug + Send + 'static, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, { /// Creates an intance of [`BlockBuilder`], creating a checkpoint in the process. - pub async fn new( - blockchain: Arc>, - db: Arc>, + pub fn new( + blockchain: Arc>, + state: Arc>, cfg: CfgEnv, parent: Header, header: HeaderData, - ) -> Result { + ) -> Self { // TODO: Proper implementation of a block builder // db.checkpoint().await?; @@ -48,13 +80,13 @@ where ..PartialHeader::default() }; - Ok(Self { + Self { blockchain, - state: db, + state, header, transactions: Vec::new(), cfg, - }) + } } /// Retrieves the runtime of the [`BlockBuilder`]. @@ -84,10 +116,11 @@ where pub async fn add_transaction( &mut self, transaction: TxEnv, - ) -> anyhow::Result<(ExecutionResult, Trace)> { + inspector: Option> + Send>>, + ) -> Result<(ExecutionResult, Trace), BlockTransactionError> { // transaction's gas limit cannot be greater than the remaining gas in the block if U256::from(transaction.gas_limit) > self.gas_remaining() { - bail!("tx has a higher gas limit than the remaining gas in the block"); + return Err(BlockTransactionError::ExceedsBlockGasLimit); } self.transactions.push(transaction.clone()); @@ -105,26 +138,21 @@ where }, }; - let blockchain = self.blockchain.clone(); - let db = self.state.clone(); - let cfg = self.cfg.clone(); - - let (result, changes, trace) = self - .state - .runtime() - .spawn(async move { - let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); - - let mut inspector = RethnetInspector::default(); - let (result, state) = evm.inspect(&mut inspector); - (result, state, inspector.into_trace()) - }) - .await - .unwrap(); + let (result, changes, trace) = run_transaction( + self.state.runtime(), + self.blockchain.clone(), + self.state.clone(), + self.cfg.clone(), + transaction, + block, + inspector, + ) + .await + .unwrap()?; self.state.apply(changes).await; - self.header.gas_used += U256::from(result.gas_used); + self.header.gas_used += U256::from(result.gas_used()); // TODO: store receipt Ok((result, trace)) @@ -132,7 +160,7 @@ where /// Finalizes the block, returning the state root. /// TODO: Build a full block - pub async fn finalize(self, rewards: Vec<(Address, U256)>) -> Result<(), E> { + pub async fn finalize(self, rewards: Vec<(Address, U256)>) -> Result<(), SE> { for (address, reward) in rewards { self.state .modify_account( @@ -146,7 +174,7 @@ where } /// Aborts building of the block, reverting all transactions in the process. - pub async fn abort(self) -> Result<(), E> { + pub async fn abort(self) -> Result<(), SE> { self.state.revert().await } } diff --git a/crates/rethnet_evm/src/blockchain/request.rs b/crates/rethnet_evm/src/blockchain/request.rs index 2114f71fc0..a1e0cb18bf 100644 --- a/crates/rethnet_evm/src/blockchain/request.rs +++ b/crates/rethnet_evm/src/blockchain/request.rs @@ -1,7 +1,7 @@ use std::fmt::Debug; use rethnet_eth::{B256, U256}; -use revm::blockchain::Blockchain; +use revm::db::BlockHash; use tokio::sync::oneshot; /// The request type used internally by a [`SyncDatabase`]. @@ -28,7 +28,7 @@ where { pub fn handle(self, db: &mut D) -> bool where - D: Blockchain, + D: BlockHash, { match self { Request::BlockHashByNumber { number, sender } => { diff --git a/crates/rethnet_evm/src/blockchain/sync.rs b/crates/rethnet_evm/src/blockchain/sync.rs index 7376bfefdf..dd7c8a8c35 100644 --- a/crates/rethnet_evm/src/blockchain/sync.rs +++ b/crates/rethnet_evm/src/blockchain/sync.rs @@ -1,7 +1,7 @@ use std::{fmt::Debug, io}; use rethnet_eth::{B256, U256}; -use revm::blockchain::Blockchain; +use revm::db::{BlockHash, BlockHashRef}; use tokio::{ runtime::{Builder, Runtime}, sync::{ @@ -14,7 +14,7 @@ use tokio::{ use super::request::Request; /// Trait that meets all requirements for a synchronous database that can be used by [`AsyncBlockchain`]. -pub trait SyncBlockchain: Blockchain + Send + Sync + 'static +pub trait SyncBlockchain: BlockHash + Send + Sync + 'static where E: Debug + Send, { @@ -22,7 +22,7 @@ where impl SyncBlockchain for B where - B: Blockchain + Send + Sync + 'static, + B: BlockHash + Send + Sync + 'static, E: Debug + Send, { } @@ -111,13 +111,13 @@ where } } -impl<'b, E> Blockchain for &'b AsyncBlockchain +impl BlockHashRef for AsyncBlockchain where E: Debug + Send + 'static, { type Error = E; - fn block_hash(&mut self, number: U256) -> Result { + fn block_hash(&self, number: U256) -> Result { task::block_in_place(move || self.runtime.block_on(self.block_hash_by_number(number))) } } diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs deleted file mode 100644 index e2c841b6be..0000000000 --- a/crates/rethnet_evm/src/db.rs +++ /dev/null @@ -1,10 +0,0 @@ -mod layered_db; -mod remote; -mod request; -mod sync; - -pub use sync::{AsyncDatabase, SyncDatabase}; - -pub use layered_db::{LayeredDatabase, RethnetLayer}; - -pub use remote::RemoteDatabase; diff --git a/crates/rethnet_evm/src/debug.rs b/crates/rethnet_evm/src/debug.rs index be867b1999..cf10ca7afa 100644 --- a/crates/rethnet_evm/src/debug.rs +++ b/crates/rethnet_evm/src/debug.rs @@ -1,19 +1,19 @@ use auto_impl::auto_impl; use rethnet_eth::{Address, B256, U256}; -use revm::{AccountInfo, Bytecode}; +use revm::primitives::{AccountInfo, Bytecode}; pub type ModifierFn = Box) + Send>; /// A trait for debug operation on a database. #[auto_impl(Box)] -pub trait DatabaseDebug { +pub trait StateDebug { /// The database's error type. type Error; /// Retrieves the storage root of the account at the specified address. fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error>; - /// Inserts an account with the specified address. + /// Inserts the provided account at the specified address. fn insert_account( &mut self, address: Address, @@ -50,6 +50,6 @@ pub trait DatabaseDebug { /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. fn make_snapshot(&mut self) -> B256; - /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. + /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. fn remove_snapshot(&mut self, state_root: &B256) -> bool; } diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index 3639cafc78..d1af33f84c 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -1,27 +1,66 @@ -use std::fmt::Debug; +use std::{fmt::Debug, sync::Arc}; -use revm::{BlockEnv, CfgEnv, TxEnv}; +use revm::{ + db::{DatabaseComponentError, DatabaseComponents}, + primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, ResultAndState, State, TxEnv}, + Inspector, +}; +use tokio::{runtime::Runtime, task::JoinHandle}; -use crate::{blockchain::AsyncBlockchain, db::AsyncDatabase}; +use crate::{ + blockchain::AsyncBlockchain, inspector::RethnetInspector, runtime::AsyncDatabase, + state::AsyncState, trace::Trace, +}; /// Creates an evm from the provided database, config, transaction, and block. #[allow(clippy::type_complexity)] -pub fn build_evm<'b, 'd, E>( - blockchain: &'b AsyncBlockchain, - db: &'d AsyncDatabase, +fn build_evm( + blockchain: Arc>, + state: Arc>, cfg: CfgEnv, transaction: TxEnv, block: BlockEnv, -) -> revm::EVM<&'d AsyncDatabase, &'b AsyncBlockchain> +) -> revm::EVM> where - E: Debug + Send + 'static, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, { let mut evm = revm::EVM::new(); - evm.set_blockchain(blockchain); - evm.database(db); + evm.database(DatabaseComponents { + state, + block_hash: blockchain, + }); evm.env.cfg = cfg; evm.env.block = block; evm.env.tx = transaction; evm } + +#[allow(clippy::type_complexity)] +pub fn run_transaction( + runtime: &Runtime, + blockchain: Arc>, + state: Arc>, + cfg: CfgEnv, + transaction: TxEnv, + block: BlockEnv, + inspector: Option> + Send>>, +) -> JoinHandle>>> +where + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ + runtime.spawn(async move { + let mut evm = build_evm(blockchain, state, cfg, transaction, block); + if let Some(mut inspector) = inspector { + let ResultAndState { result, state } = evm.inspect(&mut inspector)?; + Ok((result, state, Trace::default())) + } else { + let mut inspector = RethnetInspector::default(); + let ResultAndState { result, state } = evm.inspect(&mut inspector)?; + + Ok((result, state, inspector.into_trace())) + } + }) +} diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs index cb3491a354..df91ed8829 100644 --- a/crates/rethnet_evm/src/inspector.rs +++ b/crates/rethnet_evm/src/inspector.rs @@ -1,4 +1,7 @@ -use revm::{blockchain::Blockchain, opcode, Database, EVMData, Inspector, Interpreter, Return}; +use revm::{ + interpreter::{opcode, InstructionResult, Interpreter}, + Database, EVMData, Inspector, +}; use crate::trace::Trace; @@ -15,29 +18,28 @@ impl RethnetInspector { } } -impl Inspector for RethnetInspector +impl Inspector for RethnetInspector where D: Database, - BC: Blockchain, { fn step( &mut self, interp: &mut Interpreter, - _data: &mut EVMData<'_, D, BC>, + _data: &mut EVMData<'_, D>, _is_static: bool, - ) -> Return { + ) -> InstructionResult { self.opcode_stack.push(interp.current_opcode()); - Return::Continue + InstructionResult::Continue } fn step_end( &mut self, interp: &mut Interpreter, - _data: &mut EVMData<'_, D, BC>, + _data: &mut EVMData<'_, D>, _is_static: bool, - exit_code: Return, - ) -> Return { + exit_code: InstructionResult, + ) -> InstructionResult { let opcode = self .opcode_stack .pop() @@ -45,10 +47,10 @@ where self.trace.add_step(opcode, interp.gas(), exit_code); - if opcode == opcode::RETURN { + if opcode == opcode::RETURN || opcode == opcode::REVERT { self.trace.return_value = interp.return_value(); } - Return::Continue + InstructionResult::Continue } } diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index b72dd4a1ed..0f73af3fde 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -4,31 +4,33 @@ //! Virtual Machine (or EVM). #![warn(missing_docs)] -use rethnet_eth::Address; - pub use hashbrown::HashMap; + pub use revm::{ - blockchain::{Blockchain, BlockchainRef}, - db::EmptyDB, - Account, AccountInfo, BlockEnv, Bytecode, CfgEnv, CreateScheme, Database, DatabaseCommit, - ExecutionResult, Log, Return, SpecId, TransactOut, TransactTo, TxEnv, EVM, + db::{ + BlockHash, BlockHashRef, Database, DatabaseCommit, DatabaseComponentError, + DatabaseComponents, State as StateMut, StateRef, + }, + interpreter::{ + instruction_result::SuccessOrHalt, opcode, return_revert, CallInputs, CreateInputs, Gas, + InstructionResult, Interpreter, OPCODE_JUMPMAP, + }, + primitives::*, + EVMData, Inspector, }; pub use crate::{ block::{BlockBuilder, HeaderData}, - debug::DatabaseDebug, - runtime::Rethnet, - transaction::PendingTransaction, + debug::StateDebug, + runtime::{AsyncDatabase, Rethnet}, + transaction::{PendingTransaction, TransactionError}, }; -/// State mapping of addresses to accounts. -pub type State = HashMap; - /// Types for managing Ethereum blockchain pub mod blockchain; /// Database types for managing Ethereum state -pub mod db; +pub mod state; /// Types used for tracing EVM calls pub mod trace; diff --git a/crates/rethnet_evm/src/random.rs b/crates/rethnet_evm/src/random.rs index d0ea9e2941..f63290a330 100644 --- a/crates/rethnet_evm/src/random.rs +++ b/crates/rethnet_evm/src/random.rs @@ -1,7 +1,7 @@ #![allow(dead_code)] use rethnet_eth::B256; -use revm::common::keccak256; +use revm::primitives::keccak256; /// A pseudorandom hash generator which allows overriding of the next generated hash. #[derive(Debug)] diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index 49a04858cf..41d5752f0d 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -1,35 +1,40 @@ use std::{fmt::Debug, sync::Arc}; -use revm::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}; +use revm::{ + db::DatabaseComponents, + primitives::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}, + Inspector, +}; use crate::{ - blockchain::AsyncBlockchain, db::AsyncDatabase, evm::build_evm, inspector::RethnetInspector, - trace::Trace, transaction::TransactionError, State, + blockchain::AsyncBlockchain, evm::run_transaction, state::AsyncState, trace::Trace, + transaction::TransactionError, State, }; +/// Asynchronous implementation of the Database super-trait +pub type AsyncDatabase = DatabaseComponents>, Arc>>; + /// The asynchronous Rethnet runtime. -pub struct Rethnet +pub struct Rethnet where - E: Debug + Send + 'static, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, { - blockchain: Arc>, - db: Arc>, + blockchain: Arc>, + state: Arc>, cfg: CfgEnv, } -impl Rethnet +impl Rethnet where - E: Debug + Send + 'static, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, { /// Constructs a new [`Rethnet`] instance. - pub fn new( - blockchain: Arc>, - db: Arc>, - cfg: CfgEnv, - ) -> Self { + pub fn new(blockchain: Arc>, db: Arc>, cfg: CfgEnv) -> Self { Self { blockchain, - db, + state: db, cfg, } } @@ -39,27 +44,24 @@ where &self, transaction: TxEnv, block: BlockEnv, - ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + inspector: Option> + Send>>, + ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } - let blockchain = self.blockchain.clone(); - let db = self.db.clone(); - let cfg = self.cfg.clone(); - - Ok(self - .db - .runtime() - .spawn(async move { - let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); - - let mut inspector = RethnetInspector::default(); - let (result, state) = evm.inspect(&mut inspector); - (result, state, inspector.into_trace()) - }) - .await - .unwrap()) + run_transaction( + self.state.runtime(), + self.blockchain.clone(), + self.state.clone(), + self.cfg.clone(), + transaction, + block, + inspector, + ) + .await + .unwrap() + .map_err(TransactionError::from) } /// Runs a transaction without committing the state, while disabling balance checks and creating accounts for new addresses. @@ -67,29 +69,27 @@ where &self, transaction: TxEnv, block: BlockEnv, - ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + inspector: Option> + Send>>, + ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } - let blockchain = self.blockchain.clone(); - let db = self.db.clone(); - let mut cfg = self.cfg.clone(); cfg.disable_balance_check = true; - Ok(self - .db - .runtime() - .spawn(async move { - let mut evm = build_evm(&blockchain, &db, cfg, transaction, block); - - let mut inspector = RethnetInspector::default(); - let (result, state) = evm.inspect(&mut inspector); - (result, state, inspector.into_trace()) - }) - .await - .unwrap()) + run_transaction( + self.state.runtime(), + self.blockchain.clone(), + self.state.clone(), + cfg, + transaction, + block, + inspector, + ) + .await + .unwrap() + .map_err(TransactionError::from) } /// Runs a transaction, committing the state in the process. @@ -97,10 +97,11 @@ where &self, transaction: TxEnv, block: BlockEnv, - ) -> Result<(ExecutionResult, Trace), TransactionError> { - let (result, changes, trace) = self.dry_run(transaction, block).await?; + inspector: Option> + Send>>, + ) -> Result<(ExecutionResult, Trace), TransactionError> { + let (result, changes, trace) = self.dry_run(transaction, block, inspector).await?; - self.db.apply(changes).await; + self.state.apply(changes).await; Ok((result, trace)) } diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs new file mode 100644 index 0000000000..ef1954c132 --- /dev/null +++ b/crates/rethnet_evm/src/state.rs @@ -0,0 +1,24 @@ +mod layered_db; +mod remote; +mod request; +mod sync; + +use rethnet_eth::B256; + +pub use self::layered_db::{LayeredState, RethnetLayer}; +pub use self::remote::RemoteDatabase; +pub use self::sync::{AsyncState, SyncState}; + +/// Combinatorial error for the database API +#[derive(Debug, thiserror::Error)] +pub enum StateError { + /// No checkpoints to revert + #[error("No checkpoints to revert.")] + CannotRevert, + /// Contract with specified code hash does not exist + #[error("Contract with code hash `{0}` does not exist.")] + InvalidCodeHash(B256), + /// Specified state root does not exist + #[error("State root `{0}` does not exist.")] + InvalidStateRoot(B256), +} diff --git a/crates/rethnet_evm/src/db/layered_db.rs b/crates/rethnet_evm/src/state/layered_db.rs similarity index 91% rename from crates/rethnet_evm/src/db/layered_db.rs rename to crates/rethnet_evm/src/state/layered_db.rs index 80218f4842..bc71f4ad62 100644 --- a/crates/rethnet_evm/src/db/layered_db.rs +++ b/crates/rethnet_evm/src/state/layered_db.rs @@ -1,4 +1,3 @@ -use anyhow::anyhow; use hashbrown::HashMap; use rethnet_eth::{ account::BasicAccount, @@ -6,9 +5,15 @@ use rethnet_eth::{ trie::KECCAK_NULL_RLP, Address, B256, U256, }; -use revm::{Account, AccountInfo, Bytecode, Database, DatabaseCommit, KECCAK_EMPTY}; +use revm::{ + db::State, + primitives::{Account, AccountInfo, Bytecode, KECCAK_EMPTY}, + DatabaseCommit, +}; + +use crate::StateDebug; -use crate::DatabaseDebug; +use super::StateError; #[derive(Clone, Debug)] struct RevertedLayers { @@ -18,9 +23,9 @@ struct RevertedLayers { pub stack: Vec, } -/// A database consisting of layers. +/// A state consisting of layers. #[derive(Clone, Debug)] -pub struct LayeredDatabase { +pub struct LayeredState { stack: Vec, /// The old parent layer state root and the reverted layers reverted_layers: Option>, @@ -28,8 +33,8 @@ pub struct LayeredDatabase { snapshots: HashMap>, // naive implementation } -impl LayeredDatabase { - /// Creates a [`LayeredDatabase`] with the provided layer at the bottom. +impl LayeredState { + /// Creates a [`LayeredState`] with the provided layer at the bottom. pub fn with_layer(layer: Layer) -> Self { Self { stack: vec![layer], @@ -45,7 +50,7 @@ impl LayeredDatabase { /// Returns a mutable reference to the top layer. pub fn last_layer_mut(&mut self) -> &mut Layer { - // The `LayeredDatabase` always has at least one layer + // The `LayeredState` always has at least one layer self.stack.last_mut().unwrap() } @@ -70,7 +75,7 @@ impl LayeredDatabase { } } -impl LayeredDatabase { +impl LayeredState { /// Adds a default layer to the top, returning its index and a /// mutable reference to the layer. pub fn add_layer_default(&mut self) -> (usize, &mut Layer) { @@ -78,7 +83,7 @@ impl LayeredDatabase { } } -impl Default for LayeredDatabase { +impl Default for LayeredState { fn default() -> Self { Self { stack: vec![Layer::default()], @@ -137,7 +142,7 @@ impl RethnetLayer { } } -impl LayeredDatabase { +impl LayeredState { /// Retrieves a reference to the account corresponding to the address, if it exists. pub fn account(&self, address: &Address) -> Option<&AccountInfo> { self.iter() @@ -237,19 +242,17 @@ impl LayeredDatabase { } } -impl Database for LayeredDatabase { - type Error = anyhow::Error; +impl State for LayeredState { + type Error = StateError; - fn basic(&mut self, address: Address) -> anyhow::Result> { + fn basic(&mut self, address: Address) -> Result, Self::Error> { let account = self .iter() .find_map(|layer| layer.account_infos.get(&address)) .cloned() .flatten(); - log::debug!("account with address `{}`: {:?}", address, account); - - // TODO: Move this out of LayeredDatabase when forking + // TODO: Move this out of LayeredState when forking Ok(account.or(Some(AccountInfo { balance: U256::ZERO, nonce: 0, @@ -258,22 +261,17 @@ impl Database for LayeredDatabase { }))) } - fn code_by_hash(&mut self, code_hash: B256) -> anyhow::Result { + fn code_by_hash(&mut self, code_hash: B256) -> Result { if code_hash == KECCAK_EMPTY { return Ok(Bytecode::new()); } self.iter() .find_map(|layer| layer.contracts.get(&code_hash).cloned()) - .ok_or_else(|| { - anyhow!( - "Layered database does not contain contract with code hash: {}.", - code_hash, - ) - }) + .ok_or(StateError::InvalidCodeHash(code_hash)) } - fn storage(&mut self, address: Address, index: U256) -> anyhow::Result { + fn storage(&mut self, address: Address, index: U256) -> Result { Ok(self .iter() .find_map(|layer| layer.storage.get(&address).map(|storage| storage.as_ref())) @@ -284,7 +282,7 @@ impl Database for LayeredDatabase { } } -impl DatabaseCommit for LayeredDatabase { +impl DatabaseCommit for LayeredState { fn commit(&mut self, changes: HashMap) { changes.into_iter().for_each(|(address, account)| { if account.is_empty() || account.is_destroyed { @@ -334,8 +332,8 @@ impl DatabaseCommit for LayeredDatabase { } } -impl DatabaseDebug for LayeredDatabase { - type Error = anyhow::Error; +impl StateDebug for LayeredState { + type Error = StateError; fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { Ok(self @@ -372,7 +370,7 @@ impl DatabaseDebug for LayeredDatabase { address: Address, modifier: Box) + Send>, ) -> Result<(), Self::Error> { - // TODO: Move account insertion out of LayeredDatabase when forking + // TODO: Move account insertion out of LayeredState when forking let account_info = self.account_or_insert_mut(&address); let old_code_hash = account_info.code_hash; @@ -522,7 +520,7 @@ impl DatabaseDebug for LayeredDatabase { Ok(()) } else { - Err(anyhow!("Unknown state root: {}", state_root)) + Err(StateError::InvalidStateRoot(*state_root)) } } @@ -585,7 +583,7 @@ impl DatabaseDebug for LayeredDatabase { self.revert_to_layer(last_layer_id - 1); Ok(()) } else { - Err(anyhow!("No checkpoints to revert.")) + Err(StateError::CannotRevert) } } } diff --git a/crates/rethnet_evm/src/db/remote.rs b/crates/rethnet_evm/src/state/remote.rs similarity index 90% rename from crates/rethnet_evm/src/db/remote.rs rename to crates/rethnet_evm/src/state/remote.rs index e59785c6d6..370ebad7ee 100644 --- a/crates/rethnet_evm/src/db/remote.rs +++ b/crates/rethnet_evm/src/state/remote.rs @@ -1,9 +1,13 @@ +use revm::{ + db::StateRef, + primitives::{AccountInfo, Bytecode}, +}; use tokio::runtime::{Builder, Runtime}; -use rethnet_eth::remote::{RpcClient, RpcClientError}; -use rethnet_eth::{Address, B256, U256}; - -use revm::{db::DatabaseRef, AccountInfo, Bytecode}; +use rethnet_eth::{ + remote::{RpcClient, RpcClientError}, + Address, B256, U256, +}; /// An revm database backed by a remote Ethereum node pub struct RemoteDatabase { @@ -36,7 +40,7 @@ impl RemoteDatabase { } } -impl DatabaseRef for RemoteDatabase { +impl StateRef for RemoteDatabase { type Error = RemoteDatabaseError; fn basic(&self, address: Address) -> Result, Self::Error> { @@ -65,6 +69,7 @@ mod tests { use std::str::FromStr; + #[test_with::env(ALCHEMY_URL)] #[test] fn basic_success() { let alchemy_url = std::env::var_os("ALCHEMY_URL") diff --git a/crates/rethnet_evm/src/db/request.rs b/crates/rethnet_evm/src/state/request.rs similarity index 82% rename from crates/rethnet_evm/src/db/request.rs rename to crates/rethnet_evm/src/state/request.rs index 36c949c39e..44c97a98ea 100644 --- a/crates/rethnet_evm/src/db/request.rs +++ b/crates/rethnet_evm/src/state/request.rs @@ -2,10 +2,14 @@ use std::fmt::Debug; use hashbrown::HashMap; use rethnet_eth::{Address, B256, U256}; -use revm::{Account, AccountInfo, Bytecode, Database, DatabaseCommit}; +use revm::{ + db::State, + primitives::{Account, AccountInfo, Bytecode}, + DatabaseCommit, +}; use tokio::sync::oneshot; -use crate::{debug::ModifierFn, DatabaseDebug}; +use crate::{debug::ModifierFn, StateDebug}; /// The request type used internally by a [`SyncDatabase`]. pub enum Request @@ -80,23 +84,23 @@ impl Request where E: Debug, { - pub fn handle(self, db: &mut D) -> bool + pub fn handle(self, state: &mut S) -> bool where - D: Database + DatabaseCommit + DatabaseDebug, + S: State + DatabaseCommit + StateDebug, { match self { Request::AccountByAddress { address, sender } => { - sender.send(db.basic(address)).unwrap() + sender.send(state.basic(address)).unwrap() } Request::AccountStorageRoot { address, sender } => { - sender.send(db.account_storage_root(&address)).unwrap() + sender.send(state.account_storage_root(&address)).unwrap() } - Request::Checkpoint { sender } => sender.send(db.checkpoint()).unwrap(), + Request::Checkpoint { sender } => sender.send(state.checkpoint()).unwrap(), Request::CodeByHash { code_hash, sender } => { - sender.send(db.code_by_hash(code_hash)).unwrap() + sender.send(state.code_by_hash(code_hash)).unwrap() } Request::Commit { changes, sender } => { - db.commit(changes); + state.commit(changes); sender.send(()).unwrap() } Request::InsertAccount { @@ -104,38 +108,40 @@ where account_info, sender, } => sender - .send(db.insert_account(address, account_info)) + .send(state.insert_account(address, account_info)) .unwrap(), - Request::MakeSnapshot { sender } => sender.send(db.make_snapshot()).unwrap(), + Request::MakeSnapshot { sender } => sender.send(state.make_snapshot()).unwrap(), Request::ModifyAccount { address, modifier, sender, - } => sender.send(db.modify_account(address, modifier)).unwrap(), + } => sender + .send(state.modify_account(address, modifier)) + .unwrap(), Request::RemoveAccount { address, sender } => { - sender.send(db.remove_account(address)).unwrap() + sender.send(state.remove_account(address)).unwrap() } Request::RemoveSnapshot { state_root, sender } => { - sender.send(db.remove_snapshot(&state_root)).unwrap() + sender.send(state.remove_snapshot(&state_root)).unwrap() } - Request::Revert { sender } => sender.send(db.revert()).unwrap(), + Request::Revert { sender } => sender.send(state.revert()).unwrap(), Request::SetStorageSlot { address, index, value, sender, } => sender - .send(db.set_account_storage_slot(address, index, value)) + .send(state.set_account_storage_slot(address, index, value)) .unwrap(), Request::SetStateRoot { state_root, sender } => { - sender.send(db.set_state_root(&state_root)).unwrap() + sender.send(state.set_state_root(&state_root)).unwrap() } - Request::StateRoot { sender } => sender.send(db.state_root()).unwrap(), + Request::StateRoot { sender } => sender.send(state.state_root()).unwrap(), Request::StorageSlot { address, index, sender, - } => sender.send(db.storage(address, index)).unwrap(), + } => sender.send(state.storage(address, index)).unwrap(), Request::Terminate => return false, } diff --git a/crates/rethnet_evm/src/db/sync.rs b/crates/rethnet_evm/src/state/sync.rs similarity index 84% rename from crates/rethnet_evm/src/db/sync.rs rename to crates/rethnet_evm/src/state/sync.rs index de6a21b0d8..b8d970cc5f 100644 --- a/crates/rethnet_evm/src/db/sync.rs +++ b/crates/rethnet_evm/src/state/sync.rs @@ -2,7 +2,11 @@ use std::{fmt::Debug, io}; use hashbrown::HashMap; use rethnet_eth::{Address, B256, U256}; -use revm::{db::Database, Account, AccountInfo, Bytecode, DatabaseCommit}; +use revm::{ + db::{State, StateRef}, + primitives::{Account, AccountInfo, Bytecode}, + DatabaseCommit, +}; use tokio::{ runtime::{Builder, Runtime}, sync::{ @@ -12,21 +16,21 @@ use tokio::{ task::{self, JoinHandle}, }; -use crate::{debug::ModifierFn, DatabaseDebug}; +use crate::{debug::ModifierFn, StateDebug}; use super::request::Request; /// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. -pub trait SyncDatabase: - Database + DatabaseCommit + DatabaseDebug + Send + Sync + 'static +pub trait SyncState: + State + DatabaseCommit + StateDebug + Send + Sync + 'static where E: Debug + Send, { } -impl SyncDatabase for D +impl SyncState for S where - D: Database + DatabaseCommit + DatabaseDebug + Send + Sync + 'static, + S: State + DatabaseCommit + StateDebug + Send + Sync + 'static, E: Debug + Send, { } @@ -34,7 +38,7 @@ where /// A helper class for converting a synchronous database into an asynchronous database. /// /// Requires the inner database to implement [`Database`], [`DatabaseCommit`], and [`DatabaseDebug`]. -pub struct AsyncDatabase +pub struct AsyncState where E: Debug + Send, { @@ -43,19 +47,19 @@ where db_handle: Option>, } -impl AsyncDatabase +impl AsyncState where E: Debug + Send + 'static, { /// Constructs an [`AsyncDatabase`] instance with the provided database. - pub fn new>(mut db: D) -> io::Result { + pub fn new>(mut state: S) -> io::Result { let runtime = Builder::new_multi_thread().build()?; let (sender, mut receiver) = unbounded_channel::>(); let db_handle = runtime.spawn(async move { while let Some(request) = receiver.recv().await { - if !request.handle(&mut db) { + if !request.handle(&mut state) { break; } } @@ -271,7 +275,7 @@ where } } -impl Drop for AsyncDatabase +impl Drop for AsyncState where E: Debug + Send, { @@ -286,35 +290,35 @@ where } } -impl<'d, E> Database for &'d AsyncDatabase +impl StateRef for AsyncState where E: Debug + Send + 'static, { type Error = E; - fn basic(&mut self, address: Address) -> Result, Self::Error> { + fn basic(&self, address: Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::account_by_address(*self, address)) + .block_on(AsyncState::account_by_address(self, address)) }) } - fn code_by_hash(&mut self, code_hash: B256) -> Result { + fn code_by_hash(&self, code_hash: B256) -> Result { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::code_by_hash(*self, code_hash)) + .block_on(AsyncState::code_by_hash(self, code_hash)) }) } - fn storage(&mut self, address: Address, index: U256) -> Result { + fn storage(&self, address: Address, index: U256) -> Result { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::account_storage_slot(*self, address, index)) + .block_on(AsyncState::account_storage_slot(self, address, index)) }) } } -impl<'d, E> DatabaseCommit for &'d AsyncDatabase +impl<'d, E> DatabaseCommit for &'d AsyncState where E: Debug + Send + 'static, { @@ -323,7 +327,7 @@ where } } -impl<'d, E> DatabaseDebug for &'d AsyncDatabase +impl<'d, E> StateDebug for &'d AsyncState where E: Debug + Send + 'static, { @@ -332,7 +336,7 @@ where fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::account_storage_root(*self, address)) + .block_on(AsyncState::account_storage_root(*self, address)) }) } @@ -343,7 +347,7 @@ where ) -> Result<(), Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::insert_account(*self, address, account_info)) + .block_on(AsyncState::insert_account(*self, address, account_info)) }) } @@ -354,14 +358,14 @@ where ) -> Result<(), Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::modify_account(*self, address, modifier)) + .block_on(AsyncState::modify_account(*self, address, modifier)) }) } fn remove_account(&mut self, address: Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::remove_account(*self, address)) + .block_on(AsyncState::remove_account(*self, address)) }) } @@ -372,40 +376,39 @@ where value: U256, ) -> Result<(), Self::Error> { task::block_in_place(move || { - self.runtime - .block_on(AsyncDatabase::set_account_storage_slot( - *self, address, index, value, - )) + self.runtime.block_on(AsyncState::set_account_storage_slot( + *self, address, index, value, + )) }) } fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::set_state_root(*self, state_root)) + .block_on(AsyncState::set_state_root(*self, state_root)) }) } fn state_root(&mut self) -> Result { - task::block_in_place(move || self.runtime.block_on(AsyncDatabase::state_root(*self))) + task::block_in_place(move || self.runtime.block_on(AsyncState::state_root(*self))) } fn checkpoint(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.runtime.block_on(AsyncDatabase::checkpoint(*self))) + task::block_in_place(move || self.runtime.block_on(AsyncState::checkpoint(*self))) } fn revert(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.runtime.block_on(AsyncDatabase::revert(*self))) + task::block_in_place(move || self.runtime.block_on(AsyncState::revert(*self))) } fn make_snapshot(&mut self) -> B256 { - task::block_in_place(move || self.runtime.block_on(AsyncDatabase::make_snapshot(*self))) + task::block_in_place(move || self.runtime.block_on(AsyncState::make_snapshot(*self))) } fn remove_snapshot(&mut self, state_root: &B256) -> bool { task::block_in_place(move || { self.runtime - .block_on(AsyncDatabase::remove_snapshot(*self, *state_root)) + .block_on(AsyncState::remove_snapshot(*self, *state_root)) }) } } diff --git a/crates/rethnet_evm/src/trace.rs b/crates/rethnet_evm/src/trace.rs index 1f62e3033a..e2adf69c28 100644 --- a/crates/rethnet_evm/src/trace.rs +++ b/crates/rethnet_evm/src/trace.rs @@ -1,5 +1,5 @@ use rethnet_eth::Bytes; -use revm::{Gas, Return}; +use revm::interpreter::{Gas, InstructionResult}; /// A trace for an EVM call. #[derive(Default)] @@ -20,12 +20,12 @@ pub struct Step { /// The amount of gas that was refunded by the step pub gas_refunded: i64, /// The exit code of the step - pub exit_code: Return, + pub exit_code: InstructionResult, } impl Trace { /// Adds a VM step to the trace. - pub fn add_step(&mut self, opcode: u8, gas: &Gas, exit_code: Return) { + pub fn add_step(&mut self, opcode: u8, gas: &Gas, exit_code: InstructionResult) { let step = if let Some(old_gas) = self.gas.replace(*gas) { Step { opcode, diff --git a/crates/rethnet_evm/src/transaction.rs b/crates/rethnet_evm/src/transaction.rs index ff9a047596..f0a936fc15 100644 --- a/crates/rethnet_evm/src/transaction.rs +++ b/crates/rethnet_evm/src/transaction.rs @@ -1,3 +1,5 @@ +use std::fmt::Debug; + use rethnet_eth::{ receipt::Log, signature::SignatureError, @@ -7,13 +9,42 @@ use rethnet_eth::{ }, Address, Bloom, Bytes, B256, U256, }; +use revm::{ + db::DatabaseComponentError, + interpreter::InstructionResult, + primitives::{CreateScheme, EVMError, InvalidTransaction, TransactTo, TxEnv}, +}; /// Invalid transaction error #[derive(Debug, thiserror::Error)] -pub enum TransactionError { +pub enum TransactionError { + /// Blockchain errors + #[error(transparent)] + BlockHash(BE), + /// Corrupt transaction data + #[error("Invalid transaction")] + InvalidTransaction(InvalidTransaction), /// The transaction is expected to have a prevrandao, as the executor's config is on a post-merge hardfork. #[error("Post-merge transaction is missing prevrandao")] MissingPrevrandao, + /// State errors + #[error(transparent)] + State(SE), +} + +impl From>> for TransactionError +where + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ + fn from(error: EVMError>) -> Self { + match error { + EVMError::Transaction(e) => Self::InvalidTransaction(e), + EVMError::PrevrandaoNotSet => unreachable!(), + EVMError::Database(DatabaseComponentError::State(e)) => Self::State(e), + EVMError::Database(DatabaseComponentError::BlockHash(e)) => Self::BlockHash(e), + } + } } /// Represents all relevant information of an executed transaction @@ -27,7 +58,7 @@ pub struct TransactionInfo { pub logs: Vec, pub logs_bloom: Bloom, // pub traces: todo!(), - pub exit: revm::Return, + pub exit: InstructionResult, pub out: Option, } @@ -54,12 +85,12 @@ impl PendingTransaction { } } -impl From for revm::TxEnv { +impl From for TxEnv { fn from(transaction: PendingTransaction) -> Self { - fn transact_to(kind: TransactionKind) -> revm::TransactTo { + fn transact_to(kind: TransactionKind) -> TransactTo { match kind { - TransactionKind::Call(address) => revm::TransactTo::Call(address), - TransactionKind::Create => revm::TransactTo::Create(revm::CreateScheme::Create), + TransactionKind::Call(address) => TransactTo::Call(address), + TransactionKind::Create => TransactTo::Create(CreateScheme::Create), } } diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 997b6da8f7..87c3decf4b 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -7,7 +7,6 @@ edition = "2021" crate-type = ["cdylib"] [dependencies] -anyhow = "1.0.64" crossbeam-channel = { version = "0.5.6", default-features = false } napi = { version = "= 2.10.2", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.9.3" diff --git a/crates/rethnet_evm_napi/src/account.rs b/crates/rethnet_evm_napi/src/account.rs new file mode 100644 index 0000000000..dded796189 --- /dev/null +++ b/crates/rethnet_evm_napi/src/account.rs @@ -0,0 +1,60 @@ +use std::fmt::Debug; + +use napi::bindgen_prelude::{BigInt, Buffer}; +use napi_derive::napi; +use rethnet_evm::AccountInfo; + +#[napi(object)] +pub struct Account { + /// Account balance + #[napi(readonly)] + pub balance: BigInt, + /// Account nonce + #[napi(readonly)] + pub nonce: BigInt, + /// 256-bit code hash + #[napi(readonly)] + pub code_hash: Buffer, + /// Optionally, byte code + #[napi(readonly)] + pub code: Option, +} + +#[napi(object)] +pub struct AccountData { + /// Account balance + #[napi(readonly)] + pub balance: BigInt, + /// Account nonce + #[napi(readonly)] + pub nonce: BigInt, + /// Optionally, byte code + #[napi(readonly)] + pub code: Option, +} + +impl Debug for Account { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("Account") + .field("balance", &self.balance) + .field("nonce", &self.nonce) + .field("code_hash", &self.code_hash.as_ref()) + .finish() + } +} + +impl From for Account { + fn from(account_info: AccountInfo) -> Self { + Self { + balance: BigInt { + sign_bit: false, + words: account_info.balance.as_limbs().to_vec(), + }, + nonce: BigInt::from(account_info.nonce), + code_hash: Buffer::from(account_info.code_hash.as_bytes()), + code: account_info + .code + .map(|code| Buffer::from(&code.bytes()[..code.len()])), + } + } +} diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs index 4e76958865..4628bf68ef 100644 --- a/crates/rethnet_evm_napi/src/block/builder.rs +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -7,23 +7,28 @@ use napi::{ }; use napi_derive::napi; use rethnet_eth::{Address, U256}; +use rethnet_evm::state::StateError; use crate::{ - blockchain::Blockchain, cast::TryCast, state::StateManager, transaction::Transaction, Config, - ExecutionResult, + blockchain::Blockchain, + cast::TryCast, + config::Config, + state::StateManager, + tracer::Tracer, + transaction::{result::ExecutionResult, Transaction}, }; use super::{BlockConfig, BlockHeader}; #[napi] pub struct BlockBuilder { - builder: Arc>>>, + builder: Arc>>>, } #[napi] impl BlockBuilder { #[napi] - pub async fn new( + pub fn new( blockchain: &Blockchain, state_manager: &StateManager, config: Config, @@ -36,13 +41,11 @@ impl BlockBuilder { let builder = rethnet_evm::BlockBuilder::new( blockchain.as_inner().clone(), - state_manager.db.clone(), + state_manager.state.clone(), config, parent, block, - ) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + ); Ok(Self { builder: Arc::new(Mutex::new(Some(builder))), @@ -50,13 +53,19 @@ impl BlockBuilder { } #[napi] - pub async fn add_transaction(&self, transaction: Transaction) -> napi::Result { + pub async fn add_transaction( + &self, + transaction: Transaction, + tracer: Option<&Tracer>, + ) -> napi::Result { let mut builder = self.builder.lock().await; if let Some(builder) = builder.as_mut() { let transaction = transaction.try_into()?; + let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + let result = builder - .add_transaction(transaction) + .add_transaction(transaction, inspector) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs index 7ad9ce415c..4ddea105aa 100644 --- a/crates/rethnet_evm_napi/src/blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -14,19 +14,22 @@ use crate::{ use self::js_blockchain::{GetBlockHashCall, JsBlockchain}; +/// The Rethnet blockchain #[napi] pub struct Blockchain { - inner: Arc>, + inner: Arc>, } impl Blockchain { - pub fn as_inner(&self) -> &Arc> { + /// Provides immutable access to the inner implementation. + pub fn as_inner(&self) -> &Arc> { &self.inner } } #[napi] impl Blockchain { + /// Constructs a new blockchain that queries the blockhash using a callback. #[napi(constructor)] pub fn new( env: Env, @@ -56,9 +59,9 @@ impl Blockchain { fn with_blockchain(blockchain: B) -> napi::Result where - B: SyncBlockchain, + B: SyncBlockchain, { - let blockchain: Box> = Box::new(blockchain); + let blockchain: Box> = Box::new(blockchain); let blockchain = AsyncBlockchain::new(blockchain) .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; diff --git a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs index 74d33af5e1..44837a8abe 100644 --- a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs @@ -1,9 +1,8 @@ use std::sync::mpsc::{channel, Sender}; -use anyhow::anyhow; use napi::Status; use rethnet_eth::{B256, U256}; -use rethnet_evm::Blockchain; +use rethnet_evm::BlockHash; use crate::threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode}; @@ -16,8 +15,8 @@ pub struct JsBlockchain { pub(super) get_block_hash_fn: ThreadsafeFunction, } -impl Blockchain for JsBlockchain { - type Error = anyhow::Error; +impl BlockHash for JsBlockchain { + type Error = napi::Error; fn block_hash(&mut self, block_number: U256) -> Result { let (sender, receiver) = channel(); @@ -31,6 +30,6 @@ impl Blockchain for JsBlockchain { ); assert_eq!(status, Status::Ok); - receiver.recv().unwrap().map_err(|e| anyhow!(e.to_string())) + receiver.recv().unwrap() } } diff --git a/crates/rethnet_evm_napi/src/cast.rs b/crates/rethnet_evm_napi/src/cast.rs index 26310c9d23..b17f7a0a64 100644 --- a/crates/rethnet_evm_napi/src/cast.rs +++ b/crates/rethnet_evm_napi/src/cast.rs @@ -5,7 +5,7 @@ use napi::{ use rethnet_eth::{Bytes, B256, U256}; use rethnet_evm::{AccountInfo, Bytecode}; -use crate::{Account, AccountData}; +use crate::account::{Account, AccountData}; /// An attempted conversion that consumes `self`, which may or may not be /// expensive. It is identical to [`TryInto`], but it allows us to implement diff --git a/crates/rethnet_evm_napi/src/config.rs b/crates/rethnet_evm_napi/src/config.rs new file mode 100644 index 0000000000..760b144ad6 --- /dev/null +++ b/crates/rethnet_evm_napi/src/config.rs @@ -0,0 +1,136 @@ +use napi::{ + bindgen_prelude::{BigInt, ToNapiValue}, + Status, +}; +use napi_derive::napi; +use rethnet_evm::CfgEnv; + +use crate::cast::TryCast; + +/// Identifier for the Ethereum spec. +#[napi] +pub enum SpecId { + /// Frontier + Frontier = 0, + /// Frontier Thawing + FrontierThawing = 1, + /// Homestead + Homestead = 2, + /// DAO Fork + DaoFork = 3, + /// Tangerine + Tangerine = 4, + /// Spurious Dragon + SpuriousDragon = 5, + /// Byzantium + Byzantium = 6, + /// Constantinople + Constantinople = 7, + /// Petersburg + Petersburg = 8, + /// Istanbul + Istanbul = 9, + /// Muir Glacier + MuirGlacier = 10, + /// Berlin + Berlin = 11, + /// London + London = 12, + /// Arrow Glacier + ArrowGlacier = 13, + /// Gray Glacier + GrayGlacier = 14, + /// Merge + Merge = 15, + /// Shanghai + Shanghai = 16, + /// Cancun + Cancun = 17, + /// Latest + Latest = 18, +} + +impl From for rethnet_evm::SpecId { + fn from(value: SpecId) -> Self { + match value { + SpecId::Frontier => rethnet_evm::SpecId::FRONTIER, + SpecId::FrontierThawing => rethnet_evm::SpecId::FRONTIER_THAWING, + SpecId::Homestead => rethnet_evm::SpecId::HOMESTEAD, + SpecId::DaoFork => rethnet_evm::SpecId::DAO_FORK, + SpecId::Tangerine => rethnet_evm::SpecId::TANGERINE, + SpecId::SpuriousDragon => rethnet_evm::SpecId::SPURIOUS_DRAGON, + SpecId::Byzantium => rethnet_evm::SpecId::BYZANTIUM, + SpecId::Constantinople => rethnet_evm::SpecId::CONSTANTINOPLE, + SpecId::Petersburg => rethnet_evm::SpecId::PETERSBURG, + SpecId::Istanbul => rethnet_evm::SpecId::ISTANBUL, + SpecId::MuirGlacier => rethnet_evm::SpecId::MUIR_GLACIER, + SpecId::Berlin => rethnet_evm::SpecId::BERLIN, + SpecId::London => rethnet_evm::SpecId::LONDON, + SpecId::ArrowGlacier => rethnet_evm::SpecId::ARROW_GLACIER, + SpecId::GrayGlacier => rethnet_evm::SpecId::GRAY_GLACIER, + SpecId::Merge => rethnet_evm::SpecId::MERGE, + SpecId::Shanghai => rethnet_evm::SpecId::SHANGHAI, + SpecId::Cancun => rethnet_evm::SpecId::CANCUN, + SpecId::Latest => rethnet_evm::SpecId::LATEST, + } + } +} + +/// If not set, uses defaults from [`CfgEnv`]. +#[napi(object)] +pub struct Config { + /// The blockchain's ID + pub chain_id: Option, + /// Identifier for the Ethereum spec + pub spec_id: Option, + /// The contract code size limit for EIP-170 + pub limit_contract_code_size: Option, + /// Disables block limit validation + pub disable_block_gas_limit: Option, + /// Disables EIP-3607, which rejects transactions from sender with deployed code + pub disable_eip3607: Option, +} + +impl TryFrom for CfgEnv { + type Error = napi::Error; + + fn try_from(value: Config) -> std::result::Result { + let default = CfgEnv::default(); + let chain_id = value + .chain_id + .map_or(Ok(default.chain_id), |chain_id| chain_id.try_cast())?; + + let spec_id = value + .spec_id + .map_or(default.spec_id, |spec_id| spec_id.into()); + + let limit_contract_code_size = value.limit_contract_code_size.map_or(Ok(None), |size| { + // TODO: the lossless check in get_u64 is broken: https://github.com/napi-rs/napi-rs/pull/1348 + if let (false, size, _lossless) = size.get_u64() { + usize::try_from(size).map_or_else( + |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), + |size| Ok(Some(size)), + ) + } else { + Err(napi::Error::new( + Status::InvalidArg, + "BigInt cannot be larger than usize::MAX".to_owned(), + )) + } + })?; + + let disable_block_gas_limit = value + .disable_block_gas_limit + .unwrap_or(default.disable_block_gas_limit); + let disable_eip3607 = value.disable_eip3607.unwrap_or(default.disable_eip3607); + + Ok(Self { + chain_id, + spec_id, + limit_contract_code_size, + disable_block_gas_limit, + disable_eip3607, + ..default + }) + } +} diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 69159a5947..68a7db8099 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -1,95 +1,32 @@ +//! NAPI bindings for the Rethnet EVM +#![warn(missing_docs)] + mod access_list; +mod account; mod block; mod blockchain; mod cast; +mod config; +mod log; mod receipt; +/// Rethnet runtime for executing individual transactions +mod runtime; mod state; mod sync; mod threadsafe_function; mod trace; +mod tracer; mod transaction; -use std::{fmt::Debug, str::FromStr}; +use std::str::FromStr; -use block::BlockConfig; -use blockchain::Blockchain; -use napi::{ - bindgen_prelude::{BigInt, Buffer, ToNapiValue}, - Status, -}; -use napi_derive::napi; -use once_cell::sync::OnceCell; +use napi::Status; use rethnet_eth::Address; -use rethnet_evm::{AccountInfo, CfgEnv, TxEnv}; use secp256k1::{PublicKey, Secp256k1, SecretKey, SignOnly}; use sha3::{Digest, Keccak256}; -use state::StateManager; -use trace::Trace; -use transaction::{Transaction, TransactionOutput}; use crate::cast::TryCast; -struct Logger; - -unsafe impl Sync for Logger {} - -static LOGGER: OnceCell = OnceCell::new(); - -#[napi(object)] -pub struct Account { - /// Account balance - #[napi(readonly)] - pub balance: BigInt, - /// Account nonce - #[napi(readonly)] - pub nonce: BigInt, - /// 256-bit code hash - #[napi(readonly)] - pub code_hash: Buffer, - /// Optionally, byte code - #[napi(readonly)] - pub code: Option, -} - -#[napi(object)] -pub struct AccountData { - /// Account balance - #[napi(readonly)] - pub balance: BigInt, - /// Account nonce - #[napi(readonly)] - pub nonce: BigInt, - /// Optionally, byte code - #[napi(readonly)] - pub code: Option, -} - -impl Debug for Account { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Account") - .field("balance", &self.balance) - .field("nonce", &self.nonce) - .field("code_hash", &self.code_hash.as_ref()) - .finish() - } -} - -impl From for Account { - fn from(account_info: AccountInfo) -> Self { - Self { - balance: BigInt { - sign_bit: false, - words: account_info.balance.as_limbs().to_vec(), - }, - nonce: BigInt::from(account_info.nonce), - code_hash: Buffer::from(account_info.code_hash.as_bytes()), - code: account_info - .code - .map(|code| Buffer::from(&code.bytes()[..code.len()])), - } - } -} - fn private_key_to_address( context: &Secp256k1, private_key: String, @@ -114,301 +51,3 @@ fn public_key_to_address(public_key: PublicKey) -> Address { // Only take the lower 160 bits of the hash Address::from_slice(&hash[12..]) } - -#[napi(object)] -pub struct GenesisAccount { - /// Account private key - pub private_key: String, - /// Account balance - pub balance: BigInt, -} - -/// If not set, uses defaults from [`CfgEnv`]. -#[napi(object)] -pub struct Config { - pub chain_id: Option, - pub spec_id: Option, - pub limit_contract_code_size: Option, - pub disable_block_gas_limit: Option, - pub disable_eip3607: Option, -} - -impl TryFrom for CfgEnv { - type Error = napi::Error; - - fn try_from(value: Config) -> std::result::Result { - let default = CfgEnv::default(); - let chain_id = value - .chain_id - .map_or(Ok(default.chain_id), |chain_id| chain_id.try_cast())?; - - let spec_id = value - .spec_id - .map_or(default.spec_id, |spec_id| spec_id.into()); - - let limit_contract_code_size = value.limit_contract_code_size.map_or(Ok(None), |size| { - // TODO: the lossless check in get_u64 is broken: https://github.com/napi-rs/napi-rs/pull/1348 - if let (false, size, _lossless) = size.get_u64() { - usize::try_from(size).map_or_else( - |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), - |size| Ok(Some(size)), - ) - } else { - Err(napi::Error::new( - Status::InvalidArg, - "BigInt cannot be larger than usize::MAX".to_owned(), - )) - } - })?; - - let disable_block_gas_limit = value - .disable_block_gas_limit - .unwrap_or(default.disable_block_gas_limit); - let disable_eip3607 = value.disable_eip3607.unwrap_or(default.disable_eip3607); - - Ok(Self { - chain_id, - spec_id, - limit_contract_code_size, - disable_block_gas_limit, - disable_eip3607, - ..default - }) - } -} - -#[napi] -pub enum SpecId { - Frontier = 0, - FrontierThawing = 1, - Homestead = 2, - DaoFork = 3, - Tangerine = 4, - SpuriousDragon = 5, - Byzantium = 6, - Constantinople = 7, - Petersburg = 8, - Istanbul = 9, - MuirGlacier = 10, - Berlin = 11, - London = 12, - ArrowGlacier = 13, - GrayGlacier = 14, - Merge = 15, - Latest = 16, -} - -impl From for rethnet_evm::SpecId { - fn from(value: SpecId) -> Self { - match value { - SpecId::Frontier => rethnet_evm::SpecId::FRONTIER, - SpecId::FrontierThawing => rethnet_evm::SpecId::FRONTIER_THAWING, - SpecId::Homestead => rethnet_evm::SpecId::HOMESTEAD, - SpecId::DaoFork => rethnet_evm::SpecId::DAO_FORK, - SpecId::Tangerine => rethnet_evm::SpecId::TANGERINE, - SpecId::SpuriousDragon => rethnet_evm::SpecId::SPURIOUS_DRAGON, - SpecId::Byzantium => rethnet_evm::SpecId::BYZANTIUM, - SpecId::Constantinople => rethnet_evm::SpecId::CONSTANTINOPLE, - SpecId::Petersburg => rethnet_evm::SpecId::PETERSBURG, - SpecId::Istanbul => rethnet_evm::SpecId::ISTANBUL, - SpecId::MuirGlacier => rethnet_evm::SpecId::MUIR_GLACIER, - SpecId::Berlin => rethnet_evm::SpecId::BERLIN, - SpecId::London => rethnet_evm::SpecId::LONDON, - SpecId::ArrowGlacier => rethnet_evm::SpecId::ARROW_GLACIER, - SpecId::GrayGlacier => rethnet_evm::SpecId::GRAY_GLACIER, - SpecId::Merge => rethnet_evm::SpecId::MERGE, - SpecId::Latest => rethnet_evm::SpecId::LATEST, - } - } -} - -#[napi(object)] -pub struct Log { - pub address: Buffer, - pub topics: Vec, - pub data: Buffer, -} - -impl From for Log { - fn from(log: rethnet_evm::Log) -> Self { - let topics = log - .topics - .into_iter() - .map(|topic| Buffer::from(topic.as_bytes())) - .collect(); - - Self { - address: Buffer::from(log.address.as_bytes()), - topics, - data: Buffer::from(log.data.as_ref()), - } - } -} - -#[napi(object)] -pub struct ExecutionResult { - pub exit_code: u8, - pub output: TransactionOutput, - pub gas_used: BigInt, - pub gas_refunded: BigInt, - pub logs: Vec, - pub trace: Trace, -} - -impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { - fn from((result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)) -> Self { - let logs = result.logs.into_iter().map(Log::from).collect(); - - Self { - exit_code: result.exit_reason as u8, - output: result.out.into(), - gas_used: BigInt::from(result.gas_used), - gas_refunded: BigInt::from(result.gas_refunded), - logs, - trace: trace.into(), - } - } -} - -#[napi(object)] -pub struct TransactionResult { - pub exec_result: ExecutionResult, - pub state: serde_json::Value, -} - -impl - TryFrom<( - rethnet_evm::ExecutionResult, - rethnet_evm::State, - rethnet_evm::trace::Trace, - )> for TransactionResult -{ - type Error = napi::Error; - - fn try_from( - (result, state, trace): ( - rethnet_evm::ExecutionResult, - rethnet_evm::State, - rethnet_evm::trace::Trace, - ), - ) -> std::result::Result { - let exec_result = (result, trace).into(); - let state = serde_json::to_value(state)?; - - Ok(Self { exec_result, state }) - } -} - -#[napi(object)] -pub struct TracingMessage { - /// Recipient address. None if it is a Create message. - #[napi(readonly)] - pub to: Option, - - /// Depth of the message - #[napi(readonly)] - pub depth: u8, - - /// Input data of the message - #[napi(readonly)] - pub data: Buffer, - - /// Value sent in the message - #[napi(readonly)] - pub value: BigInt, - - /// Address of the code that is being executed. Can be different from `to` if a delegate call - /// is being done. - #[napi(readonly)] - pub code_address: Option, -} - -#[napi(object)] -pub struct TracingStep { - /// Program counter - #[napi(readonly)] - pub pc: BigInt, -} - -#[napi(object)] -pub struct TracingMessageResult { - /// Execution result - #[napi(readonly)] - pub execution_result: ExecutionResult, -} - -#[napi] -pub struct Rethnet { - runtime: rethnet_evm::Rethnet, -} - -#[napi] -impl Rethnet { - #[napi(constructor)] - pub fn new( - blockchain: &Blockchain, - state_manager: &StateManager, - cfg: Config, - ) -> napi::Result { - let _logger = LOGGER.get_or_init(|| { - pretty_env_logger::init(); - Logger - }); - - let cfg = cfg.try_into()?; - - let runtime = - rethnet_evm::Rethnet::new(blockchain.as_inner().clone(), state_manager.db.clone(), cfg); - - Ok(Self { runtime }) - } - - #[napi] - pub async fn dry_run( - &self, - transaction: Transaction, - block: BlockConfig, - ) -> napi::Result { - let transaction = transaction.try_into()?; - let block = block.try_into()?; - - self.runtime - .dry_run(transaction, block) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .try_into() - } - - #[napi] - pub async fn guaranteed_dry_run( - &self, - transaction: Transaction, - block: BlockConfig, - ) -> napi::Result { - let transaction = transaction.try_into()?; - let block = block.try_into()?; - - self.runtime - .guaranteed_dry_run(transaction, block) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .try_into() - } - - #[napi] - pub async fn run( - &self, - transaction: Transaction, - block: BlockConfig, - ) -> napi::Result { - let transaction: TxEnv = transaction.try_into()?; - let block = block.try_into()?; - - Ok(self - .runtime - .run(transaction, block) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .into()) - } -} diff --git a/crates/rethnet_evm_napi/src/log.rs b/crates/rethnet_evm_napi/src/log.rs new file mode 100644 index 0000000000..321c2743b1 --- /dev/null +++ b/crates/rethnet_evm_napi/src/log.rs @@ -0,0 +1,26 @@ +use napi::bindgen_prelude::Buffer; +use napi_derive::napi; + +/// Ethereum log. +#[napi(object)] +pub struct Log { + pub address: Buffer, + pub topics: Vec, + pub data: Buffer, +} + +impl From for Log { + fn from(log: rethnet_evm::Log) -> Self { + let topics = log + .topics + .into_iter() + .map(|topic| Buffer::from(topic.as_bytes())) + .collect(); + + Self { + address: Buffer::from(log.address.as_bytes()), + topics, + data: Buffer::from(log.data.as_ref()), + } + } +} diff --git a/crates/rethnet_evm_napi/src/receipt.rs b/crates/rethnet_evm_napi/src/receipt.rs index e01724fdb1..fe1fcec363 100644 --- a/crates/rethnet_evm_napi/src/receipt.rs +++ b/crates/rethnet_evm_napi/src/receipt.rs @@ -1,7 +1,7 @@ use napi::bindgen_prelude::{BigInt, Buffer}; use napi_derive::napi; -use crate::Log; +use crate::log::Log; #[napi(object)] pub struct Receipt { diff --git a/crates/rethnet_evm_napi/src/runtime.rs b/crates/rethnet_evm_napi/src/runtime.rs new file mode 100644 index 0000000000..43cff7941c --- /dev/null +++ b/crates/rethnet_evm_napi/src/runtime.rs @@ -0,0 +1,125 @@ +use napi::Status; +use napi_derive::napi; +use once_cell::sync::OnceCell; +use rethnet_evm::{state::StateError, InvalidTransaction, TransactionError, TxEnv}; + +use crate::{ + block::BlockConfig, + blockchain::Blockchain, + config::Config, + state::StateManager, + tracer::Tracer, + transaction::{ + result::{ExecutionResult, TransactionResult}, + Transaction, + }, +}; + +struct Logger; + +unsafe impl Sync for Logger {} + +static LOGGER: OnceCell = OnceCell::new(); + +/// The Rethnet runtime, which can execute individual transactions. +#[napi] +pub struct Rethnet { + runtime: rethnet_evm::Rethnet, +} + +#[napi] +impl Rethnet { + /// Constructs a `Rethnet` runtime. + #[napi(constructor)] + pub fn new( + blockchain: &Blockchain, + state_manager: &StateManager, + cfg: Config, + ) -> napi::Result { + let _logger = LOGGER.get_or_init(|| { + pretty_env_logger::init(); + Logger + }); + + let cfg = cfg.try_into()?; + + let runtime = rethnet_evm::Rethnet::new( + blockchain.as_inner().clone(), + state_manager.state.clone(), + cfg, + ); + + Ok(Self { runtime }) + } + + /// Executes the provided transaction without changing state. + #[napi] + pub async fn dry_run( + &self, + transaction: Transaction, + block: BlockConfig, + tracer: Option<&Tracer>, + ) -> napi::Result { + let transaction = transaction.try_into()?; + let block = block.try_into()?; + + let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + + self.runtime + .dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? + .try_into() + } + + /// Executes the provided transaction without changing state, ignoring validation checks in the process. + #[napi] + pub async fn guaranteed_dry_run( + &self, + transaction: Transaction, + block: BlockConfig, + tracer: Option<&Tracer>, + ) -> napi::Result { + let transaction = transaction.try_into()?; + let block = block.try_into()?; + + let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + + self.runtime + .guaranteed_dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? + .try_into() + } + + /// Executes the provided transaction, changing state in the process. + #[napi] + pub async fn run( + &self, + transaction: Transaction, + block: BlockConfig, + tracer: Option<&Tracer>, + ) -> napi::Result { + let transaction: TxEnv = transaction.try_into()?; + let block = block.try_into()?; + + let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + + Ok(self + .runtime + .run(transaction, block, inspector) + .await + .map_err(|e| { + napi::Error::new( + Status::GenericFailure, + match e { + TransactionError::InvalidTransaction( + InvalidTransaction::LackOfFundForGasLimit { gas_limit, balance }, + ) => format!("sender doesn't have enough funds to send tx. The max upfront cost is: {} and the sender's account only has: {}", gas_limit, balance), + e => e.to_string(), + }, + ) + })? + .into()) + } +} diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index dc0df360ef..3dabc526d6 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -7,16 +7,17 @@ use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; use rethnet_eth::{Address, B256, U256}; use rethnet_evm::{ - db::{AsyncDatabase, LayeredDatabase, RethnetLayer, SyncDatabase}, - AccountInfo, Bytecode, DatabaseDebug, HashMap, + state::{AsyncState, LayeredState, RethnetLayer, StateError, SyncState}, + AccountInfo, Bytecode, HashMap, StateDebug, }; use secp256k1::Secp256k1; use crate::{ + account::{Account, AccountData}, private_key_to_address, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - Account, AccountData, GenesisAccount, TryCast, + TryCast, }; struct ModifyAccountCall { @@ -26,18 +27,30 @@ struct ModifyAccountCall { pub sender: Sender)>>, } +/// An account that needs to be created during the genesis block. +#[napi(object)] +pub struct GenesisAccount { + /// Account private key + pub private_key: String, + /// Account balance + pub balance: BigInt, +} + +/// The Rethnet state #[napi] pub struct StateManager { - pub(super) db: Arc>, + pub(super) state: Arc>, } #[napi] impl StateManager { + /// Constructs a [`StateManager`] with an empty state. #[napi(constructor)] pub fn new() -> napi::Result { Self::with_accounts(HashMap::default()) } + /// Constructs a [`StateManager`] with the provided accounts present in the genesis state. #[napi(factory)] pub fn with_genesis_accounts(accounts: Vec) -> napi::Result { let context = Secp256k1::signing_only(); @@ -67,46 +80,50 @@ impl StateManager { accounts.insert(address, AccountInfo::default()); } - let mut database = - LayeredDatabase::with_layer(RethnetLayer::with_genesis_accounts(accounts)); + let mut state = LayeredState::with_layer(RethnetLayer::with_genesis_accounts(accounts)); - database.checkpoint().unwrap(); + state.checkpoint().unwrap(); - Self::with_db(database) + Self::with_state(state) } - fn with_db(db: D) -> napi::Result + fn with_state(state: S) -> napi::Result where - D: SyncDatabase, + S: SyncState, { - let db: Box> = Box::new(db); - let db = AsyncDatabase::new(db) + let state: Box> = Box::new(state); + let state = AsyncState::new(state) .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - Ok(Self { db: Arc::new(db) }) + Ok(Self { + state: Arc::new(state), + }) } + /// Creates a state checkpoint that can be reverted to using [`revert`]. #[napi] pub async fn checkpoint(&self) -> napi::Result<()> { - self.db + self.state .checkpoint() .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } + /// Reverts to the previous checkpoint, created using [`checkpoint`]. #[napi] pub async fn revert(&self) -> napi::Result<()> { - self.db + self.state .revert() .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } + /// Retrieves the account corresponding to the specified address. #[napi] pub async fn get_account_by_address(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.db.account_by_address(address).await.map_or_else( + self.state.account_by_address(address).await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |account_info| Ok(account_info.map(Account::from)), ) @@ -117,12 +134,13 @@ impl StateManager { pub async fn get_account_storage_root(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.db.account_storage_root(&address).await.map_or_else( + self.state.account_storage_root(&address).await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |root| Ok(root.map(|root| Buffer::from(root.as_ref()))), ) } + /// Retrieves the storage slot at the specified address and index. #[napi] pub async fn get_account_storage_slot( &self, @@ -132,7 +150,7 @@ impl StateManager { let address = Address::from_slice(&address); let index = BigInt::try_cast(index)?; - self.db + self.state .account_storage_slot(address, index) .await .map_or_else( @@ -150,34 +168,37 @@ impl StateManager { pub async fn get_code_by_hash(&self, code_hash: Buffer) -> napi::Result { let code_hash = B256::from_slice(&code_hash); - self.db.code_by_hash(code_hash).await.map_or_else( + self.state.code_by_hash(code_hash).await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |code| Ok(Buffer::from(&code.bytes()[..code.len()])), ) } + /// Retrieves the storage root of the database. #[napi] pub async fn get_state_root(&self) -> napi::Result { - self.db.state_root().await.map_or_else( + self.state.state_root().await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |root| Ok(Buffer::from(root.as_ref())), ) } + /// Inserts the provided account at the specified address. #[napi] pub async fn insert_account(&self, address: Buffer, account: Account) -> napi::Result<()> { let address = Address::from_slice(&address); let account = account.try_cast()?; - self.db + self.state .insert_account(address, account) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } + /// Makes a snapshot of the database that's retained until [`removeSnapshot`] is called. Returns the snapshot's identifier. #[napi] pub async fn make_snapshot(&self) -> Buffer { - >::as_ref(&self.db.make_snapshot().await).into() + >::as_ref(&self.state.make_snapshot().await).into() } /// Modifies the account with the provided address using the specified modifier function. @@ -217,7 +238,7 @@ impl StateManager { .create_buffer_copy(&code.bytes()[..code.len()])? .into_unknown() } else { - ctx.env.get_null()?.into_unknown() + ctx.env.get_undefined()?.into_unknown() }; let promise = ctx.callback.call(None, &[balance, nonce, code])?; @@ -232,9 +253,9 @@ impl StateManager { )?; let (deferred, promise) = env.create_deferred()?; - let db = self.db.clone(); + let db = self.state.clone(); - self.db.runtime().spawn(async move { + self.state.runtime().spawn(async move { let result = db .modify_account( address, @@ -271,23 +292,26 @@ impl StateManager { Ok(promise) } + /// Removes and returns the account at the specified address, if it exists. #[napi] pub async fn remove_account(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.db.remove_account(address).await.map_or_else( + self.state.remove_account(address).await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |account| Ok(account.map(Account::from)), ) } + /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. #[napi] pub async fn remove_snapshot(&self, state_root: Buffer) -> bool { let state_root = B256::from_slice(&state_root); - self.db.remove_snapshot(state_root).await + self.state.remove_snapshot(state_root).await } + /// Sets the storage slot at the specified address and index to the provided value. #[napi] pub async fn set_account_storage_slot( &self, @@ -299,17 +323,18 @@ impl StateManager { let index = BigInt::try_cast(index)?; let value = BigInt::try_cast(value)?; - self.db + self.state .set_account_storage_slot(address, index, value) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } + /// Reverts the state to match the specified state root. #[napi] pub async fn set_state_root(&self, state_root: Buffer) -> napi::Result<()> { let state_root = B256::from_slice(&state_root); - self.db + self.state .set_state_root(&state_root) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) diff --git a/crates/rethnet_evm_napi/src/tracer.rs b/crates/rethnet_evm_napi/src/tracer.rs new file mode 100644 index 0000000000..32be8cf5c8 --- /dev/null +++ b/crates/rethnet_evm_napi/src/tracer.rs @@ -0,0 +1,30 @@ +mod js_tracer; + +use napi::Env; +use napi_derive::napi; +use rethnet_evm::{state::StateError, AsyncDatabase, Inspector}; + +use self::js_tracer::{JsTracer, TracingCallbacks}; + +#[napi] +pub struct Tracer { + inner: Box, +} + +impl Tracer { + pub fn as_dyn_inspector( + &self, + ) -> Box<(dyn Inspector> + Send + 'static)> { + self.inner.clone() + } +} + +#[napi] +impl Tracer { + #[napi(constructor)] + pub fn new(env: Env, callbacks: TracingCallbacks) -> napi::Result { + JsTracer::new(&env, callbacks).map(|inner| Self { + inner: Box::new(inner), + }) + } +} diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs new file mode 100644 index 0000000000..ae95886d7d --- /dev/null +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -0,0 +1,786 @@ +use std::{ + fmt::Debug, + sync::mpsc::{channel, Sender}, +}; + +use napi::{ + bindgen_prelude::{BigInt, Buffer}, + Env, JsBufferValue, JsFunction, JsNumber, JsUndefined, NapiRaw, Status, +}; +use napi_derive::napi; +use rethnet_eth::{Address, Bytes, U256}; +use rethnet_evm::{ + opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt, OPCODE_JUMPMAP, +}; + +use crate::{ + account::Account, + sync::{await_void_promise, handle_error}, + threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, + transaction::result::{ExceptionalHalt, ExecutionResult}, +}; + +#[napi(object)] +pub struct TracingMessage { + /// Recipient address. None if it is a Create message. + #[napi(readonly)] + pub to: Option, + + /// Depth of the message + #[napi(readonly)] + pub depth: u8, + + /// Input data of the message + #[napi(readonly)] + pub data: Buffer, + + /// Value sent in the message + #[napi(readonly)] + pub value: BigInt, + + /// Address of the code that is being executed. Can be different from `to` if a delegate call + /// is being done. + #[napi(readonly)] + pub code_address: Option, + + /// Code of the contract that is being executed. + #[napi(readonly)] + pub code: Option, +} + +#[napi(object)] +pub struct TracingStep { + /// Call depth + #[napi(readonly)] + pub depth: JsNumber, + /// The program counter + #[napi(readonly)] + pub pc: BigInt, + /// The executed op code + #[napi(readonly)] + pub opcode: String, + // /// The return value of the step + // #[napi(readonly)] + // pub return_value: u8, + /// The amount of gas that was used by the step + #[napi(readonly)] + pub gas_cost: BigInt, + /// The amount of gas that was refunded by the step + #[napi(readonly)] + pub gas_refunded: BigInt, + /// The amount of gas left + #[napi(readonly)] + pub gas_left: BigInt, + /// The stack + #[napi(readonly)] + pub stack: Vec, + /// The memory + #[napi(readonly)] + pub memory: Buffer, + /// The contract being executed + #[napi(readonly)] + pub contract: Account, + /// The address of the contract + #[napi(readonly)] + pub contract_address: Buffer, + // /// The address of the code being executed + // #[napi(readonly)] + // pub code_address: Buffer, +} + +#[napi(object)] +pub struct TracingMessageResult { + /// Execution result + #[napi(readonly)] + pub execution_result: ExecutionResult, +} + +#[napi(object)] +pub struct TracingCallbacks { + #[napi(ts_type = "(message: TracingMessage, next: any) => Promise")] + pub before_message: JsFunction, + #[napi(ts_type = "(step: TracingStep, next: any) => Promise")] + pub step: JsFunction, + #[napi(ts_type = "(result: TracingMessageResult, next: any) => Promise")] + pub after_message: JsFunction, +} + +#[derive(Clone)] +struct BeforeMessage { + pub depth: usize, + pub to: Option
, + pub data: Bytes, + pub value: U256, + pub code_address: Option
, + pub code: Option, +} + +struct BeforeMessageHandlerCall { + message: BeforeMessage, + sender: Sender>, +} + +pub struct StepHandlerCall { + /// Call depth + pub depth: usize, + /// The program counter + pub pc: u64, + /// The executed op code + pub opcode: u8, + // /// The return value of the step + // pub return_value: InstructionResult, + // /// The amount of gas that was used by the step + // pub gas_cost: u64, + // /// The amount of gas that was refunded by the step + // pub gas_refunded: i64, + // /// The amount of gas left + // pub gas_left: u64, + // /// The stack + // pub stack: Vec, + // /// The memory + // pub memory: Bytes, + /// The contract being executed + pub contract: rethnet_evm::AccountInfo, + /// The address of the contract + pub contract_address: Address, + // /// The address of the code being executed + // pub code_address: Address, + pub sender: Sender>, +} + +pub struct AfterMessageHandlerCall { + pub result: rethnet_evm::ExecutionResult, + pub sender: Sender>, +} + +#[derive(Clone)] +pub struct JsTracer { + before_message_fn: ThreadsafeFunction, + step_fn: ThreadsafeFunction, + after_message_fn: ThreadsafeFunction, + pending_before: Option, +} + +impl JsTracer { + /// Constructs a `JsTracer` from `TracingCallbacks`. + pub fn new(env: &Env, callbacks: TracingCallbacks) -> napi::Result { + let before_message_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.before_message.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let mut tracing_message = ctx.env.create_object()?; + + ctx.env + .create_int64(ctx.value.message.depth as i64) + .and_then(|depth| tracing_message.set_named_property("depth", depth))?; + + ctx.value + .message + .to + .as_ref() + .map_or_else( + || ctx.env.get_undefined().map(JsUndefined::into_unknown), + |to| { + ctx.env + .create_buffer_copy(to) + .map(JsBufferValue::into_unknown) + }, + ) + .and_then(|to| tracing_message.set_named_property("to", to))?; + + ctx.env + .create_buffer_copy(&ctx.value.message.data) + .and_then(|data| tracing_message.set_named_property("data", data.into_raw()))?; + + ctx.env + .create_bigint_from_words(false, ctx.value.message.value.as_limbs().to_vec()) + .and_then(|value| tracing_message.set_named_property("value", value))?; + + ctx.value + .message + .code_address + .as_ref() + .map_or_else( + || ctx.env.get_undefined().map(JsUndefined::into_unknown), + |address| { + ctx.env + .create_buffer_copy(address) + .map(JsBufferValue::into_unknown) + }, + ) + .and_then(|code_address| { + tracing_message.set_named_property("codeAddress", code_address) + })?; + + ctx.value + .message + .code + .as_ref() + .map_or_else( + || ctx.env.get_undefined().map(JsUndefined::into_unknown), + |code| { + ctx.env + .create_buffer_copy(&code.bytes()[..code.len()]) + .map(JsBufferValue::into_unknown) + }, + ) + .and_then(|code_address| { + tracing_message.set_named_property("code", code_address) + })?; + + let next = ctx.env.create_object()?; + + let promise = ctx.callback.call(None, &[tracing_message, next])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let step_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.step.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let mut tracing_step = ctx.env.create_object()?; + + ctx.env + .create_int64(ctx.value.depth as i64) + .and_then(|depth| tracing_step.set_named_property("depth", depth))?; + + ctx.env + .create_bigint_from_u64(ctx.value.pc) + .and_then(|pc| tracing_step.set_named_property("pc", pc))?; + + ctx.env + .create_string(OPCODE_JUMPMAP[usize::from(ctx.value.opcode)].unwrap_or("")) + .and_then(|opcode| tracing_step.set_named_property("opcode", opcode))?; + + // ctx.env + // .create_uint32((ctx.value.return_value as u8).into()) + // .and_then(|return_value| { + // tracing_step.set_named_property("returnValue", return_value) + // })?; + + // ctx.env + // .create_bigint_from_u64(ctx.value.gas_cost) + // .and_then(|gas_cost| tracing_step.set_named_property("gasCost", gas_cost))?; + + // ctx.env + // .create_bigint_from_i64(ctx.value.gas_refunded) + // .and_then(|gas_refunded| { + // tracing_step.set_named_property("gasRefunded", gas_refunded) + // })?; + + // ctx.env + // .create_bigint_from_u64(ctx.value.gas_left) + // .and_then(|gas_left| tracing_step.set_named_property("gasLeft", gas_left))?; + + // let mut stack = + // ctx.env + // .create_array(u32::try_from(ctx.value.stack.len()).map_err(|e| { + // napi::Error::new(Status::GenericFailure, e.to_string()) + // })?)?; + + // for value in ctx.value.stack { + // ctx.env + // .create_bigint_from_words(false, value.as_limbs().to_vec()) + // .and_then(|value| stack.insert(value))?; + // } + + // stack + // .coerce_to_object() + // .and_then(|stack| tracing_step.set_named_property("stack", stack))?; + + // ctx.env + // .create_buffer_copy(&ctx.value.memory) + // .and_then(|memory| { + // tracing_step.set_named_property("memory", memory.into_raw()) + // })?; + + let mut contract = ctx.env.create_object()?; + + ctx.env + .create_bigint_from_words(false, ctx.value.contract.balance.as_limbs().to_vec()) + .and_then(|balance| contract.set_named_property("balance", balance))?; + + let nonce = ctx.env.create_bigint_from_u64(ctx.value.contract.nonce)?; + contract.set_named_property("nonce", nonce)?; + + ctx.env + .create_buffer_copy(ctx.value.contract.code_hash) + .and_then(|code_hash| { + contract.set_named_property("codeHash", code_hash.into_unknown()) + })?; + + ctx.value + .contract + .code + .as_ref() + .map_or_else( + || ctx.env.get_undefined().map(JsUndefined::into_unknown), + |code| { + ctx.env + .create_buffer_copy(&code.bytes()[..code.len()]) + .map(|code| code.into_unknown()) + }, + ) + .and_then(|code| contract.set_named_property("code", code))?; + + tracing_step.set_named_property("contract", contract)?; + + let contract_address = &ctx.value.contract_address; + ctx.env + .create_buffer_copy(contract_address) + .and_then(|contract_address| { + tracing_step + .set_named_property("contractAddress", contract_address.into_unknown()) + })?; + + let next = ctx.env.create_object()?; + + let promise = ctx.callback.call(None, &[tracing_step, next])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + let after_message_fn = ThreadsafeFunction::create( + env.raw(), + unsafe { callbacks.after_message.raw() }, + 0, + |ctx: ThreadSafeCallContext| { + let sender = ctx.value.sender.clone(); + + let mut tracing_message_result = ctx.env.create_object()?; + + let mut result = ctx.env.create_object()?; + + let gas_used = match ctx.value.result { + rethnet_evm::ExecutionResult::Success { + reason, + gas_used, + gas_refunded, + logs, + output, + } => { + ctx.env + .create_uint32(reason as u32) + .and_then(|reason| result.set_named_property("reason", reason))?; + + ctx.env + .create_bigint_from_u64(gas_refunded) + .and_then(|gas_refunded| { + result.set_named_property("gasRefunded", gas_refunded) + })?; + + u32::try_from(logs.len()) + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + .and_then(|num_logs| ctx.env.create_array(num_logs)) + .and_then(|mut logs_object| { + for log in logs { + let mut log_object = ctx.env.create_object()?; + + ctx.env.create_buffer_copy(log.address).and_then( + |address| { + log_object + .set_named_property("address", address.into_raw()) + }, + )?; + + u32::try_from(log.topics.len()) + .map_err(|e| { + napi::Error::new(Status::GenericFailure, e.to_string()) + }) + .and_then(|num_topics| ctx.env.create_array(num_topics)) + .and_then(|mut topics| { + for topic in log.topics { + ctx.env.create_buffer_copy(topic).and_then( + |topic| topics.insert(topic.into_raw()), + )? + } + + topics.coerce_to_object() + }) + .and_then(|topics| { + log_object.set_named_property("topics", topics) + })?; + + ctx.env.create_buffer_copy(&log.data).and_then(|data| { + log_object.set_named_property("data", data.into_raw()) + })?; + + logs_object.insert(log_object)?; + } + + logs_object.coerce_to_object() + }) + .and_then(|logs| result.set_named_property("logs", logs))?; + + let (output, address) = match output { + rethnet_evm::Output::Call(output) => (output, None), + rethnet_evm::Output::Create(output, address) => (output, address), + }; + + let mut transaction_output = ctx.env.create_object()?; + + ctx.env + .create_buffer_copy(output) + .map(JsBufferValue::into_unknown) + .and_then(|output| { + transaction_output.set_named_property("returnValue", output) + })?; + + address + .map_or_else( + || ctx.env.get_undefined().map(JsUndefined::into_unknown), + |address| { + ctx.env + .create_buffer_copy(address) + .map(JsBufferValue::into_unknown) + }, + ) + .and_then(|address| { + transaction_output.set_named_property("address", address) + })?; + + result.set_named_property("output", transaction_output)?; + + gas_used + } + rethnet_evm::ExecutionResult::Revert { gas_used, output } => { + ctx.env + .create_buffer_copy(output) + .map(JsBufferValue::into_unknown) + .and_then(|output| result.set_named_property("output", output))?; + + gas_used + } + rethnet_evm::ExecutionResult::Halt { reason, gas_used } => { + let halt = ExceptionalHalt::from(reason); + ctx.env + .create_uint32(halt as u32) + .and_then(|reason| result.set_named_property("reason", reason))?; + + gas_used + } + }; + + ctx.env + .create_bigint_from_u64(gas_used) + .and_then(|gas_used| result.set_named_property("gasUsed", gas_used))?; + + let mut execution_result = ctx.env.create_object()?; + + execution_result.set_named_property("result", result)?; + + ctx.env + .create_object() + .and_then(|trace| execution_result.set_named_property("trace", trace))?; + + tracing_message_result.set_named_property("executionResult", execution_result)?; + + let next = ctx.env.create_object()?; + + let promise = ctx.callback.call(None, &[tracing_message_result, next])?; + let result = await_void_promise(ctx.env, promise, ctx.value.sender); + + handle_error(sender, result) + }, + )?; + + Ok(Self { + before_message_fn, + step_fn, + after_message_fn, + pending_before: None, + }) + } + + fn validate_before_message(&mut self) { + if let Some(message) = self.pending_before.take() { + let (sender, receiver) = channel(); + + let status = self.before_message_fn.call( + BeforeMessageHandlerCall { message, sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver + .recv() + .unwrap() + .expect("Failed call to BeforeMessageHandler"); + } + } +} + +impl rethnet_evm::Inspector for JsTracer +where + D: rethnet_evm::Database, + D::Error: Debug, +{ + fn call( + &mut self, + data: &mut rethnet_evm::EVMData<'_, D>, + inputs: &mut rethnet_evm::CallInputs, + _is_static: bool, + ) -> (InstructionResult, Gas, rethnet_eth::Bytes) { + self.validate_before_message(); + + let code = data + .journaled_state + .state + .get(&inputs.context.code_address) + .map(|account| { + if let Some(code) = &account.info.code { + code.clone() + } else { + data.db.code_by_hash(account.info.code_hash).unwrap() + } + }) + .unwrap_or_else(|| { + let account = data.db.basic(inputs.context.code_address).unwrap().unwrap(); + account + .code + .unwrap_or_else(|| data.db.code_by_hash(account.code_hash).unwrap()) + }); + + self.pending_before = Some(BeforeMessage { + depth: data.journaled_state.depth, + to: Some(inputs.context.address), + data: inputs.input.clone(), + value: inputs.transfer.value, + code_address: Some(inputs.context.code_address), + code: Some(code), + }); + + (InstructionResult::Continue, Gas::new(0), Bytes::default()) + } + + fn call_end( + &mut self, + data: &mut rethnet_evm::EVMData<'_, D>, + _inputs: &rethnet_evm::CallInputs, + remaining_gas: Gas, + ret: InstructionResult, + out: Bytes, + _is_static: bool, + ) -> (InstructionResult, Gas, Bytes) { + match ret { + return_revert!() if self.pending_before.is_some() => { + self.pending_before = None; + return (ret, remaining_gas, out); + } + _ => (), + } + + self.validate_before_message(); + + let safe_ret = if ret == InstructionResult::CallTooDeep + || ret == InstructionResult::OutOfFund + || ret == InstructionResult::StateChangeDuringStaticCall + { + InstructionResult::Revert + } else { + ret + }; + + let result = match safe_ret.into() { + SuccessOrHalt::Success(reason) => rethnet_evm::ExecutionResult::Success { + reason, + gas_used: remaining_gas.spend(), + gas_refunded: remaining_gas.refunded() as u64, + logs: data.journaled_state.logs.clone(), + output: rethnet_evm::Output::Call(out.clone()), + }, + SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { + gas_used: remaining_gas.spend(), + output: out.clone(), + }, + SuccessOrHalt::Halt(reason) => rethnet_evm::ExecutionResult::Halt { + reason, + gas_used: remaining_gas.limit(), + }, + SuccessOrHalt::Internal => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), + }; + + let (sender, receiver) = channel(); + + let status = self.after_message_fn.call( + AfterMessageHandlerCall { result, sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver + .recv() + .unwrap() + .expect("Failed call to BeforeMessageHandler"); + + (ret, remaining_gas, out) + } + + fn create( + &mut self, + data: &mut rethnet_evm::EVMData<'_, D>, + inputs: &mut rethnet_evm::CreateInputs, + ) -> (InstructionResult, Option, Gas, Bytes) { + self.validate_before_message(); + + self.pending_before = Some(BeforeMessage { + depth: data.journaled_state.depth, + to: None, + data: inputs.init_code.clone(), + value: inputs.value, + code_address: None, + code: None, + }); + + ( + InstructionResult::Continue, + None, + Gas::new(0), + Bytes::default(), + ) + } + + fn create_end( + &mut self, + data: &mut rethnet_evm::EVMData<'_, D>, + _inputs: &rethnet_evm::CreateInputs, + ret: InstructionResult, + address: Option, + remaining_gas: Gas, + out: Bytes, + ) -> (InstructionResult, Option, Gas, Bytes) { + self.validate_before_message(); + + let safe_ret = + if ret == InstructionResult::CallTooDeep || ret == InstructionResult::OutOfFund { + InstructionResult::Revert + } else { + ret + }; + + let result = match safe_ret.into() { + SuccessOrHalt::Success(reason) => rethnet_evm::ExecutionResult::Success { + reason, + gas_used: remaining_gas.spend(), + gas_refunded: remaining_gas.refunded() as u64, + logs: data.journaled_state.logs.clone(), + output: rethnet_evm::Output::Create(out.clone(), address), + }, + SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { + gas_used: remaining_gas.spend(), + output: out.clone(), + }, + SuccessOrHalt::Halt(reason) => rethnet_evm::ExecutionResult::Halt { + reason, + gas_used: remaining_gas.limit(), + }, + SuccessOrHalt::Internal => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), + }; + + let (sender, receiver) = channel(); + + let status = self.after_message_fn.call( + AfterMessageHandlerCall { result, sender }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver + .recv() + .unwrap() + .expect("Failed call to BeforeMessageHandler"); + + (ret, address, remaining_gas, out) + } + + fn step( + &mut self, + interp: &mut rethnet_evm::Interpreter, + data: &mut rethnet_evm::EVMData<'_, D>, + _is_static: bool, + ) -> InstructionResult { + // Skip the step + let skip_step = self.pending_before.as_ref().map_or(false, |message| { + message.code.is_some() && interp.current_opcode() == opcode::STOP + }); + + self.validate_before_message(); + + if !skip_step { + // self.pre_steps.push(StepData { + // depth: data.journaled_state.depth, + // pc: interp.program_counter() as u64, + // opcode: interp.current_opcode(), + // gas: *interp.gas(), + // }); + + let (sender, receiver) = channel(); + + let status = self.step_fn.call( + StepHandlerCall { + depth: data.journaled_state.depth, + pc: interp.program_counter() as u64, + opcode: interp.current_opcode(), + // return_value: interp.instruction_result, + // gas_cost: post_step_gas.spend() - pre_step_gas.spend(), + // gas_refunded: post_step_gas.refunded() - pre_step_gas.refunded(), + // gas_left: interp.gas().remaining(), + // stack: interp.stack().data().clone(), + // memory: Bytes::copy_from_slice(interp.memory.data().as_slice()), + contract: data + .journaled_state + .account(interp.contract.address) + .info + .clone(), + contract_address: interp.contract().address, + sender, + }, + ThreadsafeFunctionCallMode::Blocking, + ); + assert_eq!(status, Status::Ok); + + receiver + .recv() + .unwrap() + .expect("Failed call to BeforeMessageHandler"); + } + + InstructionResult::Continue + } + + // fn step_end( + // &mut self, + // interp: &mut rethnet_evm::Interpreter, + // _data: &mut rethnet_evm::EVMData<'_, D>, + // _is_static: bool, + // _eval: InstructionResult, + // ) -> InstructionResult { + // // TODO: temporary fix + // let StepData { + // depth, + // pc, + // opcode, + // gas: pre_step_gas, + // } = self + // .pre_steps + // .pop() + // .expect("At least one pre-step should exist"); + + // let post_step_gas = interp.gas(); + + // InstructionResult::Continue + // } +} diff --git a/crates/rethnet_evm_napi/src/transaction.rs b/crates/rethnet_evm_napi/src/transaction.rs index 2c08fcd950..7ae5005e90 100644 --- a/crates/rethnet_evm_napi/src/transaction.rs +++ b/crates/rethnet_evm_napi/src/transaction.rs @@ -1,3 +1,5 @@ +pub mod result; + use napi::bindgen_prelude::{BigInt, Buffer}; use napi_derive::napi; use rethnet_eth::{Address, Bytes, U256}; @@ -88,28 +90,3 @@ impl TryFrom for rethnet_evm::TxEnv { pub struct TransactionConfig { pub disable_balance_check: Option, } - -#[napi(object)] -pub struct TransactionOutput { - /// Return value from Call or Create transactions - #[napi(readonly)] - pub output: Option, - /// Optionally, a 160-bit address from Create transactions - #[napi(readonly)] - pub address: Option, -} - -impl From for TransactionOutput { - fn from(value: rethnet_evm::TransactOut) -> Self { - let (output, address) = match value { - rethnet_evm::TransactOut::None => (None, None), - rethnet_evm::TransactOut::Call(output) => (Some(Buffer::from(output.as_ref())), None), - rethnet_evm::TransactOut::Create(output, address) => ( - Some(Buffer::from(output.as_ref())), - address.map(|address| Buffer::from(address.as_bytes())), - ), - }; - - Self { output, address } - } -} diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs new file mode 100644 index 0000000000..3013593e78 --- /dev/null +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -0,0 +1,209 @@ +use napi::{ + bindgen_prelude::{BigInt, Buffer, Either3, ToNapiValue}, + Either, +}; +use napi_derive::napi; + +use crate::{log::Log, trace::Trace}; + +/// The possible reasons for successful termination of the EVM. +#[napi] +pub enum SuccessReason { + /// The opcode `STOP` was called + Stop, + /// The opcode `RETURN` was called + Return, + /// The opcode `SELFDESTRUCT` was called + SelfDestruct, +} + +impl From for SuccessReason { + fn from(eval: rethnet_evm::Eval) -> Self { + match eval { + rethnet_evm::Eval::Stop => Self::Stop, + rethnet_evm::Eval::Return => Self::Return, + rethnet_evm::Eval::SelfDestruct => Self::SelfDestruct, + } + } +} + +#[napi(object)] +pub struct CallOutput { + /// Return value + pub return_value: Buffer, +} + +#[napi(object)] +pub struct CreateOutput { + /// Return value + pub return_value: Buffer, + /// Optionally, a 160-bit address + pub address: Option, +} + +/// The result when the EVM terminates successfully. +#[napi(object)] +pub struct SuccessResult { + /// The reason for termination + pub reason: SuccessReason, + /// The amount of gas used + pub gas_used: BigInt, + /// The amount of gas refunded + pub gas_refunded: BigInt, + /// The logs + pub logs: Vec, + /// The transaction output + pub output: Either, +} + +/// The result when the EVM terminates due to a revert. +#[napi(object)] +pub struct RevertResult { + /// The amount of gas used + pub gas_used: BigInt, + /// The transaction output + pub output: Buffer, +} + +/// Indicates that the EVM has experienced an exceptional halt. This causes execution to +/// immediately end with all gas being consumed. +#[napi] +pub enum ExceptionalHalt { + OutOfGas, + OpcodeNotFound, + // CallNotAllowedInsideStatic, + InvalidFEOpcode, + InvalidJump, + NotActivated, + StackUnderflow, + StackOverflow, + OutOfOffset, + CreateCollision, + OverflowPayment, + PrecompileError, + NonceOverflow, + /// Create init code size exceeds limit (runtime). + CreateContractSizeLimit, + /// Error on created contract that begins with EF + CreateContractStartingWithEF, +} + +impl From for ExceptionalHalt { + fn from(halt: rethnet_evm::Halt) -> Self { + match halt { + rethnet_evm::Halt::OutOfGas(..) => ExceptionalHalt::OutOfGas, + rethnet_evm::Halt::OpcodeNotFound => ExceptionalHalt::OpcodeNotFound, + // rethnet_evm::Halt::CallNotAllowedInsideStatic => { + // ExceptionalHalt::CallNotAllowedInsideStatic + // } + rethnet_evm::Halt::InvalidFEOpcode => ExceptionalHalt::InvalidFEOpcode, + rethnet_evm::Halt::InvalidJump => ExceptionalHalt::InvalidJump, + rethnet_evm::Halt::NotActivated => ExceptionalHalt::NotActivated, + rethnet_evm::Halt::StackUnderflow => ExceptionalHalt::StackUnderflow, + rethnet_evm::Halt::StackOverflow => ExceptionalHalt::StackOverflow, + rethnet_evm::Halt::OutOfOffset => ExceptionalHalt::OutOfOffset, + rethnet_evm::Halt::CreateCollision => ExceptionalHalt::CreateCollision, + rethnet_evm::Halt::OverflowPayment => ExceptionalHalt::OverflowPayment, + rethnet_evm::Halt::PrecompileError => ExceptionalHalt::PrecompileError, + rethnet_evm::Halt::NonceOverflow => ExceptionalHalt::NonceOverflow, + rethnet_evm::Halt::CreateContractSizeLimit => ExceptionalHalt::CreateContractSizeLimit, + rethnet_evm::Halt::CreateContractStartingWithEF => { + ExceptionalHalt::CreateContractStartingWithEF + } + } + } +} + +/// The result when the EVM terminates due to an exceptional halt. +#[napi(object)] +pub struct HaltResult { + /// The exceptional halt that occurred + pub reason: ExceptionalHalt, + /// Halting will spend all the gas and will thus be equal to the specified gas limit + pub gas_used: BigInt, +} + +/// The result of executing a transaction. +#[napi(object)] +pub struct ExecutionResult { + /// The transaction result + pub result: Either3, + /// The transaction trace + pub trace: Trace, +} + +impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { + fn from((result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)) -> Self { + let result = match result { + rethnet_evm::ExecutionResult::Success { + reason, + gas_used, + gas_refunded, + logs, + output, + } => { + let logs = logs.into_iter().map(Log::from).collect(); + + Either3::A(SuccessResult { + reason: reason.into(), + gas_used: BigInt::from(gas_used), + gas_refunded: BigInt::from(gas_refunded), + logs, + output: match output { + rethnet_evm::Output::Call(return_value) => Either::A(CallOutput { + return_value: Buffer::from(return_value.as_ref()), + }), + rethnet_evm::Output::Create(return_value, address) => { + Either::B(CreateOutput { + return_value: Buffer::from(return_value.as_ref()), + address: address.map(|address| Buffer::from(address.as_bytes())), + }) + } + }, + }) + } + rethnet_evm::ExecutionResult::Revert { gas_used, output } => Either3::B(RevertResult { + gas_used: BigInt::from(gas_used), + output: Buffer::from(output.as_ref()), + }), + rethnet_evm::ExecutionResult::Halt { reason, gas_used } => Either3::C(HaltResult { + reason: reason.into(), + gas_used: BigInt::from(gas_used), + }), + }; + + Self { + result, + trace: trace.into(), + } + } +} + +#[napi(object)] +pub struct TransactionResult { + pub exec_result: ExecutionResult, + pub state: serde_json::Value, +} + +impl + TryFrom<( + rethnet_evm::ExecutionResult, + rethnet_evm::State, + rethnet_evm::trace::Trace, + )> for TransactionResult +{ + type Error = napi::Error; + + fn try_from( + (result, state, trace): ( + rethnet_evm::ExecutionResult, + rethnet_evm::State, + rethnet_evm::trace::Trace, + ), + ) -> std::result::Result { + let exec_result = (result, trace).into(); + let state = serde_json::to_value(state)?; + + Ok(Self { exec_result, state }) + } +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index b7d570d02d..de821c12a1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -64,7 +64,6 @@ import { import { SolidityStackTrace } from "../stack-traces/solidity-stack-trace"; import { SolidityTracer } from "../stack-traces/solidityTracer"; import { VmTraceDecoder } from "../stack-traces/vm-trace-decoder"; -import { VMTracer } from "../stack-traces/vm-tracer"; import "./ethereumjs-workarounds"; import { rpcQuantityToBigInt } from "../../core/jsonrpc/types/base-types"; @@ -324,7 +323,6 @@ Hardhat Network's forking functionality only works with blocks from at least spu private _nextSnapshotId = 1; // We start in 1 to mimic Ganache private readonly _snapshots: Snapshot[] = []; - private readonly _vmTracer: VMTracer; private readonly _vmTraceDecoder: VmTraceDecoder; private readonly _solidityTracer: SolidityTracer; private readonly _consoleLogger: ConsoleLogger = new ConsoleLogger(); @@ -365,9 +363,6 @@ Hardhat Network's forking functionality only works with blocks from at least spu this.setUserProvidedNextBlockBaseFeePerGas(nextBlockBaseFee); } - this._vmTracer = new VMTracer(this._vm, this._common, false); - this._vmTracer.enableTracing(); - const contractsIdentifier = new ContractsIdentifier(); this._vmTraceDecoder = new VmTraceDecoder(contractsIdentifier); this._solidityTracer = new SolidityTracer(); @@ -756,9 +751,10 @@ Hardhat Network's forking functionality only works with blocks from at least spu this._runTxAndRevertMutations(tx, blockNumberOrPending) ); - let vmTrace = this._vmTracer.getLastTopLevelMessageTrace(); - const vmTracerError = this._vmTracer.getLastError(); - this._vmTracer.clearLastError(); + const traceResult = this._vm.getLastTrace(); + let vmTrace = traceResult.trace; + const vmTracerError = traceResult.error; + this._vm.clearLastError(); if (vmTrace !== undefined) { vmTrace = this._vmTraceDecoder.tryToDecodeMessageTrace(vmTrace); @@ -1564,9 +1560,10 @@ Hardhat Network's forking functionality only works with blocks from at least spu private async _gatherTraces( result: RunTxResult ): Promise { - let vmTrace = this._vmTracer.getLastTopLevelMessageTrace(); - const vmTracerError = this._vmTracer.getLastError(); - this._vmTracer.clearLastError(); + const traceResult = this._vm.getLastTrace(); + let vmTrace = traceResult.trace; + const vmTracerError = traceResult.error; + this._vm.clearLastError(); if (vmTrace !== undefined) { vmTrace = this._vmTraceDecoder.tryToDecodeMessageTrace(vmTrace); @@ -1849,6 +1846,12 @@ Hardhat Network's forking functionality only works with blocks from at least spu throw exitCode; } + if (exitCode.kind !== vmTrace?.exit.kind) { + console.trace("execution:", exitCode); + console.log("trace:", vmTrace?.exit); + throw Error("Execution error does not match trace error"); + } + if (exitCode.kind === ExitCode.CODESIZE_EXCEEDS_MAXIMUM) { if (stackTrace !== undefined) { return encodeSolidityStackTrace( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts index 9d2b907e4f..788ffb6765 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/convertToRethnet.ts @@ -18,7 +18,13 @@ import { } from "rethnet-evm"; import { fromBigIntLike } from "../../../util/bigint"; import { HardforkName } from "../../../util/hardforks"; -import { Exit } from "../vm/exit"; +import { + isCreateOutput, + isHaltResult, + isRevertResult, + isSuccessResult, +} from "../../stack-traces/message-trace"; +import { Exit, ExitCode } from "../vm/exit"; import { RunTxResult } from "../vm/vm-adapter"; import { Bloom } from "./bloom"; @@ -154,30 +160,45 @@ export function rethnetResultToRunTxResult( rethnetResult: ExecutionResult, blockGasUsed: bigint ): RunTxResult { - const vmError = Exit.fromRethnetExitCode(rethnetResult.exitCode); - // We return an object with only the properties that are used by Hardhat. - // To be extra sure that the other properties are not used, we add getters - // that exit the process if accessed. + const createdAddress = + isSuccessResult(rethnetResult.result) && + isCreateOutput(rethnetResult.result.output) + ? rethnetResult.result.output.address + : undefined; + + const exit = isSuccessResult(rethnetResult.result) + ? Exit.fromRethnetSuccessReason(rethnetResult.result.reason) + : isHaltResult(rethnetResult.result) + ? Exit.fromRethnetExceptionalHalt(rethnetResult.result.reason) + : new Exit(ExitCode.REVERT); + + const returnValue = isRevertResult(rethnetResult.result) + ? rethnetResult.result.output + : isSuccessResult(rethnetResult.result) + ? rethnetResult.result.output.returnValue + : Buffer.from([]); - const bloom = rethnetLogsToBloom(rethnetResult.logs); + const bloom = isSuccessResult(rethnetResult.result) + ? rethnetLogsToBloom(rethnetResult.result.logs) + : new Bloom(undefined); return { - gasUsed: rethnetResult.gasUsed, + gasUsed: rethnetResult.result.gasUsed, createdAddress: - rethnetResult.output.address !== undefined - ? new Address(rethnetResult.output.address) - : undefined, - exit: vmError, - returnValue: rethnetResult.output.output ?? Buffer.from([]), + createdAddress !== undefined ? new Address(createdAddress) : undefined, + exit, + returnValue, bloom, receipt: { // Receipts have a 0 as status on error - status: vmError.isError() ? 0 : 1, - cumulativeBlockGasUsed: blockGasUsed + rethnetResult.gasUsed, + status: exit.isError() ? 0 : 1, + cumulativeBlockGasUsed: blockGasUsed + rethnetResult.result.gasUsed, bitvector: bloom.bitvector, - logs: rethnetResult.logs.map((log) => { - return [log.address, log.topics, log.data]; - }), + logs: isSuccessResult(rethnetResult.result) + ? rethnetResult.result.logs.map((log) => { + return [log.address, log.topics, log.data]; + }) + : [], }, }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index 0af88fee5a..dee765b51c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -82,6 +82,10 @@ export class BlockBuilder { gasUsed: this._gasUsed, }; + if (header.number === undefined) { + header.number = this._opts.parentBlock.header.number + 1n; + } + const blockData = { header, transactions: this._transactions }; const block = Block.fromBlockData(blockData, { common: this._common, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts index a671610781..3861c3c6f1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/creation.ts @@ -31,7 +31,8 @@ export function createVm( assertHardhatInvariant(block !== null, "Should be able to get block"); return block.header.hash(); - } + }, + common ); } else { return DualModeAdapter.create(common, blockchain, config, selectHardfork); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index b49d5e94f9..80ea092e0f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -10,22 +10,29 @@ import { import { assertHardhatInvariant } from "../../../core/errors"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; +import { + isEvmStep, + isPrecompileTrace, + MessageTrace, +} from "../../stack-traces/message-trace"; +import { VMTracer } from "../../stack-traces/vm-tracer"; import { NodeConfig } from "../node-types"; import { RpcDebugTraceOutput } from "../output"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; import { EthereumJSAdapter } from "./ethereumjs"; +import { ExitCode } from "./exit"; import { RethnetAdapter } from "./rethnet"; -import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; +import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/restrict-template-expressions */ -function printEthereumJSTrace(trace: any) { +function _printEthereumJSTrace(trace: any) { console.log(JSON.stringify(trace, null, 2)); } -function printRethnetTrace(trace: any) { +function _printRethnetTrace(trace: any) { console.log( JSON.stringify( trace, @@ -38,7 +45,9 @@ function printRethnetTrace(trace: any) { export class DualModeAdapter implements VMAdapter { constructor( private _ethereumJSAdapter: VMAdapter, - private _rethnetAdapter: VMAdapter + private _rethnetAdapter: VMAdapter, + private _ethereumJSVMTracer: VMTracer, + private _rethnetVMTracer: VMTracer ) {} public static async create( @@ -62,10 +71,19 @@ export class DualModeAdapter implements VMAdapter { assertHardhatInvariant(block !== null, "Should be able to get block"); return block.header.hash(); - } + }, + common ); - return new DualModeAdapter(ethereumJSAdapter, rethnetAdapter); + const ethereumJSVMTracer = new VMTracer(common, false); + const rethnetVMTracer = new VMTracer(common, false); + + return new DualModeAdapter( + ethereumJSAdapter, + rethnetAdapter, + ethereumJSVMTracer, + rethnetVMTracer + ); } public async dryRun( @@ -73,25 +91,33 @@ export class DualModeAdapter implements VMAdapter { blockContext: Block, forceBaseFeeZero?: boolean ): Promise<[RunTxResult, Trace]> { - const [ethereumJSResult, ethereumJSTrace] = - await this._ethereumJSAdapter.dryRun(tx, blockContext, forceBaseFeeZero); + const ethereumJSResultPromise = this._ethereumJSAdapter.dryRun( + tx, + blockContext, + forceBaseFeeZero + ); - const [rethnetResult, rethnetTrace] = await this._rethnetAdapter.dryRun( + const rethnetResultPromise = this._rethnetAdapter.dryRun( tx, blockContext, forceBaseFeeZero ); + const [ + [ethereumJSResult, _ethereumJSTrace], + [rethnetResult, rethnetTrace], + ] = await Promise.all([ethereumJSResultPromise, rethnetResultPromise]); + try { assertEqualRunTxResults(ethereumJSResult, rethnetResult); return [rethnetResult, rethnetTrace]; } catch (e) { // if the results didn't match, print the traces - console.log("EthereumJS trace"); - printEthereumJSTrace(ethereumJSTrace); - console.log(); - console.log("Rethnet trace"); - printRethnetTrace(rethnetTrace); + // console.log("EthereumJS trace"); + // printEthereumJSTrace(ethereumJSTrace); + // console.log(); + // console.log("Rethnet trace"); + // printRethnetTrace(rethnetTrace); throw e; } @@ -153,18 +179,11 @@ export class DualModeAdapter implements VMAdapter { return rethnetStorageSlot; } - public async getContractCode( - address: Address, - ethJsOnly?: boolean - ): Promise { + public async getContractCode(address: Address): Promise { const ethereumJSCode = await this._ethereumJSAdapter.getContractCode( address ); - if (ethJsOnly === true) { - return ethereumJSCode; - } - const rethnetCode = await this._rethnetAdapter.getContractCode(address); if (!ethereumJSCode.equals(rethnetCode)) { @@ -211,14 +230,6 @@ export class DualModeAdapter implements VMAdapter { return this._ethereumJSAdapter.traceTransaction(hash, block, config); } - public enableTracing(callbacks: TracingCallbacks): void { - return this._ethereumJSAdapter.enableTracing(callbacks); - } - - public disableTracing(): void { - return this._ethereumJSAdapter.disableTracing(); - } - public async setBlockContext( block: Block, irregularStateOrUndefined: Buffer | undefined @@ -243,27 +254,29 @@ export class DualModeAdapter implements VMAdapter { tx: TypedTransaction, block: Block ): Promise<[RunTxResult, Trace]> { - const [ethereumJSResult, ethereumJSTrace] = - await this._ethereumJSAdapter.runTxInBlock(tx, block); + const ethereumJSResultPromise = this._ethereumJSAdapter.runTxInBlock( + tx, + block + ); + + const rethnetResultPromise = this._rethnetAdapter.runTxInBlock(tx, block); - const [rethnetResult, rethnetTrace] = - await this._rethnetAdapter.runTxInBlock(tx, block); + const [ + [ethereumJSResult, ethereumJSTrace], + [rethnetResult, _rethnetTrace], + ] = await Promise.all([ethereumJSResultPromise, rethnetResultPromise]); try { assertEqualRunTxResults(ethereumJSResult, rethnetResult); - if (rethnetResult.createdAddress !== undefined) { - const _test = this.getAccount(rethnetResult.createdAddress); - } - return [ethereumJSResult, ethereumJSTrace]; } catch (e) { // if the results didn't match, print the traces - console.log("EthereumJS trace"); - printEthereumJSTrace(ethereumJSTrace); - console.log(); - console.log("Rethnet trace"); - printRethnetTrace(rethnetTrace); + // console.log("EthereumJS trace"); + // printEthereumJSTrace(ethereumJSTrace); + // console.log(); + // console.log("Rethnet trace"); + // printRethnetTrace(rethnetTrace); throw e; } @@ -301,40 +314,256 @@ export class DualModeAdapter implements VMAdapter { return rethnetRoot; } + + public getLastTrace(): { + trace: MessageTrace | undefined; + error: Error | undefined; + } { + const { trace: ethereumJSTrace, error: ethereumJSError } = + this._ethereumJSAdapter.getLastTrace(); + const { trace: rethnetTrace, error: rethnetError } = + this._rethnetAdapter.getLastTrace(); + + if (ethereumJSTrace === undefined) { + if (rethnetTrace !== undefined) { + throw new Error( + "ethereumJSTrace is undefined but rethnetTrace is defined" + ); + } + } else { + if (rethnetTrace === undefined) { + throw new Error( + "ethereumJSTrace is defined but rethnetTrace is undefined" + ); + } + + assertEqualTraces(ethereumJSTrace, rethnetTrace); + } + + if (ethereumJSError === undefined) { + if (rethnetError !== undefined) { + throw new Error( + "ethereumJSError is undefined but rethnetError is defined" + ); + } + } else { + if (rethnetError === undefined) { + throw new Error( + "ethereumJSError is defined but rethnetError is undefined" + ); + } + + // both errors are defined + if (ethereumJSError.name !== rethnetError.name) { + throw new Error( + `Different error name: ${ethereumJSError.name} !== ${rethnetError.name}` + ); + } + + if (ethereumJSError.message !== rethnetError.message) { + throw new Error( + `Different error message: ${ethereumJSError.message} !== ${rethnetError.message}` + ); + } + + if (ethereumJSError.stack === undefined) { + if (rethnetError.stack !== undefined) { + throw new Error( + "ethereumJSError.stack is undefined but rethnetError.stack is defined" + ); + } + } else { + if (rethnetError.stack === undefined) { + throw new Error( + "ethereumJSError.stack is defined but rethnetError.stack is undefined" + ); + } + + // both error stacks are defined + if (ethereumJSError.stack !== rethnetError.stack) { + throw new Error( + `Different error stack: ${ethereumJSError.stack} !== ${rethnetError.stack}` + ); + } + } + } + + const ethereumJSSteps = this._ethereumJSVMTracer.tracingSteps; + const rethnetSteps = this._rethnetVMTracer.tracingSteps; + if (ethereumJSSteps.length !== rethnetSteps.length) { + throw new Error( + `Different number of steps in tracers: ${this._ethereumJSVMTracer.tracingSteps.length} !== ${this._rethnetVMTracer.tracingSteps.length}` + ); + } + + for (let stepIdx = 0; stepIdx < ethereumJSSteps.length; ++stepIdx) { + const ethereumJSStep = ethereumJSSteps[stepIdx]; + const rethnetStep = rethnetSteps[stepIdx]; + + if (ethereumJSStep.depth !== rethnetStep.depth) { + console.trace( + `Different steps[${stepIdx}] depth: ${ethereumJSStep.depth} !== ${rethnetStep.depth}` + ); + throw new Error("Different step depth"); + } + + if (ethereumJSStep.pc !== rethnetStep.pc) { + console.trace( + `Different steps[${stepIdx}] pc: ${ethereumJSStep.pc} !== ${rethnetStep.pc}` + ); + throw new Error("Different step pc"); + } + + if (ethereumJSStep.opcode !== rethnetStep.opcode) { + console.trace( + `Different steps[${stepIdx}] opcode: ${ethereumJSStep.opcode} !== ${rethnetStep.opcode}` + ); + throw new Error("Different step opcode"); + } + + if (ethereumJSStep.gasCost !== rethnetStep.gasCost) { + console.trace( + `Different steps[${stepIdx}] gasCost: ${ethereumJSStep.gasCost} !== ${rethnetStep.gasCost}` + ); + throw new Error("Different step gasCost"); + } + + if (ethereumJSStep.gasLeft !== rethnetStep.gasLeft) { + console.trace( + `Different steps[${stepIdx}] gasLeft: ${ethereumJSStep.gasLeft} !== ${rethnetStep.gasLeft}` + ); + throw new Error("Different step gasLeft"); + } + + const ethereumJSStack = ethereumJSStep.stack; + const rethnetStack = rethnetStep.stack; + if (ethereumJSStack.length !== rethnetStack.length) { + throw new Error( + `Different number of stack elements in tracers: ${ethereumJSStack.length} !== ${rethnetStack.length}` + ); + } + + for (let stackIdx = 0; stackIdx < ethereumJSSteps.length; ++stackIdx) { + const ethereumJSStackElement = ethereumJSStack[stackIdx]; + const rethnetStackElement = rethnetStack[stackIdx]; + + if (ethereumJSStackElement !== rethnetStackElement) { + console.trace( + `Different steps[${stepIdx}] stack[${stackIdx}]: ${ethereumJSStackElement} !== ${rethnetStackElement}` + ); + throw new Error("Different step stack element"); + } + } + + if (!ethereumJSStep.memory.equals(rethnetStep.memory)) { + console.trace( + `Different steps[${stepIdx}] memory: ${ethereumJSStep.memory} !== ${rethnetStep.memory}` + ); + throw new Error("Different step memory"); + } + + if (ethereumJSStep.contract.balance !== rethnetStep.contract.balance) { + console.trace( + `Different steps[${stepIdx}] contract balance: ${ethereumJSStep.contract.balance} !== ${rethnetStep.contract.balance}` + ); + throw new Error("Different step contract balance"); + } + + if (ethereumJSStep.contract.nonce !== rethnetStep.contract.nonce) { + console.trace( + `Different steps[${stepIdx}] contract nonce: ${ethereumJSStep.contract.nonce} !== ${rethnetStep.contract.nonce}` + ); + throw new Error("Different step contract nonce"); + } + + if ( + !ethereumJSStep.contract.codeHash.equals(rethnetStep.contract.codeHash) + ) { + console.trace( + `Different steps[${stepIdx}] contract codeHash: ${ethereumJSStep.contract.codeHash} !== ${rethnetStep.contract.codeHash}` + ); + throw new Error("Different step contract codeHash"); + } + + // Code can be stored separately from the account in Rethnet + // const ethereumJSCode = ethereumJSStep.contract.code; + // const rethnetCode = rethnetStep.contract.code; + // if (ethereumJSCode === undefined) { + // if (rethnetCode !== undefined) { + // console.trace( + // `Different steps[${stepIdx}] contract code: ${ethereumJSCode} !== ${rethnetCode}` + // ); + + // throw new Error( + // "ethereumJSCode is undefined but rethnetCode is defined" + // ); + // } + // } else { + // if (rethnetCode === undefined) { + // console.trace( + // `Different steps[${stepIdx}] contract code: ${ethereumJSCode} !== ${rethnetCode}` + // ); + + // throw new Error( + // "ethereumJSCode is defined but rethnetCode is undefined" + // ); + // } + + // if (!ethereumJSCode.equals(rethnetCode)) { + // console.trace( + // `Different steps[${stepIdx}] contract code: ${ethereumJSCode} !== ${rethnetCode}` + // ); + // throw new Error("Different step contract code"); + // } + // } + + if (!ethereumJSStep.contractAddress.equals(rethnetStep.contractAddress)) { + console.trace( + `Different steps[${stepIdx}] contract address: ${ethereumJSStep.contractAddress} !== ${rethnetStep.contractAddress}` + ); + throw new Error("Different step contract address"); + } + } + + // TODO: compare each step + // TODO: compare tracers tracingMessages and tracingMessageResults + + return { + trace: rethnetTrace, + error: rethnetError, + }; + } + + public clearLastError() { + this._ethereumJSVMTracer.clearLastError(); + this._rethnetVMTracer.clearLastError(); + } } function assertEqualRunTxResults( ethereumJSResult: RunTxResult, rethnetResult: RunTxResult ) { - if (ethereumJSResult.gasUsed !== rethnetResult.gasUsed) { - console.trace( - `Different totalGasSpent: ${ethereumJSResult.gasUsed} !== ${rethnetResult.gasUsed}` - ); - throw new Error("Different totalGasSpent"); - } - - if ( - ethereumJSResult.createdAddress?.toString() !== - rethnetResult.createdAddress?.toString() - ) { + if (ethereumJSResult.exit.kind !== rethnetResult.exit.kind) { console.trace( - `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} !== ${rethnetResult.createdAddress?.toString()}` + `Different exceptionError.error: ${ethereumJSResult.exit.kind} !== ${rethnetResult.exit.kind}` ); - throw new Error("Different createdAddress"); + throw new Error("Different exceptionError.error"); } - if (ethereumJSResult.exit.kind !== rethnetResult.exit.kind) { + if (ethereumJSResult.gasUsed !== rethnetResult.gasUsed) { console.trace( - `Different exceptionError.error: ${ethereumJSResult.exit.kind} !== ${rethnetResult.exit.kind}` + `Different totalGasSpent: ${ethereumJSResult.gasUsed} !== ${rethnetResult.gasUsed}` ); - throw new Error("Different exceptionError.error"); + throw new Error("Different totalGasSpent"); } - // TODO: we only compare the return values when a contract was *not* created, - // because sometimes ethereumjs has the created bytecode in the return value - // and rethnet doesn't - if (ethereumJSResult.createdAddress === undefined) { + const exitCode = ethereumJSResult.exit.kind; + if (exitCode === ExitCode.SUCCESS || exitCode === ExitCode.REVERT) { + // TODO: we only compare the return values when a contract was *not* created, + // because sometimes ethereumjs has the created bytecode in the return value + // and rethnet doesn't + // if (ethereumJSResult.createdAddress === undefined) { if ( ethereumJSResult.returnValue.toString("hex") !== rethnetResult.returnValue.toString("hex") @@ -346,35 +575,55 @@ function assertEqualRunTxResults( ); throw new Error("Different returnValue"); } - } + // } - if (!ethereumJSResult.bloom.equals(rethnetResult.bloom)) { - console.trace( - `Different bloom: ${ethereumJSResult.bloom} !== ${rethnetResult.bloom}` - ); - throw new Error("Different bloom"); - } + if (!ethereumJSResult.bloom.equals(rethnetResult.bloom)) { + console.trace( + `Different bloom: ${ethereumJSResult.bloom} !== ${rethnetResult.bloom}` + ); + throw new Error("Different bloom"); + } - if ( - !ethereumJSResult.receipt.bitvector.equals(rethnetResult.receipt.bitvector) - ) { - console.trace( - `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} !== ${rethnetResult.receipt.bitvector}` - ); - throw new Error("Different receipt bitvector"); - } + if ( + !ethereumJSResult.receipt.bitvector.equals( + rethnetResult.receipt.bitvector + ) + ) { + console.trace( + `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} !== ${rethnetResult.receipt.bitvector}` + ); + throw new Error("Different receipt bitvector"); + } - if ( - ethereumJSResult.receipt.cumulativeBlockGasUsed !== - rethnetResult.receipt.cumulativeBlockGasUsed - ) { - console.trace( - `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} !== ${rethnetResult.receipt.cumulativeBlockGasUsed}` - ); - throw new Error("Different receipt cumulativeBlockGasUsed"); + if ( + ethereumJSResult.receipt.cumulativeBlockGasUsed !== + rethnetResult.receipt.cumulativeBlockGasUsed + ) { + console.trace( + `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} !== ${rethnetResult.receipt.cumulativeBlockGasUsed}` + ); + throw new Error("Different receipt cumulativeBlockGasUsed"); + } + + assertEqualLogs(ethereumJSResult.receipt.logs, rethnetResult.receipt.logs); } - assertEqualLogs(ethereumJSResult.receipt.logs, rethnetResult.receipt.logs); + if (exitCode === ExitCode.SUCCESS) { + if ( + ethereumJSResult.createdAddress?.toString() !== + rethnetResult.createdAddress?.toString() && + // ethereumjs returns a createdAddress, even when reverting + !( + rethnetResult.createdAddress === undefined && + ethereumJSResult.exit.kind !== ExitCode.SUCCESS + ) + ) { + console.trace( + `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} !== ${rethnetResult.createdAddress?.toString()}` + ); + throw new Error("Different createdAddress"); + } + } } function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { @@ -448,9 +697,154 @@ function assertEqualAccounts( } if (!ethereumJSAccount.storageRoot.equals(rethnetAccount.storageRoot)) { - console.trace( - `Different storageRoot: ${ethereumJSAccount.storageRoot} !== ${rethnetAccount.storageRoot}` + // TODO re-enable + // console.trace( + // `Different storageRoot: ${ethereumJSAccount.storageRoot.toString( + // "hex" + // )} !== ${rethnetAccount.storageRoot.toString("hex")}` + // ); + // throw new Error("Different storageRoot"); + } +} + +function assertEqualTraces( + ethereumJSTrace: MessageTrace, + rethnetTrace: MessageTrace +) { + // both traces are defined + if (ethereumJSTrace.depth !== rethnetTrace.depth) { + throw new Error( + `Different depth: ${ethereumJSTrace.depth} !== ${rethnetTrace.depth}` + ); + } + + if (ethereumJSTrace.exit.kind !== rethnetTrace.exit.kind) { + throw new Error( + `Different exit: ${ethereumJSTrace.exit.kind} !== ${rethnetTrace.exit.kind}` + ); + } + + if (ethereumJSTrace.gasUsed !== rethnetTrace.gasUsed) { + throw new Error( + `Different gasUsed: ${ethereumJSTrace.gasUsed} !== ${rethnetTrace.gasUsed}` + ); + } + + if (!ethereumJSTrace.returnData.equals(rethnetTrace.returnData)) { + throw new Error( + `Different returnData: ${ethereumJSTrace.returnData} !== ${rethnetTrace.returnData}` ); - throw new Error("Different storageRoot"); + } + + if (ethereumJSTrace.value !== rethnetTrace.value) { + throw new Error( + `Different value: ${ethereumJSTrace.value} !== ${rethnetTrace.value}` + ); + } + + if (isPrecompileTrace(ethereumJSTrace)) { + if (!isPrecompileTrace(rethnetTrace)) { + throw new Error( + `ethereumJSTrace is a precompiled trace but rethnetTrace is not` + ); + } + + // Both traces are precompile traces + if (ethereumJSTrace.precompile !== rethnetTrace.precompile) { + throw new Error( + `Different precompile: ${ethereumJSTrace.precompile} !== ${rethnetTrace.precompile}` + ); + } + + if (!ethereumJSTrace.calldata.equals(rethnetTrace.calldata)) { + throw new Error( + `Different calldata: ${ethereumJSTrace.calldata} !== ${rethnetTrace.calldata}` + ); + } + } else { + if (isPrecompileTrace(rethnetTrace)) { + throw new Error( + `ethereumJSTrace is a precompiled trace but ethereumJSTrace is not` + ); + } + + // Both traces are NOT precompile traces + if (!ethereumJSTrace.code.equals(rethnetTrace.code)) { + console.log("ethereumjs:", ethereumJSTrace); + console.log("rethnet:", rethnetTrace); + throw new Error( + `Different code: ${ethereumJSTrace.code.toString( + "hex" + )} !== ${rethnetTrace.code.toString("hex")}` + ); + } + + if (ethereumJSTrace.steps.length !== rethnetTrace.steps.length) { + throw new Error( + `Different steps length: ${ethereumJSTrace.steps.length} !== ${rethnetTrace.steps.length}` + ); + } + + for (let stepIdx = 0; stepIdx < ethereumJSTrace.steps.length; stepIdx++) { + const ethereumJSStep = ethereumJSTrace.steps[stepIdx]; + const rethnetStep = rethnetTrace.steps[stepIdx]; + + if (isEvmStep(ethereumJSStep)) { + // if (stepIdx >= rethnetTrace.steps.length) { + // console.log("code:", ethereumJSTrace.code); + // console.log(stepIdx); + // console.log(ethereumJSStep); + // console.log("opcode:", ethereumJSTrace.code[ethereumJSStep.pc]); + // continue; + // } + + if (!isEvmStep(rethnetStep)) { + throw new Error( + `ethereumJSStep '${stepIdx}' is an EVM step but rethnetStep '${stepIdx}' is not` + ); + } + + if (ethereumJSStep.pc !== rethnetStep.pc) { + throw new Error( + `Different step[${stepIdx}]: ${ethereumJSStep.pc} !== ${rethnetStep.pc}` + ); + } + } else { + if (isEvmStep(rethnetStep)) { + throw new Error( + `rethnetStep '${stepIdx}' is an EVM step but ethereumJSStep '${stepIdx}' is not` + ); + } + + assertEqualTraces(ethereumJSStep, rethnetStep); + } + } + + if (ethereumJSTrace.bytecode === undefined) { + if (rethnetTrace.bytecode !== undefined) { + throw new Error( + "ethereumJSTrace.bytecode is undefined but rethnetTrace.bytecode is defined" + ); + } + } else { + if (rethnetTrace.bytecode === undefined) { + throw new Error( + "ethereumJSTrace.bytecode is defined but rethnetTrace.bytecode is undefined" + ); + } + + // Both traces contain bytecode + if (!ethereumJSTrace.bytecode.equals(rethnetTrace.bytecode)) { + throw new Error( + `Different bytecode: ${ethereumJSTrace.bytecode} !== ${rethnetTrace.bytecode}` + ); + } + } + + if (ethereumJSTrace.numberOfSubtraces !== rethnetTrace.numberOfSubtraces) { + throw new Error( + `Different numberOfSubtraces: ${ethereumJSTrace.numberOfSubtraces} !== ${rethnetTrace.numberOfSubtraces}` + ); + } } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index a4f2364f3c..ad4610f1fd 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -6,6 +6,7 @@ import { InterpreterStep, Message, } from "@nomicfoundation/ethereumjs-evm"; +import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; import { DefaultStateManager, StateManager, @@ -17,6 +18,7 @@ import { RunTxResult as EthereumJSRunTxResult, VM, } from "@nomicfoundation/ethereumjs-vm"; +import { SuccessReason } from "rethnet-evm"; import { assertHardhatInvariant } from "../../../core/errors"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import { @@ -24,7 +26,9 @@ import { InvalidInputError, TransactionExecutionError, } from "../../../core/providers/errors"; +import { MessageTrace } from "../../stack-traces/message-trace"; import { VMDebugTracer } from "../../stack-traces/vm-debug-tracer"; +import { VMTracer } from "../../stack-traces/vm-tracer"; import { ForkStateManager } from "../fork/ForkStateManager"; import { isForkedNodeConfig, NodeConfig } from "../node-types"; import { RpcDebugTraceOutput } from "../output"; @@ -36,15 +40,15 @@ import { Bloom } from "../utils/bloom"; import { makeForkClient } from "../utils/makeForkClient"; import { makeStateTrie } from "../utils/makeStateTrie"; import { Exit } from "./exit"; -import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; +import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ export class EthereumJSAdapter implements VMAdapter { - private _tracingCallbacks: TracingCallbacks | undefined; - private _blockStartStateRoot: Buffer | undefined; + private _vmTracer: VMTracer; + constructor( private readonly _vm: VM, private readonly _stateManager: StateManager, @@ -55,7 +59,18 @@ export class EthereumJSAdapter implements VMAdapter { private readonly _selectHardfork: (blockNumber: bigint) => string, private readonly _forkNetworkId?: number, private readonly _forkBlockNumber?: bigint - ) {} + ) { + this._vmTracer = new VMTracer(_common, false); + + assertHardhatInvariant( + this._vm.evm.events !== undefined, + "EVM should have an 'events' property" + ); + + this._vm.evm.events.on("beforeMessage", this._beforeMessageHandler); + this._vm.evm.events.on("step", this._stepHandler); + this._vm.evm.events.on("afterMessage", this._afterMessageHandler); + } public static async create( common: Common, @@ -257,40 +272,6 @@ export class EthereumJSAdapter implements VMAdapter { return this._stateManager.setStateRoot(stateRoot); } - public enableTracing(callbacks: TracingCallbacks): void { - assertHardhatInvariant( - this._vm.evm.events !== undefined, - "EVM should have an 'events' property" - ); - - this._tracingCallbacks = callbacks; - - this._vm.evm.events.on("beforeMessage", this._beforeMessageHandler); - this._vm.evm.events.on("step", this._stepHandler); - this._vm.evm.events.on("afterMessage", this._afterMessageHandler); - } - - public disableTracing(): void { - assertHardhatInvariant( - this._vm.evm.events !== undefined, - "EVM should have an 'events' property" - ); - - if (this._tracingCallbacks !== undefined) { - this._vm.evm.events.removeListener( - "beforeMessage", - this._beforeMessageHandler - ); - this._vm.evm.events.removeListener("step", this._stepHandler); - this._vm.evm.events.removeListener( - "afterMessage", - this._afterMessageHandler - ); - - this._tracingCallbacks = undefined; - } - } - public async setBlockContext( block: Block, irregularStateOrUndefined: Buffer | undefined @@ -461,6 +442,20 @@ export class EthereumJSAdapter implements VMAdapter { return this.getStateRoot(); } + public getLastTrace(): { + trace: MessageTrace | undefined; + error: Error | undefined; + } { + const trace = this._vmTracer.getLastTopLevelMessageTrace(); + const error = this._vmTracer.getLastError(); + + return { trace, error }; + } + + public clearLastError() { + this._vmTracer.clearLastError(); + } + private _getCommonForTracing(networkId: number, blockNumber: bigint): Common { try { const common = Common.custom( @@ -494,73 +489,119 @@ export class EthereumJSAdapter implements VMAdapter { return this._common.gteHardfork("london"); } - private _beforeMessageHandler = (message: Message, next: any) => { - if (this._tracingCallbacks !== undefined) { - return this._tracingCallbacks.beforeMessage( - { - ...message, - to: message.to?.toBuffer(), - codeAddress: - message.to !== undefined - ? message.codeAddress.toBuffer() - : undefined, - }, - next - ); - } + private _beforeMessageHandler = async (message: Message, next: any) => { + try { + const code = + message.to !== undefined + ? await this.getContractCode(message.codeAddress) + : undefined; + await this._vmTracer.addBeforeMessage({ + ...message, + to: message.to?.toBuffer(), + codeAddress: + message.to !== undefined ? message.codeAddress.toBuffer() : undefined, + code, + }); - next(); + return next(); + } catch (e) { + return next(e); + } }; - private _stepHandler = (step: InterpreterStep, next: any) => { - if (this._tracingCallbacks !== undefined) { - return this._tracingCallbacks.step( - { - pc: BigInt(step.pc), + private _stepHandler = async (step: InterpreterStep, next: any) => { + try { + await this._vmTracer.addStep({ + depth: step.depth, + pc: BigInt(step.pc), + opcode: step.opcode.name, + // returnValue: 0, // Do we have error values in ethereumjs? + gasCost: BigInt(step.opcode.fee) + (step.opcode.dynamicFee ?? 0n), + gasRefunded: step.gasRefund, + gasLeft: step.gasLeft, + stack: step.stack, + memory: step.memory, + contract: { + balance: step.account.balance, + nonce: step.account.nonce, + codeHash: step.account.codeHash, }, - next - ); - } + contractAddress: step.address.buf, + }); - next(); + return next(); + } catch (e) { + return next(e); + } }; - private _afterMessageHandler = (result: EVMResult, next: any) => { - if (this._tracingCallbacks !== undefined) { - const vmError = Exit.fromEthereumJSEvmError( - result.execResult.exceptionError - ); + private _afterMessageHandler = async (result: EVMResult, next: any) => { + try { + const gasUsed = result.execResult.executionGasUsed; + + let executionResult; + + if (result.execResult.exceptionError === undefined) { + const reason = + result.execResult.selfdestruct !== undefined && + Object.keys(result.execResult.selfdestruct).length > 0 + ? SuccessReason.SelfDestruct + : result.createdAddress !== undefined || + result.execResult.returnValue.length > 0 + ? SuccessReason.Return + : SuccessReason.Stop; + + executionResult = { + reason, + gasUsed, + gasRefunded: result.execResult.gasRefund ?? 0n, + logs: + result.execResult.logs?.map((log) => { + return { + address: log[0], + topics: log[1], + data: log[2], + }; + }) ?? [], + output: + result.createdAddress === undefined + ? { + returnValue: result.execResult.returnValue, + } + : { + address: result.createdAddress.toBuffer(), + returnValue: result.execResult.returnValue, + }, + }; + } else if (result.execResult.exceptionError.error === ERROR.REVERT) { + executionResult = { + gasUsed, + output: result.execResult.returnValue, + }; + } else { + const vmError = Exit.fromEthereumJSEvmError( + result.execResult.exceptionError + ); - const rethnetExitCode = vmError.getRethnetExitCode(); + executionResult = { + reason: vmError.getRethnetExceptionalHalt(), + gasUsed, + }; + } - return this._tracingCallbacks.afterMessage( - { - executionResult: { - exitCode: rethnetExitCode, - output: { - address: result.createdAddress?.toBuffer(), - output: result.execResult.returnValue, - }, - gasUsed: result.execResult.executionGasUsed, - gasRefunded: result.execResult.gasRefund ?? 0n, - logs: - result.execResult.logs?.map((log) => { - return { - address: log[0], - topics: log[1], - data: log[2], - }; - }) ?? [], - trace: { - steps: [], - returnValue: result.execResult.returnValue, - }, + await this._vmTracer.addAfterMessage({ + executionResult: { + result: executionResult, + trace: { + steps: [], + returnValue: result.execResult.returnValue, }, }, - next - ); - } + }); - next(); + return next(); + } catch (e) { + return next(e); + } }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts index feff8d1309..01a03b204a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts @@ -1,5 +1,6 @@ import { EvmError } from "@nomicfoundation/ethereumjs-evm"; import { ERROR } from "@nomicfoundation/ethereumjs-evm/dist/exceptions"; +import { ExceptionalHalt, SuccessReason } from "rethnet-evm"; export enum ExitCode { SUCCESS, @@ -8,38 +9,39 @@ export enum ExitCode { INTERNAL_ERROR, INVALID_OPCODE, CODESIZE_EXCEEDS_MAXIMUM, + CREATE_COLLISION, } -const exitCodeToRethnetExitCode: Record = { - [ExitCode.SUCCESS]: 0x00, - [ExitCode.REVERT]: 0x20, - [ExitCode.OUT_OF_GAS]: 0x50, - [ExitCode.INTERNAL_ERROR]: 0x20, - [ExitCode.INVALID_OPCODE]: 0x53, - [ExitCode.CODESIZE_EXCEEDS_MAXIMUM]: 0x65, -}; - export class Exit { - public static fromRethnetExitCode(rethnetExitCode: number): Exit { - switch (rethnetExitCode) { - case 0x00: - case 0x01: - case 0x02: - case 0x03: + public static fromRethnetSuccessReason(reason: SuccessReason): Exit { + switch (reason) { + case SuccessReason.Stop: + case SuccessReason.Return: + case SuccessReason.SelfDestruct: return new Exit(ExitCode.SUCCESS); - case 0x20: - return new Exit(ExitCode.REVERT); - case 0x50: + // TODO: Should we throw an error if default is hit? + } + } + + public static fromRethnetExceptionalHalt(halt: ExceptionalHalt): Exit { + switch (halt) { + case ExceptionalHalt.OutOfGas: return new Exit(ExitCode.OUT_OF_GAS); - case 0x51: - case 0x53: + + case ExceptionalHalt.OpcodeNotFound: + case ExceptionalHalt.InvalidFEOpcode: return new Exit(ExitCode.INVALID_OPCODE); - case 0x65: + + case ExceptionalHalt.CreateCollision: + return new Exit(ExitCode.CREATE_COLLISION); + + case ExceptionalHalt.CreateContractSizeLimit: return new Exit(ExitCode.CODESIZE_EXCEEDS_MAXIMUM); + default: { // TODO temporary, should be removed in production // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error(`Unmatched rethnet exit code: ${rethnetExitCode}`); + throw new Error(`Unmatched rethnet exceptional halt: ${halt}`); } } } @@ -69,9 +71,13 @@ export class Exit { return new Exit(ExitCode.CODESIZE_EXCEEDS_MAXIMUM); } + if (evmError.error === ERROR.CREATE_COLLISION) { + return new Exit(ExitCode.CREATE_COLLISION); + } + // TODO temporary, should be removed in production // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error(`Unmatched rethnet exit code: ${evmError.error}`); + throw new Error(`Unmatched evm error: ${evmError.error}`); } constructor(public kind: ExitCode) {} @@ -94,6 +100,8 @@ export class Exit { return "Invalid opcode"; case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: return "Codesize exceeds maximum"; + case ExitCode.CREATE_COLLISION: + return "Create collision"; } const _exhaustiveCheck: never = this.kind; @@ -102,7 +110,7 @@ export class Exit { public getEthereumJSError(): EvmError | undefined { switch (this.kind) { case ExitCode.SUCCESS: - return; + return undefined; case ExitCode.REVERT: return new EvmError(ERROR.REVERT); case ExitCode.OUT_OF_GAS: @@ -113,12 +121,26 @@ export class Exit { return new EvmError(ERROR.INVALID_OPCODE); case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: return new EvmError(ERROR.CODESIZE_EXCEEDS_MAXIMUM); + case ExitCode.CREATE_COLLISION: + return new EvmError(ERROR.CREATE_COLLISION); } const _exhaustiveCheck: never = this.kind; } - public getRethnetExitCode(): number { - return exitCodeToRethnetExitCode[this.kind]; + public getRethnetExceptionalHalt(): ExceptionalHalt { + switch (this.kind) { + case ExitCode.OUT_OF_GAS: + return ExceptionalHalt.OutOfGas; + case ExitCode.INVALID_OPCODE: + return ExceptionalHalt.OpcodeNotFound; + case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: + return ExceptionalHalt.CreateContractSizeLimit; + + default: + // TODO temporary, should be removed in production + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error(`Unmatched rethnet exceptional halt: ${this.kind}`); + } } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 084dc661eb..e9221cd6b1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -1,9 +1,18 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; +import { Common } from "@nomicfoundation/ethereumjs-common"; import { Account, Address } from "@nomicfoundation/ethereumjs-util"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; -import { BlockBuilder, Blockchain, Rethnet } from "rethnet-evm"; - -import { NodeConfig } from "../node-types"; +import { + BlockBuilder, + Blockchain, + Rethnet, + Tracer, + TracingMessage, + TracingMessageResult, + TracingStep, +} from "rethnet-evm"; + +import { isForkedNodeConfig, NodeConfig } from "../node-types"; import { ethereumjsHeaderDataToRethnet, ethereumjsTransactionToRethnet, @@ -14,25 +23,38 @@ import { hardforkGte, HardforkName } from "../../../util/hardforks"; import { RpcDebugTraceOutput } from "../output"; import { RethnetStateManager } from "../RethnetState"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; +import { MessageTrace } from "../../stack-traces/message-trace"; +import { VMTracer } from "../../stack-traces/vm-tracer"; -import { RunTxResult, Trace, TracingCallbacks, VMAdapter } from "./vm-adapter"; +import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ export class RethnetAdapter implements VMAdapter { + private _vmTracer: VMTracer; + constructor( private _blockchain: Blockchain, private _state: RethnetStateManager, private _rethnet: Rethnet, - private readonly _selectHardfork: (blockNumber: bigint) => string - ) {} + private readonly _selectHardfork: (blockNumber: bigint) => string, + common: Common + ) { + this._vmTracer = new VMTracer(common, false); + } public static async create( config: NodeConfig, selectHardfork: (blockNumber: bigint) => string, - getBlockHash: (blockNumber: bigint) => Promise + getBlockHash: (blockNumber: bigint) => Promise, + common: Common ): Promise { + if (isForkedNodeConfig(config)) { + // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error + throw new Error("Forking is not supported for Rethnet yet"); + } + const blockchain = new Blockchain(getBlockHash); const limitContractCodeSize = @@ -50,7 +72,13 @@ export class RethnetAdapter implements VMAdapter { disableEip3607: true, }); - return new RethnetAdapter(blockchain, state, rethnet, selectHardfork); + return new RethnetAdapter( + blockchain, + state, + rethnet, + selectHardfork, + common + ); } /** @@ -72,17 +100,27 @@ export class RethnetAdapter implements VMAdapter { blockContext.header.mixHash ); - const rethnetResult = await this._rethnet.guaranteedDryRun(rethnetTx, { - number: blockContext.header.number, - coinbase: blockContext.header.coinbase.buf, - timestamp: blockContext.header.timestamp, - basefee: - forceBaseFeeZero === true ? 0n : blockContext.header.baseFeePerGas, - gasLimit: blockContext.header.gasLimit, - difficulty, - prevrandao: prevRandao, + const tracer = new Tracer({ + beforeMessage: this._beforeMessageHandler, + step: this._stepHandler, + afterMessage: this._afterMessageHandler, }); + const rethnetResult = await this._rethnet.guaranteedDryRun( + rethnetTx, + { + number: blockContext.header.number, + coinbase: blockContext.header.coinbase.buf, + timestamp: blockContext.header.timestamp, + basefee: + forceBaseFeeZero === true ? 0n : blockContext.header.baseFeePerGas, + gasLimit: blockContext.header.gasLimit, + difficulty, + prevrandao: prevRandao, + }, + tracer + ); + try { const result = rethnetResultToRunTxResult( rethnetResult.execResult, @@ -90,8 +128,8 @@ export class RethnetAdapter implements VMAdapter { ); return [result, rethnetResult.execResult.trace]; } catch (e) { - console.log("Rethnet trace"); - console.log(rethnetResult.execResult.trace); + // console.log("Rethnet trace"); + // console.log(rethnetResult.execResult.trace); throw e; } } @@ -116,16 +154,7 @@ export class RethnetAdapter implements VMAdapter { /** * Get the contract code at the given address. */ - public async getContractCode( - address: Address, - ethJsOnly?: boolean - ): Promise { - if (ethJsOnly === true) { - throw new Error( - "Calling RethnetAdapter.getContractCode with ethJsOnly=true, this shouldn't happen" - ); - } - + public async getContractCode(address: Address): Promise { return this._state.getContractCode(address); } @@ -206,9 +235,16 @@ export class RethnetAdapter implements VMAdapter { block.header.mixHash ); + const tracer = new Tracer({ + beforeMessage: this._beforeMessageHandler, + step: this._stepHandler, + afterMessage: this._afterMessageHandler, + }); + const rethnetResult = await this._rethnet.run( rethnetTx, - ethereumjsHeaderDataToRethnet(block.header, difficulty, prevRandao) + ethereumjsHeaderDataToRethnet(block.header, difficulty, prevRandao), + tracer ); try { @@ -218,8 +254,8 @@ export class RethnetAdapter implements VMAdapter { ); return [result, rethnetResult.trace]; } catch (e) { - console.log("Rethnet trace"); - console.log(rethnetResult.trace); + // console.log("Rethnet trace"); + // console.log(rethnetResult.trace); throw e; } } @@ -230,7 +266,7 @@ export class RethnetAdapter implements VMAdapter { public async addBlockRewards( rewards: Array<[Address, bigint]> ): Promise { - const blockBuilder = await BlockBuilder.new( + const blockBuilder = BlockBuilder.new( this._blockchain, this._state.asInner(), {}, @@ -284,25 +320,25 @@ export class RethnetAdapter implements VMAdapter { block: Block, config: RpcDebugTracingConfig ): Promise { - throw new Error("not implemented"); + throw new Error("traceTransaction not implemented for Rethnet"); } - /** - * Start tracing the VM execution with the given callbacks. - */ - public enableTracing(callbacks: TracingCallbacks): void { - throw new Error("not implemented"); + public async makeSnapshot(): Promise { + return this._state.makeSnapshot(); } - /** - * Stop tracing the execution. - */ - public disableTracing(): void { - throw new Error("not implemented"); + public getLastTrace(): { + trace: MessageTrace | undefined; + error: Error | undefined; + } { + const trace = this._vmTracer.getLastTopLevelMessageTrace(); + const error = this._vmTracer.getLastError(); + + return { trace, error }; } - public async makeSnapshot(): Promise { - return this._state.makeSnapshot(); + public clearLastError() { + this._vmTracer.clearLastError(); } private _getBlockEnvDifficulty( @@ -340,4 +376,22 @@ export class RethnetAdapter implements VMAdapter { return undefined; } + + private _beforeMessageHandler = async ( + message: TracingMessage, + next: any + ) => { + await this._vmTracer.addBeforeMessage(message); + }; + + private _stepHandler = async (step: TracingStep, _next: any) => { + await this._vmTracer.addStep(step); + }; + + private _afterMessageHandler = async ( + result: TracingMessageResult, + _next: any + ) => { + await this._vmTracer.addAfterMessage(result); + }; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 724e7ca000..30a166d797 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -2,13 +2,10 @@ import type { Block } from "@nomicfoundation/ethereumjs-block"; import type { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import type { Account, Address } from "@nomicfoundation/ethereumjs-util"; import type { TxReceipt } from "@nomicfoundation/ethereumjs-vm"; -import type { - TracingMessage, - TracingMessageResult, - TracingStep, -} from "rethnet-evm"; import type { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; import type { RpcDebugTraceOutput } from "../output"; + +import { MessageTrace } from "../../stack-traces/message-trace"; import { Bloom } from "../utils/bloom"; import { Exit } from "./exit"; @@ -33,12 +30,6 @@ export interface RunBlockResult { gasUsed: bigint; } -export interface TracingCallbacks { - beforeMessage: (message: TracingMessage, next: any) => Promise; - step: (step: TracingStep, next: any) => Promise; - afterMessage: (result: TracingMessageResult, next: any) => Promise; -} - export interface VMAdapter { dryRun( tx: TypedTransaction, @@ -49,7 +40,7 @@ export interface VMAdapter { // getters getAccount(address: Address): Promise; getContractStorage(address: Address, key: Buffer): Promise; - getContractCode(address: Address, ethJsOnly?: boolean): Promise; + getContractCode(address: Address): Promise; // setters putAccount(address: Address, account: Account): Promise; @@ -79,13 +70,16 @@ export interface VMAdapter { revertBlock(): Promise; // methods for tracing + getLastTrace(): { + trace: MessageTrace | undefined; + error: Error | undefined; + }; + clearLastError(): void; traceTransaction( hash: Buffer, block: Block, config: RpcDebugTracingConfig ): Promise; - enableTracing(callbacks: TracingCallbacks): void; - disableTracing(): void; // methods for snapshotting makeSnapshot(): Promise; diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts index 2a64ea920f..573f9b1ed2 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/error-inferrer.ts @@ -557,6 +557,10 @@ export class ErrorInferrer { functionJumpdests: Instruction[], jumpedIntoFunction: boolean ): SolidityStackTrace | undefined { + if (trace.steps.length === 0) { + return; + } + const lastStep = trace.steps[trace.steps.length - 1]; if (!isEvmStep(lastStep)) { @@ -1144,6 +1148,10 @@ export class ErrorInferrer { } private _solidity063MaybeUnmappedRevert(trace: DecodedEvmMessageTrace) { + if (trace.steps.length === 0) { + return false; + } + const lastStep = trace.steps[trace.steps.length - 1]; if (!isEvmStep(lastStep)) { return false; diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts index 1faad79f83..a04e2a135b 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/message-trace.ts @@ -1,5 +1,12 @@ import type { Bytecode } from "./model"; import type { Exit } from "../provider/vm/exit"; +import { + CallOutput, + CreateOutput, + HaltResult, + RevertResult, + SuccessResult, +} from "rethnet-evm"; export type MessageTrace = | CreateMessageTrace @@ -91,3 +98,34 @@ export type MessageTraceStep = MessageTrace | EvmStep; export interface EvmStep { pc: number; } + +export function isCallOutput( + output: CallOutput | CreateOutput +): output is CallOutput { + return !isCreateOutput(output); +} + +export function isCreateOutput( + output: CallOutput | CreateOutput +): output is CreateOutput { + return "address" in output; +} + +export function isSuccessResult( + result: SuccessResult | RevertResult | HaltResult +): result is SuccessResult { + // Only need to check for one unique field + return "gasRefunded" in result; +} + +export function isRevertResult( + result: SuccessResult | RevertResult | HaltResult +): result is RevertResult { + return !("reason" in result); +} + +export function isHaltResult( + result: SuccessResult | RevertResult | HaltResult +): result is HaltResult { + return !("output" in result); +} diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/model.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/model.ts index c82a4788e6..0be42c1807 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/model.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/model.ts @@ -298,6 +298,49 @@ export class Instruction { public readonly pushData?: Buffer, public readonly location?: SourceLocation ) {} + + /** + * Checks equality with another Instruction. + */ + public equals(other: Instruction): boolean { + if (this.pc !== other.pc) { + return false; + } + + if (this.opcode !== other.opcode) { + return false; + } + + if (this.jumpType !== other.jumpType) { + return false; + } + + if (this.pushData !== undefined) { + if (other.pushData === undefined) { + return false; + } + + if (!this.pushData.equals(other.pushData)) { + return false; + } + } else if (other.pushData !== undefined) { + return false; + } + + if (this.location !== undefined) { + if (other.location === undefined) { + return false; + } + + if (!this.location.equals(other.location)) { + return false; + } + } else if (other.location !== undefined) { + return false; + } + + return true; + } } interface ImmutableReference { @@ -335,4 +378,22 @@ export class Bytecode { public hasInstruction(pc: number): boolean { return this._pcToInstruction.has(pc); } + + /** + * Checks equality with another Bytecode. + */ + public equals(other: Bytecode): boolean { + if (this._pcToInstruction.size !== other._pcToInstruction.size) { + return false; + } + + for (const [key, val] of this._pcToInstruction) { + const otherVal = other._pcToInstruction.get(key); + if (otherVal === undefined || !val.equals(otherVal)) { + return false; + } + } + + return true; + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts index a4cfa4a9ad..03145a65fa 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/vm-tracer.ts @@ -1,22 +1,24 @@ import type { Common } from "@nomicfoundation/ethereumjs-common"; -import type { +import { + CreateOutput, TracingMessage, TracingMessageResult, TracingStep, } from "rethnet-evm"; import { getActivePrecompiles } from "@nomicfoundation/ethereumjs-evm"; -import { Address, bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; +import { bufferToBigInt } from "@nomicfoundation/ethereumjs-util"; import { assertHardhatInvariant } from "../../core/errors"; import { Exit, ExitCode } from "../provider/vm/exit"; -import { VMAdapter } from "../provider/vm/vm-adapter"; import { CallMessageTrace, CreateMessageTrace, isCreateTrace, + isHaltResult, isPrecompileTrace, + isSuccessResult, MessageTrace, PrecompileMessageTrace, } from "./message-trace"; @@ -27,55 +29,19 @@ const DUMMY_RETURN_DATA = Buffer.from([]); const DUMMY_GAS_USED = 0n; export class VMTracer { + public tracingMessages: TracingMessage[] = []; + public tracingSteps: TracingStep[] = []; + public tracingMessageResults: TracingMessageResult[] = []; + private _messageTraces: MessageTrace[] = []; - private _enabled = false; private _lastError: Error | undefined; private _maxPrecompileNumber; - constructor( - private readonly _vm: VMAdapter, - common: Common, - private readonly _throwErrors = true - ) { - this._beforeMessageHandler = this._beforeMessageHandler.bind(this); - this._stepHandler = this._stepHandler.bind(this); - this._afterMessageHandler = this._afterMessageHandler.bind(this); + constructor(common: Common, private readonly _throwErrors = true) { this._maxPrecompileNumber = getActivePrecompiles(common).size; } - public enableTracing() { - if (this._enabled) { - return; - } - - this._vm.enableTracing({ - beforeMessage: this._beforeMessageHandler, - step: this._stepHandler, - afterMessage: this._afterMessageHandler, - }); - - this._enabled = true; - } - - public disableTracing() { - if (!this._enabled) { - return; - } - - this._vm.disableTracing(); - - this._enabled = false; - } - - public get enabled(): boolean { - return this._enabled; - } - public getLastTopLevelMessageTrace(): MessageTrace | undefined { - if (!this._enabled) { - throw new Error("You can't get a vm trace if the VMTracer is disabled"); - } - return this._messageTraces[0]; } @@ -91,9 +57,8 @@ export class VMTracer { return this._throwErrors || this._lastError === undefined; } - private async _beforeMessageHandler(message: TracingMessage, next: any) { + public async addBeforeMessage(message: TracingMessage) { if (!this._shouldKeepTracing()) { - next(); return; } @@ -102,8 +67,13 @@ export class VMTracer { if (message.depth === 0) { this._messageTraces = []; + this.tracingMessages = []; + this.tracingSteps = []; + this.tracingMessageResults = []; } + this.tracingMessages.push(message); + if (message.to === undefined) { const createTrace: CreateMessageTrace = { code: message.data, @@ -137,19 +107,18 @@ export class VMTracer { const codeAddress = message.codeAddress; // if we enter here, then `to` is not undefined, therefore - // `codeAddress` should be defined + // `codeAddress` and `code` should be defined assertHardhatInvariant( codeAddress !== undefined, "codeAddress should be defined" ); - - const code = await this._vm.getContractCode( - new Address(codeAddress), - true // ethJsOnly, temporary fix + assertHardhatInvariant( + message.code !== undefined, + "code should be defined" ); const callTrace: CallMessageTrace = { - code, + code: message.code, calldata: message.data, steps: [], value: message.value, @@ -180,23 +149,22 @@ export class VMTracer { } this._messageTraces.push(trace); - next(); } catch (error) { if (this._throwErrors) { - next(error); + throw error; } else { this._lastError = error as Error; - next(); } } } - private async _stepHandler(step: TracingStep, next: any) { + public async addStep(step: TracingStep) { if (!this._shouldKeepTracing()) { - next(); return; } + this.tracingSteps.push(step); + try { const trace = this._messageTraces[this._messageTraces.length - 1]; @@ -207,46 +175,52 @@ export class VMTracer { } trace.steps.push({ pc: Number(step.pc) }); - next(); } catch (error) { if (this._throwErrors) { - next(error); + throw error; } else { this._lastError = error as Error; - next(); } } } - private async _afterMessageHandler(result: TracingMessageResult, next: any) { + public async addAfterMessage(result: TracingMessageResult) { if (!this._shouldKeepTracing()) { - next(); return; } + this.tracingMessageResults.push(result); + try { const trace = this._messageTraces[this._messageTraces.length - 1]; + trace.gasUsed = result.executionResult.result.gasUsed; - trace.exit = Exit.fromRethnetExitCode(result.executionResult.exitCode); - trace.returnData = - result.executionResult.output.output ?? Buffer.from([]); - trace.gasUsed = result.executionResult.gasUsed; + const executionResult = result.executionResult.result; + if (isSuccessResult(executionResult)) { + trace.exit = Exit.fromRethnetSuccessReason(executionResult.reason); + trace.returnData = executionResult.output.returnValue; - if (isCreateTrace(trace)) { - trace.deployedContract = result.executionResult.output.address; + if (isCreateTrace(trace)) { + trace.deployedContract = ( + executionResult.output as CreateOutput + ).address; + } + } else if (isHaltResult(executionResult)) { + trace.exit = Exit.fromRethnetExceptionalHalt(executionResult.reason); + trace.returnData = Buffer.from([]); + } else { + trace.exit = new Exit(ExitCode.REVERT); + trace.returnData = executionResult.output; } if (this._messageTraces.length > 1) { this._messageTraces.pop(); } - - next(); } catch (error) { if (this._throwErrors) { - next(error); + throw error; } else { this._lastError = error as Error; - next(); } } } diff --git a/packages/hardhat-core/test/internal/hardhat-network/helpers/contracts.ts b/packages/hardhat-core/test/internal/hardhat-network/helpers/contracts.ts index edeb21430b..8b87863d45 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/helpers/contracts.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/helpers/contracts.ts @@ -420,6 +420,64 @@ contract Foo { topics: {}, }; +export const EXAMPLE_BLOCK_NUMBER_CONTRACT = { + sourceCode: `pragma solidity 0.8.17; + +contract Example { + uint public blockNumber; + + constructor() { + blockNumber = block.number; + } + + function setBlockNumber() public { + blockNumber = block.number; + } + + function getBlockNumber() public view returns (uint) { + return block.number; + } +}`, + bytecode: { + linkReferences: {}, + object: + "608060405234801561001057600080fd5b504360008190555060fa806100266000396000f3fe6080604052348015600f57600080fd5b5060043610603c5760003560e01c806342cbb15c14604157806357e871e714605b578063fbf6eaa5146075575b600080fd5b6047607d565b6040516052919060ab565b60405180910390f35b60616085565b604051606c919060ab565b60405180910390f35b607b608b565b005b600043905090565b60005481565b43600081905550565b6000819050919050565b60a5816094565b82525050565b600060208201905060be6000830184609e565b9291505056fea264697066735822122004d0ac1993d4db65d9aff533e23fe270bb1e47c8341f51f778e3e59c767c881964736f6c63430008110033", + opcodes: + "PUSH1 0x80 PUSH1 0x40 MSTORE CALLVALUE DUP1 ISZERO PUSH2 0x10 JUMPI PUSH1 0x0 DUP1 REVERT JUMPDEST POP NUMBER PUSH1 0x0 DUP2 SWAP1 SSTORE POP PUSH1 0xFA DUP1 PUSH2 0x26 PUSH1 0x0 CODECOPY PUSH1 0x0 RETURN INVALID PUSH1 0x80 PUSH1 0x40 MSTORE CALLVALUE DUP1 ISZERO PUSH1 0xF JUMPI PUSH1 0x0 DUP1 REVERT JUMPDEST POP PUSH1 0x4 CALLDATASIZE LT PUSH1 0x3C JUMPI PUSH1 0x0 CALLDATALOAD PUSH1 0xE0 SHR DUP1 PUSH4 0x42CBB15C EQ PUSH1 0x41 JUMPI DUP1 PUSH4 0x57E871E7 EQ PUSH1 0x5B JUMPI DUP1 PUSH4 0xFBF6EAA5 EQ PUSH1 0x75 JUMPI JUMPDEST PUSH1 0x0 DUP1 REVERT JUMPDEST PUSH1 0x47 PUSH1 0x7D JUMP JUMPDEST PUSH1 0x40 MLOAD PUSH1 0x52 SWAP2 SWAP1 PUSH1 0xAB JUMP JUMPDEST PUSH1 0x40 MLOAD DUP1 SWAP2 SUB SWAP1 RETURN JUMPDEST PUSH1 0x61 PUSH1 0x85 JUMP JUMPDEST PUSH1 0x40 MLOAD PUSH1 0x6C SWAP2 SWAP1 PUSH1 0xAB JUMP JUMPDEST PUSH1 0x40 MLOAD DUP1 SWAP2 SUB SWAP1 RETURN JUMPDEST PUSH1 0x7B PUSH1 0x8B JUMP JUMPDEST STOP JUMPDEST PUSH1 0x0 NUMBER SWAP1 POP SWAP1 JUMP JUMPDEST PUSH1 0x0 SLOAD DUP2 JUMP JUMPDEST NUMBER PUSH1 0x0 DUP2 SWAP1 SSTORE POP JUMP JUMPDEST PUSH1 0x0 DUP2 SWAP1 POP SWAP2 SWAP1 POP JUMP JUMPDEST PUSH1 0xA5 DUP2 PUSH1 0x94 JUMP JUMPDEST DUP3 MSTORE POP POP JUMP JUMPDEST PUSH1 0x0 PUSH1 0x20 DUP3 ADD SWAP1 POP PUSH1 0xBE PUSH1 0x0 DUP4 ADD DUP5 PUSH1 0x9E JUMP JUMPDEST SWAP3 SWAP2 POP POP JUMP INVALID LOG2 PUSH5 0x6970667358 0x22 SLT KECCAK256 DIV 0xD0 0xAC NOT SWAP4 0xD4 0xDB PUSH6 0xD9AFF533E23F 0xE2 PUSH17 0xBB1E47C8341F51F778E3E59C767C881964 PUSH20 0x6F6C634300081100330000000000000000000000 ", + sourceMap: "25:263:0:-:0;;;74:51;;;;;;;;;;108:12;94:11;:26;;;;25:263;;;;;;", + }, + abi: [ + { inputs: [], stateMutability: "nonpayable", type: "constructor" }, + { + inputs: [], + name: "blockNumber", + outputs: [{ internalType: "uint256", name: "", type: "uint256" }], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "getBlockNumber", + outputs: [{ internalType: "uint256", name: "", type: "uint256" }], + stateMutability: "view", + type: "function", + }, + { + inputs: [], + name: "setBlockNumber", + outputs: [], + stateMutability: "nonpayable", + type: "function", + }, + ], + selectors: { + blockNumber: "0x57e871e7", + getBlockNumber: "0x42cbb15c", + setBlockNumber: "0xfbf6eaa5", + }, + topics: {}, +}; + export const EXAMPLE_CHAIN_ID_CONTRACT = { sourceCode: `pragma solidity 0.8.14; diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts index 11a170f404..2c61ad9cae 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts @@ -692,7 +692,7 @@ describe("Eth module - hardfork dependant tests", function () { }); describe("Receipts formatting", function () { - describe("Before byzantium", function () { + describe.skip("Before byzantium", function () { useProviderAndCommon("spuriousDragon"); it("Should have a root field, and shouldn't have a status one nor type", async function () { diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/methods/sendTransaction.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/methods/sendTransaction.ts index 16cdc7ff5b..5e5df08e25 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/methods/sendTransaction.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/methods/sendTransaction.ts @@ -6,6 +6,7 @@ import { numberToRpcQuantity, rpcQuantityToNumber, rpcQuantityToBigInt, + rpcDataToNumber, } from "../../../../../../../src/internal/core/jsonrpc/types/base-types"; import { InvalidInputError } from "../../../../../../../src/internal/core/providers/errors"; import { workaroundWindowsCiFailures } from "../../../../../../utils/workaround-windows-ci-failures"; @@ -14,7 +15,10 @@ import { assertReceiptMatchesGethOne, assertTransactionFailure, } from "../../../../helpers/assertions"; -import { EXAMPLE_REVERT_CONTRACT } from "../../../../helpers/contracts"; +import { + EXAMPLE_BLOCK_NUMBER_CONTRACT, + EXAMPLE_REVERT_CONTRACT, +} from "../../../../helpers/contracts"; import { setCWD } from "../../../../helpers/cwd"; import { getPendingBaseFeePerGas } from "../../../../helpers/getPendingBaseFeePerGas"; import { @@ -1103,6 +1107,36 @@ describe("Eth module", function () { // assert: assert.equal(await getChainIdFromContract(this.provider), chainId); }); + + it("Should use the correct value of block.number", async function () { + const contractAddress = await deployContract( + this.provider, + `0x${EXAMPLE_BLOCK_NUMBER_CONTRACT.bytecode.object}` + ); + + const blockNumberBeforeTx = rpcQuantityToNumber( + await this.provider.send("eth_blockNumber") + ); + + await this.provider.send("eth_sendTransaction", [ + { + to: contractAddress, + from: DEFAULT_ACCOUNTS_ADDRESSES[0], + data: `${EXAMPLE_BLOCK_NUMBER_CONTRACT.selectors.setBlockNumber}`, + }, + ]); + + const contractBlockNumber = rpcDataToNumber( + await this.provider.send("eth_call", [ + { + to: contractAddress, + data: `${EXAMPLE_BLOCK_NUMBER_CONTRACT.selectors.blockNumber}`, + }, + ]) + ); + + assert.equal(contractBlockNumber, blockNumberBeforeTx + 1); + }); }); describe("eth_sendTransaction with minGasPrice", function () { diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts index 6236761dfb..50c84697fa 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts @@ -69,7 +69,8 @@ export async function runFullBlock( } ); - forkedNode["_vmTracer"].disableTracing(); + // TODO uncomment and fix this + // forkedNode["_vmTracer"].disableTracing(); const afterBlockEvent = await runBlockAndGetAfterBlockEvent( // TODO remove "as any" and make this work with VMAdapter @@ -88,7 +89,7 @@ export async function runFullBlock( await (forkedNode["_vm"] as any).putBlock(modifiedBlock); await forkedNode["_saveBlockAsSuccessfullyRun"]( modifiedBlock, - afterBlockEvent + afterBlockEvent as any // TODO remove this as any ); const newBlock = await forkedNode.getBlockByNumber(blockToRun); diff --git a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts index 0eca7effe6..8ecfde4755 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/stack-traces/execution.ts @@ -11,11 +11,10 @@ import abi from "ethereumjs-abi"; import { HardhatBlockchain } from "../../../../src/internal/hardhat-network/provider/HardhatBlockchain"; import { VMAdapter } from "../../../../src/internal/hardhat-network/provider/vm/vm-adapter"; -import { DualModeAdapter } from "../../../../src/internal/hardhat-network/provider/vm/dual"; import { MessageTrace } from "../../../../src/internal/hardhat-network/stack-traces/message-trace"; -import { VMTracer } from "../../../../src/internal/hardhat-network/stack-traces/vm-tracer"; import { defaultHardhatNetworkParams } from "../../../../src/internal/core/config/default-config"; import { BlockBuilder } from "../../../../src/internal/hardhat-network/provider/vm/block-builder"; +import { createVm } from "../../../../src/internal/hardhat-network/provider/vm/creation"; const senderPrivateKey = Buffer.from( "e331b6d69882b4cb4ea581d88e0b604039a3de5967688d3dcffdd2270c0fd109", @@ -36,7 +35,7 @@ export async function instantiateVm(): Promise<[VMAdapter, Common]> { }) ); - const vm = await DualModeAdapter.create( + const vm = await createVm( common, blockchain, { @@ -106,9 +105,6 @@ export async function traceTransaction( const signedTx = tx.sign(senderPrivateKey); - const vmTracer = new VMTracer(vm as any, common); - vmTracer.enableTracing(); - try { const blockBuilder = new BlockBuilder(vm, common, { parentBlock: Block.fromBlockData( @@ -126,14 +122,13 @@ export async function traceTransaction( await blockBuilder.addRewards([]); await blockBuilder.seal(); - const messageTrace = vmTracer.getLastTopLevelMessageTrace(); - if (messageTrace === undefined) { - const lastError = vmTracer.getLastError(); - throw lastError ?? new Error("Cannot get last top level message trace"); + const { trace, error } = vm.getLastTrace(); + if (trace === undefined) { + throw error ?? new Error("Cannot get last top level message trace"); } - return messageTrace; + return trace; } finally { - vmTracer.disableTracing(); + vm.clearLastError(); } } From 3a3bc7f1eacadf6809f316fc421b382041c63fd7 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 13:53:55 -0600 Subject: [PATCH 026/406] refactor: remove unnecessary dependencies (#3683) --- crates/rethnet_eth/src/signature.rs | 19 ++++++++++- crates/rethnet_evm/Cargo.toml | 4 --- crates/rethnet_evm_napi/Cargo.toml | 1 - crates/rethnet_evm_napi/src/lib.rs | 34 ------------------- crates/rethnet_evm_napi/src/state.rs | 8 ++--- .../src/transaction/result.rs | 4 --- 6 files changed, 22 insertions(+), 48 deletions(-) diff --git a/crates/rethnet_eth/src/signature.rs b/crates/rethnet_eth/src/signature.rs index 95857c0262..55bc325406 100644 --- a/crates/rethnet_eth/src/signature.rs +++ b/crates/rethnet_eth/src/signature.rs @@ -8,7 +8,7 @@ use std::str::FromStr; use secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, - PublicKey, Secp256k1, ThirtyTwoByteHash, + PublicKey, Secp256k1, SecretKey, SignOnly, ThirtyTwoByteHash, }; use sha3::{Digest, Keccak256}; use thiserror::Error; @@ -22,6 +22,23 @@ pub fn public_key_to_address(public_key: PublicKey) -> Address { Address::from_slice(&hash[12..]) } +/// Converts a private to an address using the provided context. +pub fn private_key_to_address( + context: &Secp256k1, + private_key: &str, +) -> Result { + private_to_public_key(context, private_key).map(public_key_to_address) +} + +fn private_to_public_key( + context: &Secp256k1, + private_key: &str, +) -> Result { + let private_key = private_key.strip_prefix("0x").unwrap_or(private_key); + + SecretKey::from_str(private_key).map(|secret_key| secret_key.public_key(context)) +} + /// An error involving a signature. #[derive(Debug, Error)] pub enum SignatureError { diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 74a83a5274..5aa495b2d3 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -5,16 +5,12 @@ edition = "2021" [dependencies] auto_impl = { version = "1.0.1", default-features = false } -ethers-signers = { version = "1.0.0", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash", "serde"] } log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } revm = { git = "https://github.com/bluealloy/revm", rev = "8e6f4f2", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } # revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } -secp256k1 = { version = "0.24.1", default-features = false, features = ["alloc"] } -sha3 = { version = "0.10.4", default-features = false } -signature = { version = "1.6.4", default-features = false, features = ["std"] } thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 87c3decf4b..214621f301 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -16,7 +16,6 @@ rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc"] } serde_json = { version = "1.0.85", default-features = false, features = ["alloc"] } -sha3 = { version = "0.10.5", default-features = false } [build-dependencies] napi-build = "2.0.1" diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 68a7db8099..fd188fdd7a 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -17,37 +17,3 @@ mod threadsafe_function; mod trace; mod tracer; mod transaction; - -use std::str::FromStr; - -use napi::Status; -use rethnet_eth::Address; -use secp256k1::{PublicKey, Secp256k1, SecretKey, SignOnly}; -use sha3::{Digest, Keccak256}; - -use crate::cast::TryCast; - -fn private_key_to_address( - context: &Secp256k1, - private_key: String, -) -> napi::Result
{ - private_to_public_key(context, private_key).map(public_key_to_address) -} - -fn private_to_public_key( - context: &Secp256k1, - private_key: String, -) -> napi::Result { - let private_key = private_key.strip_prefix("0x").unwrap_or(&private_key); - - SecretKey::from_str(private_key).map_or_else( - |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), - |secret_key| Ok(secret_key.public_key(context)), - ) -} - -fn public_key_to_address(public_key: PublicKey) -> Address { - let hash = Keccak256::digest(&public_key.serialize_uncompressed()[1..]); - // Only take the lower 160 bits of the hash - Address::from_slice(&hash[12..]) -} diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 3dabc526d6..8a8bcd1d31 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -5,7 +5,7 @@ use std::sync::{ use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; -use rethnet_eth::{Address, B256, U256}; +use rethnet_eth::{signature::private_key_to_address, Address, B256, U256}; use rethnet_evm::{ state::{AsyncState, LayeredState, RethnetLayer, StateError, SyncState}, AccountInfo, Bytecode, HashMap, StateDebug, @@ -14,10 +14,9 @@ use secp256k1::Secp256k1; use crate::{ account::{Account, AccountData}, - private_key_to_address, + cast::TryCast, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - TryCast, }; struct ModifyAccountCall { @@ -57,7 +56,8 @@ impl StateManager { let genesis_accounts = accounts .into_iter() .map(|account| { - let address = private_key_to_address(&context, account.private_key)?; + let address = private_key_to_address(&context, &account.private_key) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; account.balance.try_cast().map(|balance| { let account_info = AccountInfo { balance, diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index 3013593e78..eea97b41aa 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -71,7 +71,6 @@ pub struct RevertResult { pub enum ExceptionalHalt { OutOfGas, OpcodeNotFound, - // CallNotAllowedInsideStatic, InvalidFEOpcode, InvalidJump, NotActivated, @@ -93,9 +92,6 @@ impl From for ExceptionalHalt { match halt { rethnet_evm::Halt::OutOfGas(..) => ExceptionalHalt::OutOfGas, rethnet_evm::Halt::OpcodeNotFound => ExceptionalHalt::OpcodeNotFound, - // rethnet_evm::Halt::CallNotAllowedInsideStatic => { - // ExceptionalHalt::CallNotAllowedInsideStatic - // } rethnet_evm::Halt::InvalidFEOpcode => ExceptionalHalt::InvalidFEOpcode, rethnet_evm::Halt::InvalidJump => ExceptionalHalt::InvalidJump, rethnet_evm::Halt::NotActivated => ExceptionalHalt::NotActivated, From 99b852492e99dbf4e745361a07214d63f5252ca5 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:05:14 -0600 Subject: [PATCH 027/406] feat: add trace when a custom inspector is used (#3685) --- crates/rethnet_evm/src/evm.rs | 22 ++-- crates/rethnet_evm/src/inspector.rs | 176 ++++++++++++++++++++++------ crates/rethnet_evm/src/trace.rs | 56 ++++++++- 3 files changed, 211 insertions(+), 43 deletions(-) diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index d1af33f84c..572a2c5130 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -8,8 +8,11 @@ use revm::{ use tokio::{runtime::Runtime, task::JoinHandle}; use crate::{ - blockchain::AsyncBlockchain, inspector::RethnetInspector, runtime::AsyncDatabase, - state::AsyncState, trace::Trace, + blockchain::AsyncBlockchain, + inspector::DualInspector, + runtime::AsyncDatabase, + state::AsyncState, + trace::{Trace, TraceCollector}, }; /// Creates an evm from the provided database, config, transaction, and block. @@ -53,14 +56,19 @@ where { runtime.spawn(async move { let mut evm = build_evm(blockchain, state, cfg, transaction, block); - if let Some(mut inspector) = inspector { + + let (result, state, tracer) = if let Some(inspector) = inspector { + let mut inspector = DualInspector::new(TraceCollector::default(), inspector); + let ResultAndState { result, state } = evm.inspect(&mut inspector)?; - Ok((result, state, Trace::default())) + (result, state, inspector.into_parts().0) } else { - let mut inspector = RethnetInspector::default(); + let mut inspector = TraceCollector::default(); let ResultAndState { result, state } = evm.inspect(&mut inspector)?; - Ok((result, state, inspector.into_trace())) - } + (result, state, inspector) + }; + + Ok((result, state, tracer.into_trace())) }) } diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs index df91ed8829..c8f99c9014 100644 --- a/crates/rethnet_evm/src/inspector.rs +++ b/crates/rethnet_evm/src/inspector.rs @@ -1,56 +1,162 @@ -use revm::{ - interpreter::{opcode, InstructionResult, Interpreter}, - Database, EVMData, Inspector, -}; +use std::marker::PhantomData; -use crate::trace::Trace; +use revm::{Database, Inspector}; -#[derive(Default)] -pub struct RethnetInspector { - trace: Trace, - opcode_stack: Vec, +// TODO: Improve this design by introducing a InspectorMut trait + +/// Inspector that allows two inspectors to operate side-by-side. The immutable inspector runs +/// first, followed by the mutable inspector. To ensure both inspectors observe a valid state, you +/// have to ensure that only the mutable inspector modifies state. The returned values are solely +/// determined by the mutable inspector. +pub struct DualInspector +where + A: Inspector, + B: Inspector, + DB: Database, +{ + immutable: A, + mutable: B, + phantom: PhantomData, } -impl RethnetInspector { - /// Converts the [`RethnetInspector`] into its [`Trace`]. - pub fn into_trace(self) -> Trace { - self.trace +impl DualInspector +where + A: Inspector, + B: Inspector, + DB: Database, +{ + /// Constructs a `DualInspector` from the provided inspectors. + pub fn new(immutable: A, mutable: B) -> Self { + Self { + immutable, + mutable, + phantom: PhantomData, + } + } + + /// Returns the two inspectors wrapped by the `DualInspector`. + pub fn into_parts(self) -> (A, B) { + (self.immutable, self.mutable) } } -impl Inspector for RethnetInspector +impl Inspector for DualInspector where - D: Database, + A: Inspector, + B: Inspector, + DB: Database, { + fn initialize_interp( + &mut self, + interp: &mut revm::interpreter::Interpreter, + data: &mut revm::EVMData<'_, DB>, + is_static: bool, + ) -> revm::interpreter::InstructionResult { + self.immutable.initialize_interp(interp, data, is_static); + self.mutable.initialize_interp(interp, data, is_static) + } + fn step( &mut self, - interp: &mut Interpreter, - _data: &mut EVMData<'_, D>, - _is_static: bool, - ) -> InstructionResult { - self.opcode_stack.push(interp.current_opcode()); + interp: &mut revm::interpreter::Interpreter, + data: &mut revm::EVMData<'_, DB>, + is_static: bool, + ) -> revm::interpreter::InstructionResult { + self.immutable.step(interp, data, is_static); + self.mutable.step(interp, data, is_static) + } - InstructionResult::Continue + fn log( + &mut self, + evm_data: &mut revm::EVMData<'_, DB>, + address: &rethnet_eth::B160, + topics: &[rethnet_eth::B256], + data: &rethnet_eth::Bytes, + ) { + self.immutable.log(evm_data, address, topics, data); + self.mutable.log(evm_data, address, topics, data) } fn step_end( &mut self, - interp: &mut Interpreter, - _data: &mut EVMData<'_, D>, - _is_static: bool, - exit_code: InstructionResult, - ) -> InstructionResult { - let opcode = self - .opcode_stack - .pop() - .expect("There must always be an opcode when ending a step"); + interp: &mut revm::interpreter::Interpreter, + data: &mut revm::EVMData<'_, DB>, + is_static: bool, + eval: revm::interpreter::InstructionResult, + ) -> revm::interpreter::InstructionResult { + self.immutable.step_end(interp, data, is_static, eval); + self.mutable.step_end(interp, data, is_static, eval) + } - self.trace.add_step(opcode, interp.gas(), exit_code); + fn call( + &mut self, + data: &mut revm::EVMData<'_, DB>, + inputs: &mut revm::interpreter::CallInputs, + is_static: bool, + ) -> ( + revm::interpreter::InstructionResult, + revm::interpreter::Gas, + rethnet_eth::Bytes, + ) { + self.immutable.call(data, inputs, is_static); + self.mutable.call(data, inputs, is_static) + } - if opcode == opcode::RETURN || opcode == opcode::REVERT { - self.trace.return_value = interp.return_value(); - } + fn call_end( + &mut self, + data: &mut revm::EVMData<'_, DB>, + inputs: &revm::interpreter::CallInputs, + remaining_gas: revm::interpreter::Gas, + ret: revm::interpreter::InstructionResult, + out: rethnet_eth::Bytes, + is_static: bool, + ) -> ( + revm::interpreter::InstructionResult, + revm::interpreter::Gas, + rethnet_eth::Bytes, + ) { + self.immutable + .call_end(data, inputs, remaining_gas, ret, out.clone(), is_static); + self.mutable + .call_end(data, inputs, remaining_gas, ret, out, is_static) + } + + fn create( + &mut self, + data: &mut revm::EVMData<'_, DB>, + inputs: &mut revm::interpreter::CreateInputs, + ) -> ( + revm::interpreter::InstructionResult, + Option, + revm::interpreter::Gas, + rethnet_eth::Bytes, + ) { + self.immutable.create(data, inputs); + self.mutable.create(data, inputs) + } + + fn create_end( + &mut self, + data: &mut revm::EVMData<'_, DB>, + inputs: &revm::interpreter::CreateInputs, + ret: revm::interpreter::InstructionResult, + address: Option, + remaining_gas: revm::interpreter::Gas, + out: rethnet_eth::Bytes, + ) -> ( + revm::interpreter::InstructionResult, + Option, + revm::interpreter::Gas, + rethnet_eth::Bytes, + ) { + self.immutable + .create_end(data, inputs, ret, address, remaining_gas, out.clone()); + self.mutable + .create_end(data, inputs, ret, address, remaining_gas, out) + } - InstructionResult::Continue + fn selfdestruct(&mut self) { + self.immutable.selfdestruct(); + self.mutable.selfdestruct(); } } diff --git a/crates/rethnet_evm/src/trace.rs b/crates/rethnet_evm/src/trace.rs index e2adf69c28..106e53ba6c 100644 --- a/crates/rethnet_evm/src/trace.rs +++ b/crates/rethnet_evm/src/trace.rs @@ -1,5 +1,8 @@ use rethnet_eth::Bytes; -use revm::interpreter::{Gas, InstructionResult}; +use revm::{ + interpreter::{opcode, Gas, InstructionResult, Interpreter}, + Database, EVMData, Inspector, +}; /// A trace for an EVM call. #[derive(Default)] @@ -45,3 +48,54 @@ impl Trace { self.steps.push(step); } } + +/// Object that gathers trace information during EVM execution and can be turned into a trace upon completion. +#[derive(Default)] +pub struct TraceCollector { + trace: Trace, + opcode_stack: Vec, +} + +impl TraceCollector { + /// Converts the [`Tracer`] into its [`Trace`]. + pub fn into_trace(self) -> Trace { + self.trace + } +} + +impl Inspector for TraceCollector +where + D: Database, +{ + fn step( + &mut self, + interp: &mut Interpreter, + _data: &mut EVMData<'_, D>, + _is_static: bool, + ) -> InstructionResult { + self.opcode_stack.push(interp.current_opcode()); + + InstructionResult::Continue + } + + fn step_end( + &mut self, + interp: &mut Interpreter, + _data: &mut EVMData<'_, D>, + _is_static: bool, + exit_code: InstructionResult, + ) -> InstructionResult { + let opcode = self + .opcode_stack + .pop() + .expect("There must always be an opcode when ending a step"); + + self.trace.add_step(opcode, interp.gas(), exit_code); + + if opcode == opcode::RETURN || opcode == opcode::REVERT { + self.trace.return_value = interp.return_value(); + } + + exit_code + } +} From 7687a382b2378d30f37f646a8e92aaf61cad0e7f Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:07:03 -0600 Subject: [PATCH 028/406] bump: upgrade napi to 2.11.1 (#3687) --- crates/rethnet_evm_napi/Cargo.toml | 2 +- crates/rethnet_evm_napi/src/config.rs | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 214621f301..8a940cdfea 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -8,7 +8,7 @@ crate-type = ["cdylib"] [dependencies] crossbeam-channel = { version = "0.5.6", default-features = false } -napi = { version = "= 2.10.2", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi = { version = "= 2.11.1", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.9.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" diff --git a/crates/rethnet_evm_napi/src/config.rs b/crates/rethnet_evm_napi/src/config.rs index 760b144ad6..96cd5fe3b8 100644 --- a/crates/rethnet_evm_napi/src/config.rs +++ b/crates/rethnet_evm_napi/src/config.rs @@ -105,8 +105,7 @@ impl TryFrom for CfgEnv { .map_or(default.spec_id, |spec_id| spec_id.into()); let limit_contract_code_size = value.limit_contract_code_size.map_or(Ok(None), |size| { - // TODO: the lossless check in get_u64 is broken: https://github.com/napi-rs/napi-rs/pull/1348 - if let (false, size, _lossless) = size.get_u64() { + if let (false, size, true) = size.get_u64() { usize::try_from(size).map_or_else( |e| Err(napi::Error::new(Status::InvalidArg, e.to_string())), |size| Ok(Some(size)), From e654fc161cb37ed7bce3c2337f58c5357ba313f1 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:09:49 -0600 Subject: [PATCH 029/406] fix(rethnet): remove second LayeredDatabase::remove_account function (#3693) --- crates/rethnet_evm/src/state/layered_db.rs | 22 ++++++++-------------- 1 file changed, 8 insertions(+), 14 deletions(-) diff --git a/crates/rethnet_evm/src/state/layered_db.rs b/crates/rethnet_evm/src/state/layered_db.rs index bc71f4ad62..b7e3fe8640 100644 --- a/crates/rethnet_evm/src/state/layered_db.rs +++ b/crates/rethnet_evm/src/state/layered_db.rs @@ -215,12 +215,14 @@ impl LayeredState { } /// Removes the [`AccountInfo`] corresponding to the specified address. - pub fn remove_account(&mut self, address: &Address) { + fn remove_account(&mut self, address: &Address) -> Option { let account_info = self .iter() - .find_map(|layer| layer.account_infos.get(address)); + .find_map(|layer| layer.account_infos.get(address)) + .cloned() + .flatten(); - if let Some(Some(account_info)) = account_info { + if let Some(account_info) = &account_info { debug_assert!(account_info.code.is_none()); let code_hash = account_info.code_hash; @@ -239,6 +241,8 @@ impl LayeredState { // Write None to signal that the account's storage was deleted self.last_layer_mut().storage.insert(*address, None); } + + account_info } } @@ -399,17 +403,7 @@ impl StateDebug for LayeredState { } fn remove_account(&mut self, address: Address) -> Result, Self::Error> { - // Set None to indicate the account was deleted - if let Some(account_info) = self.last_layer_mut().account_infos.get_mut(&address) { - let old_account_info = account_info.clone(); - - *account_info = None; - - Ok(old_account_info) - } else { - self.last_layer_mut().account_infos.insert(address, None); - Ok(None) - } + Ok(self.remove_account(&address)) } fn remove_snapshot(&mut self, state_root: &B256) -> bool { From ea52368b9e2abdfc2fa59e5b6a7e756fd365268e Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:11:49 -0600 Subject: [PATCH 030/406] improvement: reduce build time by annotating types in napi macros (#3686) --- crates/rethnet_evm_napi/src/block/builder.rs | 19 ++++----- crates/rethnet_evm_napi/src/runtime.rs | 45 +++++++++++--------- crates/rethnet_evm_napi/src/state.rs | 10 ++--- 3 files changed, 38 insertions(+), 36 deletions(-) diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs index 4628bf68ef..bab132b1b0 100644 --- a/crates/rethnet_evm_napi/src/block/builder.rs +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -6,8 +6,8 @@ use napi::{ Status, }; use napi_derive::napi; -use rethnet_eth::{Address, U256}; -use rethnet_evm::state::StateError; +use rethnet_eth::{block::Header, Address, U256}; +use rethnet_evm::{state::StateError, CfgEnv, HeaderData, TxEnv}; use crate::{ blockchain::Blockchain, @@ -35,9 +35,9 @@ impl BlockBuilder { parent: BlockHeader, block: BlockConfig, ) -> napi::Result { - let config = config.try_into()?; - let parent = parent.try_into()?; - let block = block.try_into()?; + let config = CfgEnv::try_from(config)?; + let parent = Header::try_from(parent)?; + let block = HeaderData::try_from(block)?; let builder = rethnet_evm::BlockBuilder::new( blockchain.as_inner().clone(), @@ -60,7 +60,7 @@ impl BlockBuilder { ) -> napi::Result { let mut builder = self.builder.lock().await; if let Some(builder) = builder.as_mut() { - let transaction = transaction.try_into()?; + let transaction = TxEnv::try_from(transaction)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); @@ -78,17 +78,16 @@ impl BlockBuilder { } } - #[napi] /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call /// methods on the JS object. + #[napi] pub async fn finalize(&self, rewards: Vec<(Buffer, BigInt)>) -> napi::Result<()> { let mut builder = self.builder.lock().await; if let Some(builder) = builder.take() { let rewards = rewards .into_iter() .map(|(address, reward)| { - reward - .try_cast() + TryCast::::try_cast(reward) .map(|reward| (Address::from_slice(&address), reward)) }) .collect::>>()?; @@ -105,9 +104,9 @@ impl BlockBuilder { } } - #[napi] /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call /// methods on the JS object. + #[napi] pub async fn abort(&self) -> napi::Result<()> { let mut builder = self.builder.lock().await; if let Some(builder) = builder.take() { diff --git a/crates/rethnet_evm_napi/src/runtime.rs b/crates/rethnet_evm_napi/src/runtime.rs index 43cff7941c..afffabf149 100644 --- a/crates/rethnet_evm_napi/src/runtime.rs +++ b/crates/rethnet_evm_napi/src/runtime.rs @@ -1,7 +1,9 @@ use napi::Status; use napi_derive::napi; use once_cell::sync::OnceCell; -use rethnet_evm::{state::StateError, InvalidTransaction, TransactionError, TxEnv}; +use rethnet_evm::{ + state::StateError, BlockEnv, CfgEnv, InvalidTransaction, TransactionError, TxEnv, +}; use crate::{ block::BlockConfig, @@ -41,7 +43,7 @@ impl Rethnet { Logger }); - let cfg = cfg.try_into()?; + let cfg = CfgEnv::try_from(cfg)?; let runtime = rethnet_evm::Rethnet::new( blockchain.as_inner().clone(), @@ -60,16 +62,17 @@ impl Rethnet { block: BlockConfig, tracer: Option<&Tracer>, ) -> napi::Result { - let transaction = transaction.try_into()?; - let block = block.try_into()?; + let transaction = TxEnv::try_from(transaction)?; + let block = BlockEnv::try_from(block)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - self.runtime - .dry_run(transaction, block, inspector) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .try_into() + TransactionResult::try_from( + self.runtime + .dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, + ) } /// Executes the provided transaction without changing state, ignoring validation checks in the process. @@ -80,16 +83,17 @@ impl Rethnet { block: BlockConfig, tracer: Option<&Tracer>, ) -> napi::Result { - let transaction = transaction.try_into()?; - let block = block.try_into()?; + let transaction = TxEnv::try_from(transaction)?; + let block = BlockEnv::try_from(block)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - self.runtime - .guaranteed_dry_run(transaction, block, inspector) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))? - .try_into() + TransactionResult::try_from( + self.runtime + .guaranteed_dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, + ) } /// Executes the provided transaction, changing state in the process. @@ -100,12 +104,12 @@ impl Rethnet { block: BlockConfig, tracer: Option<&Tracer>, ) -> napi::Result { - let transaction: TxEnv = transaction.try_into()?; - let block = block.try_into()?; + let transaction = TxEnv::try_from(transaction)?; + let block = BlockEnv::try_from(block)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - Ok(self + Ok(ExecutionResult::from(self .runtime .run(transaction, block, inspector) .await @@ -119,7 +123,6 @@ impl Rethnet { e => e.to_string(), }, ) - })? - .into()) + })?)) } } diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 8a8bcd1d31..193060b738 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -58,7 +58,7 @@ impl StateManager { .map(|account| { let address = private_key_to_address(&context, &account.private_key) .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; - account.balance.try_cast().map(|balance| { + TryCast::::try_cast(account.balance).map(|balance| { let account_info = AccountInfo { balance, ..Default::default() @@ -148,7 +148,7 @@ impl StateManager { index: BigInt, ) -> napi::Result { let address = Address::from_slice(&address); - let index = BigInt::try_cast(index)?; + let index: U256 = BigInt::try_cast(index)?; self.state .account_storage_slot(address, index) @@ -187,7 +187,7 @@ impl StateManager { #[napi] pub async fn insert_account(&self, address: Buffer, account: Account) -> napi::Result<()> { let address = Address::from_slice(&address); - let account = account.try_cast()?; + let account: AccountInfo = account.try_cast()?; self.state .insert_account(address, account) @@ -320,8 +320,8 @@ impl StateManager { value: BigInt, ) -> napi::Result<()> { let address = Address::from_slice(&address); - let index = BigInt::try_cast(index)?; - let value = BigInt::try_cast(value)?; + let index: U256 = BigInt::try_cast(index)?; + let value: U256 = BigInt::try_cast(value)?; self.state .set_account_storage_slot(address, index, value) From 8fd9af9cd5bc2bb46b9aefec2d203362c198bcad Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:13:54 -0600 Subject: [PATCH 031/406] refactor: combine code and hash in bytecode (#3684) --- crates/rethnet_evm_napi/src/account.rs | 83 ++++++++++++++----- crates/rethnet_evm_napi/src/cast.rs | 44 +--------- crates/rethnet_evm_napi/src/state.rs | 70 +++++++++------- .../hardhat-network/provider/RethnetState.ts | 69 ++++----------- .../hardhat-network/provider/vm/dual.ts | 9 -- .../hardhat-network/provider/vm/ethereumjs.ts | 5 +- .../hardhat-network/provider/vm/rethnet.ts | 74 ++++++++++++++++- 7 files changed, 189 insertions(+), 165 deletions(-) diff --git a/crates/rethnet_evm_napi/src/account.rs b/crates/rethnet_evm_napi/src/account.rs index dded796189..cb74f91462 100644 --- a/crates/rethnet_evm_napi/src/account.rs +++ b/crates/rethnet_evm_napi/src/account.rs @@ -2,59 +2,96 @@ use std::fmt::Debug; use napi::bindgen_prelude::{BigInt, Buffer}; use napi_derive::napi; -use rethnet_evm::AccountInfo; +use rethnet_eth::Bytes; +use rethnet_evm::{AccountInfo, KECCAK_EMPTY}; + +use crate::cast::TryCast; #[napi(object)] -pub struct Account { - /// Account balance - #[napi(readonly)] - pub balance: BigInt, - /// Account nonce - #[napi(readonly)] - pub nonce: BigInt, +pub struct Bytecode { /// 256-bit code hash #[napi(readonly)] - pub code_hash: Buffer, - /// Optionally, byte code + pub hash: Buffer, + /// Byte code #[napi(readonly)] - pub code: Option, + pub code: Buffer, } #[napi(object)] -pub struct AccountData { +#[derive(Debug)] +pub struct Account { /// Account balance #[napi(readonly)] pub balance: BigInt, /// Account nonce #[napi(readonly)] pub nonce: BigInt, - /// Optionally, byte code + /// Optionally, byte code. Otherwise, hash is equal to `KECCAK_EMPTY` #[napi(readonly)] - pub code: Option, + pub code: Option, } -impl Debug for Account { +impl Debug for Bytecode { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Account") - .field("balance", &self.balance) - .field("nonce", &self.nonce) - .field("code_hash", &self.code_hash.as_ref()) + f.debug_struct("Bytecode") + .field("code_hash", &self.hash.as_ref()) + .field("code", &self.code.as_ref()) .finish() } } +impl From for Bytecode { + fn from(bytecode: rethnet_evm::Bytecode) -> Self { + Self { + hash: Buffer::from(bytecode.hash().as_bytes()), + code: Buffer::from(&bytecode.bytes()[..bytecode.len()]), + } + } +} + impl From for Account { fn from(account_info: AccountInfo) -> Self { + let code = if account_info.code_hash == KECCAK_EMPTY { + None + } else { + // We expect the code to always be provided + // TODO: Make this explicit in the type? + let code = account_info.code.unwrap(); + Some(code.into()) + }; + Self { balance: BigInt { sign_bit: false, words: account_info.balance.as_limbs().to_vec(), }, nonce: BigInt::from(account_info.nonce), - code_hash: Buffer::from(account_info.code_hash.as_bytes()), - code: account_info - .code - .map(|code| Buffer::from(&code.bytes()[..code.len()])), + code, } } } + +impl TryCast for Account { + type Error = napi::Error; + + fn try_cast(self) -> std::result::Result { + let code = self.code.map_or(rethnet_evm::Bytecode::default(), |code| { + let code_hash = rethnet_eth::B256::from_slice(&code.hash); + let code = Bytes::copy_from_slice(&code.code); + + debug_assert_eq!( + code_hash, + rethnet_evm::Bytecode::new_raw(code.clone()).hash() + ); + + unsafe { rethnet_evm::Bytecode::new_raw_with_hash(code, code_hash) } + }); + + Ok(AccountInfo { + balance: self.balance.try_cast()?, + nonce: self.nonce.get_u64().1, + code_hash: code.hash(), + code: Some(code), + }) + } +} diff --git a/crates/rethnet_evm_napi/src/cast.rs b/crates/rethnet_evm_napi/src/cast.rs index b17f7a0a64..ff00cb0cf3 100644 --- a/crates/rethnet_evm_napi/src/cast.rs +++ b/crates/rethnet_evm_napi/src/cast.rs @@ -2,10 +2,7 @@ use napi::{ bindgen_prelude::{BigInt, Buffer}, Status, }; -use rethnet_eth::{Bytes, B256, U256}; -use rethnet_evm::{AccountInfo, Bytecode}; - -use crate::account::{Account, AccountData}; +use rethnet_eth::{B256, U256}; /// An attempted conversion that consumes `self`, which may or may not be /// expensive. It is identical to [`TryInto`], but it allows us to implement @@ -18,35 +15,6 @@ pub trait TryCast: Sized { fn try_cast(self) -> Result; } -impl TryCast for Account { - type Error = napi::Error; - - fn try_cast(self) -> std::result::Result { - Ok(AccountInfo { - balance: self.balance.try_cast()?, - nonce: self.nonce.get_u64().1, - code_hash: B256::from_slice(&self.code_hash), - code: self - .code - .map(|code| Bytecode::new_raw(Bytes::copy_from_slice(&code))), - }) - } -} - -impl TryCast<(U256, u64, Option)> for AccountData { - type Error = napi::Error; - - fn try_cast(self) -> Result<(U256, u64, Option), Self::Error> { - let balance = self.balance.try_cast()?; - let nonce = self.nonce.get_u64().1; - let code = self - .code - .map(|code| Bytecode::new_raw(Bytes::copy_from_slice(&code))); - - Ok((balance, nonce, code)) - } -} - impl TryCast for Buffer { type Error = napi::Error; @@ -55,16 +23,6 @@ impl TryCast for Buffer { } } -impl TryCast for Buffer { - type Error = napi::Error; - - fn try_cast(self) -> Result { - let bytes = Bytes::copy_from_slice(&self); - - Ok(Bytecode::new_raw(bytes)) - } -} - impl TryCast for BigInt { type Error = napi::Error; diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 193060b738..7885dc39e6 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -8,12 +8,12 @@ use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, B256, U256}; use rethnet_evm::{ state::{AsyncState, LayeredState, RethnetLayer, StateError, SyncState}, - AccountInfo, Bytecode, HashMap, StateDebug, + AccountInfo, Bytecode, HashMap, StateDebug, KECCAK_EMPTY, }; use secp256k1::Secp256k1; use crate::{ - account::{Account, AccountData}, + account::Account, cast::TryCast, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, @@ -23,7 +23,7 @@ struct ModifyAccountCall { pub balance: U256, pub nonce: u64, pub code: Option, - pub sender: Sender)>>, + pub sender: Sender>, } /// An account that needs to be created during the genesis block. @@ -123,10 +123,24 @@ impl StateManager { pub async fn get_account_by_address(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.state.account_by_address(address).await.map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |account_info| Ok(account_info.map(Account::from)), - ) + let mut account_info = self + .state + .account_by_address(address) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + if let Some(account_info) = &mut account_info { + if account_info.code.is_none() && account_info.code_hash != KECCAK_EMPTY { + account_info.code = Some( + self.state + .code_by_hash(account_info.code_hash) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, + ); + } + } + + Ok(account_info.map(Account::from)) } /// Retrieves the storage root of the account at the specified address. @@ -164,16 +178,6 @@ impl StateManager { ) } - #[napi] - pub async fn get_code_by_hash(&self, code_hash: Buffer) -> napi::Result { - let code_hash = B256::from_slice(&code_hash); - - self.state.code_by_hash(code_hash).await.map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |code| Ok(Buffer::from(&code.bytes()[..code.len()])), - ) - } - /// Retrieves the storage root of the database. #[napi] pub async fn get_state_root(&self) -> napi::Result { @@ -203,14 +207,14 @@ impl StateManager { /// Modifies the account with the provided address using the specified modifier function. /// The modifier function receives the current values as individual parameters and will update the account's values - /// to the returned `AccountData` values. + /// to the returned `Account` values. #[napi(ts_return_type = "Promise")] pub fn modify_account( &self, env: Env, address: Buffer, #[napi( - ts_arg_type = "(balance: bigint, nonce: bigint, code: Buffer | undefined) => Promise" + ts_arg_type = "(balance: bigint, nonce: bigint, code: Bytecode | undefined) => Promise" )] modify_account_fn: JsFunction, ) -> napi::Result { @@ -234,19 +238,24 @@ impl StateManager { .into_unknown()?; let code = if let Some(code) = ctx.value.code { + let mut bytecode = ctx.env.create_object()?; + ctx.env - .create_buffer_copy(&code.bytes()[..code.len()])? - .into_unknown() + .create_buffer_copy(code.hash()) + .and_then(|hash| bytecode.set_named_property("hash", hash.into_raw()))?; + + ctx.env + .create_buffer_copy(&code.bytes()[..code.len()]) + .and_then(|code| bytecode.set_named_property("code", code.into_raw()))?; + + bytecode.into_unknown() } else { ctx.env.get_undefined()?.into_unknown() }; let promise = ctx.callback.call(None, &[balance, nonce, code])?; - let result = await_promise::)>( - ctx.env, - promise, - ctx.value.sender, - ); + let result = + await_promise::(ctx.env, promise, ctx.value.sender); handle_error(sender, result) }, @@ -274,12 +283,11 @@ impl StateManager { ); assert_eq!(status, Status::Ok); - let (new_balance, new_nonce, new_code) = - receiver.recv().unwrap().expect("Failed to commit"); + let new_account = receiver.recv().unwrap().expect("Failed to commit"); - *balance = new_balance; - *nonce = new_nonce; - *code = new_code; + *balance = new_account.balance; + *nonce = new_account.nonce; + *code = new_account.code; }, ), ) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 2c2963caf4..77c36ca862 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -1,11 +1,9 @@ import { - Account, Address, bufferToBigInt, - KECCAK256_NULL, toBuffer, } from "@nomicfoundation/ethereumjs-util"; -import { StateManager, AccountData } from "rethnet-evm"; +import { StateManager, Account, Bytecode } from "rethnet-evm"; import { GenesisAccount } from "./node-types"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ @@ -38,23 +36,12 @@ export class RethnetStateManager { return account !== null; } - public async getAccount(address: Address): Promise { - const account = await this._state.getAccountByAddress(address.buf); - const storageRoot = await this._state.getAccountStorageRoot(address.buf); - return new Account( - account?.nonce, - account?.balance, - storageRoot ?? undefined, - account?.codeHash - ); + public async getAccount(address: Address): Promise { + return this._state.getAccountByAddress(address.buf); } - public async putAccount(address: Address, account: Account): Promise { - await this._state.insertAccount(address.buf, { - balance: account.balance, - nonce: account.nonce, - codeHash: account.codeHash, - }); + public async getAccountStorageRoot(address: Address): Promise { + return this._state.getAccountStorageRoot(address.buf); } public async accountIsEmpty(address: Address): Promise { @@ -63,7 +50,7 @@ export class RethnetStateManager { account === null || (account.balance === 0n && account.nonce === 0n && - account.codeHash.equals(KECCAK256_NULL)) + account.code === undefined) ); } @@ -75,41 +62,15 @@ export class RethnetStateManager { return this._state.makeSnapshot(); } - public async modifyAccountFields( + public async modifyAccount( address: Address, - accountFields: Partial> + modifyAccountFn: ( + balance: bigint, + nonce: bigint, + code: Bytecode | undefined + ) => Promise ): Promise { - await this._state.modifyAccount( - address.buf, - async function ( - balance: bigint, - nonce: bigint, - code: Buffer | undefined - ): Promise { - return { - balance: accountFields.balance ?? balance, - nonce: accountFields.nonce ?? nonce, - code, - }; - } - ); - } - - public async putContractCode(address: Address, value: Buffer): Promise { - await this._state.modifyAccount( - address.buf, - async function ( - balance: bigint, - nonce: bigint, - _code: Buffer | undefined - ): Promise { - return { - balance, - nonce, - code: value, - }; - } - ); + await this._state.modifyAccount(address.buf, modifyAccountFn); } public async getContractCode(address: Address): Promise { @@ -119,10 +80,10 @@ export class RethnetStateManager { } if (account.code !== undefined) { - return account.code; + return account.code.code; } - return this._state.getCodeByHash(account.codeHash); + return Buffer.from([]); } public async getContractStorage( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 80ea092e0f..b80bb4e42c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -476,15 +476,6 @@ export class DualModeAdapter implements VMAdapter { throw new Error("Different step contract nonce"); } - if ( - !ethereumJSStep.contract.codeHash.equals(rethnetStep.contract.codeHash) - ) { - console.trace( - `Different steps[${stepIdx}] contract codeHash: ${ethereumJSStep.contract.codeHash} !== ${rethnetStep.contract.codeHash}` - ); - throw new Error("Different step contract codeHash"); - } - // Code can be stored separately from the account in Rethnet // const ethereumJSCode = ethereumJSStep.contract.code; // const rethnetCode = rethnetStep.contract.code; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index ad4610f1fd..56a9293600 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -524,7 +524,10 @@ export class EthereumJSAdapter implements VMAdapter { contract: { balance: step.account.balance, nonce: step.account.nonce, - codeHash: step.account.codeHash, + code: { + hash: step.account.codeHash, + code: Buffer.from([]), + }, }, contractAddress: step.address.buf, }); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index e9221cd6b1..2fa3160a7c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -1,10 +1,16 @@ import { Block } from "@nomicfoundation/ethereumjs-block"; import { Common } from "@nomicfoundation/ethereumjs-common"; -import { Account, Address } from "@nomicfoundation/ethereumjs-util"; +import { + Account, + Address, + KECCAK256_NULL, +} from "@nomicfoundation/ethereumjs-util"; import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { + Account as RethnetAccount, BlockBuilder, Blockchain, + Bytecode, Rethnet, Tracer, TracingMessage, @@ -20,6 +26,7 @@ import { rethnetResultToRunTxResult, } from "../utils/convertToRethnet"; import { hardforkGte, HardforkName } from "../../../util/hardforks"; +import { keccak256 } from "../../../util/keccak"; import { RpcDebugTraceOutput } from "../output"; import { RethnetStateManager } from "../RethnetState"; import { RpcDebugTracingConfig } from "../../../core/jsonrpc/types/input/debugTraceTransaction"; @@ -138,7 +145,14 @@ export class RethnetAdapter implements VMAdapter { * Get the account info for the given address. */ public async getAccount(address: Address): Promise { - return this._state.getAccount(address); + const account = await this._state.getAccount(address); + const storageRoot = await this._state.getAccountStorageRoot(address); + return new Account( + account?.nonce, + account?.balance, + storageRoot ?? undefined, + account?.code?.hash + ); } /** @@ -162,14 +176,66 @@ export class RethnetAdapter implements VMAdapter { * Update the account info for the given address. */ public async putAccount(address: Address, account: Account): Promise { - return this._state.putAccount(address, account); + const contractCode = + account.codeHash === KECCAK256_NULL + ? undefined + : await this._state.getContractCode(address); + + return this._state.modifyAccount( + address, + async function ( + balance: bigint, + nonce: bigint, + code: Bytecode | undefined + ): Promise { + const newCode: Bytecode | undefined = + account.codeHash === KECCAK256_NULL + ? undefined + : account.codeHash === code?.hash + ? code + : { + hash: account.codeHash, + code: contractCode!, + }; + + return { + balance: account.balance, + nonce: account.nonce, + code: newCode, + }; + } + ); } /** * Update the contract code for the given address. */ public async putContractCode(address: Address, value: Buffer): Promise { - return this._state.putContractCode(address, value); + const codeHash = keccak256(value); + return this._state.modifyAccount( + address, + async function ( + balance: bigint, + nonce: bigint, + code: Bytecode | undefined + ): Promise { + const newCode: Bytecode | undefined = + codeHash === KECCAK256_NULL + ? undefined + : codeHash === code?.hash + ? code + : { + hash: codeHash, + code: value, + }; + + return { + balance, + nonce, + code: newCode, + }; + } + ); } /** From 229885628c327ca8e54d3f87077d2d231fdec0c8 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:17:32 -0600 Subject: [PATCH 032/406] refactor: rename state-related files (#3688) --- crates/rethnet_evm/src/lib.rs | 2 -- crates/rethnet_evm/src/state.rs | 12 ++++++++---- crates/rethnet_evm/src/{ => state}/debug.rs | 10 +++++++--- .../src/state/{layered_db.rs => layered_state.rs} | 4 +--- crates/rethnet_evm/src/state/request.rs | 4 ++-- crates/rethnet_evm/src/state/sync.rs | 8 ++++++-- crates/rethnet_evm_napi/src/state.rs | 4 ++-- 7 files changed, 26 insertions(+), 18 deletions(-) rename crates/rethnet_evm/src/{ => state}/debug.rs (87%) rename crates/rethnet_evm/src/state/{layered_db.rs => layered_state.rs} (99%) diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index 0f73af3fde..ff2167a5d3 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -21,7 +21,6 @@ pub use revm::{ pub use crate::{ block::{BlockBuilder, HeaderData}, - debug::StateDebug, runtime::{AsyncDatabase, Rethnet}, transaction::{PendingTransaction, TransactionError}, }; @@ -36,7 +35,6 @@ pub mod state; pub mod trace; mod block; -mod debug; pub(crate) mod evm; mod inspector; pub(crate) mod random; diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs index ef1954c132..5a6c631b01 100644 --- a/crates/rethnet_evm/src/state.rs +++ b/crates/rethnet_evm/src/state.rs @@ -1,13 +1,17 @@ -mod layered_db; +mod debug; +mod layered_state; mod remote; mod request; mod sync; use rethnet_eth::B256; -pub use self::layered_db::{LayeredState, RethnetLayer}; -pub use self::remote::RemoteDatabase; -pub use self::sync::{AsyncState, SyncState}; +pub use self::{ + debug::{AccountModifierFn, StateDebug}, + layered_state::{LayeredState, RethnetLayer}, + remote::RemoteDatabase, + sync::{AsyncState, SyncState}, +}; /// Combinatorial error for the database API #[derive(Debug, thiserror::Error)] diff --git a/crates/rethnet_evm/src/debug.rs b/crates/rethnet_evm/src/state/debug.rs similarity index 87% rename from crates/rethnet_evm/src/debug.rs rename to crates/rethnet_evm/src/state/debug.rs index cf10ca7afa..489f62ef7b 100644 --- a/crates/rethnet_evm/src/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -2,7 +2,8 @@ use auto_impl::auto_impl; use rethnet_eth::{Address, B256, U256}; use revm::primitives::{AccountInfo, Bytecode}; -pub type ModifierFn = Box) + Send>; +/// Function type for modifying account information. +pub type AccountModifierFn = Box) + Send>; /// A trait for debug operation on a database. #[auto_impl(Box)] @@ -21,8 +22,11 @@ pub trait StateDebug { ) -> Result<(), Self::Error>; /// Modifies the account at the specified address using the provided function. - fn modify_account(&mut self, address: Address, modifier: ModifierFn) - -> Result<(), Self::Error>; + fn modify_account( + &mut self, + address: Address, + modifier: AccountModifierFn, + ) -> Result<(), Self::Error>; /// Removes and returns the account at the specified address, if it exists. fn remove_account(&mut self, address: Address) -> Result, Self::Error>; diff --git a/crates/rethnet_evm/src/state/layered_db.rs b/crates/rethnet_evm/src/state/layered_state.rs similarity index 99% rename from crates/rethnet_evm/src/state/layered_db.rs rename to crates/rethnet_evm/src/state/layered_state.rs index b7e3fe8640..8357271fc0 100644 --- a/crates/rethnet_evm/src/state/layered_db.rs +++ b/crates/rethnet_evm/src/state/layered_state.rs @@ -11,9 +11,7 @@ use revm::{ DatabaseCommit, }; -use crate::StateDebug; - -use super::StateError; +use super::{StateDebug, StateError}; #[derive(Clone, Debug)] struct RevertedLayers { diff --git a/crates/rethnet_evm/src/state/request.rs b/crates/rethnet_evm/src/state/request.rs index 44c97a98ea..4c8fa9dd04 100644 --- a/crates/rethnet_evm/src/state/request.rs +++ b/crates/rethnet_evm/src/state/request.rs @@ -9,7 +9,7 @@ use revm::{ }; use tokio::sync::oneshot; -use crate::{debug::ModifierFn, StateDebug}; +use crate::state::{AccountModifierFn, StateDebug}; /// The request type used internally by a [`SyncDatabase`]. pub enum Request @@ -45,7 +45,7 @@ where }, ModifyAccount { address: Address, - modifier: ModifierFn, + modifier: AccountModifierFn, sender: oneshot::Sender>, }, RemoveAccount { diff --git a/crates/rethnet_evm/src/state/sync.rs b/crates/rethnet_evm/src/state/sync.rs index b8d970cc5f..1915fd3c05 100644 --- a/crates/rethnet_evm/src/state/sync.rs +++ b/crates/rethnet_evm/src/state/sync.rs @@ -16,7 +16,7 @@ use tokio::{ task::{self, JoinHandle}, }; -use crate::{debug::ModifierFn, StateDebug}; +use crate::state::{AccountModifierFn, StateDebug}; use super::request::Request; @@ -181,7 +181,11 @@ where } /// Modifies the account at the specified address using the provided function. - pub async fn modify_account(&self, address: Address, modifier: ModifierFn) -> Result<(), E> { + pub async fn modify_account( + &self, + address: Address, + modifier: AccountModifierFn, + ) -> Result<(), E> { let (sender, receiver) = oneshot::channel(); self.request_sender diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 7885dc39e6..3198a95c39 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -7,8 +7,8 @@ use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, B256, U256}; use rethnet_evm::{ - state::{AsyncState, LayeredState, RethnetLayer, StateError, SyncState}, - AccountInfo, Bytecode, HashMap, StateDebug, KECCAK_EMPTY, + state::{AsyncState, LayeredState, RethnetLayer, StateDebug, StateError, SyncState}, + AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, }; use secp256k1::Secp256k1; From f9f6830eed14a374a612fa973c50aafa8a442fe2 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 21 Feb 2023 14:20:55 -0600 Subject: [PATCH 033/406] docs(rethnet): clarify that modify_account operates on empty accounts (#3694) --- crates/rethnet_evm/src/state/debug.rs | 3 ++- crates/rethnet_evm/src/state/layered_state.rs | 1 - 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index 489f62ef7b..24fe6ee394 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -21,7 +21,8 @@ pub trait StateDebug { account_info: AccountInfo, ) -> Result<(), Self::Error>; - /// Modifies the account at the specified address using the provided function. + /// Modifies the account at the specified address using the provided function. If the address + /// points to an empty account, that will be modified instead. fn modify_account( &mut self, address: Address, diff --git a/crates/rethnet_evm/src/state/layered_state.rs b/crates/rethnet_evm/src/state/layered_state.rs index 8357271fc0..c777c58cdd 100644 --- a/crates/rethnet_evm/src/state/layered_state.rs +++ b/crates/rethnet_evm/src/state/layered_state.rs @@ -372,7 +372,6 @@ impl StateDebug for LayeredState { address: Address, modifier: Box) + Send>, ) -> Result<(), Self::Error> { - // TODO: Move account insertion out of LayeredState when forking let account_info = self.account_or_insert_mut(&address); let old_code_hash = account_info.code_hash; From a109786c2b52d6d854360bc69696fd099dcf0561 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Fri, 10 Mar 2023 17:44:42 -0500 Subject: [PATCH 034/406] feat: add ForkDatabase (#3575) --- crates/rethnet_evm/src/db.rs | 12 + crates/rethnet_evm/src/state/fork.rs | 298 ++++++++++++++++++ crates/rethnet_evm/src/state/layered_state.rs | 8 +- crates/rethnet_evm/src/state/remote.rs | 8 + 4 files changed, 319 insertions(+), 7 deletions(-) create mode 100644 crates/rethnet_evm/src/db.rs create mode 100644 crates/rethnet_evm/src/state/fork.rs diff --git a/crates/rethnet_evm/src/db.rs b/crates/rethnet_evm/src/db.rs new file mode 100644 index 0000000000..079e6e7e30 --- /dev/null +++ b/crates/rethnet_evm/src/db.rs @@ -0,0 +1,12 @@ +mod fork; +mod layered_db; +mod remote; +mod request; +mod sync; + +pub use sync::{AsyncDatabase, SyncDatabase}; + +pub use fork::{ForkDatabase, ForkDatabaseError}; +pub use layered_db::{LayeredDatabase, RethnetLayer}; + +pub use remote::RemoteDatabase; diff --git a/crates/rethnet_evm/src/state/fork.rs b/crates/rethnet_evm/src/state/fork.rs new file mode 100644 index 0000000000..404695c277 --- /dev/null +++ b/crates/rethnet_evm/src/state/fork.rs @@ -0,0 +1,298 @@ +use hashbrown::HashMap; +use revm::{db::DatabaseRef, Account, AccountInfo, Bytecode}; + +use rethnet_eth::{Address, B256, U256}; + +use crate::db::{ + layered_db::{LayeredDatabase, RethnetLayer}, + remote::{RemoteDatabase, RemoteDatabaseError}, +}; + +/// A database integrating the state from a remote node and the state from a local layered +/// database. +pub struct ForkDatabase { + layered_db: LayeredDatabase, + remote_db: RemoteDatabase, + account_info_cache: HashMap, + code_by_hash_cache: HashMap, + storage_cache: HashMap<(Address, U256), U256>, + fork_block_number: u64, + fork_block_state_root_cache: Option, +} + +/// An error emitted by ForkDatabase +#[derive(thiserror::Error, Debug)] +pub enum ForkDatabaseError { + /// An error from the underlying RemoteDatabase + #[error(transparent)] + RemoteDatabase(#[from] RemoteDatabaseError), + + /// An error from the underlying LayeredDatabase + #[error(transparent)] + LayeredDatabase(#[from] as revm::Database>::Error), + + /// Code hash not found in cache of remote database + #[error("Cache of remote database does not contain contract with code hash: {0}.")] + NoSuchCodeHash(B256), + + /// Some other error from an underlying dependency + #[error(transparent)] + OtherError(#[from] std::io::Error), +} + +impl ForkDatabase { + /// instantiate a new ForkDatabase + pub fn new(url: &str, fork_block_number: u64) -> Self { + let remote_db = RemoteDatabase::new(url); + + let layered_db = LayeredDatabase::::default(); + + Self { + layered_db, + remote_db, + account_info_cache: HashMap::new(), + code_by_hash_cache: HashMap::new(), + storage_cache: HashMap::new(), + fork_block_number, + fork_block_state_root_cache: None, + } + } +} + +impl revm::Database for ForkDatabase { + type Error = ForkDatabaseError; + + fn basic(&mut self, address: Address) -> Result, Self::Error> { + if let Some(layered) = self + .layered_db + .basic(address) + .map_err(ForkDatabaseError::LayeredDatabase)? + { + Ok(Some(layered)) + } else if let Some(cached) = self.account_info_cache.get(&address) { + Ok(Some(cached.clone())) + } else if let Some(remote) = self + .remote_db + .basic(address) + .map_err(ForkDatabaseError::RemoteDatabase)? + { + self.account_info_cache.insert(address, remote.clone()); + + if remote.code.is_some() { + self.code_by_hash_cache + .insert(remote.code_hash, remote.code.clone().unwrap()); + } + + Ok(Some(remote)) + } else { + Ok(None) + } + } + + fn code_by_hash(&mut self, code_hash: B256) -> Result { + if let Ok(layered) = self.layered_db.code_by_hash(code_hash) { + Ok(layered) + } else if let Some(cached) = self.code_by_hash_cache.get(&code_hash) { + Ok(cached.clone()) + } else { + // remote_db doesn't support code_by_hash, so there's no delegation to it here. + Err(ForkDatabaseError::NoSuchCodeHash(code_hash)) + } + } + + fn storage(&mut self, address: Address, index: U256) -> Result { + let layered = self + .layered_db + .storage(address, index) + .map_err(ForkDatabaseError::LayeredDatabase)?; + + if layered != U256::from(0) { + Ok(layered) + } else if let Some(cached) = self.storage_cache.get(&(address, index)) { + Ok(*cached) + } else { + let remote = self + .remote_db + .storage(address, index) + .map_err(ForkDatabaseError::RemoteDatabase)?; + + self.storage_cache.insert((address, index), remote); + + Ok(remote) + } + } +} + +impl revm::DatabaseCommit for ForkDatabase { + fn commit(&mut self, changes: HashMap) { + self.layered_db.commit(changes) + } +} + +impl crate::DatabaseDebug for ForkDatabase { + type Error = ForkDatabaseError; + + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + self.layered_db + .account_storage_root(address) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Inserts an account with the specified address. + fn insert_account( + &mut self, + address: Address, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + self.layered_db + .insert_account(address, account_info) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Modifies the account at the specified address using the provided function. + fn modify_account( + &mut self, + address: Address, + modifier: crate::debug::ModifierFn, + ) -> Result<(), Self::Error> { + use revm::Database; // for basic() + + if (self + .layered_db + .basic(address) + .map_err(ForkDatabaseError::LayeredDatabase)?) + .is_none() + { + let account_info = if let Some(cached) = self.account_info_cache.get(&address) { + Some(cached.clone()) + } else if let Some(remote) = self + .remote_db + .basic(address) + .map_err(ForkDatabaseError::RemoteDatabase)? + { + self.account_info_cache.insert(address, remote.clone()); + + if remote.code.is_some() { + self.code_by_hash_cache + .insert(remote.code_hash, remote.code.clone().unwrap()); + } + + Some(remote) + } else { + None + }; + if let Some(account_info) = account_info { + self.layered_db.insert_account(address, account_info)? + } + } + self.layered_db + .modify_account(address, modifier) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Removes and returns the account at the specified address, if it exists. + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + crate::DatabaseDebug::remove_account(&mut self.layered_db, address) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Sets the storage slot at the specified address and index to the provided value. + fn set_account_storage_slot( + &mut self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + self.layered_db + .set_account_storage_slot(address, index, value) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Reverts the state to match the specified state root. + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + self.layered_db + .set_state_root(state_root) + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Retrieves the storage root of the database. + fn state_root(&mut self) -> Result { + if self.layered_db.iter().next().is_some() { + Ok(self + .layered_db + .state_root() + .map_err(ForkDatabaseError::LayeredDatabase)?) + } else if let Some(cached) = self.fork_block_state_root_cache { + Ok(cached) + } else { + self.fork_block_state_root_cache = Some( + self.remote_db + .state_root(self.fork_block_number) + .map_err(ForkDatabaseError::RemoteDatabase) + .map_err(|e| anyhow::anyhow!(e))?, + ); + Ok(self.fork_block_state_root_cache.unwrap()) + } + } + + /// Creates a checkpoint that can be reverted to using [`revert`]. + fn checkpoint(&mut self) -> Result<(), Self::Error> { + self.layered_db + .checkpoint() + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Reverts to the previous checkpoint, created using [`checkpoint`]. + fn revert(&mut self) -> Result<(), Self::Error> { + self.layered_db + .revert() + .map_err(ForkDatabaseError::LayeredDatabase) + } + + /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. + fn make_snapshot(&mut self) -> B256 { + self.layered_db.make_snapshot() + } + + /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. + fn remove_snapshot(&mut self, state_root: &B256) -> bool { + self.layered_db.remove_snapshot(state_root) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + use std::str::FromStr; + + fn get_alchemy_url() -> Result { + Ok(std::env::var_os("ALCHEMY_URL") + .expect("ALCHEMY_URL environment variable not defined") + .into_string() + .expect("couldn't convert OsString into a String")) + } + + #[test_with::env(ALCHEMY_URL)] + #[test] + fn basic_success() { + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + let mut fork_db = ForkDatabase::new( + &get_alchemy_url().expect("failed to get alchemy url"), + 16220843, + ); + let account_info = + revm::Database::basic(&mut fork_db, dai_address).expect("should have succeeded"); + + assert!(account_info.is_some()); + let account_info = account_info.unwrap(); + assert_eq!(account_info.balance, U256::from(0)); + assert_eq!(account_info.nonce, 1); + assert_eq!( + account_info.code_hash, + B256::from_str("0x74280a6e975486b18c8a65edee16b3b7a2f4c24398a094648552810549cbf864") + .expect("failed to parse") + ); + } +} diff --git a/crates/rethnet_evm/src/state/layered_state.rs b/crates/rethnet_evm/src/state/layered_state.rs index c777c58cdd..7da12286fd 100644 --- a/crates/rethnet_evm/src/state/layered_state.rs +++ b/crates/rethnet_evm/src/state/layered_state.rs @@ -254,13 +254,7 @@ impl State for LayeredState { .cloned() .flatten(); - // TODO: Move this out of LayeredState when forking - Ok(account.or(Some(AccountInfo { - balance: U256::ZERO, - nonce: 0, - code_hash: KECCAK_EMPTY, - code: None, - }))) + Ok(account) } fn code_by_hash(&mut self, code_hash: B256) -> Result { diff --git a/crates/rethnet_evm/src/state/remote.rs b/crates/rethnet_evm/src/state/remote.rs index 370ebad7ee..06c0b420bc 100644 --- a/crates/rethnet_evm/src/state/remote.rs +++ b/crates/rethnet_evm/src/state/remote.rs @@ -38,6 +38,14 @@ impl RemoteDatabase { .expect("failed to construct async runtime"), } } + + /// Retrieve the state root of the given block + pub fn state_root(&self, block_number: u64) -> Result { + Ok(self + .runtime + .block_on(self.client.get_block_by_number(block_number, false))? + .state_root) + } } impl StateRef for RemoteDatabase { From 2c8afd98c9201910f9c9f817afce45b74bcde155 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Sun, 12 Mar 2023 12:02:26 -0400 Subject: [PATCH 035/406] fix: Rethnet CI (#3761) --- .github/workflows/rethnet-ci.yml | 7 ++++--- crates/rethnet_evm_napi/package.json | 3 ++- crates/rethnet_evm_napi/test/evm/RethnetDb.ts | 4 +--- crates/rethnet_evm_napi/test/evm/StateManager.ts | 15 +++++---------- 4 files changed, 12 insertions(+), 17 deletions(-) diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml index 901bb3444e..7ae33e77d4 100644 --- a/.github/workflows/rethnet-ci.yml +++ b/.github/workflows/rethnet-ci.yml @@ -96,11 +96,12 @@ jobs: strategy: fail-fast: false matrix: - os: ["ubuntu-latest", "windows-latest", "macOS-latest"] + # "windows-latest" removed from the following lines pending https://github.com/napi-rs/napi-rs/issues/1405 + os: ["ubuntu-latest", "macOS-latest"] include: - RUSTFLAGS: "-Dwarnings" - - os: "windows-latest" - RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + #- os: "windows-latest" + # RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" steps: - uses: actions/checkout@v3 diff --git a/crates/rethnet_evm_napi/package.json b/crates/rethnet_evm_napi/package.json index 34de16a557..1b360dc4c2 100644 --- a/crates/rethnet_evm_napi/package.json +++ b/crates/rethnet_evm_napi/package.json @@ -11,7 +11,8 @@ "scripts": { "build": "napi build --release", "build:debug": "napi build", - "test": "mocha --recursive \"test/**/*.ts\" --exit" + "test": "yarn tsc && mocha --recursive \"test/**/*.ts\" --exit", + "clean": "rm -rf rethnet-evm.node" }, "devDependencies": { "@napi-rs/cli": "^2.11.4", diff --git a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts index 083b2d90ef..993a0d81b4 100644 --- a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts +++ b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts @@ -1,8 +1,7 @@ import { expect } from "chai"; -import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; +import { Address } from "@nomicfoundation/ethereumjs-util"; import { - AccountData, Blockchain, BlockConfig, Config, @@ -47,7 +46,6 @@ describe("Rethnet", () => { await stateManager.insertAccount(caller.buf, { nonce: 0n, balance: BigInt("0xffffffff"), - codeHash: KECCAK256_NULL, }); // send some value diff --git a/crates/rethnet_evm_napi/test/evm/StateManager.ts b/crates/rethnet_evm_napi/test/evm/StateManager.ts index b771b0877a..fc3be4e425 100644 --- a/crates/rethnet_evm_napi/test/evm/StateManager.ts +++ b/crates/rethnet_evm_napi/test/evm/StateManager.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; -import { AccountData, StateManager } from "../.."; +import { Account, Bytecode, StateManager } from "../.."; describe("State Manager", () => { const caller = Address.fromString( @@ -22,7 +22,6 @@ describe("State Manager", () => { await stateManager.insertAccount(caller.buf, { nonce: 0n, balance: 0n, - codeHash: KECCAK256_NULL, }); let account = await stateManager.getAccountByAddress(caller.buf); @@ -34,7 +33,6 @@ describe("State Manager", () => { await stateManager.insertAccount(caller.buf, { nonce: 0n, balance: 0n, - codeHash: KECCAK256_NULL, }); await stateManager.modifyAccount( @@ -42,8 +40,8 @@ describe("State Manager", () => { async function ( _balance: bigint, nonce: bigint, - code: Buffer | undefined - ): Promise { + code: Bytecode | undefined + ): Promise { return { balance: 100n, nonce, @@ -56,14 +54,12 @@ describe("State Manager", () => { expect(account?.balance).to.equal(100n); expect(account?.nonce).to.equal(0n); - expect(account?.codeHash).to.eql(KECCAK256_NULL); }); it("setAccountNonce", async () => { await stateManager.insertAccount(caller.buf, { nonce: 0n, balance: 0n, - codeHash: KECCAK256_NULL, }); await stateManager.modifyAccount( @@ -71,8 +67,8 @@ describe("State Manager", () => { async function ( balance: bigint, nonce: bigint, - code: Buffer | undefined - ): Promise { + code: Bytecode | undefined + ): Promise { return { balance, nonce: 5n, @@ -85,6 +81,5 @@ describe("State Manager", () => { expect(account?.balance).to.equal(0n); expect(account?.nonce).to.equal(5n); - expect(account?.codeHash).to.eql(KECCAK256_NULL); }); }); From 081b702b2eaf5593505866acd0cfd6e6a2aa9c67 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Mon, 20 Mar 2023 09:34:13 -0400 Subject: [PATCH 036/406] bump: upgrade napi to 2.11.4 (#3780) --- crates/rethnet_evm_napi/Cargo.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 8a940cdfea..f056598ca5 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -8,7 +8,7 @@ crate-type = ["cdylib"] [dependencies] crossbeam-channel = { version = "0.5.6", default-features = false } -napi = { version = "= 2.11.1", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi = { version = "= 2.11.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.9.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" From 0bd71f62fe1c6db2dd1589b6e905591166527388 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Mon, 20 Mar 2023 15:16:11 -0400 Subject: [PATCH 037/406] chore: unpin napi version (#3784) --- crates/rethnet_evm_napi/Cargo.toml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index f056598ca5..4cfe495427 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -8,7 +8,8 @@ crate-type = ["cdylib"] [dependencies] crossbeam-channel = { version = "0.5.6", default-features = false } -napi = { version = "= 2.11.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +# if ever napi needs to be pinned, be sure to pin napi-derive to the same version +napi = { version = "2.11.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.9.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" From d87a1fe42730211e31f91a6bee26227552a79bed Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Mon, 3 Apr 2023 09:41:26 +0200 Subject: [PATCH 038/406] Add tests for accounts with the same bytecode --- .../hardhat-network/helpers/assertions.ts | 14 +++++ .../provider/modules/hardhat.ts | 51 ++++++++++++++++++- .../hardhat-network/provider/selfdestruct.ts | 46 ++++++++++++++++- 3 files changed, 109 insertions(+), 2 deletions(-) diff --git a/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts b/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts index 0b9888dab5..bdc96763f6 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts @@ -354,3 +354,17 @@ export async function assertAddressBalance( ); assert.equal(value, expectedValue); } + +export async function assertEqualCode( + provider: EthereumProvider, + address1: string, + address2: string +) { + const code1 = await provider.send("eth_getCode", [address1]); + const code2 = await provider.send("eth_getCode", [address2]); + assert.equal( + code1, + code2, + `Expected code in accounts ${address1} and ${address2} to be equal` + ); +} diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/hardhat.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/hardhat.ts index 7e266f950a..3d4f04c18f 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/hardhat.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/hardhat.ts @@ -14,6 +14,7 @@ import { expectErrorAsync } from "../../../../helpers/errors"; import { INFURA_URL } from "../../../../setup"; import { workaroundWindowsCiFailures } from "../../../../utils/workaround-windows-ci-failures"; import { + assertEqualCode, assertInternalError, assertInvalidArgumentsError, assertInvalidInputError, @@ -29,7 +30,10 @@ import { compileLiteral } from "../../stack-traces/compilation"; import { getPendingBaseFeePerGas } from "../../helpers/getPendingBaseFeePerGas"; import { RpcBlockOutput } from "../../../../../src/internal/hardhat-network/provider/output"; import * as BigIntUtils from "../../../../../src/internal/util/bigint"; -import { EXAMPLE_DIFFICULTY_CONTRACT } from "../../helpers/contracts"; +import { + EXAMPLE_CONTRACT, + EXAMPLE_DIFFICULTY_CONTRACT, +} from "../../helpers/contracts"; import { HardhatMetadata } from "../../../../../src/internal/core/jsonrpc/types/output/metadata"; import { useFixtureProject } from "../../../../helpers/project"; import { useEnvironment } from "../../../../helpers/environment"; @@ -1893,6 +1897,51 @@ describe("Hardhat module", function () { ).stateRoot; assert.equal(newStateRoot, oldStateRoot); }); + + it("modifying an account's code shouldn't affect another account with the same code", async function () { + // deploy two contracts with the same bytecode + const contractAddress1 = await deployContract( + this.provider, + `0x${EXAMPLE_CONTRACT.bytecode.object}`, + DEFAULT_ACCOUNTS_ADDRESSES[0] + ); + + const contractAddress2 = await deployContract( + this.provider, + `0x${EXAMPLE_CONTRACT.bytecode.object}`, + DEFAULT_ACCOUNTS_ADDRESSES[0] + ); + + await assertEqualCode( + this.provider, + contractAddress1, + contractAddress2 + ); + const contractCode1Before = await this.provider.send("eth_getCode", [ + contractAddress1, + ]); + + // modify the code of the second one + await this.provider.send("hardhat_setCode", [ + contractAddress2, + "0xff", + ]); + + // check that only the second one was affected + const contractCode1 = await this.provider.send("eth_getCode", [ + contractAddress1, + ]); + assert.notEqual(contractCode1.toLowerCase(), "0xff"); + assert.equal( + contractCode1.toLowerCase(), + contractCode1Before.toLowerCase() + ); + + const contractCode2 = await this.provider.send("eth_getCode", [ + contractAddress2, + ]); + assert.equal(contractCode2.toLowerCase(), "0xff"); + }); }); describe("hardhat_setNonce", function () { diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/selfdestruct.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/selfdestruct.ts index 0c69c3dd5e..4c9027e7d2 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/selfdestruct.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/selfdestruct.ts @@ -1,5 +1,8 @@ import { assert } from "chai"; -import { assertContractFieldEqualNumber } from "../helpers/assertions"; +import { + assertContractFieldEqualNumber, + assertEqualCode, +} from "../helpers/assertions"; import { CALL_SELFDESTRUCT_CONTRACT, @@ -166,6 +169,47 @@ describe("selfdestruct", function () { assert.equal(contractCodeAfterTx, "0x"); assert.equal(BigInt(receiverAddressBalanceAfterTx), 1000n); }); + + it("a selfdestruct shouldn't affect another account with the same code", async function () { + // deploy two contracts with the same bytecode + const contractAddress1 = await deployContract( + this.provider, + `0x${SELFDESTRUCT_CONTRACT.bytecode.object}`, + DEFAULT_ACCOUNTS_ADDRESSES[0] + ); + const contractAddress2 = await deployContract( + this.provider, + `0x${SELFDESTRUCT_CONTRACT.bytecode.object}`, + DEFAULT_ACCOUNTS_ADDRESSES[0] + ); + + await assertEqualCode( + this.provider, + contractAddress1, + contractAddress2 + ); + + // call self-destruct in one of them + await this.provider.send("eth_sendTransaction", [ + { + from: DEFAULT_ACCOUNTS_ADDRESSES[0], + to: contractAddress1, + data: `${SELFDESTRUCT_CONTRACT.selectors.sd}0000000000000000000000000000000000000000000000000000000000000000`, + }, + ]); + + // check that the first contract doesn't have code but the second one + // does + const contractCode1 = await this.provider.send("eth_getCode", [ + contractAddress1, + ]); + assert.equal(contractCode1, "0x"); + + const contractCode2 = await this.provider.send("eth_getCode", [ + contractAddress2, + ]); + assert.notEqual(contractCode2, "0x"); + }); }); }); }); From e5a03d944a0ccd7a5d951b699fc0d89764521a08 Mon Sep 17 00:00:00 2001 From: Wodann Date: Tue, 11 Apr 2023 10:44:56 -0500 Subject: [PATCH 039/406] refactor: explicitly make snapshots in block builder (#3810) --- crates/rethnet_evm/src/state/debug.rs | 5 +- crates/rethnet_evm/src/state/layered_state.rs | 89 ++++--------------- crates/rethnet_evm/src/state/request.rs | 2 +- crates/rethnet_evm/src/state/sync.rs | 4 +- crates/rethnet_evm_napi/Cargo.toml | 4 +- crates/rethnet_evm_napi/src/config.rs | 2 +- crates/rethnet_evm_napi/src/state.rs | 18 +++- .../rethnet_evm_napi/src/tracer/js_tracer.rs | 14 +-- .../src/transaction/result.rs | 2 +- .../hardhat-network/provider/RethnetState.ts | 9 +- .../internal/hardhat-network/provider/node.ts | 32 ++++--- .../hardhat-network/provider/vm/dual.ts | 18 +++- .../hardhat-network/provider/vm/ethereumjs.ts | 9 +- .../hardhat-network/provider/vm/rethnet.ts | 6 +- .../hardhat-network/provider/vm/vm-adapter.ts | 3 +- 15 files changed, 105 insertions(+), 112 deletions(-) diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index 24fe6ee394..ecaa084d82 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -52,8 +52,9 @@ pub trait StateDebug { /// Reverts to the previous checkpoint, created using [`checkpoint`]. fn revert(&mut self) -> Result<(), Self::Error>; - /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. - fn make_snapshot(&mut self) -> B256; + /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier and whether + /// that snapshot already existed. + fn make_snapshot(&mut self) -> (B256, bool); /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. fn remove_snapshot(&mut self, state_root: &B256) -> bool; diff --git a/crates/rethnet_evm/src/state/layered_state.rs b/crates/rethnet_evm/src/state/layered_state.rs index 7da12286fd..2b815f869e 100644 --- a/crates/rethnet_evm/src/state/layered_state.rs +++ b/crates/rethnet_evm/src/state/layered_state.rs @@ -13,20 +13,10 @@ use revm::{ use super::{StateDebug, StateError}; -#[derive(Clone, Debug)] -struct RevertedLayers { - /// The parent layer's state root - pub parent_state_root: B256, - /// The reverted layers - pub stack: Vec, -} - /// A state consisting of layers. #[derive(Clone, Debug)] pub struct LayeredState { stack: Vec, - /// The old parent layer state root and the reverted layers - reverted_layers: Option>, /// Snapshots snapshots: HashMap>, // naive implementation } @@ -36,7 +26,6 @@ impl LayeredState { pub fn with_layer(layer: Layer) -> Self { Self { stack: vec![layer], - reverted_layers: None, snapshots: HashMap::new(), } } @@ -85,7 +74,6 @@ impl Default for LayeredState { fn default() -> Self { Self { stack: vec![Layer::default()], - reverted_layers: None, snapshots: HashMap::new(), } } @@ -348,17 +336,21 @@ impl StateDebug for LayeredState { Ok(()) } - fn make_snapshot(&mut self) -> B256 { + fn make_snapshot(&mut self) -> (B256, bool) { let state_root = self.state_root().unwrap(); - let mut snapshot = self.stack.clone(); - if let Some(layer) = snapshot.last_mut() { - layer.state_root.replace(state_root); - } - // Currently overwrites old snapshots - self.snapshots.insert(state_root, snapshot); + let mut exists = true; + self.snapshots.entry(state_root).or_insert_with(|| { + exists = false; - state_root + let mut snapshot = self.stack.clone(); + if let Some(layer) = snapshot.last_mut() { + layer.state_root.replace(state_root); + } + snapshot + }); + + (state_root, exists) } fn modify_account( @@ -435,47 +427,12 @@ impl StateDebug for LayeredState { self.last_layer_mut().state_root.replace(state_root); } - if let Some(snapshot) = self.snapshots.get(state_root) { - // Retain all layers except the first - self.reverted_layers = Some(RevertedLayers { - parent_state_root: self.stack.first().unwrap().state_root.unwrap(), - stack: self.stack.split_off(1), - }); - self.stack = snapshot.clone(); + if let Some(snapshot) = self.snapshots.remove(state_root) { + self.stack = snapshot; return Ok(()); } - // Check whether the state root is contained in the previously reverted layers - let reinstated_layers = self.reverted_layers.take().and_then(|mut reverted_layers| { - let layer_id = - reverted_layers - .stack - .iter() - .enumerate() - .find_map(|(layer_id, layer)| { - if layer.state_root.unwrap() == *state_root { - Some(layer_id) - } else { - None - } - }); - - if let Some(layer_id) = layer_id { - reverted_layers.stack.truncate(layer_id + 1); - - Some(reverted_layers) - } else { - None - } - }); - - let state_root = reinstated_layers - .as_ref() - .map_or(state_root, |reinstated_layers| { - &reinstated_layers.parent_state_root - }); - let layer_id = self.stack.iter().enumerate().find_map(|(layer_id, layer)| { if layer.state_root.unwrap() == *state_root { Some(layer_id) @@ -485,23 +442,7 @@ impl StateDebug for LayeredState { }); if let Some(layer_id) = layer_id { - let reverted_layers = self.stack.split_off(layer_id + 1); - let parent_state_root = self.stack.last().unwrap().state_root.unwrap(); - - if let Some(mut reinstated_layers) = reinstated_layers { - self.stack.append(&mut reinstated_layers.stack); - } - - self.add_layer_default(); - - self.reverted_layers = if reverted_layers.is_empty() { - None - } else { - Some(RevertedLayers { - parent_state_root, - stack: reverted_layers, - }) - }; + self.stack.truncate(layer_id + 1); Ok(()) } else { diff --git a/crates/rethnet_evm/src/state/request.rs b/crates/rethnet_evm/src/state/request.rs index 4c8fa9dd04..f6a1c6f8ed 100644 --- a/crates/rethnet_evm/src/state/request.rs +++ b/crates/rethnet_evm/src/state/request.rs @@ -41,7 +41,7 @@ where sender: oneshot::Sender>, }, MakeSnapshot { - sender: oneshot::Sender, + sender: oneshot::Sender<(B256, bool)>, }, ModifyAccount { address: Address, diff --git a/crates/rethnet_evm/src/state/sync.rs b/crates/rethnet_evm/src/state/sync.rs index 1915fd3c05..f47381e34c 100644 --- a/crates/rethnet_evm/src/state/sync.rs +++ b/crates/rethnet_evm/src/state/sync.rs @@ -170,7 +170,7 @@ where } /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. - pub async fn make_snapshot(&self) -> B256 { + pub async fn make_snapshot(&self) -> (B256, bool) { let (sender, receiver) = oneshot::channel(); self.request_sender @@ -405,7 +405,7 @@ where task::block_in_place(move || self.runtime.block_on(AsyncState::revert(*self))) } - fn make_snapshot(&mut self) -> B256 { + fn make_snapshot(&mut self) -> (B256, bool) { task::block_in_place(move || self.runtime.block_on(AsyncState::make_snapshot(*self))) } diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 4cfe495427..1d4cfc2660 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -9,8 +9,8 @@ crate-type = ["cdylib"] [dependencies] crossbeam-channel = { version = "0.5.6", default-features = false } # if ever napi needs to be pinned, be sure to pin napi-derive to the same version -napi = { version = "2.11.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } -napi-derive = "2.9.3" +napi = { version = "2.12.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } +napi-derive = "2.12.3" once_cell = "1.15.0" pretty_env_logger = "0.4.0" rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } diff --git a/crates/rethnet_evm_napi/src/config.rs b/crates/rethnet_evm_napi/src/config.rs index 96cd5fe3b8..16fe3906bb 100644 --- a/crates/rethnet_evm_napi/src/config.rs +++ b/crates/rethnet_evm_napi/src/config.rs @@ -1,5 +1,5 @@ use napi::{ - bindgen_prelude::{BigInt, ToNapiValue}, + bindgen_prelude::{BigInt, FromNapiValue, ToNapiValue}, Status, }; use napi_derive::napi; diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 3198a95c39..1a909019fb 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -35,6 +35,15 @@ pub struct GenesisAccount { pub balance: BigInt, } +/// An identifier for a snapshot of the state +#[napi(object)] +pub struct SnapshotId { + /// Snapshot's state root + pub state_root: Buffer, + /// Whether the snapshot already existed. + pub existed: bool, +} + /// The Rethnet state #[napi] pub struct StateManager { @@ -201,8 +210,13 @@ impl StateManager { /// Makes a snapshot of the database that's retained until [`removeSnapshot`] is called. Returns the snapshot's identifier. #[napi] - pub async fn make_snapshot(&self) -> Buffer { - >::as_ref(&self.state.make_snapshot().await).into() + pub async fn make_snapshot(&self) -> SnapshotId { + let (state_root, existed) = self.state.make_snapshot().await; + + SnapshotId { + state_root: >::as_ref(&state_root).into(), + existed, + } } /// Modifies the account with the provided address using the specified modifier function. diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs index ae95886d7d..c2f704ea45 100644 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -546,17 +546,21 @@ where } }) .unwrap_or_else(|| { - let account = data.db.basic(inputs.context.code_address).unwrap().unwrap(); - account - .code - .unwrap_or_else(|| data.db.code_by_hash(account.code_hash).unwrap()) + data.db.basic(inputs.context.code_address).unwrap().map_or( + Bytecode::new(), + |account_info| { + account_info.code.unwrap_or_else(|| { + data.db.code_by_hash(account_info.code_hash).unwrap() + }) + }, + ) }); self.pending_before = Some(BeforeMessage { depth: data.journaled_state.depth, to: Some(inputs.context.address), data: inputs.input.clone(), - value: inputs.transfer.value, + value: inputs.context.apparent_value, code_address: Some(inputs.context.code_address), code: Some(code), }); diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index eea97b41aa..b11042cbbb 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -1,5 +1,5 @@ use napi::{ - bindgen_prelude::{BigInt, Buffer, Either3, ToNapiValue}, + bindgen_prelude::{BigInt, Buffer, Either3, FromNapiValue, ToNapiValue}, Either, }; use napi_derive::napi; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 77c36ca862..04f3873b4d 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -58,8 +58,13 @@ export class RethnetStateManager { await this._state.removeAccount(address.buf); } - public async makeSnapshot(): Promise { - return this._state.makeSnapshot(); + public async makeSnapshot(): Promise<[Buffer, boolean]> { + const snapshot = await this._state.makeSnapshot(); + return [snapshot.stateRoot, snapshot.existed]; + } + + public async removeSnapshot(stateRoot: Buffer): Promise { + return this._state.removeSnapshot(stateRoot); } public async modifyAccount( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index de821c12a1..9d459b677c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -974,7 +974,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu id, date: new Date(), latestBlock: await this.getLatestBlock(), - stateRoot: await this._vm.makeSnapshot(), + stateRoot: (await this._vm.makeSnapshot())[0], txPoolSnapshotId: this._txPool.snapshot(), blockTimeOffsetSeconds: this.getTimeIncrement(), nextBlockTimestamp: this.getNextBlockTimestamp(), @@ -1042,7 +1042,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu // We delete this and the following snapshots, as they can only be used // once in Ganache - this._snapshots.splice(snapshotIndex); + await this._removeSnapshot(snapshotIndex); return true; } @@ -1520,18 +1520,22 @@ Hardhat Network's forking functionality only works with blocks from at least spu private async _mineTransactionAndPending( tx: TypedTransaction ): Promise { - const snapshotId = await this.takeSnapshot(); + const id = await this.takeSnapshot(); let result; try { const txHash = await this._addPendingTransaction(tx); result = await this._mineBlocksUntilTransactionIsIncluded(txHash); } catch (err) { - await this.revertToSnapshot(snapshotId); + await this.revertToSnapshot(id); throw err; } - this._removeSnapshot(snapshotId); + const snapshotIndex = this._getSnapshotIndex(id); + if (snapshotIndex !== undefined) { + await this._removeSnapshot(id); + } + return result; } @@ -1779,12 +1783,12 @@ Hardhat Network's forking functionality only works with blocks from at least spu return undefined; } - private _removeSnapshot(id: number) { - const snapshotIndex = this._getSnapshotIndex(id); - if (snapshotIndex === undefined) { - return; + private async _removeSnapshot(snapshotIndex: number) { + const deletedSnapshots = this._snapshots.splice(snapshotIndex); + + for (const deletedSnapshot of deletedSnapshots) { + await this._vm.removeSnapshot(deletedSnapshot.stateRoot); } - this._snapshots.splice(snapshotIndex); } private _initLocalAccounts(genesisAccounts: GenesisAccount[]) { @@ -2064,12 +2068,16 @@ Hardhat Network's forking functionality only works with blocks from at least spu ); } - const currentStateRoot = await this._vm.getStateRoot(); + const [snapshot, existed] = await this._vm.makeSnapshot(); await this._setBlockContext(block); try { return await action(); } finally { - await this._vm.restoreContext(currentStateRoot); + await this._vm.restoreContext(snapshot); + + if (!existed) { + await this._vm.removeSnapshot(snapshot); + } } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index b80bb4e42c..c9839d28c1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -299,9 +299,10 @@ export class DualModeAdapter implements VMAdapter { return this._ethereumJSAdapter.revertBlock(); } - public async makeSnapshot(): Promise { - const ethereumJSRoot = await this._ethereumJSAdapter.makeSnapshot(); - const rethnetRoot = await this._rethnetAdapter.makeSnapshot(); + public async makeSnapshot(): Promise<[Buffer, boolean]> { + const [ethereumJSRoot, _] = await this._ethereumJSAdapter.makeSnapshot(); + const [rethnetRoot, rethnetExisted] = + await this._rethnetAdapter.makeSnapshot(); if (!ethereumJSRoot.equals(rethnetRoot)) { console.trace( @@ -312,7 +313,16 @@ export class DualModeAdapter implements VMAdapter { throw new Error("Different snapshot state root"); } - return rethnetRoot; + return [rethnetRoot, rethnetExisted]; + } + + public async removeSnapshot(stateRoot: Buffer): Promise { + const _ethereumJSSuccess = await this._ethereumJSAdapter.removeSnapshot( + stateRoot + ); + const rethnetSuccess = await this._rethnetAdapter.removeSnapshot(stateRoot); + + return rethnetSuccess; } public getLastTrace(): { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 56a9293600..8c10fe784b 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -438,8 +438,13 @@ export class EthereumJSAdapter implements VMAdapter { this._blockStartStateRoot = undefined; } - public async makeSnapshot(): Promise { - return this.getStateRoot(); + public async makeSnapshot(): Promise<[Buffer, boolean]> { + return [await this.getStateRoot(), false]; + } + + public async removeSnapshot(_stateRoot: Buffer): Promise { + // No way of deleting snapshot + return true; } public getLastTrace(): { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 2fa3160a7c..172bfae15e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -389,10 +389,14 @@ export class RethnetAdapter implements VMAdapter { throw new Error("traceTransaction not implemented for Rethnet"); } - public async makeSnapshot(): Promise { + public async makeSnapshot(): Promise<[Buffer, boolean]> { return this._state.makeSnapshot(); } + public async removeSnapshot(stateRoot: Buffer): Promise { + return this._state.removeSnapshot(stateRoot); + } + public getLastTrace(): { trace: MessageTrace | undefined; error: Error | undefined; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 30a166d797..9dcd16b5d3 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -82,5 +82,6 @@ export interface VMAdapter { ): Promise; // methods for snapshotting - makeSnapshot(): Promise; + makeSnapshot(): Promise<[Buffer, boolean]>; + removeSnapshot(stateRoot: Buffer): Promise; } From 5699afa73ce6b8a5a539923e4e7b7b627e161a03 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 19 Apr 2023 12:38:01 -0500 Subject: [PATCH 040/406] improvement: minimize data copied to JS (#3837) BREAKING CHANGE: requires node version >= 18 --- .changeset/forty-seals-occur.md | 5 + .../workflows/LATEST_DEPENDENCY_VERSIONS.yml | 2 +- .github/workflows/comment-on-linter-error.yml | 2 +- .github/workflows/e2e-ci.yml | 2 +- .../workflows/hardhat-chai-matchers-ci.yml | 10 +- .github/workflows/hardhat-core-ci.yml | 10 +- .github/workflows/hardhat-ethers-ci.yml | 10 +- .github/workflows/hardhat-etherscan-ci.yml | 10 +- .github/workflows/hardhat-foundry-ci.yml | 10 +- .github/workflows/hardhat-ganache-ci.yml | 10 +- .../workflows/hardhat-network-forking-ci.yml | 2 +- .../workflows/hardhat-network-helpers-ci.yml | 10 +- ...dhat-network-tracing-all-solc-versions.yml | 2 +- .../workflows/hardhat-network-tracing-ci.yml | 2 +- .github/workflows/hardhat-shorthand-ci.yml | 10 +- .github/workflows/hardhat-solhint-ci.yml | 10 +- .github/workflows/hardhat-solpp-ci.yml | 6 +- .github/workflows/hardhat-toolbox-ci.yml | 10 +- .github/workflows/hardhat-truffle4-ci.yml | 10 +- .github/workflows/hardhat-truffle5-ci.yml | 10 +- .github/workflows/hardhat-vyper-ci.yml | 2 +- .github/workflows/hardhat-waffle-ci.yml | 10 +- .github/workflows/hardhat-web3-ci.yml | 10 +- .github/workflows/hardhat-web3-legacy-ci.yml | 10 +- .github/workflows/lint.yml | 2 +- .github/workflows/pre-release.yml | 4 +- .github/workflows/release.yml | 4 +- .github/workflows/rethnet-ci.yml | 2 +- .../workflows/test-recent-mainnet-block.yml | 2 +- crates/rethnet_eth/Cargo.toml | 2 +- crates/rethnet_evm/Cargo.toml | 2 +- crates/rethnet_evm_napi/Cargo.toml | 2 +- crates/rethnet_evm_napi/src/runtime.rs | 35 ++- crates/rethnet_evm_napi/src/state.rs | 34 ++- .../rethnet_evm_napi/src/tracer/js_tracer.rs | 259 +++++++++++------- packages/hardhat-core/package.json | 6 +- .../hardhat-network/provider/vm/dual.ts | 118 ++++---- .../hardhat-network/provider/vm/ethereumjs.ts | 30 +- .../hardhat-network/provider/vm/rethnet.ts | 4 +- 39 files changed, 389 insertions(+), 292 deletions(-) create mode 100644 .changeset/forty-seals-occur.md diff --git a/.changeset/forty-seals-occur.md b/.changeset/forty-seals-occur.md new file mode 100644 index 0000000000..d57e0b94c1 --- /dev/null +++ b/.changeset/forty-seals-occur.md @@ -0,0 +1,5 @@ +--- +"hardhat": major +--- + +Node 18 or higher required to avoid bug with external memory diff --git a/.github/workflows/LATEST_DEPENDENCY_VERSIONS.yml b/.github/workflows/LATEST_DEPENDENCY_VERSIONS.yml index 8247aaffcf..1e95e8cdd3 100644 --- a/.github/workflows/LATEST_DEPENDENCY_VERSIONS.yml +++ b/.github/workflows/LATEST_DEPENDENCY_VERSIONS.yml @@ -15,7 +15,7 @@ jobs: steps: - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 - uses: actions/checkout@v2 - name: Delete yarn.lock run: "rm yarn.lock" diff --git a/.github/workflows/comment-on-linter-error.yml b/.github/workflows/comment-on-linter-error.yml index 5b281a58b9..b1304a6ec6 100644 --- a/.github/workflows/comment-on-linter-error.yml +++ b/.github/workflows/comment-on-linter-error.yml @@ -22,7 +22,7 @@ jobs: ref: "refs/pull/${{ github.event.number }}/merge" - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile diff --git a/.github/workflows/e2e-ci.yml b/.github/workflows/e2e-ci.yml index 1409159af5..e7d129c312 100644 --- a/.github/workflows/e2e-ci.yml +++ b/.github/workflows/e2e-ci.yml @@ -33,7 +33,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-chai-matchers-ci.yml b/.github/workflows/hardhat-chai-matchers-ci.yml index 35e720e80a..df2fe7f674 100644 --- a/.github/workflows/hardhat-chai-matchers-ci.yml +++ b/.github/workflows/hardhat-chai-matchers-ci.yml @@ -25,13 +25,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-chai-matchers on Windows with Node 14 + name: Test hardhat-chai-matchers on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test:ci test_on_macos: - name: Test hardhat-chai-matchers on MacOS with Node 14 + name: Test hardhat-chai-matchers on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-core-ci.yml b/.github/workflows/hardhat-core-ci.yml index 75a5c3ef26..a2b3870eae 100644 --- a/.github/workflows/hardhat-core-ci.yml +++ b/.github/workflows/hardhat-core-ci.yml @@ -25,13 +25,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-core on Windows with Node 14 + name: Test hardhat-core on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -44,7 +44,7 @@ jobs: run: yarn test:except-tracing test_on_macos: - name: Test hardhat-core on MacOS with Node 14 + name: Test hardhat-core on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -52,7 +52,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -69,7 +69,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-ethers-ci.yml b/.github/workflows/hardhat-ethers-ci.yml index 457743ec24..d8413f2f76 100644 --- a/.github/workflows/hardhat-ethers-ci.yml +++ b/.github/workflows/hardhat-ethers-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-ethers on Windows with Node 14 + name: Test hardhat-ethers on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-ethers on MacOS with Node 14 + name: Test hardhat-ethers on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-etherscan-ci.yml b/.github/workflows/hardhat-etherscan-ci.yml index b97a30aa77..4e60011dfd 100644 --- a/.github/workflows/hardhat-etherscan-ci.yml +++ b/.github/workflows/hardhat-etherscan-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-etherscan on Windows with Node 14 + name: Test hardhat-etherscan on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-etherscan on MacOS with Node 14 + name: Test hardhat-etherscan on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-foundry-ci.yml b/.github/workflows/hardhat-foundry-ci.yml index f52e14fc2b..989808eb7e 100644 --- a/.github/workflows/hardhat-foundry-ci.yml +++ b/.github/workflows/hardhat-foundry-ci.yml @@ -25,13 +25,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-foundry on Windows with Node 14 + name: Test hardhat-foundry on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-foundry on MacOS with Node 14 + name: Test hardhat-foundry on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-ganache-ci.yml b/.github/workflows/hardhat-ganache-ci.yml index 147d47629a..f0215c8049 100644 --- a/.github/workflows/hardhat-ganache-ci.yml +++ b/.github/workflows/hardhat-ganache-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-ganache on Windows with Node 14 + name: Test hardhat-ganache on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-ganache on MacOS with Node 14 + name: Test hardhat-ganache on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-network-forking-ci.yml b/.github/workflows/hardhat-network-forking-ci.yml index 6b6b96064b..17f377fe9a 100644 --- a/.github/workflows/hardhat-network-forking-ci.yml +++ b/.github/workflows/hardhat-network-forking-ci.yml @@ -39,7 +39,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile diff --git a/.github/workflows/hardhat-network-helpers-ci.yml b/.github/workflows/hardhat-network-helpers-ci.yml index c2bc454b93..abc1c0c6ab 100644 --- a/.github/workflows/hardhat-network-helpers-ci.yml +++ b/.github/workflows/hardhat-network-helpers-ci.yml @@ -25,13 +25,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-network-helpers on Windows with Node 14 + name: Test hardhat-network-helpers on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-network-helpers on MacOS with Node 14 + name: Test hardhat-network-helpers on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-network-tracing-all-solc-versions.yml b/.github/workflows/hardhat-network-tracing-all-solc-versions.yml index 0704096987..fdddcbb601 100644 --- a/.github/workflows/hardhat-network-tracing-all-solc-versions.yml +++ b/.github/workflows/hardhat-network-tracing-all-solc-versions.yml @@ -19,7 +19,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-network-tracing-ci.yml b/.github/workflows/hardhat-network-tracing-ci.yml index c5c8f07963..87549528ec 100644 --- a/.github/workflows/hardhat-network-tracing-ci.yml +++ b/.github/workflows/hardhat-network-tracing-ci.yml @@ -30,7 +30,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-shorthand-ci.yml b/.github/workflows/hardhat-shorthand-ci.yml index f07472ab0a..56df6b8bff 100644 --- a/.github/workflows/hardhat-shorthand-ci.yml +++ b/.github/workflows/hardhat-shorthand-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-shorthand on Windows with Node 14 + name: Test hardhat-shorthand on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-shorthand on MacOS with Node 14 + name: Test hardhat-shorthand on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-solhint-ci.yml b/.github/workflows/hardhat-solhint-ci.yml index b548aae14e..075593e9bf 100644 --- a/.github/workflows/hardhat-solhint-ci.yml +++ b/.github/workflows/hardhat-solhint-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-solhint on Windows with Node 14 + name: Test hardhat-solhint on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-solhint on MacOS with Node 14 + name: Test hardhat-solhint on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-solpp-ci.yml b/.github/workflows/hardhat-solpp-ci.yml index ac5f545700..bafe268771 100644 --- a/.github/workflows/hardhat-solpp-ci.yml +++ b/.github/workflows/hardhat-solpp-ci.yml @@ -27,7 +27,7 @@ concurrency: jobs: test_on_macos: - name: Test hardhat-solpp on MacOS with Node 14 + name: Test hardhat-solpp on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -35,7 +35,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -49,7 +49,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-toolbox-ci.yml b/.github/workflows/hardhat-toolbox-ci.yml index d10cddefc0..f90d7fb797 100644 --- a/.github/workflows/hardhat-toolbox-ci.yml +++ b/.github/workflows/hardhat-toolbox-ci.yml @@ -35,13 +35,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-toolbox on Windows with Node 14 + name: Test hardhat-toolbox on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -51,7 +51,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-toolbox on MacOS with Node 14 + name: Test hardhat-toolbox on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -59,7 +59,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -73,7 +73,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-truffle4-ci.yml b/.github/workflows/hardhat-truffle4-ci.yml index 46b7d5d003..3a5c85c8c4 100644 --- a/.github/workflows/hardhat-truffle4-ci.yml +++ b/.github/workflows/hardhat-truffle4-ci.yml @@ -29,13 +29,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-truffle4 on Windows with Node 14 + name: Test hardhat-truffle4 on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -45,7 +45,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-truffle4 on MacOS with Node 14 + name: Test hardhat-truffle4 on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -53,7 +53,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-truffle5-ci.yml b/.github/workflows/hardhat-truffle5-ci.yml index 70e873cc04..906dc1330e 100644 --- a/.github/workflows/hardhat-truffle5-ci.yml +++ b/.github/workflows/hardhat-truffle5-ci.yml @@ -29,13 +29,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-truffle5 on Windows with Node 14 + name: Test hardhat-truffle5 on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -45,7 +45,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-truffle5 on MacOS with Node 14 + name: Test hardhat-truffle5 on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -53,7 +53,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-vyper-ci.yml b/.github/workflows/hardhat-vyper-ci.yml index 528e027713..a078ea6f06 100644 --- a/.github/workflows/hardhat-vyper-ci.yml +++ b/.github/workflows/hardhat-vyper-ci.yml @@ -31,7 +31,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-waffle-ci.yml b/.github/workflows/hardhat-waffle-ci.yml index b8db4e000b..4068094aea 100644 --- a/.github/workflows/hardhat-waffle-ci.yml +++ b/.github/workflows/hardhat-waffle-ci.yml @@ -29,13 +29,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-waffle on Windows with Node 14 + name: Test hardhat-waffle on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -45,7 +45,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-waffle on MacOS with Node 14 + name: Test hardhat-waffle on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -53,7 +53,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -67,7 +67,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-web3-ci.yml b/.github/workflows/hardhat-web3-ci.yml index 33a8ead269..1124c214f8 100644 --- a/.github/workflows/hardhat-web3-ci.yml +++ b/.github/workflows/hardhat-web3-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-web3 on Windows with Node 14 + name: Test hardhat-web3 on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-web3 on MacOS with Node 14 + name: Test hardhat-web3 on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/hardhat-web3-legacy-ci.yml b/.github/workflows/hardhat-web3-legacy-ci.yml index 221110aa34..1883b285ad 100644 --- a/.github/workflows/hardhat-web3-legacy-ci.yml +++ b/.github/workflows/hardhat-web3-legacy-ci.yml @@ -27,13 +27,13 @@ concurrency: jobs: test_on_windows: - name: Test hardhat-web3-legacy on Windows with Node 14 + name: Test hardhat-web3-legacy on Windows with Node 18 runs-on: windows-latest steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-web3-legacy on MacOS with Node 14 + name: Test hardhat-web3-legacy on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -51,7 +51,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -65,7 +65,7 @@ jobs: runs-on: ubuntu-latest strategy: matrix: - node: [14, 16, 18] + node: [18] steps: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 2c37c9bb8a..67f321d498 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -19,7 +19,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn cache-dependency-path: | yarn.lock diff --git a/.github/workflows/pre-release.yml b/.github/workflows/pre-release.yml index eead734896..6b517e7421 100644 --- a/.github/workflows/pre-release.yml +++ b/.github/workflows/pre-release.yml @@ -17,7 +17,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile @@ -49,7 +49,7 @@ jobs: - uses: actions/checkout@v2 - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install run: yarn --frozen-lockfile diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 6d196e0990..a5d71f63f0 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -16,10 +16,10 @@ jobs: # This makes Actions fetch all Git history so that Changesets can generate changelogs with the correct commits fetch-depth: 0 - - name: Setup Node.js 14.x + - name: Setup Node.js 18.x uses: actions/setup-node@v2 with: - node-version: 14.x + node-version: 18.x cache: "yarn" - name: Install Dependencies diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml index 7ae33e77d4..c4bf5a7eb0 100644 --- a/.github/workflows/rethnet-ci.yml +++ b/.github/workflows/rethnet-ci.yml @@ -68,7 +68,7 @@ jobs: - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 cache: yarn - name: Install node dependencies diff --git a/.github/workflows/test-recent-mainnet-block.yml b/.github/workflows/test-recent-mainnet-block.yml index 2d22215aea..b4ef2dc1af 100644 --- a/.github/workflows/test-recent-mainnet-block.yml +++ b/.github/workflows/test-recent-mainnet-block.yml @@ -16,7 +16,7 @@ jobs: steps: - uses: actions/setup-node@v2 with: - node-version: 14 + node-version: 18 - uses: actions/checkout@v2 - name: Install run: yarn --frozen-lockfile diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index 45594fb6b8..559440fe93 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -14,7 +14,7 @@ hex-literal = { version = "0.3", default-features = false } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } reqwest = { version = "0.11", features = ["blocking", "json"] } -revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "8e6f4f2", version = "1.0", default-features = false } +revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = "1.0", default-features = false } # revm-primitives = { path = "../../../revm/crates/primitives", version = "1.0", default-features = false } rlp = { version = "0.5.2", default-features = false, features = ["derive"] } ruint = { version = "1.7.0", default-features = false } diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 5aa495b2d3..b85cfa0e66 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -9,7 +9,7 @@ hashbrown = { version = "0.13", default-features = false, features = ["ahash", " log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } -revm = { git = "https://github.com/bluealloy/revm", rev = "8e6f4f2", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } +revm = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } # revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index 1d4cfc2660..ef01898c36 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -8,7 +8,7 @@ crate-type = ["cdylib"] [dependencies] crossbeam-channel = { version = "0.5.6", default-features = false } -# if ever napi needs to be pinned, be sure to pin napi-derive to the same version +# when napi is pinned, be sure to pin napi-derive to the same version napi = { version = "2.12.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.12.3" once_cell = "1.15.0" diff --git a/crates/rethnet_evm_napi/src/runtime.rs b/crates/rethnet_evm_napi/src/runtime.rs index afffabf149..ebd0f896a4 100644 --- a/crates/rethnet_evm_napi/src/runtime.rs +++ b/crates/rethnet_evm_napi/src/runtime.rs @@ -11,10 +11,7 @@ use crate::{ config::Config, state::StateManager, tracer::Tracer, - transaction::{ - result::{ExecutionResult, TransactionResult}, - Transaction, - }, + transaction::{result::ExecutionResult, Transaction}, }; struct Logger; @@ -61,18 +58,19 @@ impl Rethnet { transaction: Transaction, block: BlockConfig, tracer: Option<&Tracer>, - ) -> napi::Result { + ) -> napi::Result { let transaction = TxEnv::try_from(transaction)?; let block = BlockEnv::try_from(block)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - TransactionResult::try_from( - self.runtime - .dry_run(transaction, block, inspector) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, - ) + let (result, _state, trace) = self + .runtime + .dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(ExecutionResult::from((result, trace))) } /// Executes the provided transaction without changing state, ignoring validation checks in the process. @@ -82,18 +80,19 @@ impl Rethnet { transaction: Transaction, block: BlockConfig, tracer: Option<&Tracer>, - ) -> napi::Result { + ) -> napi::Result { let transaction = TxEnv::try_from(transaction)?; let block = BlockEnv::try_from(block)?; let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - TransactionResult::try_from( - self.runtime - .guaranteed_dry_run(transaction, block, inspector) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, - ) + let (result, _state, trace) = self + .runtime + .guaranteed_dry_run(transaction, block, inspector) + .await + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(ExecutionResult::from((result, trace))) } /// Executes the provided transaction, changing state in the process. diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 1a909019fb..9b2c10278d 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -1,11 +1,14 @@ -use std::sync::{ - mpsc::{channel, Sender}, - Arc, +use std::{ + mem, + sync::{ + mpsc::{channel, Sender}, + Arc, + }, }; use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; -use rethnet_eth::{signature::private_key_to_address, Address, B256, U256}; +use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ state::{AsyncState, LayeredState, RethnetLayer, StateDebug, StateError, SyncState}, AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, @@ -238,7 +241,7 @@ impl StateManager { env.raw(), unsafe { modify_account_fn.raw() }, 0, - |ctx: ThreadSafeCallContext| { + |mut ctx: ThreadSafeCallContext| { let sender = ctx.value.sender.clone(); let balance = ctx @@ -258,9 +261,26 @@ impl StateManager { .create_buffer_copy(code.hash()) .and_then(|hash| bytecode.set_named_property("hash", hash.into_raw()))?; + let code = code.original_bytes(); + ctx.env - .create_buffer_copy(&code.bytes()[..code.len()]) - .and_then(|code| bytecode.set_named_property("code", code.into_raw()))?; + .adjust_external_memory(code.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + code.as_ptr(), + code.len(), + code, + |code: Bytes, mut env| { + env.adjust_external_memory(-(code.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(code); + }, + ) + } + .and_then(|code| bytecode.set_named_property("code", code.into_raw()))?; bytecode.into_unknown() } else { diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs index c2f704ea45..005e3ea183 100644 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -1,5 +1,6 @@ use std::{ fmt::Debug, + mem, sync::mpsc::{channel, Sender}, }; @@ -9,12 +10,9 @@ use napi::{ }; use napi_derive::napi; use rethnet_eth::{Address, Bytes, U256}; -use rethnet_evm::{ - opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt, OPCODE_JUMPMAP, -}; +use rethnet_evm::{opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt}; use crate::{ - account::Account, sync::{await_void_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, transaction::result::{ExceptionalHalt, ExecutionResult}, @@ -56,33 +54,33 @@ pub struct TracingStep { /// The program counter #[napi(readonly)] pub pc: BigInt, - /// The executed op code - #[napi(readonly)] - pub opcode: String, + // /// The executed op code + // #[napi(readonly)] + // pub opcode: String, // /// The return value of the step // #[napi(readonly)] // pub return_value: u8, - /// The amount of gas that was used by the step - #[napi(readonly)] - pub gas_cost: BigInt, - /// The amount of gas that was refunded by the step - #[napi(readonly)] - pub gas_refunded: BigInt, - /// The amount of gas left - #[napi(readonly)] - pub gas_left: BigInt, - /// The stack - #[napi(readonly)] - pub stack: Vec, - /// The memory - #[napi(readonly)] - pub memory: Buffer, - /// The contract being executed - #[napi(readonly)] - pub contract: Account, - /// The address of the contract - #[napi(readonly)] - pub contract_address: Buffer, + // /// The amount of gas that was used by the step + // #[napi(readonly)] + // pub gas_cost: BigInt, + // /// The amount of gas that was refunded by the step + // #[napi(readonly)] + // pub gas_refunded: BigInt, + // /// The amount of gas left + // #[napi(readonly)] + // pub gas_left: BigInt, + // /// The stack + // #[napi(readonly)] + // pub stack: Vec, + // /// The memory + // #[napi(readonly)] + // pub memory: Buffer, + // /// The contract being executed + // #[napi(readonly)] + // pub contract: Account, + // /// The address of the contract + // #[napi(readonly)] + // pub contract_address: Buffer, // /// The address of the code being executed // #[napi(readonly)] // pub code_address: Buffer, @@ -168,7 +166,7 @@ impl JsTracer { env.raw(), unsafe { callbacks.before_message.raw() }, 0, - |ctx: ThreadSafeCallContext| { + |mut ctx: ThreadSafeCallContext| { let sender = ctx.value.sender.clone(); let mut tracing_message = ctx.env.create_object()?; @@ -191,9 +189,26 @@ impl JsTracer { ) .and_then(|to| tracing_message.set_named_property("to", to))?; + let data = ctx.value.message.data; + ctx.env - .create_buffer_copy(&ctx.value.message.data) - .and_then(|data| tracing_message.set_named_property("data", data.into_raw()))?; + .adjust_external_memory(data.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + data.as_ptr(), + data.len(), + data, + |data: Bytes, mut env| { + env.adjust_external_memory(-(data.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(data); + }, + ) + } + .and_then(|data| tracing_message.set_named_property("data", data.into_raw()))?; ctx.env .create_bigint_from_words(false, ctx.value.message.value.as_limbs().to_vec()) @@ -215,21 +230,32 @@ impl JsTracer { tracing_message.set_named_property("codeAddress", code_address) })?; - ctx.value - .message - .code - .as_ref() - .map_or_else( - || ctx.env.get_undefined().map(JsUndefined::into_unknown), - |code| { - ctx.env - .create_buffer_copy(&code.bytes()[..code.len()]) - .map(JsBufferValue::into_unknown) - }, - ) - .and_then(|code_address| { - tracing_message.set_named_property("code", code_address) - })?; + if let Some(code) = &ctx.value.message.code { + let code = code.original_bytes(); + ctx.env + .adjust_external_memory(code.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + code.as_ptr(), + code.len(), + code, + |code: Bytes, mut env| { + env.adjust_external_memory(-(code.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(code); + }, + ) + } + .map(JsBufferValue::into_unknown) + } else { + ctx.env.get_undefined().map(JsUndefined::into_unknown) + } + .and_then(|code_address| { + tracing_message.set_named_property("code", code_address) + })?; let next = ctx.env.create_object()?; @@ -257,9 +283,9 @@ impl JsTracer { .create_bigint_from_u64(ctx.value.pc) .and_then(|pc| tracing_step.set_named_property("pc", pc))?; - ctx.env - .create_string(OPCODE_JUMPMAP[usize::from(ctx.value.opcode)].unwrap_or("")) - .and_then(|opcode| tracing_step.set_named_property("opcode", opcode))?; + // ctx.env + // .create_string(OPCODE_JUMPMAP[usize::from(ctx.value.opcode)].unwrap_or("")) + // .and_then(|opcode| tracing_step.set_named_property("opcode", opcode))?; // ctx.env // .create_uint32((ctx.value.return_value as u8).into()) @@ -303,44 +329,44 @@ impl JsTracer { // tracing_step.set_named_property("memory", memory.into_raw()) // })?; - let mut contract = ctx.env.create_object()?; + // let mut contract = ctx.env.create_object()?; - ctx.env - .create_bigint_from_words(false, ctx.value.contract.balance.as_limbs().to_vec()) - .and_then(|balance| contract.set_named_property("balance", balance))?; + // ctx.env + // .create_bigint_from_words(false, ctx.value.contract.balance.as_limbs().to_vec()) + // .and_then(|balance| contract.set_named_property("balance", balance))?; - let nonce = ctx.env.create_bigint_from_u64(ctx.value.contract.nonce)?; - contract.set_named_property("nonce", nonce)?; + // let nonce = ctx.env.create_bigint_from_u64(ctx.value.contract.nonce)?; + // contract.set_named_property("nonce", nonce)?; - ctx.env - .create_buffer_copy(ctx.value.contract.code_hash) - .and_then(|code_hash| { - contract.set_named_property("codeHash", code_hash.into_unknown()) - })?; - - ctx.value - .contract - .code - .as_ref() - .map_or_else( - || ctx.env.get_undefined().map(JsUndefined::into_unknown), - |code| { - ctx.env - .create_buffer_copy(&code.bytes()[..code.len()]) - .map(|code| code.into_unknown()) - }, - ) - .and_then(|code| contract.set_named_property("code", code))?; - - tracing_step.set_named_property("contract", contract)?; + // ctx.env + // .create_buffer_copy(ctx.value.contract.code_hash) + // .and_then(|code_hash| { + // contract.set_named_property("codeHash", code_hash.into_unknown()) + // })?; - let contract_address = &ctx.value.contract_address; - ctx.env - .create_buffer_copy(contract_address) - .and_then(|contract_address| { - tracing_step - .set_named_property("contractAddress", contract_address.into_unknown()) - })?; + // ctx.value + // .contract + // .code + // .as_ref() + // .map_or_else( + // || ctx.env.get_undefined().map(JsUndefined::into_unknown), + // |code| { + // ctx.env + // .create_buffer_copy(&code.bytes()[..code.len()]) + // .map(|code| code.into_unknown()) + // }, + // ) + // .and_then(|code| contract.set_named_property("code", code))?; + + // tracing_step.set_named_property("contract", contract)?; + + // let contract_address = &ctx.value.contract_address; + // ctx.env + // .create_buffer_copy(contract_address) + // .and_then(|contract_address| { + // tracing_step + // .set_named_property("contractAddress", contract_address.into_unknown()) + // })?; let next = ctx.env.create_object()?; @@ -355,7 +381,7 @@ impl JsTracer { env.raw(), unsafe { callbacks.after_message.raw() }, 0, - |ctx: ThreadSafeCallContext| { + |mut ctx: ThreadSafeCallContext| { let sender = ctx.value.sender.clone(); let mut tracing_message_result = ctx.env.create_object()?; @@ -412,7 +438,24 @@ impl JsTracer { log_object.set_named_property("topics", topics) })?; - ctx.env.create_buffer_copy(&log.data).and_then(|data| { + ctx.env + .adjust_external_memory(log.data.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + log.data.as_ptr(), + log.data.len(), + log.data, + |data: Bytes, mut env| { + env.adjust_external_memory(-(data.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(data); + }, + ) + } + .and_then(|data| { log_object.set_named_property("data", data.into_raw()) })?; @@ -431,11 +474,26 @@ impl JsTracer { let mut transaction_output = ctx.env.create_object()?; ctx.env - .create_buffer_copy(output) - .map(JsBufferValue::into_unknown) - .and_then(|output| { - transaction_output.set_named_property("returnValue", output) - })?; + .adjust_external_memory(output.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + output.as_ptr(), + output.len(), + output, + |output: Bytes, mut env| { + env.adjust_external_memory(-(output.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(output); + }, + ) + } + .map(JsBufferValue::into_unknown) + .and_then(|output| { + transaction_output.set_named_property("returnValue", output) + })?; address .map_or_else( @@ -456,9 +514,24 @@ impl JsTracer { } rethnet_evm::ExecutionResult::Revert { gas_used, output } => { ctx.env - .create_buffer_copy(output) - .map(JsBufferValue::into_unknown) - .and_then(|output| result.set_named_property("output", output))?; + .adjust_external_memory(output.len() as i64) + .expect("Failed to adjust external memory"); + + unsafe { + ctx.env.create_buffer_with_borrowed_data( + output.as_ptr(), + output.len(), + output, + |output: Bytes, mut env| { + env.adjust_external_memory(-(output.len() as i64)) + .expect("Failed to adjust external memory"); + + mem::forget(output); + }, + ) + } + .map(JsBufferValue::into_unknown) + .and_then(|output| result.set_named_property("output", output))?; gas_used } diff --git a/packages/hardhat-core/package.json b/packages/hardhat-core/package.json index 23f149860d..7cef62209f 100644 --- a/packages/hardhat-core/package.json +++ b/packages/hardhat-core/package.json @@ -29,9 +29,9 @@ "eslint": "eslint 'src/**/*.ts' 'test/**/*.ts'", "prettier": "prettier \"**/*.{js,md,json}\"", "test": "mocha --recursive \"test/**/*.ts\" --exit", - "test:except-tracing": "mocha --recursive \"test/**/*.ts\" --invert --grep \"Stack traces\"", - "test:tracing": "mocha --recursive \"test/internal/hardhat-network/{helpers,stack-traces}/**/*.ts\"", - "test:forking": "mocha --recursive \"test/internal/hardhat-network/{helpers,jsonrpc,provider}/**/*.ts\"", + "test:except-tracing": "mocha --recursive \"test/**/*.ts\" --invert --grep \"Stack traces\" --exit", + "test:tracing": "mocha --recursive \"test/internal/hardhat-network/{helpers,stack-traces}/**/*.ts\" --exit", + "test:forking": "mocha --recursive \"test/internal/hardhat-network/{helpers,jsonrpc,provider}/**/*.ts\" --exit", "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "build": "tsc --build .", "prepublishOnly": "yarn build", diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index c9839d28c1..699ffc809c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -424,67 +424,67 @@ export class DualModeAdapter implements VMAdapter { throw new Error("Different step pc"); } - if (ethereumJSStep.opcode !== rethnetStep.opcode) { - console.trace( - `Different steps[${stepIdx}] opcode: ${ethereumJSStep.opcode} !== ${rethnetStep.opcode}` - ); - throw new Error("Different step opcode"); - } + // if (ethereumJSStep.opcode !== rethnetStep.opcode) { + // console.trace( + // `Different steps[${stepIdx}] opcode: ${ethereumJSStep.opcode} !== ${rethnetStep.opcode}` + // ); + // throw new Error("Different step opcode"); + // } - if (ethereumJSStep.gasCost !== rethnetStep.gasCost) { - console.trace( - `Different steps[${stepIdx}] gasCost: ${ethereumJSStep.gasCost} !== ${rethnetStep.gasCost}` - ); - throw new Error("Different step gasCost"); - } + // if (ethereumJSStep.gasCost !== rethnetStep.gasCost) { + // console.trace( + // `Different steps[${stepIdx}] gasCost: ${ethereumJSStep.gasCost} !== ${rethnetStep.gasCost}` + // ); + // throw new Error("Different step gasCost"); + // } - if (ethereumJSStep.gasLeft !== rethnetStep.gasLeft) { - console.trace( - `Different steps[${stepIdx}] gasLeft: ${ethereumJSStep.gasLeft} !== ${rethnetStep.gasLeft}` - ); - throw new Error("Different step gasLeft"); - } + // if (ethereumJSStep.gasLeft !== rethnetStep.gasLeft) { + // console.trace( + // `Different steps[${stepIdx}] gasLeft: ${ethereumJSStep.gasLeft} !== ${rethnetStep.gasLeft}` + // ); + // throw new Error("Different step gasLeft"); + // } - const ethereumJSStack = ethereumJSStep.stack; - const rethnetStack = rethnetStep.stack; - if (ethereumJSStack.length !== rethnetStack.length) { - throw new Error( - `Different number of stack elements in tracers: ${ethereumJSStack.length} !== ${rethnetStack.length}` - ); - } + // const ethereumJSStack = ethereumJSStep.stack; + // const rethnetStack = rethnetStep.stack; + // if (ethereumJSStack.length !== rethnetStack.length) { + // throw new Error( + // `Different number of stack elements in tracers: ${ethereumJSStack.length} !== ${rethnetStack.length}` + // ); + // } - for (let stackIdx = 0; stackIdx < ethereumJSSteps.length; ++stackIdx) { - const ethereumJSStackElement = ethereumJSStack[stackIdx]; - const rethnetStackElement = rethnetStack[stackIdx]; + // for (let stackIdx = 0; stackIdx < ethereumJSSteps.length; ++stackIdx) { + // const ethereumJSStackElement = ethereumJSStack[stackIdx]; + // const rethnetStackElement = rethnetStack[stackIdx]; - if (ethereumJSStackElement !== rethnetStackElement) { - console.trace( - `Different steps[${stepIdx}] stack[${stackIdx}]: ${ethereumJSStackElement} !== ${rethnetStackElement}` - ); - throw new Error("Different step stack element"); - } - } + // if (ethereumJSStackElement !== rethnetStackElement) { + // console.trace( + // `Different steps[${stepIdx}] stack[${stackIdx}]: ${ethereumJSStackElement} !== ${rethnetStackElement}` + // ); + // throw new Error("Different step stack element"); + // } + // } - if (!ethereumJSStep.memory.equals(rethnetStep.memory)) { - console.trace( - `Different steps[${stepIdx}] memory: ${ethereumJSStep.memory} !== ${rethnetStep.memory}` - ); - throw new Error("Different step memory"); - } + // if (!ethereumJSStep.memory.equals(rethnetStep.memory)) { + // console.trace( + // `Different steps[${stepIdx}] memory: ${ethereumJSStep.memory} !== ${rethnetStep.memory}` + // ); + // throw new Error("Different step memory"); + // } - if (ethereumJSStep.contract.balance !== rethnetStep.contract.balance) { - console.trace( - `Different steps[${stepIdx}] contract balance: ${ethereumJSStep.contract.balance} !== ${rethnetStep.contract.balance}` - ); - throw new Error("Different step contract balance"); - } + // if (ethereumJSStep.contract.balance !== rethnetStep.contract.balance) { + // console.trace( + // `Different steps[${stepIdx}] contract balance: ${ethereumJSStep.contract.balance} !== ${rethnetStep.contract.balance}` + // ); + // throw new Error("Different step contract balance"); + // } - if (ethereumJSStep.contract.nonce !== rethnetStep.contract.nonce) { - console.trace( - `Different steps[${stepIdx}] contract nonce: ${ethereumJSStep.contract.nonce} !== ${rethnetStep.contract.nonce}` - ); - throw new Error("Different step contract nonce"); - } + // if (ethereumJSStep.contract.nonce !== rethnetStep.contract.nonce) { + // console.trace( + // `Different steps[${stepIdx}] contract nonce: ${ethereumJSStep.contract.nonce} !== ${rethnetStep.contract.nonce}` + // ); + // throw new Error("Different step contract nonce"); + // } // Code can be stored separately from the account in Rethnet // const ethereumJSCode = ethereumJSStep.contract.code; @@ -518,12 +518,12 @@ export class DualModeAdapter implements VMAdapter { // } // } - if (!ethereumJSStep.contractAddress.equals(rethnetStep.contractAddress)) { - console.trace( - `Different steps[${stepIdx}] contract address: ${ethereumJSStep.contractAddress} !== ${rethnetStep.contractAddress}` - ); - throw new Error("Different step contract address"); - } + // if (!ethereumJSStep.contractAddress.equals(rethnetStep.contractAddress)) { + // console.trace( + // `Different steps[${stepIdx}] contract address: ${ethereumJSStep.contractAddress} !== ${rethnetStep.contractAddress}` + // ); + // throw new Error("Different step contract address"); + // } } // TODO: compare each step diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 8c10fe784b..a6129bb513 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -519,22 +519,22 @@ export class EthereumJSAdapter implements VMAdapter { await this._vmTracer.addStep({ depth: step.depth, pc: BigInt(step.pc), - opcode: step.opcode.name, + // opcode: step.opcode.name, // returnValue: 0, // Do we have error values in ethereumjs? - gasCost: BigInt(step.opcode.fee) + (step.opcode.dynamicFee ?? 0n), - gasRefunded: step.gasRefund, - gasLeft: step.gasLeft, - stack: step.stack, - memory: step.memory, - contract: { - balance: step.account.balance, - nonce: step.account.nonce, - code: { - hash: step.account.codeHash, - code: Buffer.from([]), - }, - }, - contractAddress: step.address.buf, + // gasCost: BigInt(step.opcode.fee) + (step.opcode.dynamicFee ?? 0n), + // gasRefunded: step.gasRefund, + // gasLeft: step.gasLeft, + // stack: step.stack, + // memory: step.memory, + // contract: { + // balance: step.account.balance, + // nonce: step.account.nonce, + // code: { + // hash: step.account.codeHash, + // code: Buffer.from([]), + // }, + // }, + // contractAddress: step.address.buf, }); return next(); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 172bfae15e..bcfe420f62 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -130,10 +130,10 @@ export class RethnetAdapter implements VMAdapter { try { const result = rethnetResultToRunTxResult( - rethnetResult.execResult, + rethnetResult, blockContext.header.gasUsed ); - return [result, rethnetResult.execResult.trace]; + return [result, rethnetResult.trace]; } catch (e) { // console.log("Rethnet trace"); // console.log(rethnetResult.execResult.trace); From 0219b5787b10a7ee5f11ea9f43932cfb8db43b79 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 15:56:38 -0500 Subject: [PATCH 041/406] fix: TxPool memory leak --- .../hardhat-network/provider/TxPool.ts | 49 +- .../internal/hardhat-network/provider/node.ts | 26 +- .../hardhat-network/provider/TxPool.ts | 538 ++++++++++++++---- 3 files changed, 471 insertions(+), 142 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts index d11d956a9e..60584ca8f8 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/TxPool.ts @@ -100,23 +100,33 @@ export class TxPool { tx: SerializedTransaction ) => OrderedTransaction; - constructor( - private readonly _getAccount: (address: Address) => Promise, - blockGasLimit: bigint, - common: Common - ) { + constructor(blockGasLimit: bigint, common: Common) { this._state = makePoolState({ blockGasLimit: BigIntUtils.toHex(blockGasLimit), }); this._deserializeTransaction = (tx) => deserializeTransaction(tx, common); } - public async addTransaction(tx: TypedTransaction) { + public async addTransaction( + getAccount: (address: Address) => Promise, + tx: TypedTransaction + ) { const senderAddress = this._getSenderAddress(tx); - const nextConfirmedNonce = await this._getNextConfirmedNonce(senderAddress); - const nextPendingNonce = await this.getNextPendingNonce(senderAddress); + const nextConfirmedNonce = await this._getNextConfirmedNonce( + getAccount, + senderAddress + ); + const nextPendingNonce = await this.getNextPendingNonce( + getAccount, + senderAddress + ); - await this._validateTransaction(tx, senderAddress, nextConfirmedNonce); + await this._validateTransaction( + getAccount, + tx, + senderAddress, + nextConfirmedNonce + ); const txNonce = tx.nonce; @@ -245,12 +255,15 @@ export class TxPool { * Returns the next available nonce for an address, taking into account * its pending transactions. */ - public async getNextPendingNonce(accountAddress: Address): Promise { + public async getNextPendingNonce( + getAccount: (address: Address) => Promise, + accountAddress: Address + ): Promise { const pendingTxs = this._getPendingForAddress(accountAddress.toString()); const lastPendingTx = pendingTxs?.last(undefined); if (lastPendingTx === undefined) { - return this._getNextConfirmedNonce(accountAddress); + return this._getNextConfirmedNonce(getAccount, accountAddress); } const lastPendingTxNonce = @@ -273,12 +286,14 @@ export class TxPool { /** * Updates the pending and queued list of all addresses */ - public async updatePendingAndQueued() { + public async updatePendingAndQueued( + getAccount: (address: Address) => Promise + ) { let newPending = this._getPending(); // update pending transactions for (const [address, txs] of newPending) { - const senderAccount = await this._getAccount(Address.fromString(address)); + const senderAccount = await getAccount(Address.fromString(address)); const senderNonce = senderAccount.nonce; const senderBalance = senderAccount.balance; @@ -314,7 +329,7 @@ export class TxPool { // update queued addresses let newQueued = this._getQueued(); for (const [address, txs] of newQueued) { - const senderAccount = await this._getAccount(Address.fromString(address)); + const senderAccount = await getAccount(Address.fromString(address)); const senderNonce = senderAccount.nonce; const senderBalance = senderAccount.balance; @@ -421,6 +436,7 @@ export class TxPool { } private async _validateTransaction( + getAccount: (address: Address) => Promise, tx: TypedTransaction, senderAddress: Address, senderNonce: bigint @@ -440,7 +456,7 @@ export class TxPool { ); } - const senderAccount = await this._getAccount(senderAddress); + const senderAccount = await getAccount(senderAddress); const senderBalance = senderAccount.balance; const maxFee = "gasPrice" in tx ? tx.gasPrice : tx.maxFeePerGas; @@ -584,9 +600,10 @@ export class TxPool { * pending transactions. */ private async _getNextConfirmedNonce( + getAccount: (address: Address) => Promise, accountAddress: Address ): Promise { - const account = await this._getAccount(accountAddress); + const account = await getAccount(accountAddress); return account.nonce; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 9d459b677c..36f72d7d62 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -239,11 +239,7 @@ export class HardhatNode extends EventEmitter { ); } - const txPool = new TxPool( - (address) => vm.getAccount(address), - BigInt(blockGasLimit), - common - ); + const txPool = new TxPool(BigInt(blockGasLimit), common); const instanceId = bufferToBigInt(randomBytes(32)); @@ -647,7 +643,10 @@ Hardhat Network's forking functionality only works with blocks from at least spu } public async getAccountNextPendingNonce(address: Address): Promise { - return this._txPool.getNextPendingNonce(address); + return this._txPool.getNextPendingNonce( + this._vm.getAccount.bind(this._vm), + address + ); } public async getCodeFromTrace( @@ -1225,7 +1224,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu public async setBlockGasLimit(gasLimit: bigint | number) { this._txPool.setBlockGasLimit(gasLimit); - await this._txPool.updatePendingAndQueued(); + await this._txPool.updatePendingAndQueued( + this._vm.getAccount.bind(this._vm) + ); } public async setMinGasPrice(minGasPrice: bigint) { @@ -1505,7 +1506,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu } private async _addPendingTransaction(tx: TypedTransaction): Promise { - await this._txPool.addTransaction(tx); + await this._txPool.addTransaction(this._vm.getAccount.bind(this._vm), tx); await this._notifyPendingTransaction(tx); return bufferToHex(tx.hash()); } @@ -1601,7 +1602,10 @@ Hardhat Network's forking functionality only works with blocks from at least spu } // validate nonce - const nextPendingNonce = await this._txPool.getNextPendingNonce(sender); + const nextPendingNonce = await this._txPool.getNextPendingNonce( + this._vm.getAccount.bind(this._vm), + sender + ); const txNonce = tx.nonce; const expectedNonceMsg = `Expected nonce to be ${nextPendingNonce.toString()} but got ${txNonce.toString()}.`; @@ -1717,7 +1721,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu const block = await blockBuilder.seal(); await this._blockchain.putBlock(block); - await this._txPool.updatePendingAndQueued(); + await this._txPool.updatePendingAndQueued( + this._vm.getAccount.bind(this._vm) + ); return { block, diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts index 8f8aba0dde..0f580f7e94 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/TxPool.ts @@ -36,11 +36,7 @@ describe("Tx Pool", () => { beforeEach(() => { stateManager = new DefaultStateManager(); const common = new Common({ chain: "mainnet" }); - txPool = new TxPool( - (address) => stateManager.getAccount(address), - blockGasLimit, - common - ); + txPool = new TxPool(blockGasLimit, common); }); describe("addTransaction", () => { @@ -58,7 +54,10 @@ describe("Tx Pool", () => { from: address, nonce: 0, }); - await txPool.addTransaction(tx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ); const pendingTxs = txPool.getPendingTransactions(); assert.lengthOf(txMapToArray(pendingTxs), 1); @@ -76,7 +75,10 @@ describe("Tx Pool", () => { from: address, nonce: 3, }); - await txPool.addTransaction(tx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ); const pendingTxs = txPool.getPendingTransactions(); assert.equal(pendingTxs.size, 0); @@ -95,7 +97,10 @@ describe("Tx Pool", () => { }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ), Error, "Nonce too low" ); @@ -121,8 +126,14 @@ describe("Tx Pool", () => { from: address, nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); const pendingTxs = txPool.getPendingTransactions(); assert.sameDeepMembers( @@ -145,9 +156,18 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); const pendingTxs = txPool.getPendingTransactions(); assert.sameDeepMembers( @@ -174,10 +194,22 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); - await txPool.addTransaction(tx4); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); const pendingTxs = txPool.getPendingTransactions(); assert.sameDeepMembers( @@ -197,8 +229,14 @@ describe("Tx Pool", () => { from: address, nonce: 2, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); const pendingTxs = txPool.getPendingTransactions(); assert.sameDeepMembers( @@ -219,7 +257,10 @@ describe("Tx Pool", () => { nonce: 0, }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ), Error, "Nonce too low" ); @@ -242,8 +283,14 @@ describe("Tx Pool", () => { nonce: 0, gasPrice: 10, }); - await txPool.addTransaction(tx1a); - await txPool.addTransaction(tx1b); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1a + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1b + ); const pendingTxs = txPool.getPendingTransactions(); assert.sameDeepMembers( @@ -267,8 +314,14 @@ describe("Tx Pool", () => { nonce: 1, gasPrice: 10, }); - await txPool.addTransaction(tx2a); - await txPool.addTransaction(tx2b); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2a + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2b + ); const queuedTxs = txPool.getQueuedTransactions(); @@ -290,7 +343,10 @@ describe("Tx Pool", () => { gasPrice: 20, }); - await txPool.addTransaction(tx1a); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1a + ); const tx1b = createTestFakeTransaction({ from: address, @@ -299,7 +355,10 @@ describe("Tx Pool", () => { }); await assert.isRejected( - txPool.addTransaction(tx1b), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1b + ), InvalidInputError, `Replacement transaction underpriced. A gasPrice/maxFeePerGas of at least 22 is necessary to replace the existing transaction with nonce 0.` ); @@ -312,7 +371,10 @@ describe("Tx Pool", () => { }); await assert.isRejected( - txPool.addTransaction(tx1c), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1c + ), InvalidInputError, `Replacement transaction underpriced. A gasPrice/maxFeePerGas of at least 22 is necessary to replace the existing transaction with nonce 0.` ); @@ -325,7 +387,10 @@ describe("Tx Pool", () => { }); await assert.isRejected( - txPool.addTransaction(tx1d), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1d + ), InvalidInputError, `Replacement transaction underpriced. A gasPrice/maxPriorityFeePerGas of at least 22 is necessary to replace the existing transaction with nonce 0.` ); @@ -352,9 +417,15 @@ describe("Tx Pool", () => { nonce: 1, gasPrice: 21, }); - await txPool.addTransaction(tx2a); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2a + ); await assert.isRejected( - txPool.addTransaction(tx2b), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2b + ), InvalidInputError, `Replacement transaction underpriced. A gasPrice/maxFeePerGas of at least 22 is necessary to replace the existing transaction with nonce 1` ); @@ -395,8 +466,14 @@ describe("Tx Pool", () => { nonce: 0, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); const pendingTxs = txPool.getPendingTransactions(); @@ -425,10 +502,22 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); - await txPool.addTransaction(tx4); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); const pendingTxs = txPool.getPendingTransactions(); @@ -460,11 +549,26 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); - await txPool.addTransaction(tx4); - await txPool.addTransaction(tx5); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx5 + ); const pendingTxs = txPool.getPendingTransactions(); @@ -504,13 +608,34 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); - await txPool.addTransaction(tx4); - await txPool.addTransaction(tx5); - await txPool.addTransaction(tx6); - await txPool.addTransaction(tx7); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx5 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx6 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx7 + ); const pendingTxs = txPool.getPendingTransactions(); @@ -531,10 +656,16 @@ describe("Tx Pool", () => { const signedTx1 = tx1.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); const signedTx2 = tx2.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx1 + ); await assert.isRejected( - txPool.addTransaction(signedTx2), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx2 + ), InvalidInputError, `Known transaction: ${bufferToHex(signedTx1.hash())}` ); @@ -544,7 +675,7 @@ describe("Tx Pool", () => { const gasLimit = 15_000_000; const tx = createTestFakeTransaction({ gasLimit }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, `Transaction gas limit is ${gasLimit} and exceeds block gas limit of ${blockGasLimit}` ); @@ -553,7 +684,7 @@ describe("Tx Pool", () => { it("rejects if transaction is not signed", async () => { const tx = createUnsignedTestTransaction(); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, "Invalid Signature" ); @@ -572,7 +703,7 @@ describe("Tx Pool", () => { }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, "Nonce too low" ); @@ -582,7 +713,7 @@ describe("Tx Pool", () => { const gasLimit = 100; const tx = createTestFakeTransaction({ gasLimit }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, `Transaction requires at least 21000 gas but got ${gasLimit}` ); @@ -593,7 +724,7 @@ describe("Tx Pool", () => { to: undefined, }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, "contract creation without any data provided" ); @@ -616,7 +747,7 @@ describe("Tx Pool", () => { value: 5, }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, "sender doesn't have enough funds to send tx" ); @@ -628,7 +759,10 @@ describe("Tx Pool", () => { value: 5, }); await assert.isRejected( - txPool.addTransaction(tx2), + txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ), InvalidInputError, "sender doesn't have enough funds to send tx" ); @@ -667,10 +801,22 @@ describe("Tx Pool", () => { nonce: 0, }); - await txPool.addTransaction(txA.data); - await txPool.addTransaction(txB.data); - await txPool.addTransaction(txC.data); - await txPool.addTransaction(txD.data); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + txA.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + txB.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + txC.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + txD.data + ); const pendingTxs = txPool.getPendingTransactions(); assertEqualTransactionMaps( @@ -690,7 +836,10 @@ describe("Tx Pool", () => { gasLimit: 21_000, }); - await txPool.addTransaction(tx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ); const txFromTxPool = txPool.getTransactionByHash(tx.hash()); @@ -705,7 +854,10 @@ describe("Tx Pool", () => { gasLimit: 21_000, }); - await txPool.addTransaction(tx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ); const txFromTxPool = txPool.getTransactionByHash(tx.hash()); @@ -721,7 +873,10 @@ describe("Tx Pool", () => { const signedTx = tx.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx + ); const oldTxFromTxPool = txPool.getTransactionByHash(signedTx.hash()); @@ -735,7 +890,9 @@ describe("Tx Pool", () => { }) ); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const actualTxFromTxPool = txPool.getTransactionByHash(signedTx.hash()); @@ -751,7 +908,10 @@ describe("Tx Pool", () => { const signedTx = tx.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx + ); const oldTxFromTxPool = txPool.getTransactionByHash(signedTx.hash()); @@ -765,7 +925,9 @@ describe("Tx Pool", () => { }) ); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const actualTxFromTxPool = txPool.getTransactionByHash(signedTx.hash()); @@ -789,9 +951,17 @@ describe("Tx Pool", () => { nonce: 0, }); - await txPool.addTransaction(tx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); - assert.isTrue((await txPool.getNextPendingNonce(address)) === 1n); + assert.isTrue( + (await txPool.getNextPendingNonce( + stateManager.getAccount.bind(stateManager), + address + )) === 1n + ); }); it("is not affected by queued transactions", async () => { @@ -804,10 +974,21 @@ describe("Tx Pool", () => { nonce: 2, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); - assert.isTrue((await txPool.getNextPendingNonce(address)) === 1n); + assert.isTrue( + (await txPool.getNextPendingNonce( + stateManager.getAccount.bind(stateManager), + address + )) === 1n + ); }); it("returns correct nonce after all queued transactions are moved to pending", async () => { @@ -824,11 +1005,25 @@ describe("Tx Pool", () => { nonce: 1, }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); - assert.isTrue((await txPool.getNextPendingNonce(address)) === 3n); + assert.isTrue( + (await txPool.getNextPendingNonce( + stateManager.getAccount.bind(stateManager), + address + )) === 3n + ); }); it("returns correct nonce after some queued transactions are moved to pending", async () => { @@ -837,12 +1032,29 @@ describe("Tx Pool", () => { const tx3 = createTestFakeTransaction({ from: address, nonce: 5 }); const tx4 = createTestFakeTransaction({ from: address, nonce: 1 }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx3); - await txPool.addTransaction(tx4); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); - assert.isTrue((await txPool.getNextPendingNonce(address)) === 3n); + assert.isTrue( + (await txPool.getNextPendingNonce( + stateManager.getAccount.bind(stateManager), + address + )) === 3n + ); }); }); @@ -870,11 +1082,16 @@ describe("Tx Pool", () => { const tx1 = createTestTransaction({ nonce: 0, gasLimit: 9_500_000 }); const signedTx1 = tx1.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx1 + ); txPool.setBlockGasLimit(5_000_000); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const pendingTransactions = txPool.getPendingTransactions(); assertEqualTransactionMaps(pendingTransactions, makeOrderedTxMap([])); @@ -884,11 +1101,16 @@ describe("Tx Pool", () => { const tx1 = createTestTransaction({ nonce: 1, gasLimit: 9_500_000 }); const signedTx1 = tx1.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx1 + ); txPool.setBlockGasLimit(5_000_000); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const queuedTransactions = txPool.getQueuedTransactions(); assertEqualTransactionMaps(queuedTransactions, makeOrderedTxMap([])); @@ -920,10 +1142,22 @@ describe("Tx Pool", () => { from: address2, }); - await txPool.addTransaction(tx1.data); - await txPool.addTransaction(tx2.data); - await txPool.addTransaction(tx3.data); - await txPool.addTransaction(tx4.data); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3.data + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4.data + ); await stateManager.putAccount( address1, @@ -940,7 +1174,9 @@ describe("Tx Pool", () => { }) ); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const pendingTransactions = txPool.getPendingTransactions(); assertEqualTransactionMaps( @@ -957,14 +1193,19 @@ describe("Tx Pool", () => { }); const signedTx1 = tx1.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx1 + ); await stateManager.putAccount( address1, Account.fromAccountData({ nonce: 0n, balance: 0n }) ); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const pendingTransactions = txPool.getPendingTransactions(); assertEqualTransactionMaps(pendingTransactions, makeOrderedTxMap([])); @@ -978,14 +1219,19 @@ describe("Tx Pool", () => { }); const signedTx1 = tx1.sign(toBuffer(DEFAULT_ACCOUNTS[0].privateKey)); - await txPool.addTransaction(signedTx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + signedTx1 + ); await stateManager.putAccount( address1, Account.fromAccountData({ nonce: 0n, balance: 0n }) ); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const queuedTransactions = txPool.getQueuedTransactions(); assertEqualTransactionMaps(queuedTransactions, makeOrderedTxMap([])); @@ -1027,11 +1273,26 @@ describe("Tx Pool", () => { from: sender, }); - await txPool.addTransaction(tx0); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); - await txPool.addTransaction(tx4); - await txPool.addTransaction(tx5); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx0 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx4 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx5 + ); // pending: [0, 1, 2] // queued: [4, 5] @@ -1048,7 +1309,9 @@ describe("Tx Pool", () => { // this should drop tx1 txPool.setBlockGasLimit(150_000); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); // pending: [0] // queued: [2, 4, 5] @@ -1071,7 +1334,10 @@ describe("Tx Pool", () => { gasLimit: 100_000, from: sender, }); - await txPool.addTransaction(tx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); let pendingTxs = txPool.getPendingTransactions(); assert.lengthOf(txMapToArray(pendingTxs), 1); @@ -1081,14 +1347,19 @@ describe("Tx Pool", () => { assert.lengthOf(txMapToArray(queuedTxs), 0); txPool.setBlockGasLimit(90_000); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const tx2 = createTestFakeTransaction({ gasLimit: 80_000, from: sender, nonce: 0, }); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); pendingTxs = txPool.getPendingTransactions(); assert.lengthOf(txMapToArray(pendingTxs), 1); @@ -1121,9 +1392,18 @@ describe("Tx Pool", () => { from: sender, }); - await txPool.addTransaction(tx0); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx0 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); // pending: [0, 1, 2] // queued: [0] @@ -1138,13 +1418,18 @@ describe("Tx Pool", () => { // this should drop tx1 txPool.setBlockGasLimit(100_000); - await txPool.updatePendingAndQueued(); + await txPool.updatePendingAndQueued( + stateManager.getAccount.bind(stateManager) + ); const tx3 = createTestFakeTransaction({ nonce: 3, from: sender, }); - await txPool.addTransaction(tx3); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx3 + ); // pending: [0, 1, 2, 3] // queued: [] @@ -1177,7 +1462,7 @@ describe("Tx Pool", () => { txPool.setBlockGasLimit(21_000); const tx = createTestFakeTransaction({ gasLimit: 50_000 }); await assert.isRejected( - txPool.addTransaction(tx), + txPool.addTransaction(stateManager.getAccount.bind(stateManager), tx), InvalidInputError, "Transaction gas limit is 50000 and exceeds block gas limit of 21000" ); @@ -1193,7 +1478,10 @@ describe("Tx Pool", () => { it("returns a bigger snapshot id if the state changed", async () => { const id1 = txPool.snapshot(); const tx = createTestFakeTransaction(); - await txPool.addTransaction(tx); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx + ); const id2 = txPool.snapshot(); assert.isAbove(id2, id1); }); @@ -1219,7 +1507,10 @@ describe("Tx Pool", () => { orderId: 0, nonce: 0, }); - await txPool.addTransaction(tx1.data); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1.data + ); const id = txPool.snapshot(); @@ -1228,7 +1519,10 @@ describe("Tx Pool", () => { orderId: 1, nonce: 1, }); - await txPool.addTransaction(tx2.data); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2.data + ); txPool.revert(id); const pendingTransactions = txPool.getPendingTransactions(); @@ -1252,18 +1546,30 @@ describe("Tx Pool", () => { const tx1 = createTestFakeTransaction({ nonce: 0 }); const tx2 = createTestFakeTransaction({ nonce: 0 }); - await txPool.addTransaction(tx1); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); assert.isTrue(txPool.hasPendingTransactions()); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); assert.isTrue(txPool.hasPendingTransactions()); }); it("returns false when there are only queued transactions", async () => { const tx1 = createTestFakeTransaction({ nonce: 1 }); const tx2 = createTestFakeTransaction({ nonce: 1 }); - await txPool.addTransaction(tx1); - await txPool.addTransaction(tx2); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx1 + ); + await txPool.addTransaction( + stateManager.getAccount.bind(stateManager), + tx2 + ); assert.isFalse(txPool.hasPendingTransactions()); }); From 394ef359c0e612c9128a728a13521a2d4388eb41 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 15:57:42 -0500 Subject: [PATCH 042/406] fix: stored logging closure memory leak --- .../src/internal/hardhat-network/provider/modules/hardhat.ts | 5 +---- 1 file changed, 1 insertion(+), 4 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/hardhat.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/hardhat.ts index 428a92b3c5..14398880ca 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/modules/hardhat.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/modules/hardhat.ts @@ -40,9 +40,6 @@ export class HardhatModule { constructor( private readonly _node: HardhatNode, private readonly _resetCallback: (forkConfig?: ForkConfig) => Promise, - private readonly _setLoggingEnabledCallback: ( - loggingEnabled: boolean - ) => void, private readonly _logger: ModulesLogger, private readonly _experimentalHardhatNetworkMessageTraceHooks: BoundExperimentalHardhatNetworkMessageTraceHook[] = [] ) {} @@ -241,7 +238,7 @@ export class HardhatModule { private async _setLoggingEnabledAction( loggingEnabled: boolean ): Promise { - this._setLoggingEnabledCallback(loggingEnabled); + this._logger.setEnabled(loggingEnabled); return true; } From 345e47524019aa843680d2da468ec9d9a8cd97df Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:00:27 -0500 Subject: [PATCH 043/406] fix: WeakRef memory leak fixes --- .../src/internal/hardhat-network/provider/provider.ts | 11 ++++++----- 1 file changed, 6 insertions(+), 5 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts index 33abfcacbb..e2ce9ff039 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts @@ -272,12 +272,12 @@ export class HardhatNetworkProvider this._logger, this._experimentalHardhatNetworkMessageTraceHooks ); + + const provider = new WeakRef(this); this._hardhatModule = new HardhatModule( node, - (forkConfig?: ForkConfig) => this._reset(miningTimer, forkConfig), - (loggingEnabled: boolean) => { - this._logger.setEnabled(loggingEnabled); - }, + (forkConfig?: ForkConfig) => + provider.deref()!._reset(miningTimer, forkConfig), this._logger, this._experimentalHardhatNetworkMessageTraceHooks ); @@ -320,9 +320,10 @@ export class HardhatNetworkProvider } private _makeMiningTimer(): MiningTimer { + const provider = new WeakRef(this); const miningTimer = new MiningTimer(this._intervalMining, async () => { try { - await this.request({ method: "hardhat_intervalMine" }); + await provider.deref()!.request({ method: "hardhat_intervalMine" }); } catch (e) { console.error("Unexpected error calling hardhat_intervalMine:", e); } From bc9d82c77b6db17205c66cf0c0ac53b3dd116f43 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:00:59 -0500 Subject: [PATCH 044/406] fix: stored closure self reference --- .../src/internal/hardhat-network/provider/provider.ts | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts index e2ce9ff039..14c6dc8fda 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/provider.ts @@ -347,19 +347,19 @@ export class HardhatNetworkProvider } private _forwardNodeEvents(node: HardhatNode) { - node.addListener("ethEvent", this._ethEventListener); + node.addListener("ethEvent", this._ethEventListener.bind(this)); } private _stopForwardingNodeEvents(node: HardhatNode) { - node.removeListener("ethEvent", this._ethEventListener); + node.removeListener("ethEvent", this._ethEventListener.bind(this)); } - private _ethEventListener = (payload: { filterId: bigint; result: any }) => { + private _ethEventListener(payload: { filterId: bigint; result: any }) { const subscription = `0x${payload.filterId.toString(16)}`; const result = payload.result; this._emitLegacySubscriptionEvent(subscription, result); this._emitEip1193SubscriptionEvent(subscription, result); - }; + } private _emitLegacySubscriptionEvent(subscription: string, result: any) { this.emit("notification", { From bcaf94eedb28f95fd59c127644a1fa95f14ccf77 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:01:25 -0500 Subject: [PATCH 045/406] fix: self references in vm adapters --- .../hardhat-network/provider/vm/ethereumjs.ts | 30 +++++++++++++------ .../hardhat-network/provider/vm/rethnet.ts | 28 ++++++++--------- 2 files changed, 35 insertions(+), 23 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index a6129bb513..73d28562ed 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -67,9 +67,15 @@ export class EthereumJSAdapter implements VMAdapter { "EVM should have an 'events' property" ); - this._vm.evm.events.on("beforeMessage", this._beforeMessageHandler); - this._vm.evm.events.on("step", this._stepHandler); - this._vm.evm.events.on("afterMessage", this._afterMessageHandler); + this._vm.evm.events.on( + "beforeMessage", + this._beforeMessageHandler.bind(this) + ); + this._vm.evm.events.on("step", this._stepHandler.bind(this)); + this._vm.evm.events.on( + "afterMessage", + this._afterMessageHandler.bind(this) + ); } public static async create( @@ -494,7 +500,10 @@ export class EthereumJSAdapter implements VMAdapter { return this._common.gteHardfork("london"); } - private _beforeMessageHandler = async (message: Message, next: any) => { + private async _beforeMessageHandler( + message: Message, + next: any + ): Promise { try { const code = message.to !== undefined @@ -512,9 +521,9 @@ export class EthereumJSAdapter implements VMAdapter { } catch (e) { return next(e); } - }; + } - private _stepHandler = async (step: InterpreterStep, next: any) => { + private async _stepHandler(step: InterpreterStep, next: any): Promise { try { await this._vmTracer.addStep({ depth: step.depth, @@ -541,9 +550,12 @@ export class EthereumJSAdapter implements VMAdapter { } catch (e) { return next(e); } - }; + } - private _afterMessageHandler = async (result: EVMResult, next: any) => { + private async _afterMessageHandler( + result: EVMResult, + next: any + ): Promise { try { const gasUsed = result.execResult.executionGasUsed; @@ -611,5 +623,5 @@ export class EthereumJSAdapter implements VMAdapter { } catch (e) { return next(e); } - }; + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index bcfe420f62..b5ef329071 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -108,9 +108,9 @@ export class RethnetAdapter implements VMAdapter { ); const tracer = new Tracer({ - beforeMessage: this._beforeMessageHandler, - step: this._stepHandler, - afterMessage: this._afterMessageHandler, + beforeMessage: this._beforeMessageHandler.bind(this), + step: this._stepHandler.bind(this), + afterMessage: this._afterMessageHandler.bind(this), }); const rethnetResult = await this._rethnet.guaranteedDryRun( @@ -302,9 +302,9 @@ export class RethnetAdapter implements VMAdapter { ); const tracer = new Tracer({ - beforeMessage: this._beforeMessageHandler, - step: this._stepHandler, - afterMessage: this._afterMessageHandler, + beforeMessage: this._beforeMessageHandler.bind(this), + step: this._stepHandler.bind(this), + afterMessage: this._afterMessageHandler.bind(this), }); const rethnetResult = await this._rethnet.run( @@ -447,21 +447,21 @@ export class RethnetAdapter implements VMAdapter { return undefined; } - private _beforeMessageHandler = async ( + private async _beforeMessageHandler( message: TracingMessage, next: any - ) => { + ): Promise { await this._vmTracer.addBeforeMessage(message); - }; + } - private _stepHandler = async (step: TracingStep, _next: any) => { + private async _stepHandler(step: TracingStep, _next: any): Promise { await this._vmTracer.addStep(step); - }; + } - private _afterMessageHandler = async ( + private async _afterMessageHandler( result: TracingMessageResult, _next: any - ) => { + ): Promise { await this._vmTracer.addAfterMessage(result); - }; + } } From 31496f50d54c54b90050b7a58f1632023be36eba Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:40:48 -0500 Subject: [PATCH 046/406] feat: add tracing instrumentation (#3839) --- crates/rethnet_eth/src/remote.rs | 1 + crates/rethnet_evm/Cargo.toml | 4 + crates/rethnet_evm/src/block/builder.rs | 14 ++- crates/rethnet_evm/src/blockchain/sync.rs | 1 + crates/rethnet_evm/src/evm.rs | 12 +- crates/rethnet_evm/src/lib.rs | 1 + crates/rethnet_evm/src/runtime.rs | 19 ++- crates/rethnet_evm/src/state/debug.rs | 33 +++++- crates/rethnet_evm/src/state/layered_state.rs | 4 +- crates/rethnet_evm/src/state/remote.rs | 4 + crates/rethnet_evm/src/state/request.rs | 108 +----------------- crates/rethnet_evm/src/state/sync.rs | 47 ++++++-- crates/rethnet_evm_napi/Cargo.toml | 7 +- crates/rethnet_evm_napi/package.json | 1 + crates/rethnet_evm_napi/src/blockchain.rs | 8 +- crates/rethnet_evm_napi/src/lib.rs | 1 + crates/rethnet_evm_napi/src/logger.rs | 39 +++++++ crates/rethnet_evm_napi/src/runtime.rs | 17 +-- crates/rethnet_evm_napi/src/state.rs | 34 +++++- crates/rethnet_evm_napi/src/tracer.rs | 6 +- .../rethnet_evm_napi/src/tracer/js_tracer.rs | 17 ++- packages/hardhat-core/package.json | 2 + 22 files changed, 230 insertions(+), 150 deletions(-) create mode 100644 crates/rethnet_evm_napi/src/logger.rs diff --git a/crates/rethnet_eth/src/remote.rs b/crates/rethnet_eth/src/remote.rs index ac5a638367..5995b5a0d4 100644 --- a/crates/rethnet_eth/src/remote.rs +++ b/crates/rethnet_eth/src/remote.rs @@ -47,6 +47,7 @@ pub enum RpcClientError { } /// A client for executing RPC methods on a remote Ethereum node +#[derive(Debug)] pub struct RpcClient { url: String, client: reqwest::Client, diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index b85cfa0e66..04793616f0 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -13,6 +13,10 @@ revm = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = " # revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } +tracing = { version = "0.1.37", features = ["attributes", "std"], optional = true } [dev-dependencies] test-with = { version = "0.9.1", default-features = false } + +[features] +tracing = ["dep:tracing"] diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index 2afed9532e..31c6f2dcd1 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -7,13 +7,15 @@ use rethnet_eth::{ use revm::{ db::DatabaseComponentError, primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, SpecId, TxEnv}, - Inspector, }; use tokio::runtime::Runtime; use crate::{ - blockchain::AsyncBlockchain, evm::run_transaction, runtime::AsyncDatabase, state::AsyncState, - trace::Trace, HeaderData, + blockchain::AsyncBlockchain, + evm::{run_transaction, AsyncInspector}, + state::{AccountModifierFn, AsyncState}, + trace::Trace, + HeaderData, }; #[derive(Debug, thiserror::Error)] @@ -116,7 +118,7 @@ where pub async fn add_transaction( &mut self, transaction: TxEnv, - inspector: Option> + Send>>, + inspector: Option>>, ) -> Result<(ExecutionResult, Trace), BlockTransactionError> { // transaction's gas limit cannot be greater than the remaining gas in the block if U256::from(transaction.gas_limit) > self.gas_remaining() { @@ -165,7 +167,9 @@ where self.state .modify_account( address, - Box::new(move |balance, _nonce, _code| *balance += reward), + AccountModifierFn::new(Box::new(move |balance, _nonce, _code| { + *balance += reward; + })), ) .await?; } diff --git a/crates/rethnet_evm/src/blockchain/sync.rs b/crates/rethnet_evm/src/blockchain/sync.rs index dd7c8a8c35..4bef0acfeb 100644 --- a/crates/rethnet_evm/src/blockchain/sync.rs +++ b/crates/rethnet_evm/src/blockchain/sync.rs @@ -30,6 +30,7 @@ where /// A helper class for converting a synchronous blockchain into an asynchronous blockchain. /// /// Requires the inner blockchain to implement [`Blockchain`]. +#[derive(Debug)] pub struct AsyncBlockchain where E: Debug + Send, diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index 572a2c5130..9cf6eadbff 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -15,8 +15,17 @@ use crate::{ trace::{Trace, TraceCollector}, }; +/// Super trait for an inspector of an `AsyncDatabase` that's debuggable. +pub trait AsyncInspector: Inspector> + Debug + Send +where + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ +} + /// Creates an evm from the provided database, config, transaction, and block. #[allow(clippy::type_complexity)] +#[cfg_attr(feature = "tracing", tracing::instrument)] fn build_evm( blockchain: Arc>, state: Arc>, @@ -41,6 +50,7 @@ where } #[allow(clippy::type_complexity)] +#[cfg_attr(feature = "tracing", tracing::instrument)] pub fn run_transaction( runtime: &Runtime, blockchain: Arc>, @@ -48,7 +58,7 @@ pub fn run_transaction( cfg: CfgEnv, transaction: TxEnv, block: BlockEnv, - inspector: Option> + Send>>, + inspector: Option>>, ) -> JoinHandle>>> where BE: Debug + Send + 'static, diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index ff2167a5d3..d89b9fb640 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -21,6 +21,7 @@ pub use revm::{ pub use crate::{ block::{BlockBuilder, HeaderData}, + evm::AsyncInspector, runtime::{AsyncDatabase, Rethnet}, transaction::{PendingTransaction, TransactionError}, }; diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index 41d5752f0d..a135fada88 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -3,18 +3,22 @@ use std::{fmt::Debug, sync::Arc}; use revm::{ db::DatabaseComponents, primitives::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}, - Inspector, }; use crate::{ - blockchain::AsyncBlockchain, evm::run_transaction, state::AsyncState, trace::Trace, - transaction::TransactionError, State, + blockchain::AsyncBlockchain, + evm::{run_transaction, AsyncInspector}, + state::AsyncState, + trace::Trace, + transaction::TransactionError, + State, }; /// Asynchronous implementation of the Database super-trait pub type AsyncDatabase = DatabaseComponents>, Arc>>; /// The asynchronous Rethnet runtime. +#[derive(Debug)] pub struct Rethnet where BE: Debug + Send + 'static, @@ -40,11 +44,12 @@ where } /// Runs a transaction without committing the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn dry_run( &self, transaction: TxEnv, block: BlockEnv, - inspector: Option> + Send>>, + inspector: Option>>, ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); @@ -65,11 +70,12 @@ where } /// Runs a transaction without committing the state, while disabling balance checks and creating accounts for new addresses. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn guaranteed_dry_run( &self, transaction: TxEnv, block: BlockEnv, - inspector: Option> + Send>>, + inspector: Option>>, ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); @@ -93,11 +99,12 @@ where } /// Runs a transaction, committing the state in the process. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn run( &self, transaction: TxEnv, block: BlockEnv, - inspector: Option> + Send>>, + inspector: Option>>, ) -> Result<(ExecutionResult, Trace), TransactionError> { let (result, changes, trace) = self.dry_run(transaction, block, inspector).await?; diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index ecaa084d82..bf1f9aec87 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -1,9 +1,38 @@ +use std::{fmt::Debug, ops::Deref}; + use auto_impl::auto_impl; use rethnet_eth::{Address, B256, U256}; use revm::primitives::{AccountInfo, Bytecode}; -/// Function type for modifying account information. -pub type AccountModifierFn = Box) + Send>; +/// Debuggable function type for modifying account information. +pub struct AccountModifierFn { + inner: Box) + Send>, +} + +impl AccountModifierFn { + /// Constructs an [`AccountModifierDebuggableFn`] from the provided function. + pub fn new(func: Box) + Send>) -> Self { + Self { inner: func } + } +} + +impl Debug for AccountModifierFn { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!( + f, + "{}", + std::any::type_name::)>() + ) + } +} + +impl Deref for AccountModifierFn { + type Target = dyn Fn(&mut U256, &mut u64, &mut Option); + + fn deref(&self) -> &Self::Target { + self.inner.as_ref() + } +} /// A trait for debug operation on a database. #[auto_impl(Box)] diff --git a/crates/rethnet_evm/src/state/layered_state.rs b/crates/rethnet_evm/src/state/layered_state.rs index 2b815f869e..b6a0f8564a 100644 --- a/crates/rethnet_evm/src/state/layered_state.rs +++ b/crates/rethnet_evm/src/state/layered_state.rs @@ -11,7 +11,7 @@ use revm::{ DatabaseCommit, }; -use super::{StateDebug, StateError}; +use super::{AccountModifierFn, StateDebug, StateError}; /// A state consisting of layers. #[derive(Clone, Debug)] @@ -356,7 +356,7 @@ impl StateDebug for LayeredState { fn modify_account( &mut self, address: Address, - modifier: Box) + Send>, + modifier: AccountModifierFn, ) -> Result<(), Self::Error> { let account_info = self.account_or_insert_mut(&address); let old_code_hash = account_info.code_hash; diff --git a/crates/rethnet_evm/src/state/remote.rs b/crates/rethnet_evm/src/state/remote.rs index 06c0b420bc..b8da9b5dba 100644 --- a/crates/rethnet_evm/src/state/remote.rs +++ b/crates/rethnet_evm/src/state/remote.rs @@ -10,6 +10,7 @@ use rethnet_eth::{ }; /// An revm database backed by a remote Ethereum node +#[derive(Debug)] pub struct RemoteDatabase { client: RpcClient, runtime: Runtime, @@ -28,6 +29,7 @@ pub enum RemoteDatabaseError { impl RemoteDatabase { /// Construct a new RemoteDatabse given the URL of a remote Ethereum node. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn new(url: &str) -> Self { Self { client: RpcClient::new(url), @@ -51,6 +53,7 @@ impl RemoteDatabase { impl StateRef for RemoteDatabase { type Error = RemoteDatabaseError; + #[cfg_attr(feature = "tracing", tracing::instrument)] fn basic(&self, address: Address) -> Result, Self::Error> { Ok(Some( self.runtime @@ -64,6 +67,7 @@ impl StateRef for RemoteDatabase { unimplemented!(); } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn storage(&self, address: Address, index: U256) -> Result { self.runtime .block_on(self.client.get_storage_at(&address, index, None)) diff --git a/crates/rethnet_evm/src/state/request.rs b/crates/rethnet_evm/src/state/request.rs index f6a1c6f8ed..6a1fe0bac1 100644 --- a/crates/rethnet_evm/src/state/request.rs +++ b/crates/rethnet_evm/src/state/request.rs @@ -12,10 +12,8 @@ use tokio::sync::oneshot; use crate::state::{AccountModifierFn, StateDebug}; /// The request type used internally by a [`SyncDatabase`]. -pub enum Request -where - E: Debug, -{ +#[derive(Debug)] +pub enum Request { AccountByAddress { address: Address, sender: oneshot::Sender, E>>, @@ -84,9 +82,10 @@ impl Request where E: Debug, { + #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn handle(self, state: &mut S) -> bool where - S: State + DatabaseCommit + StateDebug, + S: State + DatabaseCommit + StateDebug + Debug, { match self { Request::AccountByAddress { address, sender } => { @@ -148,102 +147,3 @@ where true } } - -impl Debug for Request -where - E: Debug, -{ - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - match self { - Self::AccountByAddress { address, sender } => f - .debug_struct("AccountByAddress") - .field("address", address) - .field("sender", sender) - .finish(), - Self::AccountStorageRoot { address, sender } => f - .debug_struct("AccountStorageRoot") - .field("address", address) - .field("sender", sender) - .finish(), - Self::Checkpoint { sender } => f - .debug_struct("Checkpoint") - .field("sender", sender) - .finish(), - Self::CodeByHash { code_hash, sender } => f - .debug_struct("CodeByHash") - .field("code_hash", code_hash) - .field("sender", sender) - .finish(), - Self::Commit { changes, sender } => f - .debug_struct("Commit") - .field("changes", changes) - .field("sender", sender) - .finish(), - Self::InsertAccount { - address, - account_info, - sender, - } => f - .debug_struct("InsertAccount") - .field("address", address) - .field("account_info", account_info) - .field("sender", sender) - .finish(), - Self::MakeSnapshot { sender } => f - .debug_struct("MakeSnapshot") - .field("sender", sender) - .finish(), - Self::ModifyAccount { - address, - modifier: _modifier, - sender, - } => f - .debug_struct("ModifyAccount") - .field("address", address) - .field("sender", sender) - .finish(), - Self::RemoveAccount { address, sender } => f - .debug_struct("RemoveAccount") - .field("address", address) - .field("sender", sender) - .finish(), - Self::RemoveSnapshot { state_root, sender } => f - .debug_struct("RemoveSnapshot") - .field("state_root", state_root) - .field("sender", sender) - .finish(), - Self::Revert { sender } => f.debug_struct("Revert").field("sender", sender).finish(), - Self::SetStorageSlot { - address, - index, - value, - sender, - } => f - .debug_struct("SetStorageSlot") - .field("address", address) - .field("index", index) - .field("value", value) - .field("sender", sender) - .finish(), - Self::SetStateRoot { state_root, sender } => f - .debug_struct("SetStateRoot") - .field("state_root", state_root) - .field("sender", sender) - .finish(), - Self::StateRoot { sender } => { - f.debug_struct("StateRoot").field("sender", sender).finish() - } - Self::StorageSlot { - address, - index, - sender, - } => f - .debug_struct("StorageSlot") - .field("address", address) - .field("index", index) - .field("sender", sender) - .finish(), - Self::Terminate => write!(f, "Terminate"), - } - } -} diff --git a/crates/rethnet_evm/src/state/sync.rs b/crates/rethnet_evm/src/state/sync.rs index f47381e34c..1b09e331df 100644 --- a/crates/rethnet_evm/src/state/sync.rs +++ b/crates/rethnet_evm/src/state/sync.rs @@ -22,7 +22,7 @@ use super::request::Request; /// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. pub trait SyncState: - State + DatabaseCommit + StateDebug + Send + Sync + 'static + State + DatabaseCommit + StateDebug + Debug + Send + Sync + 'static where E: Debug + Send, { @@ -30,7 +30,7 @@ where impl SyncState for S where - S: State + DatabaseCommit + StateDebug + Send + Sync + 'static, + S: State + DatabaseCommit + StateDebug + Debug + Send + Sync + 'static, E: Debug + Send, { } @@ -38,6 +38,8 @@ where /// A helper class for converting a synchronous database into an asynchronous database. /// /// Requires the inner database to implement [`Database`], [`DatabaseCommit`], and [`DatabaseDebug`]. + +#[derive(Debug)] pub struct AsyncState where E: Debug + Send, @@ -52,6 +54,7 @@ where E: Debug + Send + 'static, { /// Constructs an [`AsyncDatabase`] instance with the provided database. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn new>(mut state: S) -> io::Result { let runtime = Builder::new_multi_thread().build()?; @@ -78,6 +81,7 @@ where } /// Retrieves the account corresponding to the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn account_by_address(&self, address: Address) -> Result, E> { let (sender, receiver) = oneshot::channel(); @@ -89,6 +93,7 @@ where } /// Retrieves the storage root of the account at the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn account_storage_root(&self, address: &Address) -> Result, E> { let (sender, receiver) = oneshot::channel(); @@ -103,6 +108,7 @@ where } /// Retrieves the storage slot corresponding to the specified address and index. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn account_storage_slot(&self, address: Address, index: U256) -> Result { let (sender, receiver) = oneshot::channel(); @@ -118,6 +124,7 @@ where } /// Applies the provided changes to the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn apply(&self, changes: HashMap) { let (sender, receiver) = oneshot::channel(); @@ -129,6 +136,7 @@ where } /// Creates a state checkpoint that can be reverted to using [`revert`]. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn checkpoint(&self) -> Result<(), E> { let (sender, receiver) = oneshot::channel(); @@ -140,6 +148,7 @@ where } /// Retrieves the code corresponding to the specified hash. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn code_by_hash(&self, code_hash: B256) -> Result { let (sender, receiver) = oneshot::channel(); @@ -151,6 +160,7 @@ where } /// Inserts the specified account into the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn insert_account( &self, address: Address, @@ -170,6 +180,7 @@ where } /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn make_snapshot(&self) -> (B256, bool) { let (sender, receiver) = oneshot::channel(); @@ -181,6 +192,7 @@ where } /// Modifies the account at the specified address using the provided function. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn modify_account( &self, address: Address, @@ -200,6 +212,7 @@ where } /// Removes and returns the account at the specified address, if it exists. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn remove_account(&self, address: Address) -> Result, E> { let (sender, receiver) = oneshot::channel(); @@ -211,6 +224,7 @@ where } /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn remove_snapshot(&self, state_root: B256) -> bool { let (sender, receiver) = oneshot::channel(); @@ -222,6 +236,7 @@ where } /// Reverts to the previous checkpoint, created using [`checkpoint`]. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn revert(&self) -> Result<(), E> { let (sender, receiver) = oneshot::channel(); @@ -233,6 +248,7 @@ where } /// Sets the storage slot at the specified address and index to the provided value. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn set_account_storage_slot( &self, address: Address, @@ -254,6 +270,7 @@ where } /// Reverts the state to match the specified state root. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn set_state_root(&self, state_root: &B256) -> Result<(), E> { let (sender, receiver) = oneshot::channel(); @@ -268,6 +285,7 @@ where } /// Retrieves the state's root. + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn state_root(&self) -> Result { let (sender, receiver) = oneshot::channel(); @@ -283,6 +301,7 @@ impl Drop for AsyncState where E: Debug + Send, { + #[cfg_attr(feature = "tracing", tracing::instrument)] fn drop(&mut self) { if let Some(handle) = self.db_handle.take() { self.request_sender @@ -300,6 +319,7 @@ where { type Error = E; + #[cfg_attr(feature = "tracing", tracing::instrument)] fn basic(&self, address: Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime @@ -307,6 +327,7 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn code_by_hash(&self, code_hash: B256) -> Result { task::block_in_place(move || { self.runtime @@ -314,6 +335,7 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn storage(&self, address: Address, index: U256) -> Result { task::block_in_place(move || { self.runtime @@ -326,6 +348,7 @@ impl<'d, E> DatabaseCommit for &'d AsyncState where E: Debug + Send + 'static, { + #[cfg_attr(feature = "tracing", tracing::instrument)] fn commit(&mut self, changes: HashMap) { task::block_in_place(move || self.runtime.block_on(self.apply(changes))) } @@ -337,6 +360,7 @@ where { type Error = E; + #[cfg_attr(feature = "tracing", tracing::instrument)] fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime @@ -344,6 +368,7 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn insert_account( &mut self, address: Address, @@ -355,10 +380,11 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn modify_account( &mut self, address: Address, - modifier: Box) + Send>, + modifier: AccountModifierFn, ) -> Result<(), Self::Error> { task::block_in_place(move || { self.runtime @@ -366,6 +392,7 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn remove_account(&mut self, address: Address) -> Result, Self::Error> { task::block_in_place(move || { self.runtime @@ -386,6 +413,12 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn state_root(&mut self) -> Result { + task::block_in_place(move || self.runtime.block_on(AsyncState::state_root(*self))) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { task::block_in_place(move || { self.runtime @@ -393,22 +426,22 @@ where }) } - fn state_root(&mut self) -> Result { - task::block_in_place(move || self.runtime.block_on(AsyncState::state_root(*self))) - } - + #[cfg_attr(feature = "tracing", tracing::instrument)] fn checkpoint(&mut self) -> Result<(), Self::Error> { task::block_in_place(move || self.runtime.block_on(AsyncState::checkpoint(*self))) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn revert(&mut self) -> Result<(), Self::Error> { task::block_in_place(move || self.runtime.block_on(AsyncState::revert(*self))) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn make_snapshot(&mut self) -> (B256, bool) { task::block_in_place(move || self.runtime.block_on(AsyncState::make_snapshot(*self))) } + #[cfg_attr(feature = "tracing", tracing::instrument)] fn remove_snapshot(&mut self, state_root: &B256) -> bool { task::block_in_place(move || { self.runtime diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index ef01898c36..e05efaf300 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -12,11 +12,16 @@ crossbeam-channel = { version = "0.5.6", default-features = false } napi = { version = "2.12.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.12.3" once_cell = "1.15.0" -pretty_env_logger = "0.4.0" rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc"] } serde_json = { version = "1.0.85", default-features = false, features = ["alloc"] } +tracing = { version = "0.1.37", default-features = false, features = ["std"] } +tracing-flame = { version = "0.2.0", default-features = false, features = ["smallvec"] } +tracing-subscriber = { version = "0.3.16", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec", "std"] } [build-dependencies] napi-build = "2.0.1" + +[features] +tracing = ["rethnet_evm/tracing"] diff --git a/crates/rethnet_evm_napi/package.json b/crates/rethnet_evm_napi/package.json index 1b360dc4c2..f47230e241 100644 --- a/crates/rethnet_evm_napi/package.json +++ b/crates/rethnet_evm_napi/package.json @@ -11,6 +11,7 @@ "scripts": { "build": "napi build --release", "build:debug": "napi build", + "build:tracing": "napi build --release --features tracing", "test": "yarn tsc && mocha --recursive \"test/**/*.ts\" --exit", "clean": "rm -rf rethnet-evm.node" }, diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs index 4ddea105aa..cec1916abc 100644 --- a/crates/rethnet_evm_napi/src/blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -1,6 +1,6 @@ mod js_blockchain; -use std::sync::Arc; +use std::{fmt::Debug, sync::Arc}; use napi::{bindgen_prelude::Buffer, Env, JsFunction, NapiRaw, Status}; use napi_derive::napi; @@ -8,6 +8,7 @@ use rethnet_eth::B256; use rethnet_evm::blockchain::{AsyncBlockchain, SyncBlockchain}; use crate::{ + logger::enable_logging, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction}, }; @@ -16,6 +17,7 @@ use self::js_blockchain::{GetBlockHashCall, JsBlockchain}; /// The Rethnet blockchain #[napi] +#[derive(Debug)] pub struct Blockchain { inner: Arc>, } @@ -31,11 +33,14 @@ impl Blockchain { impl Blockchain { /// Constructs a new blockchain that queries the blockhash using a callback. #[napi(constructor)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new( env: Env, #[napi(ts_arg_type = "(blockNumber: bigint) => Promise")] get_block_hash_fn: JsFunction, ) -> napi::Result { + enable_logging(); + let get_block_hash_fn = ThreadsafeFunction::create( env.raw(), unsafe { get_block_hash_fn.raw() }, @@ -57,6 +62,7 @@ impl Blockchain { Self::with_blockchain(JsBlockchain { get_block_hash_fn }) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn with_blockchain(blockchain: B) -> napi::Result where B: SyncBlockchain, diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index fd188fdd7a..be2f80bc41 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -8,6 +8,7 @@ mod blockchain; mod cast; mod config; mod log; +mod logger; mod receipt; /// Rethnet runtime for executing individual transactions mod runtime; diff --git a/crates/rethnet_evm_napi/src/logger.rs b/crates/rethnet_evm_napi/src/logger.rs new file mode 100644 index 0000000000..af5da362c8 --- /dev/null +++ b/crates/rethnet_evm_napi/src/logger.rs @@ -0,0 +1,39 @@ +use once_cell::sync::OnceCell; +use tracing_subscriber::{prelude::*, EnvFilter, Registry}; + +struct Logger { + #[cfg(feature = "tracing")] + _guard: tracing_flame::FlushGuard>, +} + +unsafe impl Sync for Logger {} + +static LOGGER: OnceCell = OnceCell::new(); + +pub fn enable_logging() { + let _logger = LOGGER.get_or_init(|| { + let fmt_layer = tracing_subscriber::fmt::layer() + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_target(false) + .with_level(true) + .with_filter(EnvFilter::from_default_env()); + + #[cfg(feature = "tracing")] + let (flame_layer, _guard) = tracing_flame::FlameLayer::with_file("tracing.folded").unwrap(); + + let subscriber = Registry::default().with(fmt_layer); + + #[cfg(feature = "tracing")] + let subscriber = subscriber.with(flame_layer); + + tracing::subscriber::set_global_default(subscriber) + .expect("Could not set global default tracing subscriber"); + + Logger { + #[cfg(feature = "tracing")] + _guard, + } + }); +} diff --git a/crates/rethnet_evm_napi/src/runtime.rs b/crates/rethnet_evm_napi/src/runtime.rs index ebd0f896a4..472ae5614e 100644 --- a/crates/rethnet_evm_napi/src/runtime.rs +++ b/crates/rethnet_evm_napi/src/runtime.rs @@ -1,6 +1,5 @@ use napi::Status; use napi_derive::napi; -use once_cell::sync::OnceCell; use rethnet_evm::{ state::StateError, BlockEnv, CfgEnv, InvalidTransaction, TransactionError, TxEnv, }; @@ -14,14 +13,9 @@ use crate::{ transaction::{result::ExecutionResult, Transaction}, }; -struct Logger; - -unsafe impl Sync for Logger {} - -static LOGGER: OnceCell = OnceCell::new(); - /// The Rethnet runtime, which can execute individual transactions. #[napi] +#[derive(Debug)] pub struct Rethnet { runtime: rethnet_evm::Rethnet, } @@ -30,16 +24,12 @@ pub struct Rethnet { impl Rethnet { /// Constructs a `Rethnet` runtime. #[napi(constructor)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new( blockchain: &Blockchain, state_manager: &StateManager, cfg: Config, ) -> napi::Result { - let _logger = LOGGER.get_or_init(|| { - pretty_env_logger::init(); - Logger - }); - let cfg = CfgEnv::try_from(cfg)?; let runtime = rethnet_evm::Rethnet::new( @@ -53,6 +43,7 @@ impl Rethnet { /// Executes the provided transaction without changing state. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn dry_run( &self, transaction: Transaction, @@ -75,6 +66,7 @@ impl Rethnet { /// Executes the provided transaction without changing state, ignoring validation checks in the process. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn guaranteed_dry_run( &self, transaction: Transaction, @@ -97,6 +89,7 @@ impl Rethnet { /// Executes the provided transaction, changing state in the process. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn run( &self, transaction: Transaction, diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 9b2c10278d..359943838d 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -10,7 +10,10 @@ use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ - state::{AsyncState, LayeredState, RethnetLayer, StateDebug, StateError, SyncState}, + state::{ + AccountModifierFn, AsyncState, LayeredState, RethnetLayer, StateDebug, StateError, + SyncState, + }, AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, }; use secp256k1::Secp256k1; @@ -18,6 +21,7 @@ use secp256k1::Secp256k1; use crate::{ account::Account, cast::TryCast, + logger::enable_logging, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, }; @@ -49,6 +53,7 @@ pub struct SnapshotId { /// The Rethnet state #[napi] +#[derive(Debug)] pub struct StateManager { pub(super) state: Arc>, } @@ -57,12 +62,14 @@ pub struct StateManager { impl StateManager { /// Constructs a [`StateManager`] with an empty state. #[napi(constructor)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new() -> napi::Result { Self::with_accounts(HashMap::default()) } /// Constructs a [`StateManager`] with the provided accounts present in the genesis state. #[napi(factory)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn with_genesis_accounts(accounts: Vec) -> napi::Result { let context = Secp256k1::signing_only(); let genesis_accounts = accounts @@ -84,6 +91,7 @@ impl StateManager { Self::with_accounts(genesis_accounts) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn with_accounts(mut accounts: HashMap) -> napi::Result { // Mimic precompiles activation for idx in 1..=8 { @@ -99,10 +107,13 @@ impl StateManager { Self::with_state(state) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn with_state(state: S) -> napi::Result where S: SyncState, { + enable_logging(); + let state: Box> = Box::new(state); let state = AsyncState::new(state) .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; @@ -114,15 +125,19 @@ impl StateManager { /// Creates a state checkpoint that can be reverted to using [`revert`]. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn checkpoint(&self) -> napi::Result<()> { self.state .checkpoint() .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(()) } /// Reverts to the previous checkpoint, created using [`checkpoint`]. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn revert(&self) -> napi::Result<()> { self.state .revert() @@ -132,6 +147,7 @@ impl StateManager { /// Retrieves the account corresponding to the specified address. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn get_account_by_address(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); @@ -157,6 +173,7 @@ impl StateManager { /// Retrieves the storage root of the account at the specified address. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn get_account_storage_root(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); @@ -168,6 +185,7 @@ impl StateManager { /// Retrieves the storage slot at the specified address and index. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn get_account_storage_slot( &self, address: Buffer, @@ -192,6 +210,7 @@ impl StateManager { /// Retrieves the storage root of the database. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn get_state_root(&self) -> napi::Result { self.state.state_root().await.map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), @@ -201,6 +220,7 @@ impl StateManager { /// Inserts the provided account at the specified address. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn insert_account(&self, address: Buffer, account: Account) -> napi::Result<()> { let address = Address::from_slice(&address); let account: AccountInfo = account.try_cast()?; @@ -213,6 +233,7 @@ impl StateManager { /// Makes a snapshot of the database that's retained until [`removeSnapshot`] is called. Returns the snapshot's identifier. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn make_snapshot(&self) -> SnapshotId { let (state_root, existed) = self.state.make_snapshot().await; @@ -226,6 +247,7 @@ impl StateManager { /// The modifier function receives the current values as individual parameters and will update the account's values /// to the returned `Account` values. #[napi(ts_return_type = "Promise")] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn modify_account( &self, env: Env, @@ -302,7 +324,7 @@ impl StateManager { let result = db .modify_account( address, - Box::new( + AccountModifierFn::new(Box::new( move |balance: &mut U256, nonce: &mut u64, code: &mut Option| { let (sender, receiver) = channel(); @@ -323,7 +345,7 @@ impl StateManager { *nonce = new_account.nonce; *code = new_account.code; }, - ), + )), ) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())); @@ -336,6 +358,7 @@ impl StateManager { /// Removes and returns the account at the specified address, if it exists. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn remove_account(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); @@ -347,6 +370,7 @@ impl StateManager { /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn remove_snapshot(&self, state_root: Buffer) -> bool { let state_root = B256::from_slice(&state_root); @@ -355,6 +379,7 @@ impl StateManager { /// Sets the storage slot at the specified address and index to the provided value. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn set_account_storage_slot( &self, address: Buffer, @@ -373,6 +398,7 @@ impl StateManager { /// Reverts the state to match the specified state root. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn set_state_root(&self, state_root: Buffer) -> napi::Result<()> { let state_root = B256::from_slice(&state_root); diff --git a/crates/rethnet_evm_napi/src/tracer.rs b/crates/rethnet_evm_napi/src/tracer.rs index 32be8cf5c8..1de31c2d8a 100644 --- a/crates/rethnet_evm_napi/src/tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer.rs @@ -2,7 +2,7 @@ mod js_tracer; use napi::Env; use napi_derive::napi; -use rethnet_evm::{state::StateError, AsyncDatabase, Inspector}; +use rethnet_evm::{state::StateError, AsyncInspector}; use self::js_tracer::{JsTracer, TracingCallbacks}; @@ -12,9 +12,7 @@ pub struct Tracer { } impl Tracer { - pub fn as_dyn_inspector( - &self, - ) -> Box<(dyn Inspector> + Send + 'static)> { + pub fn as_dyn_inspector(&self) -> Box> { self.inner.clone() } } diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs index 005e3ea183..21a3459b21 100644 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -10,7 +10,9 @@ use napi::{ }; use napi_derive::napi; use rethnet_eth::{Address, Bytes, U256}; -use rethnet_evm::{opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt}; +use rethnet_evm::{ + opcode, return_revert, AsyncInspector, Bytecode, Gas, InstructionResult, SuccessOrHalt, +}; use crate::{ sync::{await_void_promise, handle_error}, @@ -594,6 +596,19 @@ impl JsTracer { } } +impl Debug for JsTracer { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("JsTracer").finish() + } +} + +impl AsyncInspector for JsTracer +where + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ +} + impl rethnet_evm::Inspector for JsTracer where D: rethnet_evm::Database, diff --git a/packages/hardhat-core/package.json b/packages/hardhat-core/package.json index 7cef62209f..4ed94f1c8d 100644 --- a/packages/hardhat-core/package.json +++ b/packages/hardhat-core/package.json @@ -33,7 +33,9 @@ "test:tracing": "mocha --recursive \"test/internal/hardhat-network/{helpers,stack-traces}/**/*.ts\" --exit", "test:forking": "mocha --recursive \"test/internal/hardhat-network/{helpers,jsonrpc,provider}/**/*.ts\" --exit", "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", + "prebuild:tracing": "cd ../../crates/rethnet_evm_napi && yarn build:tracing", "build": "tsc --build .", + "build:tracing": "tsc --build .", "prepublishOnly": "yarn build", "clean": "rimraf builtin-tasks internal types utils *.d.ts *.map *.js build-test tsconfig.tsbuildinfo test/internal/hardhat-network/provider/.hardhat_node_test_cache" }, From b459e84b468b1fdb31b6750935be81867d75aeee Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:46:02 -0500 Subject: [PATCH 047/406] feat: add hybrid state management (#3840) Hybrid state management uses a Merkle-Patricia trie for the latest state and layers for history. --- crates/rethnet_eth/Cargo.toml | 1 - crates/rethnet_eth/src/account.rs | 34 +- crates/rethnet_eth/src/state.rs | 14 +- crates/rethnet_evm/Cargo.toml | 5 + crates/rethnet_evm/src/state.rs | 11 +- crates/rethnet_evm/src/state/account.rs | 17 + crates/rethnet_evm/src/state/contract.rs | 118 ++++ crates/rethnet_evm/src/state/debug.rs | 25 +- crates/rethnet_evm/src/state/history.rs | 25 + crates/rethnet_evm/src/state/hybrid.rs | 277 ++++++++ crates/rethnet_evm/src/state/layered.rs | 277 ++++++++ .../rethnet_evm/src/state/layered/changes.rs | 329 +++++++++ crates/rethnet_evm/src/state/layered_state.rs | 515 -------------- crates/rethnet_evm/src/state/request.rs | 14 +- crates/rethnet_evm/src/state/sync.rs | 47 +- crates/rethnet_evm/src/state/trie.rs | 234 +++++++ crates/rethnet_evm/src/state/trie/account.rs | 658 ++++++++++++++++++ crates/rethnet_evm_napi/src/state.rs | 14 +- .../hardhat-network/provider/RethnetState.ts | 4 + .../provider/fork/ForkStateManager.ts | 9 + .../hardhat-network/provider/vm/dual.ts | 9 + .../hardhat-network/provider/vm/ethereumjs.ts | 146 +++- .../hardhat-network/provider/vm/rethnet.ts | 4 + .../hardhat-network/provider/vm/vm-adapter.ts | 3 + 24 files changed, 2228 insertions(+), 562 deletions(-) create mode 100644 crates/rethnet_evm/src/state/account.rs create mode 100644 crates/rethnet_evm/src/state/contract.rs create mode 100644 crates/rethnet_evm/src/state/history.rs create mode 100644 crates/rethnet_evm/src/state/hybrid.rs create mode 100644 crates/rethnet_evm/src/state/layered.rs create mode 100644 crates/rethnet_evm/src/state/layered/changes.rs delete mode 100644 crates/rethnet_evm/src/state/layered_state.rs create mode 100644 crates/rethnet_evm/src/state/trie.rs create mode 100644 crates/rethnet_evm/src/state/trie/account.rs diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index 559440fe93..cde44e5161 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -10,7 +10,6 @@ hash-db = { version = "0.15.2", default-features = false } hash256-std-hasher = { version = "0.15.2", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash"] } hex = { version = "0.4.3", default-features = false, features = ["alloc"] } -hex-literal = { version = "0.3", default-features = false } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } reqwest = { version = "0.11", features = ["blocking", "json"] } diff --git a/crates/rethnet_eth/src/account.rs b/crates/rethnet_eth/src/account.rs index a24252e941..4c79ca73df 100644 --- a/crates/rethnet_eth/src/account.rs +++ b/crates/rethnet_eth/src/account.rs @@ -5,14 +5,10 @@ //! Ethereum account types -use hex_literal::hex; - use crate::{trie::KECCAK_NULL_RLP, B256, U256}; -/// The KECCAK for empty code. -pub const KECCAK_EMPTY: revm_primitives::B256 = revm_primitives::B256(hex!( - "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" -)); +use revm_primitives::AccountInfo; +pub use revm_primitives::KECCAK_EMPTY; /// Basic account type. #[derive(Debug, Clone, PartialEq, Eq)] @@ -22,7 +18,7 @@ pub const KECCAK_EMPTY: revm_primitives::B256 = revm_primitives::B256(hex!( )] pub struct BasicAccount { /// Nonce of the account. - pub nonce: U256, + pub nonce: u64, /// Balance of the account. pub balance: U256, /// Storage root of the account. @@ -35,13 +31,35 @@ impl Default for BasicAccount { fn default() -> Self { BasicAccount { balance: U256::ZERO, - nonce: U256::ZERO, + nonce: 0, code_hash: KECCAK_EMPTY, storage_root: KECCAK_NULL_RLP, } } } +impl From for AccountInfo { + fn from(account: BasicAccount) -> Self { + Self { + balance: account.balance, + nonce: account.nonce, + code_hash: account.code_hash, + code: None, + } + } +} + +impl From<(&AccountInfo, B256)> for BasicAccount { + fn from((account_info, storage_root): (&AccountInfo, B256)) -> Self { + Self { + nonce: account_info.nonce, + balance: account_info.balance, + storage_root, + code_hash: account_info.code_hash, + } + } +} + impl rlp::Encodable for BasicAccount { fn rlp_append(&self, stream: &mut rlp::RlpStream) { stream.begin_list(4); diff --git a/crates/rethnet_eth/src/state.rs b/crates/rethnet_eth/src/state.rs index 9f264bdaf0..f0cd101807 100644 --- a/crates/rethnet_eth/src/state.rs +++ b/crates/rethnet_eth/src/state.rs @@ -9,16 +9,22 @@ pub type State = HashMap; pub type Storage = HashMap; /// Calculates the state root hash of the provided state. -pub fn state_root(state: &State) -> B256 { - sec_trie_root(state.iter().map(|(address, account)| { +pub fn state_root<'a, I>(state: I) -> B256 +where + I: IntoIterator, +{ + sec_trie_root(state.into_iter().map(|(address, account)| { let account = rlp::encode(account); (address, account) })) } /// Calculates the storage root hash of the provided storage. -pub fn storage_root(storage: &Storage) -> B256 { - sec_trie_root(storage.iter().map(|(index, value)| { +pub fn storage_root<'a, I>(storage: I) -> B256 +where + I: IntoIterator, +{ + sec_trie_root(storage.into_iter().map(|(index, value)| { let value = rlp::encode(value); (index.to_be_bytes::<32>(), value) })) diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 04793616f0..0026673ffd 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -5,12 +5,17 @@ edition = "2021" [dependencies] auto_impl = { version = "1.0.1", default-features = false } +cita_trie = { git = "https://github.com/Wodann/cita-trie", rev = "60efef5", version = "4.0.0", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash", "serde"] } +hasher = { git = "https://github.com/Wodann/hasher", rev = "89d3fc9", version = "0.1.4", default-features = false, features = ["hash-keccak"] } log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } revm = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } # revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } +rlp = { version = "0.5.2", default-features = false } +serde = { version = "1.0.158", default-features = false, features = ["std"] } +serde_json = { version = "1.0.94", default-features = false, features = ["std"] } thiserror = { version = "1.0.38", default-features = false } tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-thread", "sync"] } tracing = { version = "0.1.37", features = ["attributes", "std"], optional = true } diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs index 5a6c631b01..c86606cfb7 100644 --- a/crates/rethnet_evm/src/state.rs +++ b/crates/rethnet_evm/src/state.rs @@ -1,14 +1,21 @@ +mod account; +mod contract; mod debug; -mod layered_state; +mod history; +mod hybrid; +mod layered; mod remote; mod request; mod sync; +mod trie; use rethnet_eth::B256; pub use self::{ debug::{AccountModifierFn, StateDebug}, - layered_state::{LayeredState, RethnetLayer}, + history::StateHistory, + hybrid::HybridState, + layered::{LayeredState, RethnetLayer}, remote::RemoteDatabase, sync::{AsyncState, SyncState}, }; diff --git a/crates/rethnet_evm/src/state/account.rs b/crates/rethnet_evm/src/state/account.rs new file mode 100644 index 0000000000..c451e3d5a9 --- /dev/null +++ b/crates/rethnet_evm/src/state/account.rs @@ -0,0 +1,17 @@ +use rethnet_eth::state::Storage; +use revm::primitives::AccountInfo; + +#[derive(Clone, Debug, Default)] +pub struct RethnetAccount { + pub info: AccountInfo, + pub storage: Storage, +} + +impl From for RethnetAccount { + fn from(info: AccountInfo) -> Self { + Self { + info, + storage: Default::default(), + } + } +} diff --git a/crates/rethnet_evm/src/state/contract.rs b/crates/rethnet_evm/src/state/contract.rs new file mode 100644 index 0000000000..a136648a72 --- /dev/null +++ b/crates/rethnet_evm/src/state/contract.rs @@ -0,0 +1,118 @@ +use hashbrown::HashMap; +use rethnet_eth::{account::KECCAK_EMPTY, B256}; +use revm::primitives::Bytecode; + +use super::{layered::LayeredChanges, RethnetLayer}; + +#[derive(Clone, Debug)] +struct ContractEntry { + code: Bytecode, + occurences: usize, +} + +impl ContractEntry { + pub fn new(code: Bytecode) -> Self { + Self { + code, + occurences: 1, + } + } + + /// Increments the number of occurences. + pub fn increment(&mut self) { + self.occurences += 1; + } + + /// Decrements the number of occurences. If no occurences are left, the [`ContractEntry`] + /// is consumed. + pub fn decrement(mut self) -> Option { + self.occurences -= 1; + + if !DELETE_UNUSED_CODE || self.occurences > 0 { + Some(self) + } else { + None + } + } +} + +#[derive(Clone, Debug)] +pub struct ContractStorage { + contracts: HashMap>, +} + +impl ContractStorage { + /// Inserts new code or, if it already exists, increments the number of occurences of + /// the code. + pub fn insert_code(&mut self, code: Bytecode) { + self.contracts + .entry(code.hash()) + .and_modify(|entry| entry.increment()) + .or_insert_with(|| ContractEntry::new(code)); + } + + /// Decremenents the number of occurences of the code corresponding to the provided code hash, + /// if it exists, and removes unused code. + pub fn remove_code(&mut self, code_hash: &B256) { + self.contracts + .entry(*code_hash) + .and_replace_entry_with(|_code, entry| entry.decrement()); + } +} + +impl ContractStorage { + /// Retrieves the contract code corresponding to the provided hash. + pub fn get(&self, code_hash: &B256) -> Option<&Bytecode> { + self.contracts.get(code_hash).map(|entry| &entry.code) + } +} + +impl ContractStorage { + /// Retrieves the contract code corresponding to the provided hash. + pub fn get(&self, code_hash: &B256) -> Option<&Bytecode> { + self.contracts.get(code_hash).and_then(|entry| { + if entry.occurences > 0 { + Some(&entry.code) + } else { + None + } + }) + } +} + +impl Default for ContractStorage { + fn default() -> Self { + let mut contracts = HashMap::new(); + contracts.insert(KECCAK_EMPTY, ContractEntry::new(Bytecode::new())); + + Self { contracts } + } +} + +impl From<&LayeredChanges> for ContractStorage { + fn from(changes: &LayeredChanges) -> Self { + let mut storage = Self::default(); + + changes.iter().for_each(|layer| { + layer + .contracts() + .contracts + .iter() + .for_each(|(code_hash, entry)| { + if entry.occurences > 0 { + storage.contracts.insert( + *code_hash, + ContractEntry { + code: entry.code.clone(), + occurences: entry.occurences, + }, + ); + } else { + storage.contracts.remove(code_hash); + } + }) + }); + + storage + } +} diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index bf1f9aec87..bc97960d49 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -4,14 +4,16 @@ use auto_impl::auto_impl; use rethnet_eth::{Address, B256, U256}; use revm::primitives::{AccountInfo, Bytecode}; +type BoxedAccountModifierFn = Box) + Send>; + /// Debuggable function type for modifying account information. pub struct AccountModifierFn { - inner: Box) + Send>, + inner: BoxedAccountModifierFn, } impl AccountModifierFn { /// Constructs an [`AccountModifierDebuggableFn`] from the provided function. - pub fn new(func: Box) + Send>) -> Self { + pub fn new(func: BoxedAccountModifierFn) -> Self { Self { inner: func } } } @@ -61,6 +63,9 @@ pub trait StateDebug { /// Removes and returns the account at the specified address, if it exists. fn remove_account(&mut self, address: Address) -> Result, Self::Error>; + /// Serializes the state using ordering of addresses and storage indices. + fn serialize(&mut self) -> String; + /// Sets the storage slot at the specified address and index to the provided value. fn set_account_storage_slot( &mut self, @@ -69,22 +74,6 @@ pub trait StateDebug { value: U256, ) -> Result<(), Self::Error>; - /// Reverts the state to match the specified state root. - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error>; - /// Retrieves the storage root of the database. fn state_root(&mut self) -> Result; - - /// Creates a checkpoint that can be reverted to using [`revert`]. - fn checkpoint(&mut self) -> Result<(), Self::Error>; - - /// Reverts to the previous checkpoint, created using [`checkpoint`]. - fn revert(&mut self) -> Result<(), Self::Error>; - - /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier and whether - /// that snapshot already existed. - fn make_snapshot(&mut self) -> (B256, bool); - - /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. - fn remove_snapshot(&mut self, state_root: &B256) -> bool; } diff --git a/crates/rethnet_evm/src/state/history.rs b/crates/rethnet_evm/src/state/history.rs new file mode 100644 index 0000000000..55590aa1b0 --- /dev/null +++ b/crates/rethnet_evm/src/state/history.rs @@ -0,0 +1,25 @@ +use auto_impl::auto_impl; +use rethnet_eth::B256; + +/// A trait for debug operation on a database. +#[auto_impl(Box)] +pub trait StateHistory { + /// The database's error type. + type Error; + + /// Reverts the state to match the specified state root. + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error>; + + /// Creates a checkpoint that can be reverted to using [`revert`]. + fn checkpoint(&mut self) -> Result<(), Self::Error>; + + /// Reverts to the previous checkpoint, created using [`checkpoint`]. + fn revert(&mut self) -> Result<(), Self::Error>; + + /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier and whether + /// that snapshot already existed. + fn make_snapshot(&mut self) -> (B256, bool); + + /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. + fn remove_snapshot(&mut self, state_root: &B256) -> bool; +} diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs new file mode 100644 index 0000000000..53530210d3 --- /dev/null +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -0,0 +1,277 @@ +use std::fmt::Debug; + +use hashbrown::HashMap; +use rethnet_eth::{Address, B256, U256}; +use revm::{ + db::StateRef, + primitives::{Account, AccountInfo, Bytecode, KECCAK_EMPTY}, + DatabaseCommit, +}; + +use super::{ + history::StateHistory, + layered::LayeredChanges, + trie::{AccountTrie, TrieState}, + AccountModifierFn, RethnetLayer, StateDebug, StateError, +}; + +#[derive(Debug)] +struct Snapshot { + pub changes: LayeredChanges, + pub trie: TrieState, +} + +/// A state consisting of layers. +#[derive(Debug, Default)] +pub struct HybridState { + trie: TrieState, + changes: LayeredChanges, + snapshots: HashMap>, +} + +impl>> HybridState { + /// Creates a [`HybridState`] with the provided layer at the bottom. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn with_accounts(accounts: HashMap) -> Self { + let latest_state = TrieState::with_accounts(AccountTrie::with_accounts(&accounts)); + let layer = accounts.into(); + + Self { + trie: latest_state, + changes: LayeredChanges::with_layer(layer), + snapshots: HashMap::new(), + } + } +} + +impl StateRef for HybridState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn basic(&self, address: Address) -> Result, Self::Error> { + self.trie.basic(address) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn code_by_hash(&self, code_hash: B256) -> Result { + self.trie.code_by_hash(code_hash) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn storage(&self, address: Address, index: U256) -> Result { + self.trie.storage(address, index) + } +} + +impl DatabaseCommit for HybridState { + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn commit(&mut self, changes: HashMap) { + self.changes.apply(&changes); + self.trie.commit(changes); + } +} + +impl StateDebug for HybridState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + self.trie.account_storage_root(address) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn insert_account( + &mut self, + address: Address, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + self.trie.insert_account(address, account_info.clone())?; + self.changes.account_or_insert_mut(&address).info = account_info; + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn modify_account( + &mut self, + address: Address, + modifier: AccountModifierFn, + ) -> Result<(), Self::Error> { + let mut account_info = self.trie.basic(address)?.map_or_else( + || AccountInfo { + code: None, + ..AccountInfo::default() + }, + |mut account_info| { + // Fill the bytecode + if account_info.code_hash != KECCAK_EMPTY { + account_info.code = Some( + self.trie + .code_by_hash(account_info.code_hash) + .expect("Code must exist"), + ); + } + + account_info + }, + ); + + let old_code_hash = account_info.code_hash; + + modifier( + &mut account_info.balance, + &mut account_info.nonce, + &mut account_info.code, + ); + + let new_code = account_info.code.take(); + let new_code_hash = new_code.as_ref().map_or(KECCAK_EMPTY, |code| code.hash()); + account_info.code_hash = new_code_hash; + + let code_changed = old_code_hash != new_code_hash; + if code_changed { + if let Some(new_code) = new_code { + self.trie.insert_code(new_code.clone()); + self.changes.insert_code(new_code); + } + + self.trie.remove_code(&old_code_hash); + self.changes.remove_code(&old_code_hash); + } + + self.trie.insert_account(address, account_info.clone())?; + self.changes.account_or_insert_mut(&address).info = account_info; + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + Ok(if self.trie.remove_account(address).unwrap().is_some() { + self.changes.remove_account(&address) + } else { + None + }) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn serialize(&mut self) -> String { + self.trie.serialize() + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_account_storage_slot( + &mut self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + self.trie.set_account_storage_slot(address, index, value)?; + + self.changes + .account_or_insert_mut(&address) + .storage + .insert(index, value); + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn state_root(&mut self) -> Result { + self.trie.state_root() + } +} + +impl StateHistory for HybridState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn make_snapshot(&mut self) -> (B256, bool) { + let state_root = self.state_root().unwrap(); + + let mut exists = true; + self.snapshots.entry(state_root).or_insert_with(|| { + exists = false; + + let mut changes = self.changes.clone(); + changes.last_layer_mut().set_state_root(state_root); + + Snapshot { + changes, + trie: self.trie.clone(), + } + }); + + (state_root, exists) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn remove_snapshot(&mut self, state_root: &B256) -> bool { + self.snapshots.remove(state_root).is_some() + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + // Ensure the last layer has a state root + if !self.changes.last_layer_mut().has_state_root() { + let state_root = self.state_root()?; + self.changes.last_layer_mut().set_state_root(state_root); + } + + if let Some(Snapshot { + changes, + trie: latest_state, + }) = self.snapshots.remove(state_root) + { + self.trie = latest_state; + self.changes = changes; + + return Ok(()); + } + + let inverted_layer_id = self + .changes + .iter() + .enumerate() + .find_map(|(layer_id, layer)| { + if *layer.state_root().unwrap() == *state_root { + Some(layer_id) + } else { + None + } + }); + + if let Some(layer_id) = inverted_layer_id { + let layer_id = self.changes.last_layer_id() - layer_id; + + self.changes.revert_to_layer(layer_id); + self.trie = TrieState::from(&self.changes); + + Ok(()) + } else { + Err(StateError::InvalidStateRoot(*state_root)) + } + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn checkpoint(&mut self) -> Result<(), Self::Error> { + let state_root = self.state_root()?; + self.changes.last_layer_mut().set_state_root(state_root); + + self.changes.add_layer_default(); + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn revert(&mut self) -> Result<(), Self::Error> { + let last_layer_id = self.changes.last_layer_id(); + if last_layer_id > 0 { + self.changes.revert_to_layer(last_layer_id - 1); + self.trie = TrieState::from(&self.changes); + Ok(()) + } else { + Err(StateError::CannotRevert) + } + } +} diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs new file mode 100644 index 0000000000..b84e966ba4 --- /dev/null +++ b/crates/rethnet_evm/src/state/layered.rs @@ -0,0 +1,277 @@ +mod changes; + +pub use changes::{LayeredChanges, RethnetLayer}; + +use std::fmt::Debug; + +use hashbrown::HashMap; +use rethnet_eth::{ + account::BasicAccount, + state::{state_root, storage_root}, + Address, B256, U256, +}; +use revm::{ + db::StateRef, + primitives::{Account, AccountInfo, Bytecode, KECCAK_EMPTY}, + DatabaseCommit, +}; + +use super::{history::StateHistory, AccountModifierFn, StateDebug, StateError}; + +/// A state consisting of layers. +#[derive(Debug, Default)] +pub struct LayeredState { + changes: LayeredChanges, + /// Snapshots + snapshots: HashMap>, +} + +impl>> LayeredState { + /// Creates a [`LayeredState`] with the provided layer at the bottom. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn with_accounts(accounts: HashMap) -> Self { + let layer = accounts.into(); + + Self { + changes: LayeredChanges::with_layer(layer), + snapshots: HashMap::new(), + } + } +} + +impl StateRef for LayeredState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn basic(&self, address: Address) -> Result, Self::Error> { + Ok(self + .changes + .account(&address) + .map(|account| account.info.clone())) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn code_by_hash(&self, code_hash: B256) -> Result { + self.changes + .code_by_hash(&code_hash) + .map(Clone::clone) + .ok_or(StateError::InvalidCodeHash(code_hash)) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn storage(&self, address: Address, index: U256) -> Result { + Ok(self + .changes + .account(&address) + .and_then(|account| account.storage.get(&index)) + .cloned() + .unwrap_or(U256::ZERO)) + } +} + +impl DatabaseCommit for LayeredState { + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn commit(&mut self, changes: HashMap) { + self.changes.apply(&changes); + } +} + +impl StateDebug for LayeredState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + Ok(self + .changes + .account(address) + .map(|account| storage_root(&account.storage))) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn insert_account( + &mut self, + address: Address, + account_info: AccountInfo, + ) -> Result<(), Self::Error> { + self.changes.account_or_insert_mut(&address).info = account_info; + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn modify_account( + &mut self, + address: Address, + modifier: AccountModifierFn, + ) -> Result<(), Self::Error> { + let mut account_info = self.changes.account_or_insert_mut(&address).info.clone(); + + // Fill the bytecode + if account_info.code_hash != KECCAK_EMPTY { + account_info.code = Some( + self.changes + .code_by_hash(&account_info.code_hash) + .cloned() + .expect("Code must exist"), + ); + } + + let old_code_hash = account_info.code_hash; + + modifier( + &mut account_info.balance, + &mut account_info.nonce, + &mut account_info.code, + ); + + let new_code = account_info.code.take(); + let new_code_hash = new_code.as_ref().map_or(KECCAK_EMPTY, |code| code.hash()); + account_info.code_hash = new_code_hash; + + let code_change = old_code_hash != new_code_hash; + if code_change { + if let Some(new_code) = new_code { + self.changes.insert_code(new_code); + } + + self.changes.remove_code(&old_code_hash); + } + + self.changes.account_or_insert_mut(&address).info = account_info; + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + Ok(self.changes.remove_account(&address)) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn serialize(&mut self) -> String { + self.changes.serialize() + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_account_storage_slot( + &mut self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + self.changes + .account_or_insert_mut(&address) + .storage + .insert(index, value); + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn state_root(&mut self) -> Result { + let mut state = HashMap::new(); + + self.changes + .iter() + .flat_map(|layer| layer.accounts()) + .for_each(|(address, account)| { + state + .entry(*address) + .or_insert(account.as_ref().map(|account| BasicAccount { + nonce: account.info.nonce, + balance: account.info.balance, + storage_root: storage_root(&account.storage), + code_hash: account.info.code_hash, + })); + }); + + let state = state + .iter() + .filter_map(|(address, account)| account.as_ref().map(|account| (address, account))); + + Ok(state_root(state)) + } +} + +impl StateHistory for LayeredState { + type Error = StateError; + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn make_snapshot(&mut self) -> (B256, bool) { + let state_root = self.state_root().unwrap(); + + let mut exists = true; + self.snapshots.entry(state_root).or_insert_with(|| { + exists = false; + + let mut snapshot = self.changes.clone(); + snapshot.last_layer_mut().set_state_root(state_root); + + snapshot + }); + + (state_root, exists) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn remove_snapshot(&mut self, state_root: &B256) -> bool { + self.snapshots.remove(state_root).is_some() + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + // Ensure the last layer has a state root + if !self.changes.last_layer_mut().has_state_root() { + let state_root = self.state_root()?; + self.changes.last_layer_mut().set_state_root(state_root); + } + + if let Some(snapshot) = self.snapshots.remove(state_root) { + self.changes = snapshot; + + return Ok(()); + } + + let inverted_layer_id = self + .changes + .iter() + .enumerate() + .find_map(|(layer_id, layer)| { + if *layer.state_root().unwrap() == *state_root { + Some(layer_id) + } else { + None + } + }); + + if let Some(inverted_layer_id) = inverted_layer_id { + let layer_id = self.changes.last_layer_id() - inverted_layer_id; + self.changes.revert_to_layer(layer_id); + + Ok(()) + } else { + Err(StateError::InvalidStateRoot(*state_root)) + } + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn checkpoint(&mut self) -> Result<(), Self::Error> { + let state_root = self.state_root()?; + self.changes.last_layer_mut().set_state_root(state_root); + + self.changes.add_layer_default(); + + Ok(()) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn revert(&mut self) -> Result<(), Self::Error> { + let last_layer_id = self.changes.last_layer_id(); + if last_layer_id > 0 { + self.changes.revert_to_layer(last_layer_id - 1); + Ok(()) + } else { + Err(StateError::CannotRevert) + } + } +} diff --git a/crates/rethnet_evm/src/state/layered/changes.rs b/crates/rethnet_evm/src/state/layered/changes.rs new file mode 100644 index 0000000000..8d02e80b17 --- /dev/null +++ b/crates/rethnet_evm/src/state/layered/changes.rs @@ -0,0 +1,329 @@ +use std::{collections::BTreeMap, fmt::Debug}; + +use cita_trie::Hasher; +use hashbrown::HashMap; +use hasher::HasherKeccak; +use rethnet_eth::{account::KECCAK_EMPTY, state::storage_root, Address, B256, U256}; +use revm::primitives::{Account, AccountInfo, Bytecode}; + +use crate::state::{account::RethnetAccount, contract::ContractStorage}; + +#[derive(Clone, Debug)] +pub struct LayeredChanges { + stack: Vec, +} + +impl LayeredChanges { + /// Creates [`LayeredChanges`] with the provided layer at the bottom. + pub fn with_layer(layer: Layer) -> Self { + Self { stack: vec![layer] } + } + + /// Returns the index of the top layer. + pub fn last_layer_id(&self) -> usize { + self.stack.len() - 1 + } + + /// Returns a mutable reference to the top layer. + pub fn last_layer_mut(&mut self) -> &mut Layer { + // The `LayeredState` always has at least one layer + self.stack.last_mut().unwrap() + } + + /// Returns an iterator over the object's layers. + pub fn iter(&self) -> impl Iterator { + self.stack.iter().rev() + } + + /// Returns a reverse iterator over the object's layers, oldest to newest. + pub fn rev(&self) -> impl Iterator { + self.stack.iter() + } +} + +impl LayeredChanges { + /// Adds the provided layer to the top, returning its index and a + /// mutable reference to the layer. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn add_layer(&mut self, layer: Layer) -> (usize, &mut Layer) { + let layer_id = self.stack.len(); + self.stack.push(layer); + (layer_id, self.stack.last_mut().unwrap()) + } + + /// Reverts to the layer with specified `layer_id`, removing all + /// layers above it. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn revert_to_layer(&mut self, layer_id: usize) { + assert!(layer_id < self.stack.len(), "Invalid layer id."); + self.stack.truncate(layer_id + 1); + } +} + +impl LayeredChanges { + /// Adds a default layer to the top, returning its index and a + /// mutable reference to the layer. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn add_layer_default(&mut self) -> (usize, &mut Layer) { + self.add_layer(Layer::default()) + } +} + +impl Default for LayeredChanges { + fn default() -> Self { + Self { + stack: vec![Layer::default()], + } + } +} + +/// A layer with information needed for [`Rethnet`]. +#[derive(Clone, Debug, Default)] +pub struct RethnetLayer { + /// Accounts, where the Option signals deletion. + accounts: HashMap>, + /// Code hash -> Address + contracts: ContractStorage, + /// Cached state root + state_root: Option, +} + +impl RethnetLayer { + /// Retrieves an iterator over all accounts. + pub fn accounts(&self) -> impl Iterator)> { + self.accounts.iter() + } + + /// Retrieves the contract storage + pub fn contracts(&self) -> &ContractStorage { + &self.contracts + } + + /// Returns whether the layer has a state root. + pub fn has_state_root(&self) -> bool { + self.state_root.is_some() + } + + /// Retrieves the layer's state root. + pub fn state_root(&self) -> Option<&B256> { + self.state_root.as_ref() + } + + /// Sets the layer's state root. + pub fn set_state_root(&mut self, state_root: B256) { + self.state_root = Some(state_root); + } +} + +impl From> for RethnetLayer { + fn from(accounts: HashMap) -> Self { + let mut accounts: HashMap> = accounts + .into_iter() + .map(|(address, account_info)| (address, Some(account_info.into()))) + .collect(); + + let mut contracts = ContractStorage::default(); + + accounts + .values_mut() + .filter_map(|account| { + account + .as_mut() + .and_then(|account| account.info.code.take()) + }) + .for_each(|code| { + if code.hash() != KECCAK_EMPTY { + contracts.insert_code(code); + } + }); + + Self { + accounts, + contracts, + state_root: None, + } + } +} + +impl LayeredChanges { + /// Retrieves a reference to the account corresponding to the address, if it exists. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn account(&self, address: &Address) -> Option<&RethnetAccount> { + self.iter() + .find_map(|layer| layer.accounts.get(address).map(Option::as_ref)) + .flatten() + } + + /// Retrieves a mutable reference to the account corresponding to the address, if it exists. + /// Otherwise, inserts a new account. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn account_or_insert_mut(&mut self, address: &Address) -> &mut RethnetAccount { + // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html + if self.last_layer_mut().accounts.contains_key(address) { + let was_deleted = self + .last_layer_mut() + .accounts + .get(address) + .unwrap() + .is_none(); + + if !was_deleted { + return self + .last_layer_mut() + .accounts + .get_mut(address) + .unwrap() + .as_mut() + .unwrap(); + } + } + + let account = self.account(address).cloned().unwrap_or_default(); + + self.last_layer_mut() + .accounts + .insert_unique_unchecked(*address, Some(account)) + .1 + .as_mut() + .unwrap() + } + + /// Applies the provided changes to the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn apply(&mut self, changes: &HashMap) { + changes.iter().for_each(|(address, account)| { + if account.is_destroyed || account.is_empty() { + // Removes account only if it exists, so safe to use for empty, touched accounts + self.remove_account(address); + } else { + let old_account = self.account_or_insert_mut(address); + + if account.storage_cleared { + old_account.storage.clear(); + } + + account.storage.iter().for_each(|(index, value)| { + let value = value.present_value(); + if value == U256::ZERO { + old_account.storage.remove(index); + } else { + old_account.storage.insert(*index, value); + } + }); + + let mut account_info = account.info.clone(); + + let old_code_hash = old_account.info.code_hash; + let code_changed = old_code_hash != account_info.code_hash; + + let new_code = account_info.code.take(); + old_account.info = account_info; + + if code_changed { + if let Some(new_code) = new_code { + self.insert_code(new_code); + } + + self.remove_code(&old_code_hash); + } + } + }); + } + + /// Retrieves the code corresponding to the specified code hash. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn code_by_hash(&self, code_hash: &B256) -> Option<&Bytecode> { + self.iter().find_map(|layer| layer.contracts.get(code_hash)) + } + + /// Removes the [`AccountInfo`] corresponding to the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn remove_account(&mut self, address: &Address) -> Option { + if let Some(account) = self.account(address) { + let account_info = account.info.clone(); + + if account.info.code_hash != KECCAK_EMPTY { + debug_assert!(account.info.code.is_none()); + + let code_hash = account.info.code_hash; + + self.last_layer_mut().contracts.remove_code(&code_hash); + } + + // Insert `None` to signal that the account was deleted + self.last_layer_mut().accounts.insert(*address, None); + + return Some(account_info); + } + + None + } + + /// Serializes the state using ordering of addresses and storage indices. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn serialize(&self) -> String { + let mut state = BTreeMap::new(); + + #[derive(serde::Serialize)] + struct StateAccount { + /// Balance of the account. + pub balance: U256, + /// Code hash of the account. + pub code_hash: B256, + /// Nonce of the account. + pub nonce: u64, + /// Storage + pub storage: BTreeMap, + /// Storage root of the account. + pub storage_root: B256, + } + + self.iter() + .flat_map(|layer| layer.accounts()) + .for_each(|(address, account)| { + state.entry(*address).or_insert_with(|| { + account.as_ref().map(|account| { + let storage_root = storage_root(&account.storage); + + // Sort entries + let storage: BTreeMap = account + .storage + .iter() + .map(|(index, value)| { + let hashed_index = + HasherKeccak::new().digest(&index.to_be_bytes::<32>()); + + (B256::from_slice(&hashed_index), *value) + }) + .collect(); + + StateAccount { + balance: account.info.balance, + nonce: account.info.nonce, + code_hash: account.info.code_hash, + storage_root, + storage, + } + }) + }); + }); + + // Remove deleted entries + let state: BTreeMap<_, _> = state + .into_iter() + .filter_map(|(address, account)| account.map(|account| (address, account))) + .collect(); + + serde_json::to_string_pretty(&state).unwrap() + } + + /// Inserts the provided bytecode using its hash, potentially overwriting an existing value. + pub fn insert_code(&mut self, code: Bytecode) { + self.last_layer_mut().contracts.insert_code(code); + } + + /// Removes the code corresponding to the provided hash, if it exists. + pub fn remove_code(&mut self, code_hash: &B256) { + self.last_layer_mut().contracts.remove_code(code_hash); + } +} diff --git a/crates/rethnet_evm/src/state/layered_state.rs b/crates/rethnet_evm/src/state/layered_state.rs deleted file mode 100644 index b6a0f8564a..0000000000 --- a/crates/rethnet_evm/src/state/layered_state.rs +++ /dev/null @@ -1,515 +0,0 @@ -use hashbrown::HashMap; -use rethnet_eth::{ - account::BasicAccount, - state::{state_root, storage_root}, - trie::KECCAK_NULL_RLP, - Address, B256, U256, -}; -use revm::{ - db::State, - primitives::{Account, AccountInfo, Bytecode, KECCAK_EMPTY}, - DatabaseCommit, -}; - -use super::{AccountModifierFn, StateDebug, StateError}; - -/// A state consisting of layers. -#[derive(Clone, Debug)] -pub struct LayeredState { - stack: Vec, - /// Snapshots - snapshots: HashMap>, // naive implementation -} - -impl LayeredState { - /// Creates a [`LayeredState`] with the provided layer at the bottom. - pub fn with_layer(layer: Layer) -> Self { - Self { - stack: vec![layer], - snapshots: HashMap::new(), - } - } - - /// Returns the index of the top layer. - pub fn last_layer_id(&self) -> usize { - self.stack.len() - 1 - } - - /// Returns a mutable reference to the top layer. - pub fn last_layer_mut(&mut self) -> &mut Layer { - // The `LayeredState` always has at least one layer - self.stack.last_mut().unwrap() - } - - /// Adds the provided layer to the top, returning its index and a - /// mutable reference to the layer. - pub fn add_layer(&mut self, layer: Layer) -> (usize, &mut Layer) { - let layer_id = self.stack.len(); - self.stack.push(layer); - (layer_id, self.stack.last_mut().unwrap()) - } - - /// Reverts to the layer with specified `layer_id`, removing all - /// layers above it. - pub fn revert_to_layer(&mut self, layer_id: usize) { - assert!(layer_id < self.stack.len(), "Invalid layer id."); - self.stack.truncate(layer_id + 1); - } - - /// Returns an iterator over the object's layers. - pub fn iter(&self) -> impl Iterator { - self.stack.iter().rev() - } -} - -impl LayeredState { - /// Adds a default layer to the top, returning its index and a - /// mutable reference to the layer. - pub fn add_layer_default(&mut self) -> (usize, &mut Layer) { - self.add_layer(Layer::default()) - } -} - -impl Default for LayeredState { - fn default() -> Self { - Self { - stack: vec![Layer::default()], - snapshots: HashMap::new(), - } - } -} - -/// A layer with information needed for [`Rethnet`]. -#[derive(Clone, Debug, Default)] -pub struct RethnetLayer { - /// Address -> AccountInfo - account_infos: HashMap>, - /// Address -> Storage - storage: HashMap>>, - /// Code hash -> Address - contracts: HashMap, - /// Cached state root - state_root: Option, -} - -impl RethnetLayer { - /// Creates a `RethnetLayer` with the provided genesis accounts. - pub fn with_genesis_accounts(genesis_accounts: HashMap) -> Self { - let genesis_accounts = genesis_accounts - .into_iter() - .map(|(address, account_info)| (address, Some(account_info))) - .collect(); - - Self { - account_infos: genesis_accounts, - ..Default::default() - } - } - - /// Returns whether the layer has a state root. - pub fn has_state_root(&self) -> bool { - self.state_root.is_some() - } - - /// Insert the provided `AccountInfo` at the specified `address`. - pub fn insert_account(&mut self, address: Address, mut account_info: AccountInfo) { - if let Some(code) = account_info.code.take() { - if !code.is_empty() { - account_info.code_hash = code.hash(); - self.contracts.insert(code.hash(), code); - } - } - - if account_info.code_hash.is_zero() { - account_info.code_hash = KECCAK_EMPTY; - } - - self.account_infos.insert(address, Some(account_info)); - } -} - -impl LayeredState { - /// Retrieves a reference to the account corresponding to the address, if it exists. - pub fn account(&self, address: &Address) -> Option<&AccountInfo> { - self.iter() - .find_map(|layer| { - layer - .account_infos - .get(address) - .map(|account_info| account_info.as_ref()) - }) - .flatten() - } - - /// Retrieves a mutable reference to the account corresponding to the address, if it exists. - pub fn account_mut(&mut self, address: &Address) -> Option<&mut AccountInfo> { - // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html - if self.last_layer_mut().account_infos.contains_key(address) { - return self - .last_layer_mut() - .account_infos - .get_mut(address) - .and_then(|account_info| account_info.as_mut()); - } - - self.account(address).cloned().map(|account_info| { - self.last_layer_mut() - .account_infos - .insert_unique_unchecked(*address, Some(account_info)) - .1 - .as_mut() - .unwrap() - }) - } - - /// Retrieves a mutable reference to the account corresponding to the address, if it exists. - /// Otherwise, inserts a new account. - pub fn account_or_insert_mut(&mut self, address: &Address) -> &mut AccountInfo { - // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html - if self.last_layer_mut().account_infos.contains_key(address) { - let was_deleted = self - .last_layer_mut() - .account_infos - .get(address) - .unwrap() - .is_none(); - - if !was_deleted { - return self - .last_layer_mut() - .account_infos - .get_mut(address) - .unwrap() - .as_mut() - .unwrap(); - } - } - - let account_info = self.account(address).cloned().unwrap_or(AccountInfo { - balance: U256::ZERO, - nonce: 0, - code_hash: KECCAK_EMPTY, - code: None, - }); - - self.last_layer_mut() - .account_infos - .insert_unique_unchecked(*address, Some(account_info)) - .1 - .as_mut() - .unwrap() - } - - /// Removes the [`AccountInfo`] corresponding to the specified address. - fn remove_account(&mut self, address: &Address) -> Option { - let account_info = self - .iter() - .find_map(|layer| layer.account_infos.get(address)) - .cloned() - .flatten(); - - if let Some(account_info) = &account_info { - debug_assert!(account_info.code.is_none()); - - let code_hash = account_info.code_hash; - - self.last_layer_mut() - .contracts - .insert(code_hash, Bytecode::new()); - - // Write None to signal that the account was deleted - self.last_layer_mut().account_infos.insert(*address, None); - } - - let storage = self.iter().find_map(|layer| layer.storage.get(address)); - - if let Some(Some(_)) = storage { - // Write None to signal that the account's storage was deleted - self.last_layer_mut().storage.insert(*address, None); - } - - account_info - } -} - -impl State for LayeredState { - type Error = StateError; - - fn basic(&mut self, address: Address) -> Result, Self::Error> { - let account = self - .iter() - .find_map(|layer| layer.account_infos.get(&address)) - .cloned() - .flatten(); - - Ok(account) - } - - fn code_by_hash(&mut self, code_hash: B256) -> Result { - if code_hash == KECCAK_EMPTY { - return Ok(Bytecode::new()); - } - - self.iter() - .find_map(|layer| layer.contracts.get(&code_hash).cloned()) - .ok_or(StateError::InvalidCodeHash(code_hash)) - } - - fn storage(&mut self, address: Address, index: U256) -> Result { - Ok(self - .iter() - .find_map(|layer| layer.storage.get(&address).map(|storage| storage.as_ref())) - .flatten() - .and_then(|storage| storage.get(&index)) - .cloned() - .unwrap_or(U256::ZERO)) - } -} - -impl DatabaseCommit for LayeredState { - fn commit(&mut self, changes: HashMap) { - changes.into_iter().for_each(|(address, account)| { - if account.is_empty() || account.is_destroyed { - self.remove_account(&address); - } else { - self.last_layer_mut().insert_account(address, account.info); - - let storage = if self.last_layer_mut().storage.contains_key(&address) { - let storage = self.last_layer_mut().storage.get_mut(&address).unwrap(); - - let was_deleted = storage.is_none(); - if was_deleted { - storage.replace(HashMap::new()); - } - - storage.as_mut().unwrap() - } else { - let storage = self - .iter() - .find_map(|layer| layer.storage.get(&address)) - .cloned() - .flatten() - .unwrap_or_default(); - - self.last_layer_mut() - .storage - .insert_unique_unchecked(address, Some(storage)) - .1 - .as_mut() - .unwrap() - }; - - if account.storage_cleared { - storage.clear(); - } - - account.storage.into_iter().for_each(|(index, value)| { - let value = value.present_value(); - if value == U256::ZERO { - storage.remove(&index); - } else { - storage.insert(index, value); - } - }); - } - }); - } -} - -impl StateDebug for LayeredState { - type Error = StateError; - - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { - Ok(self - .iter() - .find_map(|layer| layer.storage.get(address)) - .map(|storage| storage.as_ref().map_or(KECCAK_NULL_RLP, storage_root))) - } - - fn insert_account( - &mut self, - address: Address, - account_info: AccountInfo, - ) -> Result<(), Self::Error> { - self.last_layer_mut().insert_account(address, account_info); - - Ok(()) - } - - fn make_snapshot(&mut self) -> (B256, bool) { - let state_root = self.state_root().unwrap(); - - let mut exists = true; - self.snapshots.entry(state_root).or_insert_with(|| { - exists = false; - - let mut snapshot = self.stack.clone(); - if let Some(layer) = snapshot.last_mut() { - layer.state_root.replace(state_root); - } - snapshot - }); - - (state_root, exists) - } - - fn modify_account( - &mut self, - address: Address, - modifier: AccountModifierFn, - ) -> Result<(), Self::Error> { - let account_info = self.account_or_insert_mut(&address); - let old_code_hash = account_info.code_hash; - - modifier( - &mut account_info.balance, - &mut account_info.nonce, - &mut account_info.code, - ); - - if let Some(code) = account_info.code.take() { - let new_code_hash = code.hash(); - - if old_code_hash != new_code_hash { - account_info.code_hash = new_code_hash; - - let last_layer = self.last_layer_mut(); - - // The old contract should now return empty bytecode - last_layer.contracts.insert(old_code_hash, Bytecode::new()); - - last_layer.contracts.insert(new_code_hash, code); - } - } - - Ok(()) - } - - fn remove_account(&mut self, address: Address) -> Result, Self::Error> { - Ok(self.remove_account(&address)) - } - - fn remove_snapshot(&mut self, state_root: &B256) -> bool { - self.snapshots.remove(state_root).is_some() - } - - fn set_account_storage_slot( - &mut self, - address: Address, - index: U256, - value: U256, - ) -> Result<(), Self::Error> { - self.last_layer_mut() - .storage - .entry(address) - .and_modify(|entry| { - let was_deleted = entry.is_none(); - if was_deleted { - entry.replace(HashMap::new()); - } - - entry.as_mut().unwrap().insert(index, value); - }) - .or_insert_with(|| { - let mut account_storage = HashMap::new(); - account_storage.insert(index, value); - - Some(account_storage) - }); - - Ok(()) - } - - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { - // Ensure the last layer has a state root - if !self.last_layer_mut().has_state_root() { - let state_root = self.state_root()?; - self.last_layer_mut().state_root.replace(state_root); - } - - if let Some(snapshot) = self.snapshots.remove(state_root) { - self.stack = snapshot; - - return Ok(()); - } - - let layer_id = self.stack.iter().enumerate().find_map(|(layer_id, layer)| { - if layer.state_root.unwrap() == *state_root { - Some(layer_id) - } else { - None - } - }); - - if let Some(layer_id) = layer_id { - self.stack.truncate(layer_id + 1); - - Ok(()) - } else { - Err(StateError::InvalidStateRoot(*state_root)) - } - } - - fn state_root(&mut self) -> Result { - let mut storage = HashMap::new(); - - self.iter().flat_map(|layer| layer.storage.iter()).for_each( - |(address, account_storage)| { - storage.entry(*address).or_insert(account_storage.clone()); - }, - ); - - let storage_roots: HashMap = storage - .into_iter() - .filter_map(|(address, storage)| { - storage.map(|storage| (address, storage_root(&storage))) - }) - .collect(); - - let mut state = HashMap::new(); - - self.iter() - .flat_map(|layer| layer.account_infos.iter()) - .for_each(|(address, account_info)| { - let storage_root = storage_roots - .get(address) - .cloned() - .unwrap_or(KECCAK_NULL_RLP); - - state - .entry(*address) - .or_insert(account_info.as_ref().map(|account_info| BasicAccount { - nonce: U256::from(account_info.nonce), - balance: account_info.balance, - storage_root, - code_hash: account_info.code_hash, - })); - }); - - let state: HashMap = state - .into_iter() - .filter_map(|(address, account)| account.map(|account| (address, account))) - .collect(); - - Ok(state_root(&state)) - } - - fn checkpoint(&mut self) -> Result<(), Self::Error> { - let state_root = self.state_root()?; - self.last_layer_mut().state_root.replace(state_root); - - self.add_layer_default(); - - Ok(()) - } - - fn revert(&mut self) -> Result<(), Self::Error> { - let last_layer_id = self.last_layer_id(); - if last_layer_id > 0 { - self.revert_to_layer(last_layer_id - 1); - Ok(()) - } else { - Err(StateError::CannotRevert) - } - } -} diff --git a/crates/rethnet_evm/src/state/request.rs b/crates/rethnet_evm/src/state/request.rs index 6a1fe0bac1..2c5ca0b10e 100644 --- a/crates/rethnet_evm/src/state/request.rs +++ b/crates/rethnet_evm/src/state/request.rs @@ -3,7 +3,7 @@ use std::fmt::Debug; use hashbrown::HashMap; use rethnet_eth::{Address, B256, U256}; use revm::{ - db::State, + db::StateRef, primitives::{Account, AccountInfo, Bytecode}, DatabaseCommit, }; @@ -11,6 +11,8 @@ use tokio::sync::oneshot; use crate::state::{AccountModifierFn, StateDebug}; +use super::history::StateHistory; + /// The request type used internally by a [`SyncDatabase`]. #[derive(Debug)] pub enum Request { @@ -57,6 +59,9 @@ pub enum Request { Revert { sender: oneshot::Sender>, }, + Serialize { + sender: oneshot::Sender, + }, SetStorageSlot { address: Address, index: U256, @@ -85,7 +90,11 @@ where #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn handle(self, state: &mut S) -> bool where - S: State + DatabaseCommit + StateDebug + Debug, + S: StateRef + + DatabaseCommit + + StateDebug + + StateHistory + + Debug, { match self { Request::AccountByAddress { address, sender } => { @@ -124,6 +133,7 @@ where sender.send(state.remove_snapshot(&state_root)).unwrap() } Request::Revert { sender } => sender.send(state.revert()).unwrap(), + Request::Serialize { sender } => sender.send(state.serialize()).unwrap(), Request::SetStorageSlot { address, index, diff --git a/crates/rethnet_evm/src/state/sync.rs b/crates/rethnet_evm/src/state/sync.rs index 1b09e331df..ee1e00e383 100644 --- a/crates/rethnet_evm/src/state/sync.rs +++ b/crates/rethnet_evm/src/state/sync.rs @@ -3,7 +3,7 @@ use std::{fmt::Debug, io}; use hashbrown::HashMap; use rethnet_eth::{Address, B256, U256}; use revm::{ - db::{State, StateRef}, + db::StateRef, primitives::{Account, AccountInfo, Bytecode}, DatabaseCommit, }; @@ -18,11 +18,18 @@ use tokio::{ use crate::state::{AccountModifierFn, StateDebug}; -use super::request::Request; +use super::{history::StateHistory, request::Request}; /// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. pub trait SyncState: - State + DatabaseCommit + StateDebug + Debug + Send + Sync + 'static + StateRef + + DatabaseCommit + + StateDebug + + StateHistory + + Debug + + Send + + Sync + + 'static where E: Debug + Send, { @@ -30,7 +37,14 @@ where impl SyncState for S where - S: State + DatabaseCommit + StateDebug + Debug + Send + Sync + 'static, + S: StateRef + + DatabaseCommit + + StateDebug + + StateHistory + + Debug + + Send + + Sync + + 'static, E: Debug + Send, { } @@ -247,6 +261,18 @@ where receiver.await.unwrap() } + /// Serializes the state using ordering of addresses and storage indices. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub async fn serialize(&self) -> String { + let (sender, receiver) = oneshot::channel(); + + self.request_sender + .send(Request::Serialize { sender }) + .expect("Failed to send request"); + + receiver.await.unwrap() + } + /// Sets the storage slot at the specified address and index to the provided value. #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn set_account_storage_slot( @@ -400,6 +426,12 @@ where }) } + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn serialize(&mut self) -> String { + task::block_in_place(move || self.runtime.block_on(AsyncState::serialize(*self))) + } + + #[cfg_attr(feature = "tracing", tracing::instrument)] fn set_account_storage_slot( &mut self, address: Address, @@ -417,6 +449,13 @@ where fn state_root(&mut self) -> Result { task::block_in_place(move || self.runtime.block_on(AsyncState::state_root(*self))) } +} + +impl<'d, E> StateHistory for &'d AsyncState +where + E: Debug + Send + 'static, +{ + type Error = E; #[cfg_attr(feature = "tracing", tracing::instrument)] fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { diff --git a/crates/rethnet_evm/src/state/trie.rs b/crates/rethnet_evm/src/state/trie.rs new file mode 100644 index 0000000000..bb29da80e5 --- /dev/null +++ b/crates/rethnet_evm/src/state/trie.rs @@ -0,0 +1,234 @@ +mod account; + +pub use self::account::AccountTrie; + +use hashbrown::HashMap; +use rethnet_eth::{ + account::{BasicAccount, KECCAK_EMPTY}, + trie::KECCAK_NULL_RLP, + Address, B160, B256, U256, +}; +use revm::{ + db::StateRef, + primitives::{Account, AccountInfo, Bytecode}, + DatabaseCommit, +}; + +use super::{ + contract::ContractStorage, layered::LayeredChanges, RethnetLayer, StateDebug, StateError, +}; + +/// An implementation of revm's state that uses a trie. +#[derive(Clone, Debug, Default)] +pub struct TrieState { + accounts: AccountTrie, + contracts: ContractStorage, +} + +impl TrieState { + /// Constructs a [`TrieState`] from the provided [`AccountTrie`]. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn with_accounts(accounts: AccountTrie) -> Self { + Self { + accounts, + ..TrieState::default() + } + } + + /// Inserts the provided bytecode using its hash, potentially overwriting an existing value. + pub fn insert_code(&mut self, code: Bytecode) { + self.contracts.insert_code(code); + } + + /// Removes the code corresponding to the provided hash, if it exists. + pub fn remove_code(&mut self, code_hash: &B256) { + if *code_hash != KECCAK_EMPTY { + self.contracts.remove_code(code_hash); + } + } +} + +impl StateRef for TrieState { + type Error = StateError; + + fn basic(&self, address: Address) -> Result, Self::Error> { + Ok(self.accounts.account(&address).map(AccountInfo::from)) + } + + fn code_by_hash(&self, code_hash: B256) -> Result { + self.contracts + .get(&code_hash) + .cloned() + .ok_or(StateError::InvalidCodeHash(code_hash)) + } + + fn storage(&self, address: B160, index: U256) -> Result { + Ok(self + .accounts + .account_storage_slot(&address, &index) + .unwrap_or(U256::ZERO)) + } +} + +impl DatabaseCommit for TrieState { + fn commit(&mut self, mut changes: HashMap) { + changes.iter_mut().for_each(|(address, account)| { + if account.is_destroyed { + self.remove_code(&account.info.code_hash); + } else if account.is_empty() { + // Don't do anything. Account was merely touched + } else { + let old_code_hash = self + .accounts + .account(address) + .map_or(KECCAK_EMPTY, |old_account| old_account.code_hash); + + let code_changed = old_code_hash != account.info.code_hash; + if code_changed { + if let Some(new_code) = account.info.code.take() { + self.insert_code(new_code); + } + + self.remove_code(&old_code_hash); + } + } + }); + + self.accounts.commit(&changes); + } +} + +impl StateDebug for TrieState { + type Error = StateError; + + fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + Ok(self.accounts.storage_root(address)) + } + + fn insert_account( + &mut self, + address: Address, + mut account_info: AccountInfo, + ) -> Result<(), Self::Error> { + if let Some(code) = account_info.code.take() { + account_info.code_hash = code.hash; + + self.insert_code(code); + } + + self.accounts.set_account(&address, &account_info); + + Ok(()) + } + + fn modify_account( + &mut self, + address: Address, + modifier: super::AccountModifierFn, + ) -> Result<(), Self::Error> { + let mut account_info = self.accounts.account(&address).map_or_else( + || AccountInfo { + code: None, + ..AccountInfo::default() + }, + |account| { + let mut account_info = AccountInfo::from(account); + + // Fill the bytecode + if account_info.code_hash != KECCAK_EMPTY { + account_info.code = Some( + self.code_by_hash(account_info.code_hash) + .expect("Code must exist"), + ); + } + + account_info + }, + ); + + let old_code_hash = account_info.code_hash; + + modifier( + &mut account_info.balance, + &mut account_info.nonce, + &mut account_info.code, + ); + + // Strip the bytecode + let new_code = account_info.code.take(); + + let new_code_hash = new_code.as_ref().map_or(KECCAK_EMPTY, |code| code.hash()); + account_info.code_hash = new_code_hash; + + let code_changed = new_code_hash != old_code_hash; + if code_changed { + if let Some(new_code) = new_code { + self.insert_code(new_code); + } + + self.remove_code(&old_code_hash); + } + + self.accounts.set_account(&address, &account_info); + + Ok(()) + } + + fn remove_account(&mut self, address: Address) -> Result, Self::Error> { + Ok(self.accounts.remove_account(&address).map(|account| { + self.remove_code(&account.code_hash); + + AccountInfo { + balance: account.balance, + nonce: account.nonce, + code_hash: account.code_hash, + code: None, + } + })) + } + + fn serialize(&mut self) -> String { + self.accounts.serialize() + } + + fn set_account_storage_slot( + &mut self, + address: Address, + index: U256, + value: U256, + ) -> Result<(), Self::Error> { + self.accounts + .set_account_storage_slot(&address, &index, &value); + + Ok(()) + } + + fn state_root(&mut self) -> Result { + Ok(self.accounts.state_root()) + } +} + +impl From<&LayeredChanges> for TrieState { + fn from(changes: &LayeredChanges) -> Self { + let accounts = AccountTrie::from_changes(changes.rev().map(|layer| { + layer.accounts().map(|(address, account)| { + ( + address, + account.as_ref().map(|account| { + ( + BasicAccount::from((&account.info, KECCAK_NULL_RLP)), + &account.storage, + ) + }), + ) + }) + })); + + let contracts = ContractStorage::from(changes); + + Self { + accounts, + contracts, + } + } +} diff --git a/crates/rethnet_evm/src/state/trie/account.rs b/crates/rethnet_evm/src/state/trie/account.rs new file mode 100644 index 0000000000..3c2e89253e --- /dev/null +++ b/crates/rethnet_evm/src/state/trie/account.rs @@ -0,0 +1,658 @@ +use std::{collections::BTreeMap, fmt::Debug, sync::Arc}; + +use cita_trie::{MemoryDB, PatriciaTrie, Trie as CitaTrie}; +use hashbrown::HashMap; +use hasher::{Hasher, HasherKeccak}; +use rethnet_eth::{account::BasicAccount, Address, B160, B256, U256}; +use revm::primitives::{Account, AccountInfo}; + +/// A change to the account, where `None` implies deletion. +pub type AccountChange<'a> = (&'a Address, Option<(BasicAccount, &'a HashMap)>); + +type AccountStorageTries = HashMap, B256)>; + +type Trie = PatriciaTrie; + +/// A trie for maintaining the state of accounts and their storage. +#[derive(Debug)] +pub struct AccountTrie { + state_root: B256, + state_trie_db: Arc, + storage_trie_dbs: AccountStorageTries, +} + +impl AccountTrie { + /// Constructs a `TrieState` from an (address -> account) mapping. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn with_accounts(accounts: &HashMap) -> Self { + let state_trie_db = Arc::new(MemoryDB::new(true)); + let hasher = Arc::new(HasherKeccak::new()); + + let mut storage_trie_dbs = HashMap::new(); + + let state_root = { + let mut state_trie = Trie::new(state_trie_db.clone(), hasher.clone()); + accounts.iter().for_each(|(address, account_info)| { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + let storage_root = { + let mut storage_trie = Trie::new(storage_trie_db.clone(), hasher.clone()); + + B256::from_slice(&storage_trie.root().unwrap()) + }; + storage_trie_dbs.insert(*address, (storage_trie_db, storage_root)); + + Self::set_account_in(address, account_info, storage_root, &mut state_trie); + }); + + B256::from_slice(&state_trie.root().unwrap()) + }; + + Self { + state_root, + state_trie_db, + storage_trie_dbs, + } + } + + /// Constructs a `TrieState` from layers of changes. + #[cfg_attr(feature = "tracing", tracing::instrument(skip(layers)))] + pub fn from_changes<'a, I, C>(layers: I) -> Self + where + I: IntoIterator, + C: IntoIterator>, + { + let state_trie_db = Arc::new(MemoryDB::new(true)); + let hasher = Arc::new(HasherKeccak::new()); + + let mut storage_trie_dbs = HashMap::new(); + + let state_root = { + let mut state_trie = Trie::new(state_trie_db.clone(), hasher.clone()); + + layers.into_iter().for_each(|layer| { + layer.into_iter().for_each(|(address, change)| { + if let Some((mut account, storage)) = change { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + + let storage_root = { + let mut storage_trie = + Trie::new(storage_trie_db.clone(), hasher.clone()); + + storage.iter().for_each(|(index, value): (&U256, &U256)| { + Self::set_account_storage_slot_in(index, value, &mut storage_trie); + }); + + B256::from_slice(&storage_trie.root().unwrap()) + }; + + // Overwrites any existing storage in the process, as we receive the complete storage every change + storage_trie_dbs.insert(*address, (storage_trie_db, storage_root)); + + account.storage_root = storage_root; + + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + state_trie + .insert(hashed_address, rlp::encode(&account).to_vec()) + .unwrap(); + } else { + Self::remove_account_in(address, &mut state_trie, &mut storage_trie_dbs); + } + }) + }); + + B256::from_slice(&state_trie.root().unwrap()) + }; + + Self { + state_root, + state_trie_db, + storage_trie_dbs, + } + } + + /// Retrieves an account corresponding to the specified address from the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn account(&self, address: &Address) -> Option { + let state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + Self::account_in(address, &state_trie) + } + + fn account_in(address: &Address, state_trie: &Trie) -> Option { + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + + state_trie + .get(&hashed_address) + .unwrap() + .map(|encoded_account| rlp::decode::(&encoded_account).unwrap()) + } + + /// Retrieves the storage storage corresponding to the account at the specified address and the specified index, if they exist. + pub fn account_storage_slot(&self, address: &Address, index: &U256) -> Option { + self.storage_trie_dbs + .get(address) + .and_then(|(storage_trie_db, storage_root)| { + let storage_trie = Trie::from( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + storage_root.as_bytes(), + ) + .expect("Invalid storage root"); + + let hashed_index = HasherKeccak::new().digest(&index.to_be_bytes::<32>()); + storage_trie + .get(&hashed_index) + .unwrap() + .map(|decode_value| rlp::decode::(&decode_value).unwrap()) + }) + } + + /// Commits changes to the state. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn commit(&mut self, changes: &HashMap) { + let mut state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + changes.iter().for_each(|(address, account)| { + if account.is_destroyed || account.is_empty() { + // Removes account only if it exists, so safe to use for empty, touched accounts + Self::remove_account_in(address, &mut state_trie, &mut self.storage_trie_dbs); + } else { + if account.storage_cleared { + // We can simply remove the storage trie db, as it will get reinitialized in the next operation + self.storage_trie_dbs.remove(address); + } + + let (storage_trie_db, storage_root) = + self.storage_trie_dbs.entry(*address).or_insert_with(|| { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + let storage_root = { + let mut storage_trie = + Trie::new(storage_trie_db.clone(), Arc::new(HasherKeccak::new())); + + B256::from_slice(&storage_trie.root().unwrap()) + }; + + (storage_trie_db, storage_root) + }); + + let storage_changed = account.storage_cleared || !account.storage.is_empty(); + if storage_changed { + let mut storage_trie = Trie::from( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + storage_root.as_bytes(), + ) + .expect("Invalid storage root"); + + account.storage.iter().for_each(|(index, value)| { + Self::set_account_storage_slot_in( + index, + &value.present_value, + &mut storage_trie, + ); + }); + + *storage_root = B256::from_slice(&storage_trie.root().unwrap()); + } + + Self::set_account_in(address, &account.info, *storage_root, &mut state_trie); + } + }); + + self.state_root = B256::from_slice(&state_trie.root().unwrap()); + } + + /// Sets the provided account at the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn set_account(&mut self, address: &Address, account_info: &AccountInfo) { + let mut state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + // Check whether the account already existed. If so, use its storage root. + let (_db, storage_root) = self.storage_trie_dbs.entry(*address).or_insert_with(|| { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + let storage_root = { + let mut storage_trie = + Trie::new(storage_trie_db.clone(), Arc::new(HasherKeccak::new())); + B256::from_slice(&storage_trie.root().unwrap()) + }; + + (storage_trie_db, storage_root) + }); + + Self::set_account_in(address, account_info, *storage_root, &mut state_trie); + + self.state_root = B256::from_slice(&state_trie.root().unwrap()); + } + + /// Helper function for setting the account at the specified address into the provided state trie. + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_account_in( + address: &Address, + account_info: &AccountInfo, + storage_root: B256, + state_trie: &mut Trie, + ) { + let account = BasicAccount::from((account_info, storage_root)); + + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + state_trie + .insert(hashed_address, rlp::encode(&account).to_vec()) + .unwrap(); + } + + /// Removes the account at the specified address, if it exists. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn remove_account(&mut self, address: &Address) -> Option { + let mut state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + let account = Self::remove_account_in(address, &mut state_trie, &mut self.storage_trie_dbs); + + self.state_root = B256::from_slice(&state_trie.root().unwrap()); + + account + } + + /// Helper function for removing the account at the specified address from the provided state trie and storage tries, if it exists. + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn remove_account_in( + address: &Address, + state_trie: &mut Trie, + storage_trie_dbs: &mut AccountStorageTries, + ) -> Option { + let account = Self::account_in(address, state_trie); + + if account.is_some() { + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + state_trie.remove(&hashed_address).unwrap(); + + storage_trie_dbs.remove(address); + } + + account + } + + /// Serializes the state using ordering of addresses and storage indices. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn serialize(&self) -> String { + let state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + #[derive(serde::Serialize)] + struct StateAccount { + /// Balance of the account. + pub balance: U256, + /// Code hash of the account. + pub code_hash: B256, + /// Nonce of the account. + pub nonce: u64, + /// Storage + pub storage: BTreeMap, + /// Storage root of the account. + pub storage_root: B256, + } + + let state: BTreeMap = self + .storage_trie_dbs + .iter() + .map(|(address, (storage_trie_db, storage_root))| { + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + let account = state_trie + .get(&hashed_address) + .unwrap() + .unwrap_or_else(|| panic!("Account with address '{}' and hashed address '{:?}' must exist in state, if a storage trie is stored for it", address, hashed_address)); + + let account: BasicAccount = rlp::decode(&account).unwrap(); + + let storage_trie = Trie::from( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + storage_root.as_bytes(), + ) + .expect("Invalid storage root"); + + let storage = storage_trie + .iter() + .map(|(hashed_index, encoded_value)| { + let value: U256 = rlp::decode(&encoded_value).unwrap(); + assert_eq!(hashed_index.len(), 32); + (B256::from_slice(&hashed_index), value) + }) + .collect(); + + let account = StateAccount { + balance: account.balance, + code_hash: account.code_hash, + nonce: account.nonce, + storage, + storage_root: *storage_root, + }; + + (*address, account) + }) + .collect(); + + serde_json::to_string_pretty(&state).unwrap() + } + + /// Sets the storage slot at the specified address and index to the provided value. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn set_account_storage_slot(&mut self, address: &Address, index: &U256, value: &U256) { + let (storage_trie_db, storage_root) = + self.storage_trie_dbs.entry(*address).or_insert_with(|| { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + let storage_root = { + let mut storage_trie = + Trie::new(storage_trie_db.clone(), Arc::new(HasherKeccak::new())); + B256::from_slice(&storage_trie.root().unwrap()) + }; + + (storage_trie_db, storage_root) + }); + + { + let mut storage_trie = Trie::from( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + storage_root.as_bytes(), + ) + .expect("Invalid storage root"); + + Self::set_account_storage_slot_in(index, value, &mut storage_trie); + + *storage_root = B256::from_slice(&storage_trie.root().unwrap()); + }; + + let mut state_trie = Trie::from( + self.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + self.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + let hashed_address = HasherKeccak::new().digest(address.as_bytes()); + let account = state_trie.get(&hashed_address).unwrap().map_or( + BasicAccount { + storage_root: *storage_root, + ..BasicAccount::default() + }, + |account| { + let mut account: BasicAccount = rlp::decode(&account).unwrap(); + account.storage_root = *storage_root; + account + }, + ); + + state_trie + .insert(hashed_address, rlp::encode(&account).to_vec()) + .unwrap(); + + self.state_root = B256::from_slice(&state_trie.root().unwrap()); + } + + /// Helper function for setting the storage slot at the specified address and index to the provided value. + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn set_account_storage_slot_in(index: &U256, value: &U256, storage_trie: &mut Trie) { + let hashed_index = HasherKeccak::new().digest(&index.to_be_bytes::<32>()); + if *value == U256::ZERO { + if storage_trie.contains(&hashed_index).unwrap() { + storage_trie.remove(&hashed_index).unwrap(); + } + } else { + storage_trie + .insert(hashed_index, rlp::encode(value).to_vec()) + .unwrap(); + } + } + + /// Retrieves the trie's state root. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn state_root(&self) -> B256 { + self.state_root + } + + /// Retrieves the storage root of the account at the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn storage_root(&self, address: &Address) -> Option { + self.storage_trie_dbs.get(address).map(|(_db, root)| *root) + } +} + +impl Clone for AccountTrie { + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn clone(&self) -> Self { + let state_trie_db = Arc::new((*self.state_trie_db).clone()); + + let storage_trie_dbs = self + .storage_trie_dbs + .iter() + .map(|(address, (storage_trie_db, storage_root))| { + let storage_trie_db = Arc::new((**storage_trie_db).clone()); + + (*address, (storage_trie_db, *storage_root)) + }) + .collect(); + + Self { + state_root: self.state_root, + state_trie_db, + storage_trie_dbs, + } + } +} + +impl Default for AccountTrie { + #[cfg_attr(feature = "tracing", tracing::instrument)] + fn default() -> Self { + let state_trie_db = Arc::new(MemoryDB::new(true)); + let state_root = { + let mut state_trie = Trie::new(state_trie_db.clone(), Arc::new(HasherKeccak::new())); + + B256::from_slice(&state_trie.root().unwrap()) + }; + + Self { + state_root, + state_trie_db, + storage_trie_dbs: HashMap::new(), + } + } +} + +#[cfg(test)] +mod tests { + use rethnet_eth::{ + account::KECCAK_EMPTY, + state::{state_root, Storage}, + trie::KECCAK_NULL_RLP, + }; + + use super::*; + + fn precompiled_contracts() -> HashMap { + let mut accounts = HashMap::new(); + + // Mimic precompiles activation + for idx in 1..=8 { + let mut address = Address::zero(); + address.0[19] = idx; + accounts.insert(address, AccountInfo::default()); + } + + accounts + } + + #[test] + fn clone_empty() { + let state = AccountTrie::default(); + let cloned_state = state.clone(); + + assert_eq!(state.state_root(), cloned_state.state_root()); + } + + #[test] + fn clone_precompiles() { + let accounts = precompiled_contracts(); + + let state = AccountTrie::with_accounts(&accounts); + let cloned_state = state.clone(); + + assert_eq!(state.state_root(), cloned_state.state_root()); + } + + #[test] + fn default_empty() { + let state = AccountTrie::default(); + + assert_eq!(state.state_root(), KECCAK_NULL_RLP); + } + + #[test] + fn with_accounts_empty() { + let accounts = HashMap::new(); + let state = AccountTrie::with_accounts(&accounts); + + assert_eq!(state.state_root(), KECCAK_NULL_RLP); + } + + #[test] + fn with_accounts_precompiles() { + let accounts = precompiled_contracts(); + + let old: HashMap<_, _> = accounts + .iter() + .map(|(address, account_info)| { + ( + *address, + BasicAccount { + nonce: account_info.nonce, + balance: account_info.balance, + storage_root: KECCAK_NULL_RLP, + code_hash: account_info.code_hash, + }, + ) + }) + .collect(); + + let old = state_root(old.iter()); + + let state = AccountTrie::with_accounts(&accounts); + + assert_eq!(state.state_root(), old); + } + + #[test] + fn from_changes_empty() { + let changes: Vec>> = Vec::new(); + let state = AccountTrie::from_changes(changes); + + assert_eq!(state.state_root(), KECCAK_NULL_RLP); + } + + #[test] + fn from_changes_one_layer() { + const DUMMY_ADDRESS: [u8; 20] = [1u8; 20]; + + let expected_address = Address::from(DUMMY_ADDRESS); + let expected_storage = Storage::new(); + + let expected_account = BasicAccount { + nonce: 1, + balance: U256::from(100u32), + storage_root: KECCAK_NULL_RLP, + code_hash: KECCAK_EMPTY, + }; + + let changes: Vec>> = vec![vec![( + &expected_address, + Some((expected_account.clone(), &expected_storage)), + )]]; + let state = AccountTrie::from_changes(changes); + + let state_trie = Trie::from( + state.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + state.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + let account = state_trie + .get(&HasherKeccak::new().digest(expected_address.as_bytes())) + .unwrap() + .expect("Account must exist"); + + let account: BasicAccount = rlp::decode(&account).expect("Failed to decode account"); + + assert_eq!(account, expected_account); + } + + #[test] + fn from_changes_two_layers() { + const DUMMY_ADDRESS: [u8; 20] = [1u8; 20]; + + let expected_address = Address::from(DUMMY_ADDRESS); + let expected_storage = Storage::new(); + + let account_layer1 = BasicAccount { + nonce: 1, + balance: U256::from(100u32), + storage_root: KECCAK_NULL_RLP, + code_hash: KECCAK_EMPTY, + }; + + let account_layer2 = BasicAccount { + nonce: 2, + balance: U256::from(200u32), + storage_root: KECCAK_NULL_RLP, + code_hash: KECCAK_EMPTY, + }; + + let changes: Vec>> = vec![ + vec![(&expected_address, Some((account_layer1, &expected_storage)))], + vec![( + &expected_address, + Some((account_layer2.clone(), &expected_storage)), + )], + ]; + let state = AccountTrie::from_changes(changes); + + let state_trie = Trie::from( + state.state_trie_db.clone(), + Arc::new(HasherKeccak::new()), + state.state_root.as_bytes(), + ) + .expect("Invalid state root"); + + let account = state_trie + .get(&HasherKeccak::new().digest(expected_address.as_bytes())) + .unwrap() + .expect("Account must exist"); + + let account: BasicAccount = rlp::decode(&account).expect("Failed to decode account"); + + assert_eq!(account, account_layer2); + } +} diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 359943838d..5892120123 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -10,10 +10,7 @@ use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ - state::{ - AccountModifierFn, AsyncState, LayeredState, RethnetLayer, StateDebug, StateError, - SyncState, - }, + state::{AccountModifierFn, AsyncState, HybridState, StateError, StateHistory, SyncState}, AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, }; use secp256k1::Secp256k1; @@ -100,7 +97,7 @@ impl StateManager { accounts.insert(address, AccountInfo::default()); } - let mut state = LayeredState::with_layer(RethnetLayer::with_genesis_accounts(accounts)); + let mut state = HybridState::with_accounts(accounts); state.checkpoint().unwrap(); @@ -377,6 +374,13 @@ impl StateManager { self.state.remove_snapshot(state_root).await } + /// Serializes the state using ordering of addresses and storage indices. + #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub async fn serialize(&self) -> String { + self.state.serialize().await + } + /// Sets the storage slot at the specified address and index to the provided value. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 04f3873b4d..1d6eff5329 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -129,4 +129,8 @@ export class RethnetStateManager { public async setStateRoot(stateRoot: Buffer): Promise { return this._state.setStateRoot(stateRoot); } + + public async serialize(): Promise { + return this._state.serialize(); + } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts index 065c03d1eb..1d50a10114 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts @@ -43,6 +43,9 @@ const notSupportedError = (method: string) => new Error(`${method} is not supported when forking from remote network`); export class ForkStateManager implements StateManager { + // temporary, used to print the whole storage + // should be removed + public addresses: Set = new Set(); private _state: State = ImmutableMap>(); private _initialStateRoot: string = randomHash(); private _stateRoot: string = this._initialStateRoot; @@ -154,6 +157,7 @@ export class ForkStateManager implements StateManager { } public async putContractCode(address: Address, value: Buffer): Promise { + this.addresses.add(address.toString()); const hexAddress = address.toString(); const account = (this._state.get(hexAddress) ?? makeAccountState()).set( "code", @@ -211,6 +215,7 @@ export class ForkStateManager implements StateManager { key: Buffer, value: Buffer ): Promise { + this.addresses.add(address.toString()); if (key.length !== 32) { throw new Error("Storage key must be 32 bytes long"); } @@ -241,6 +246,7 @@ export class ForkStateManager implements StateManager { } public async clearContractStorage(address: Address): Promise { + this.addresses.add(address.toString()); const hexAddress = address.toString(); let account = this._state.get(hexAddress) ?? makeAccountState(); account = account @@ -366,6 +372,7 @@ export class ForkStateManager implements StateManager { // we set an empty account instead of deleting it to avoid // re-fetching the state from the remote node. // This is only valid post spurious dragon, but we don't support older hardforks when forking. + this.addresses.add(address.toString()); const emptyAccount = makeEmptyAccountState(); this._state = this._state.set(address.toString(), emptyAccount); } @@ -391,6 +398,7 @@ export class ForkStateManager implements StateManager { } private _putAccount(address: Address, account: Account): void { + this.addresses.add(address.toString()); // Because the vm only ever modifies the nonce, balance and codeHash using this // method we ignore the stateRoot property const hexAddress = address.toString(); @@ -429,6 +437,7 @@ export class ForkStateManager implements StateManager { address: Address, accountFields: any ): Promise { + this.addresses.add(address.toString()); // copied from BaseStateManager const account = await this.getAccount(address); account.nonce = accountFields.nonce ?? account.nonce; diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 699ffc809c..3c84c377bd 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -133,6 +133,7 @@ export class DualModeAdapter implements VMAdapter { "hex" )} !== ${rethnetRoot.toString("hex")}` ); + await this.printState(); throw new Error("Different state root"); } @@ -310,6 +311,7 @@ export class DualModeAdapter implements VMAdapter { "hex" )} !== ${rethnetRoot.toString("hex")}` ); + await this.printState(); throw new Error("Different snapshot state root"); } @@ -539,6 +541,13 @@ export class DualModeAdapter implements VMAdapter { this._ethereumJSVMTracer.clearLastError(); this._rethnetVMTracer.clearLastError(); } + + public async printState() { + console.log("EthereumJS:"); + await this._ethereumJSAdapter.printState(); + console.log("Rethnet:"); + await this._rethnetAdapter.printState(); + } } function assertEqualRunTxResults( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 73d28562ed..916ea5aea1 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -38,12 +38,73 @@ import { FakeSenderTransaction } from "../transactions/FakeSenderTransaction"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; import { Bloom } from "../utils/bloom"; import { makeForkClient } from "../utils/makeForkClient"; +import { makeAccount } from "../utils/makeAccount"; import { makeStateTrie } from "../utils/makeStateTrie"; import { Exit } from "./exit"; import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ +// temporary wrapper class used to print the whole storage +class DefaultStateManagerWithAddresses extends DefaultStateManager { + public addresses: Set = new Set(); + + public putAccount(address: Address, account: Account): Promise { + this.addresses.add(address.toString()); + return super.putAccount(address, account); + } + + public deleteAccount(address: Address): Promise { + this.addresses.add(address.toString()); + return super.deleteAccount(address); + } + + public modifyAccountFields( + address: Address, + accountFields: any + ): Promise { + this.addresses.add(address.toString()); + return super.modifyAccountFields(address, accountFields); + } + + public putContractCode(address: Address, value: Buffer): Promise { + this.addresses.add(address.toString()); + return super.putContractCode(address, value); + } + + public putContractStorage( + address: Address, + key: Buffer, + value: Buffer + ): Promise { + this.addresses.add(address.toString()); + return super.putContractStorage(address, key, value); + } + + public clearContractStorage(address: Address): Promise { + this.addresses.add(address.toString()); + return super.clearContractStorage(address); + } +} + +interface Storage { + [address: string]: { + balance: string; + nonce: number; + // eslint-disable-next-line @typescript-eslint/naming-convention + code_hash: string; + // eslint-disable-next-line @typescript-eslint/naming-convention + storage_root: string; + storage: { + [storageSlot: string]: string; + }; + }; +} + +type StateManagerWithAddresses = StateManager & { + addresses: Set; +}; + export class EthereumJSAdapter implements VMAdapter { private _blockStartStateRoot: Buffer | undefined; @@ -51,7 +112,7 @@ export class EthereumJSAdapter implements VMAdapter { constructor( private readonly _vm: VM, - private readonly _stateManager: StateManager, + public readonly _stateManager: StateManagerWithAddresses, private readonly _blockchain: HardhatBlockchainInterface, private readonly _common: Common, private readonly _configNetworkId: number, @@ -84,7 +145,7 @@ export class EthereumJSAdapter implements VMAdapter { config: NodeConfig, selectHardfork: (blockNumber: bigint) => string ): Promise { - let stateManager: StateManager; + let stateManager: StateManagerWithAddresses; let forkBlockNum: bigint | undefined; let forkNetworkId: number | undefined; @@ -107,9 +168,14 @@ export class EthereumJSAdapter implements VMAdapter { } else { const stateTrie = await makeStateTrie(config.genesisAccounts); - stateManager = new DefaultStateManager({ + stateManager = new DefaultStateManagerWithAddresses({ trie: stateTrie, }); + + for (const genesisAccount of config.genesisAccounts) { + const { address } = makeAccount(genesisAccount); + stateManager.addresses.add(address.toString()); + } } const eei = new EEI(stateManager, common, blockchain); @@ -467,6 +533,80 @@ export class EthereumJSAdapter implements VMAdapter { this._vmTracer.clearLastError(); } + public async printState() { + const storage: Storage = {}; + + for (const address of this._stateManager.addresses) { + const account = await this._stateManager.getAccount( + Address.fromString(address) + ); + + const nonce = Number(account.nonce); + const balance = `0x${account.balance.toString(16).padStart(64, "0")}`; + const codeHash = `0x${account.codeHash + .toString("hex") + .padStart(64, "0")}`; + + const storageRoot = `0x${account.storageRoot + .toString("hex") + .padStart(64, "0")}`; + + const dumpedAccountStorage = await this._stateManager.dumpStorage( + Address.fromString(address) + ); + + const accountStorage: Record = {}; + + for (const [key, value] of Object.entries(dumpedAccountStorage)) { + accountStorage[`0x${key.padStart(64, "0")}`] = `0x${value.padStart( + 64, + "0" + )}`; + } + + if ( + nonce === 0 && + account.balance === 0n && + // empty code + codeHash === + "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" && + // empty storage + storageRoot === + "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + ) { + if (Object.entries(accountStorage).length > 0) { + // sanity check + throw new Error( + "Assertion error: storage root is empty but storage has data" + ); + } + + // we don't add empty accounts + continue; + } + + storage[address] = { + nonce, + balance, + code_hash: codeHash, + storage_root: storageRoot, + storage: accountStorage, + }; + } + + const replacer = (_key: any, value: any) => + typeof value === "object" && !Array.isArray(value) && value !== null + ? Object.keys(value) + .sort() + .reduce((sorted: any, key: any) => { + sorted[key] = value[key]; + return sorted; + }, {}) + : value; + + console.log(JSON.stringify(storage, replacer, 2)); + } + private _getCommonForTracing(networkId: number, blockNumber: bigint): Common { try { const common = Common.custom( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index b5ef329071..2d5bda6907 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -411,6 +411,10 @@ export class RethnetAdapter implements VMAdapter { this._vmTracer.clearLastError(); } + public async printState() { + console.log(await this._state.serialize()); + } + private _getBlockEnvDifficulty( difficulty: bigint | undefined ): bigint | undefined { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 9dcd16b5d3..0e1085725a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -84,4 +84,7 @@ export interface VMAdapter { // methods for snapshotting makeSnapshot(): Promise<[Buffer, boolean]>; removeSnapshot(stateRoot: Buffer): Promise; + + // for debugging purposes + printState(): Promise; } From 294df5178af41c0e2284c594af7307f61cddcbb1 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 16:54:34 -0500 Subject: [PATCH 048/406] improvement: adjust napi external memory for StateManager (#3841) --- crates/rethnet_evm_napi/src/state.rs | 42 ++++++++++++++++++++++------ 1 file changed, 33 insertions(+), 9 deletions(-) diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 5892120123..4e601a3f0f 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -6,7 +6,10 @@ use std::{ }, }; -use napi::{bindgen_prelude::*, JsFunction, JsObject, NapiRaw, Status}; +use napi::{ + bindgen_prelude::{BigInt, Buffer, ObjectFinalize}, + Env, JsFunction, JsObject, NapiRaw, Status, +}; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ @@ -23,6 +26,10 @@ use crate::{ threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, }; +// An arbitrarily large amount of memory to signal to the javascript garbage collector that it needs to +// attempt to free the state object's memory. +const STATE_MEMORY_SIZE: i64 = 10_000; + struct ModifyAccountCall { pub balance: U256, pub nonce: u64, @@ -49,7 +56,7 @@ pub struct SnapshotId { } /// The Rethnet state -#[napi] +#[napi(custom_finalize)] #[derive(Debug)] pub struct StateManager { pub(super) state: Arc>, @@ -60,14 +67,17 @@ impl StateManager { /// Constructs a [`StateManager`] with an empty state. #[napi(constructor)] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - pub fn new() -> napi::Result { - Self::with_accounts(HashMap::default()) + pub fn new(mut env: Env) -> napi::Result { + Self::with_accounts(&mut env, HashMap::default()) } /// Constructs a [`StateManager`] with the provided accounts present in the genesis state. #[napi(factory)] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - pub fn with_genesis_accounts(accounts: Vec) -> napi::Result { + pub fn with_genesis_accounts( + mut env: Env, + accounts: Vec, + ) -> napi::Result { let context = Secp256k1::signing_only(); let genesis_accounts = accounts .into_iter() @@ -85,11 +95,14 @@ impl StateManager { }) .collect::>>()?; - Self::with_accounts(genesis_accounts) + Self::with_accounts(&mut env, genesis_accounts) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn with_accounts(mut accounts: HashMap) -> napi::Result { + fn with_accounts( + env: &mut Env, + mut accounts: HashMap, + ) -> napi::Result { // Mimic precompiles activation for idx in 1..=8 { let mut address = Address::zero(); @@ -101,11 +114,11 @@ impl StateManager { state.checkpoint().unwrap(); - Self::with_state(state) + Self::with_state(env, state) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn with_state(state: S) -> napi::Result + fn with_state(env: &mut Env, state: S) -> napi::Result where S: SyncState, { @@ -115,6 +128,8 @@ impl StateManager { let state = AsyncState::new(state) .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + env.adjust_external_memory(STATE_MEMORY_SIZE)?; + Ok(Self { state: Arc::new(state), }) @@ -412,3 +427,12 @@ impl StateManager { .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } } + +impl ObjectFinalize for StateManager { + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] + fn finalize(self, mut env: Env) -> napi::Result<()> { + env.adjust_external_memory(-STATE_MEMORY_SIZE)?; + + Ok(()) + } +} From 41eafe25a646871cb91035f22972ce9c7a52bab3 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 17:09:18 -0500 Subject: [PATCH 049/406] refactor: replace background with short-lived tasks (#3842) --- crates/rethnet_eth/Cargo.toml | 2 +- crates/rethnet_evm/Cargo.toml | 2 +- crates/rethnet_evm/src/block/builder.rs | 60 +-- crates/rethnet_evm/src/blockchain.rs | 19 +- crates/rethnet_evm/src/blockchain/request.rs | 49 -- crates/rethnet_evm/src/blockchain/sync.rs | 124 ----- crates/rethnet_evm/src/evm.rs | 58 +-- crates/rethnet_evm/src/inspector.rs | 47 +- crates/rethnet_evm/src/lib.rs | 4 +- crates/rethnet_evm/src/runtime.rs | 78 +-- crates/rethnet_evm/src/state.rs | 35 +- crates/rethnet_evm/src/state/request.rs | 159 ------ crates/rethnet_evm/src/state/sync.rs | 490 ------------------ crates/rethnet_evm/src/trace.rs | 11 +- crates/rethnet_evm_napi/src/blockchain.rs | 20 +- .../src/blockchain/js_blockchain.rs | 17 +- crates/rethnet_evm_napi/src/context.rs | 79 +++ crates/rethnet_evm_napi/src/lib.rs | 1 + crates/rethnet_evm_napi/src/state.rs | 226 +++++--- crates/rethnet_evm_napi/src/tracer.rs | 4 +- .../rethnet_evm_napi/src/tracer/js_tracer.rs | 58 ++- .../src/transaction/result.rs | 12 +- crates/rethnet_evm_napi/test/evm/RethnetDb.ts | 195 +++---- .../rethnet_evm_napi/test/evm/StateManager.ts | 6 +- .../src/internal/core/providers/http.ts | 2 +- .../hardhat-network/provider/RethnetState.ts | 6 +- .../hardhat-network/provider/vm/rethnet.ts | 4 + 27 files changed, 585 insertions(+), 1183 deletions(-) delete mode 100644 crates/rethnet_evm/src/blockchain/request.rs delete mode 100644 crates/rethnet_evm/src/blockchain/sync.rs delete mode 100644 crates/rethnet_evm/src/state/request.rs delete mode 100644 crates/rethnet_evm/src/state/sync.rs create mode 100644 crates/rethnet_evm_napi/src/context.rs diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index cde44e5161..814d0aef3c 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -13,7 +13,7 @@ hex = { version = "0.4.3", default-features = false, features = ["alloc"] } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } reqwest = { version = "0.11", features = ["blocking", "json"] } -revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = "1.0", default-features = false } +revm-primitives = { git = "https://github.com/Wodann/revm", rev = "a4550cc", version = "1.1", default-features = false } # revm-primitives = { path = "../../../revm/crates/primitives", version = "1.0", default-features = false } rlp = { version = "0.5.2", default-features = false, features = ["derive"] } ruint = { version = "1.7.0", default-features = false } diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 0026673ffd..422d7213b8 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -11,7 +11,7 @@ hasher = { git = "https://github.com/Wodann/hasher", rev = "89d3fc9", version = log = { version = "0.4.17", default-features = false } parking_lot = { version = "0.12.1", default-features = false } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth", features = ["serde"] } -revm = { git = "https://github.com/bluealloy/revm", rev = "3789509", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } +revm = { git = "https://github.com/Wodann/revm", rev = "a4550cc", version = "3.1", default-features = false, features = ["dev", "secp256k1", "serde", "std"] } # revm = { path = "../../../revm/crates/revm", version = "3.0", default-features = false, features = ["dev", "serde", "std"] } rlp = { version = "0.5.2", default-features = false } serde = { version = "1.0.158", default-features = false, features = ["std"] } diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index 31c6f2dcd1..e50e1c0a8b 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -8,12 +8,12 @@ use revm::{ db::DatabaseComponentError, primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, SpecId, TxEnv}, }; -use tokio::runtime::Runtime; +use tokio::sync::RwLock; use crate::{ - blockchain::AsyncBlockchain, - evm::{run_transaction, AsyncInspector}, - state::{AccountModifierFn, AsyncState}, + blockchain::SyncBlockchain, + evm::{build_evm, run_transaction, SyncInspector}, + state::{AccountModifierFn, SyncState}, trace::Trace, HeaderData, }; @@ -51,8 +51,8 @@ where BE: Debug + Send + 'static, SE: Debug + Send + 'static, { - blockchain: Arc>, - state: Arc>, + blockchain: Arc>>>, + state: Arc>>>, header: PartialHeader, transactions: Vec, cfg: CfgEnv, @@ -65,8 +65,8 @@ where { /// Creates an intance of [`BlockBuilder`], creating a checkpoint in the process. pub fn new( - blockchain: Arc>, - state: Arc>, + blockchain: Arc>>>, + state: Arc>>>, cfg: CfgEnv, parent: Header, header: HeaderData, @@ -91,11 +91,6 @@ where } } - /// Retrieves the runtime of the [`BlockBuilder`]. - pub fn runtime(&self) -> &Runtime { - self.state.runtime() - } - /// Retrieves the amount of gas used in the block, so far. pub fn gas_used(&self) -> U256 { self.header.gas_used @@ -118,7 +113,7 @@ where pub async fn add_transaction( &mut self, transaction: TxEnv, - inspector: Option>>, + inspector: Option>>, ) -> Result<(ExecutionResult, Trace), BlockTransactionError> { // transaction's gas limit cannot be greater than the remaining gas in the block if U256::from(transaction.gas_limit) > self.gas_remaining() { @@ -140,19 +135,14 @@ where }, }; - let (result, changes, trace) = run_transaction( - self.state.runtime(), - self.blockchain.clone(), - self.state.clone(), - self.cfg.clone(), - transaction, - block, - inspector, - ) - .await - .unwrap()?; + let mut state = self.state.write().await; + let blockchain = self.blockchain.read().await; + + let evm = build_evm(&*blockchain, &*state, self.cfg.clone(), transaction, block); + + let (result, changes, trace) = run_transaction(evm, inspector)?; - self.state.apply(changes).await; + state.commit(changes); self.header.gas_used += U256::from(result.gas_used()); @@ -163,15 +153,14 @@ where /// Finalizes the block, returning the state root. /// TODO: Build a full block pub async fn finalize(self, rewards: Vec<(Address, U256)>) -> Result<(), SE> { + let mut state = self.state.write().await; for (address, reward) in rewards { - self.state - .modify_account( - address, - AccountModifierFn::new(Box::new(move |balance, _nonce, _code| { - *balance += reward; - })), - ) - .await?; + state.modify_account( + address, + AccountModifierFn::new(Box::new(move |balance, _nonce, _code| { + *balance += reward; + })), + )?; } Ok(()) @@ -179,6 +168,7 @@ where /// Aborts building of the block, reverting all transactions in the process. pub async fn abort(self) -> Result<(), SE> { - self.state.revert().await + let mut state = self.state.write().await; + state.revert() } } diff --git a/crates/rethnet_evm/src/blockchain.rs b/crates/rethnet_evm/src/blockchain.rs index 6c7d91710c..bd5279df8d 100644 --- a/crates/rethnet_evm/src/blockchain.rs +++ b/crates/rethnet_evm/src/blockchain.rs @@ -1,4 +1,17 @@ -mod request; -mod sync; +use std::fmt::Debug; -pub use sync::{AsyncBlockchain, SyncBlockchain}; +use revm::db::BlockHashRef; + +/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncBlockchain`]. +pub trait SyncBlockchain: BlockHashRef + Send + Sync + Debug + 'static +where + E: Debug + Send, +{ +} + +impl SyncBlockchain for B +where + B: BlockHashRef + Send + Sync + Debug + 'static, + E: Debug + Send, +{ +} diff --git a/crates/rethnet_evm/src/blockchain/request.rs b/crates/rethnet_evm/src/blockchain/request.rs deleted file mode 100644 index a1e0cb18bf..0000000000 --- a/crates/rethnet_evm/src/blockchain/request.rs +++ /dev/null @@ -1,49 +0,0 @@ -use std::fmt::Debug; - -use rethnet_eth::{B256, U256}; -use revm::db::BlockHash; -use tokio::sync::oneshot; - -/// The request type used internally by a [`SyncDatabase`]. -#[derive(Debug)] -pub enum Request -where - E: Debug, -{ - BlockHashByNumber { - number: U256, - sender: oneshot::Sender>, - }, - // InsertBlock { - // block_number: U256, - // block_hash: B256, - // sender: oneshot::Sender>, - // }, - Terminate, -} - -impl Request -where - E: Debug, -{ - pub fn handle(self, db: &mut D) -> bool - where - D: BlockHash, - { - match self { - Request::BlockHashByNumber { number, sender } => { - sender.send(db.block_hash(number)).unwrap() - } - // Request::InsertBlock { - // block_number, - // block_hash, - // sender, - // } => sender - // .send(db.insert_block(block_number, block_hash)) - // .unwrap(), - Request::Terminate => return false, - } - - true - } -} diff --git a/crates/rethnet_evm/src/blockchain/sync.rs b/crates/rethnet_evm/src/blockchain/sync.rs deleted file mode 100644 index 4bef0acfeb..0000000000 --- a/crates/rethnet_evm/src/blockchain/sync.rs +++ /dev/null @@ -1,124 +0,0 @@ -use std::{fmt::Debug, io}; - -use rethnet_eth::{B256, U256}; -use revm::db::{BlockHash, BlockHashRef}; -use tokio::{ - runtime::{Builder, Runtime}, - sync::{ - mpsc::{unbounded_channel, UnboundedSender}, - oneshot, - }, - task::{self, JoinHandle}, -}; - -use super::request::Request; - -/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncBlockchain`]. -pub trait SyncBlockchain: BlockHash + Send + Sync + 'static -where - E: Debug + Send, -{ -} - -impl SyncBlockchain for B -where - B: BlockHash + Send + Sync + 'static, - E: Debug + Send, -{ -} - -/// A helper class for converting a synchronous blockchain into an asynchronous blockchain. -/// -/// Requires the inner blockchain to implement [`Blockchain`]. -#[derive(Debug)] -pub struct AsyncBlockchain -where - E: Debug + Send, -{ - runtime: Runtime, - request_sender: UnboundedSender>, - blockchain_handle: Option>, -} - -impl AsyncBlockchain -where - E: Debug + Send + 'static, -{ - /// Constructs an [`AsyncBlockchain`] instance with the provided database. - pub fn new>(mut blockchain: B) -> io::Result { - let runtime = Builder::new_multi_thread().build()?; - - let (sender, mut receiver) = unbounded_channel::>(); - - let blockchain_handle = runtime.spawn(async move { - while let Some(request) = receiver.recv().await { - if !request.handle(&mut blockchain) { - break; - } - } - }); - - Ok(Self { - runtime, - request_sender: sender, - blockchain_handle: Some(blockchain_handle), - }) - } - - /// Retrieves the runtime of the [`AsyncBlockchain`]. - pub fn runtime(&self) -> &Runtime { - &self.runtime - } - - /// Retrieves the hash of the block corresponding to the specified number. - pub async fn block_hash_by_number(&self, number: U256) -> Result { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::BlockHashByNumber { number, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - // /// Inserts the specified block number and hash into the state. - // pub async fn insert_block(&self, block_number: U256, block_hash: B256) -> Result<(), E> { - // let (sender, receiver) = oneshot::channel(); - - // self.request_sender - // .send(Request::InsertBlock { - // block_number, - // block_hash, - // sender, - // }) - // .expect("Failed to send request"); - - // receiver.await.unwrap() - // } -} - -impl Drop for AsyncBlockchain -where - E: Debug + Send, -{ - fn drop(&mut self) { - if let Some(handle) = self.blockchain_handle.take() { - self.request_sender - .send(Request::Terminate) - .expect("Failed to send request"); - - self.runtime.block_on(handle).unwrap(); - } - } -} - -impl BlockHashRef for AsyncBlockchain -where - E: Debug + Send + 'static, -{ - type Error = E; - - fn block_hash(&self, number: U256) -> Result { - task::block_in_place(move || self.runtime.block_on(self.block_hash_by_number(number))) - } -} diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index 9cf6eadbff..b17c143bc7 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -1,22 +1,21 @@ -use std::{fmt::Debug, sync::Arc}; +use std::fmt::Debug; use revm::{ db::{DatabaseComponentError, DatabaseComponents}, primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, ResultAndState, State, TxEnv}, Inspector, }; -use tokio::{runtime::Runtime, task::JoinHandle}; use crate::{ - blockchain::AsyncBlockchain, + blockchain::SyncBlockchain, inspector::DualInspector, - runtime::AsyncDatabase, - state::AsyncState, + state::SyncState, trace::{Trace, TraceCollector}, + SyncDatabase, }; /// Super trait for an inspector of an `AsyncDatabase` that's debuggable. -pub trait AsyncInspector: Inspector> + Debug + Send +pub trait SyncInspector: Inspector> + Debug + Send where BE: Debug + Send + 'static, SE: Debug + Send + 'static, @@ -24,15 +23,14 @@ where } /// Creates an evm from the provided database, config, transaction, and block. -#[allow(clippy::type_complexity)] #[cfg_attr(feature = "tracing", tracing::instrument)] -fn build_evm( - blockchain: Arc>, - state: Arc>, +pub fn build_evm<'b, 's, BE, SE>( + blockchain: &'b dyn SyncBlockchain, + state: &'s dyn SyncState, cfg: CfgEnv, transaction: TxEnv, block: BlockEnv, -) -> revm::EVM> +) -> revm::EVM> where BE: Debug + Send + 'static, SE: Debug + Send + 'static, @@ -49,36 +47,26 @@ where evm } -#[allow(clippy::type_complexity)] -#[cfg_attr(feature = "tracing", tracing::instrument)] +#[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn run_transaction( - runtime: &Runtime, - blockchain: Arc>, - state: Arc>, - cfg: CfgEnv, - transaction: TxEnv, - block: BlockEnv, - inspector: Option>>, -) -> JoinHandle>>> + evm: revm::EVM>, + inspector: Option>>, +) -> Result<(ExecutionResult, State, Trace), EVMError>> where BE: Debug + Send + 'static, SE: Debug + Send + 'static, { - runtime.spawn(async move { - let mut evm = build_evm(blockchain, state, cfg, transaction, block); - - let (result, state, tracer) = if let Some(inspector) = inspector { - let mut inspector = DualInspector::new(TraceCollector::default(), inspector); + let (result, state, tracer) = if let Some(inspector) = inspector { + let mut inspector = DualInspector::new(TraceCollector::default(), inspector); - let ResultAndState { result, state } = evm.inspect(&mut inspector)?; - (result, state, inspector.into_parts().0) - } else { - let mut inspector = TraceCollector::default(); - let ResultAndState { result, state } = evm.inspect(&mut inspector)?; + let ResultAndState { result, state } = evm.inspect_ref(&mut inspector)?; + (result, state, inspector.into_parts().0) + } else { + let mut inspector = TraceCollector::default(); + let ResultAndState { result, state } = evm.inspect_ref(&mut inspector)?; - (result, state, inspector) - }; + (result, state, inspector) + }; - Ok((result, state, tracer.into_trace())) - }) + Ok((result, state, tracer.into_trace())) } diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs index c8f99c9014..8c7bc40eed 100644 --- a/crates/rethnet_evm/src/inspector.rs +++ b/crates/rethnet_evm/src/inspector.rs @@ -1,6 +1,6 @@ use std::marker::PhantomData; -use revm::{Database, Inspector}; +use revm::Inspector; // TODO: Improve this design by introducing a InspectorMut trait @@ -8,22 +8,20 @@ use revm::{Database, Inspector}; /// first, followed by the mutable inspector. To ensure both inspectors observe a valid state, you /// have to ensure that only the mutable inspector modifies state. The returned values are solely /// determined by the mutable inspector. -pub struct DualInspector +pub struct DualInspector where - A: Inspector, - B: Inspector, - DB: Database, + A: Inspector, + B: Inspector, { immutable: A, mutable: B, - phantom: PhantomData, + phantom: PhantomData, } -impl DualInspector +impl DualInspector where - A: Inspector, - B: Inspector, - DB: Database, + A: Inspector, + B: Inspector, { /// Constructs a `DualInspector` from the provided inspectors. pub fn new(immutable: A, mutable: B) -> Self { @@ -40,16 +38,15 @@ where } } -impl Inspector for DualInspector +impl Inspector for DualInspector where - A: Inspector, - B: Inspector, - DB: Database, + A: Inspector, + B: Inspector, { fn initialize_interp( &mut self, interp: &mut revm::interpreter::Interpreter, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, is_static: bool, ) -> revm::interpreter::InstructionResult { self.immutable.initialize_interp(interp, data, is_static); @@ -59,7 +56,7 @@ where fn step( &mut self, interp: &mut revm::interpreter::Interpreter, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, is_static: bool, ) -> revm::interpreter::InstructionResult { self.immutable.step(interp, data, is_static); @@ -68,7 +65,7 @@ where fn log( &mut self, - evm_data: &mut revm::EVMData<'_, DB>, + evm_data: &mut dyn revm::EVMData, address: &rethnet_eth::B160, topics: &[rethnet_eth::B256], data: &rethnet_eth::Bytes, @@ -80,7 +77,7 @@ where fn step_end( &mut self, interp: &mut revm::interpreter::Interpreter, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, is_static: bool, eval: revm::interpreter::InstructionResult, ) -> revm::interpreter::InstructionResult { @@ -90,7 +87,7 @@ where fn call( &mut self, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, inputs: &mut revm::interpreter::CallInputs, is_static: bool, ) -> ( @@ -104,7 +101,7 @@ where fn call_end( &mut self, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, inputs: &revm::interpreter::CallInputs, remaining_gas: revm::interpreter::Gas, ret: revm::interpreter::InstructionResult, @@ -123,7 +120,7 @@ where fn create( &mut self, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, inputs: &mut revm::interpreter::CreateInputs, ) -> ( revm::interpreter::InstructionResult, @@ -137,7 +134,7 @@ where fn create_end( &mut self, - data: &mut revm::EVMData<'_, DB>, + data: &mut dyn revm::EVMData, inputs: &revm::interpreter::CreateInputs, ret: revm::interpreter::InstructionResult, address: Option, @@ -155,8 +152,8 @@ where .create_end(data, inputs, ret, address, remaining_gas, out) } - fn selfdestruct(&mut self) { - self.immutable.selfdestruct(); - self.mutable.selfdestruct(); + fn selfdestruct(&mut self, contract: rethnet_eth::B160, target: rethnet_eth::B160) { + self.immutable.selfdestruct(contract, target); + self.mutable.selfdestruct(contract, target); } } diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index d89b9fb640..514eba5ff6 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -21,8 +21,8 @@ pub use revm::{ pub use crate::{ block::{BlockBuilder, HeaderData}, - evm::AsyncInspector, - runtime::{AsyncDatabase, Rethnet}, + evm::SyncInspector, + runtime::{Rethnet, SyncDatabase}, transaction::{PendingTransaction, TransactionError}, }; diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index a135fada88..df273e8e62 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -4,18 +4,20 @@ use revm::{ db::DatabaseComponents, primitives::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}, }; +use tokio::sync::RwLock; use crate::{ - blockchain::AsyncBlockchain, - evm::{run_transaction, AsyncInspector}, - state::AsyncState, + blockchain::SyncBlockchain, + evm::{build_evm, run_transaction, SyncInspector}, + state::SyncState, trace::Trace, transaction::TransactionError, State, }; /// Asynchronous implementation of the Database super-trait -pub type AsyncDatabase = DatabaseComponents>, Arc>>; +pub type SyncDatabase<'b, 's, BE, SE> = + DatabaseComponents<&'s dyn SyncState, &'b dyn SyncBlockchain>; /// The asynchronous Rethnet runtime. #[derive(Debug)] @@ -24,8 +26,8 @@ where BE: Debug + Send + 'static, SE: Debug + Send + 'static, { - blockchain: Arc>, - state: Arc>, + blockchain: Arc>>>, + state: Arc>>>, cfg: CfgEnv, } @@ -35,10 +37,14 @@ where SE: Debug + Send + 'static, { /// Constructs a new [`Rethnet`] instance. - pub fn new(blockchain: Arc>, db: Arc>, cfg: CfgEnv) -> Self { + pub fn new( + blockchain: Arc>>>, + state: Arc>>>, + cfg: CfgEnv, + ) -> Self { Self { blockchain, - state: db, + state, cfg, } } @@ -49,24 +55,18 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, + inspector: Option>>, ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } - run_transaction( - self.state.runtime(), - self.blockchain.clone(), - self.state.clone(), - self.cfg.clone(), - transaction, - block, - inspector, - ) - .await - .unwrap() - .map_err(TransactionError::from) + let state = self.state.read().await; + let blockchain = self.blockchain.read().await; + + let evm = build_evm(&*blockchain, &*state, self.cfg.clone(), transaction, block); + + run_transaction(evm, inspector).map_err(TransactionError::from) } /// Runs a transaction without committing the state, while disabling balance checks and creating accounts for new addresses. @@ -75,7 +75,7 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, + inspector: Option>>, ) -> Result<(ExecutionResult, State, Trace), TransactionError> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); @@ -84,18 +84,12 @@ where let mut cfg = self.cfg.clone(); cfg.disable_balance_check = true; - run_transaction( - self.state.runtime(), - self.blockchain.clone(), - self.state.clone(), - cfg, - transaction, - block, - inspector, - ) - .await - .unwrap() - .map_err(TransactionError::from) + let state = self.state.read().await; + let blockchain = self.blockchain.read().await; + + let evm = build_evm(&*blockchain, &*state, cfg, transaction, block); + + run_transaction(evm, inspector).map_err(TransactionError::from) } /// Runs a transaction, committing the state in the process. @@ -104,11 +98,21 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, + inspector: Option>>, ) -> Result<(ExecutionResult, Trace), TransactionError> { - let (result, changes, trace) = self.dry_run(transaction, block, inspector).await?; + if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { + return Err(TransactionError::MissingPrevrandao); + } + + let mut state = self.state.write().await; + let blockchain = self.blockchain.read().await; + + let evm = build_evm(&*blockchain, &*state, self.cfg.clone(), transaction, block); + + let (result, changes, trace) = + run_transaction(evm, inspector).map_err(TransactionError::from)?; - self.state.apply(changes).await; + state.commit(changes); Ok((result, trace)) } diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs index c86606cfb7..4eb3a03ca5 100644 --- a/crates/rethnet_evm/src/state.rs +++ b/crates/rethnet_evm/src/state.rs @@ -5,11 +5,12 @@ mod history; mod hybrid; mod layered; mod remote; -mod request; -mod sync; mod trie; +use std::fmt::Debug; + use rethnet_eth::B256; +use revm::{db::StateRef, DatabaseCommit}; pub use self::{ debug::{AccountModifierFn, StateDebug}, @@ -17,7 +18,6 @@ pub use self::{ hybrid::HybridState, layered::{LayeredState, RethnetLayer}, remote::RemoteDatabase, - sync::{AsyncState, SyncState}, }; /// Combinatorial error for the database API @@ -33,3 +33,32 @@ pub enum StateError { #[error("State root `{0}` does not exist.")] InvalidStateRoot(B256), } + +/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. +pub trait SyncState: + StateRef + + DatabaseCommit + + StateDebug + + StateHistory + + Debug + + Send + + Sync + + 'static +where + E: Debug + Send, +{ +} + +impl SyncState for S +where + S: StateRef + + DatabaseCommit + + StateDebug + + StateHistory + + Debug + + Send + + Sync + + 'static, + E: Debug + Send, +{ +} diff --git a/crates/rethnet_evm/src/state/request.rs b/crates/rethnet_evm/src/state/request.rs deleted file mode 100644 index 2c5ca0b10e..0000000000 --- a/crates/rethnet_evm/src/state/request.rs +++ /dev/null @@ -1,159 +0,0 @@ -use std::fmt::Debug; - -use hashbrown::HashMap; -use rethnet_eth::{Address, B256, U256}; -use revm::{ - db::StateRef, - primitives::{Account, AccountInfo, Bytecode}, - DatabaseCommit, -}; -use tokio::sync::oneshot; - -use crate::state::{AccountModifierFn, StateDebug}; - -use super::history::StateHistory; - -/// The request type used internally by a [`SyncDatabase`]. -#[derive(Debug)] -pub enum Request { - AccountByAddress { - address: Address, - sender: oneshot::Sender, E>>, - }, - AccountStorageRoot { - address: Address, - sender: oneshot::Sender, E>>, - }, - Checkpoint { - sender: oneshot::Sender>, - }, - CodeByHash { - code_hash: B256, - sender: oneshot::Sender>, - }, - Commit { - changes: HashMap, - sender: oneshot::Sender<()>, - }, - InsertAccount { - address: Address, - account_info: AccountInfo, - sender: oneshot::Sender>, - }, - MakeSnapshot { - sender: oneshot::Sender<(B256, bool)>, - }, - ModifyAccount { - address: Address, - modifier: AccountModifierFn, - sender: oneshot::Sender>, - }, - RemoveAccount { - address: Address, - sender: oneshot::Sender, E>>, - }, - RemoveSnapshot { - state_root: B256, - sender: oneshot::Sender, - }, - Revert { - sender: oneshot::Sender>, - }, - Serialize { - sender: oneshot::Sender, - }, - SetStorageSlot { - address: Address, - index: U256, - value: U256, - sender: oneshot::Sender>, - }, - SetStateRoot { - state_root: B256, - sender: oneshot::Sender>, - }, - StateRoot { - sender: oneshot::Sender>, - }, - StorageSlot { - address: Address, - index: U256, - sender: oneshot::Sender>, - }, - Terminate, -} - -impl Request -where - E: Debug, -{ - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub fn handle(self, state: &mut S) -> bool - where - S: StateRef - + DatabaseCommit - + StateDebug - + StateHistory - + Debug, - { - match self { - Request::AccountByAddress { address, sender } => { - sender.send(state.basic(address)).unwrap() - } - Request::AccountStorageRoot { address, sender } => { - sender.send(state.account_storage_root(&address)).unwrap() - } - Request::Checkpoint { sender } => sender.send(state.checkpoint()).unwrap(), - Request::CodeByHash { code_hash, sender } => { - sender.send(state.code_by_hash(code_hash)).unwrap() - } - Request::Commit { changes, sender } => { - state.commit(changes); - sender.send(()).unwrap() - } - Request::InsertAccount { - address, - account_info, - sender, - } => sender - .send(state.insert_account(address, account_info)) - .unwrap(), - Request::MakeSnapshot { sender } => sender.send(state.make_snapshot()).unwrap(), - Request::ModifyAccount { - address, - modifier, - sender, - } => sender - .send(state.modify_account(address, modifier)) - .unwrap(), - Request::RemoveAccount { address, sender } => { - sender.send(state.remove_account(address)).unwrap() - } - Request::RemoveSnapshot { state_root, sender } => { - sender.send(state.remove_snapshot(&state_root)).unwrap() - } - Request::Revert { sender } => sender.send(state.revert()).unwrap(), - Request::Serialize { sender } => sender.send(state.serialize()).unwrap(), - Request::SetStorageSlot { - address, - index, - value, - sender, - } => sender - .send(state.set_account_storage_slot(address, index, value)) - .unwrap(), - Request::SetStateRoot { state_root, sender } => { - sender.send(state.set_state_root(&state_root)).unwrap() - } - Request::StateRoot { sender } => sender.send(state.state_root()).unwrap(), - Request::StorageSlot { - address, - index, - sender, - } => sender.send(state.storage(address, index)).unwrap(), - Request::Terminate => return false, - } - - true - } -} diff --git a/crates/rethnet_evm/src/state/sync.rs b/crates/rethnet_evm/src/state/sync.rs deleted file mode 100644 index ee1e00e383..0000000000 --- a/crates/rethnet_evm/src/state/sync.rs +++ /dev/null @@ -1,490 +0,0 @@ -use std::{fmt::Debug, io}; - -use hashbrown::HashMap; -use rethnet_eth::{Address, B256, U256}; -use revm::{ - db::StateRef, - primitives::{Account, AccountInfo, Bytecode}, - DatabaseCommit, -}; -use tokio::{ - runtime::{Builder, Runtime}, - sync::{ - mpsc::{unbounded_channel, UnboundedSender}, - oneshot, - }, - task::{self, JoinHandle}, -}; - -use crate::state::{AccountModifierFn, StateDebug}; - -use super::{history::StateHistory, request::Request}; - -/// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. -pub trait SyncState: - StateRef - + DatabaseCommit - + StateDebug - + StateHistory - + Debug - + Send - + Sync - + 'static -where - E: Debug + Send, -{ -} - -impl SyncState for S -where - S: StateRef - + DatabaseCommit - + StateDebug - + StateHistory - + Debug - + Send - + Sync - + 'static, - E: Debug + Send, -{ -} - -/// A helper class for converting a synchronous database into an asynchronous database. -/// -/// Requires the inner database to implement [`Database`], [`DatabaseCommit`], and [`DatabaseDebug`]. - -#[derive(Debug)] -pub struct AsyncState -where - E: Debug + Send, -{ - runtime: Runtime, - request_sender: UnboundedSender>, - db_handle: Option>, -} - -impl AsyncState -where - E: Debug + Send + 'static, -{ - /// Constructs an [`AsyncDatabase`] instance with the provided database. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub fn new>(mut state: S) -> io::Result { - let runtime = Builder::new_multi_thread().build()?; - - let (sender, mut receiver) = unbounded_channel::>(); - - let db_handle = runtime.spawn(async move { - while let Some(request) = receiver.recv().await { - if !request.handle(&mut state) { - break; - } - } - }); - - Ok(Self { - runtime, - request_sender: sender, - db_handle: Some(db_handle), - }) - } - - /// Retrieves the runtime of the [`AsyncDatabase`]. - pub fn runtime(&self) -> &Runtime { - &self.runtime - } - - /// Retrieves the account corresponding to the specified address. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn account_by_address(&self, address: Address) -> Result, E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::AccountByAddress { address, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Retrieves the storage root of the account at the specified address. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn account_storage_root(&self, address: &Address) -> Result, E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::AccountStorageRoot { - address: *address, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Retrieves the storage slot corresponding to the specified address and index. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn account_storage_slot(&self, address: Address, index: U256) -> Result { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::StorageSlot { - address, - index, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Applies the provided changes to the state. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn apply(&self, changes: HashMap) { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Commit { changes, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Creates a state checkpoint that can be reverted to using [`revert`]. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn checkpoint(&self) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Checkpoint { sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Retrieves the code corresponding to the specified hash. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn code_by_hash(&self, code_hash: B256) -> Result { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::CodeByHash { code_hash, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Inserts the specified account into the state. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn insert_account( - &self, - address: Address, - account_info: AccountInfo, - ) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::InsertAccount { - address, - account_info, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn make_snapshot(&self) -> (B256, bool) { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::MakeSnapshot { sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Modifies the account at the specified address using the provided function. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn modify_account( - &self, - address: Address, - modifier: AccountModifierFn, - ) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::ModifyAccount { - address, - modifier, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Removes and returns the account at the specified address, if it exists. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn remove_account(&self, address: Address) -> Result, E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::RemoveAccount { address, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn remove_snapshot(&self, state_root: B256) -> bool { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::RemoveSnapshot { state_root, sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Reverts to the previous checkpoint, created using [`checkpoint`]. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn revert(&self) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Revert { sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Serializes the state using ordering of addresses and storage indices. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn serialize(&self) -> String { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::Serialize { sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Sets the storage slot at the specified address and index to the provided value. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn set_account_storage_slot( - &self, - address: Address, - index: U256, - value: U256, - ) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::SetStorageSlot { - address, - index, - value, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Reverts the state to match the specified state root. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn set_state_root(&self, state_root: &B256) -> Result<(), E> { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::SetStateRoot { - state_root: *state_root, - sender, - }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } - - /// Retrieves the state's root. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn state_root(&self) -> Result { - let (sender, receiver) = oneshot::channel(); - - self.request_sender - .send(Request::StateRoot { sender }) - .expect("Failed to send request"); - - receiver.await.unwrap() - } -} - -impl Drop for AsyncState -where - E: Debug + Send, -{ - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn drop(&mut self) { - if let Some(handle) = self.db_handle.take() { - self.request_sender - .send(Request::Terminate) - .expect("Failed to send request"); - - self.runtime.block_on(handle).unwrap(); - } - } -} - -impl StateRef for AsyncState -where - E: Debug + Send + 'static, -{ - type Error = E; - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn basic(&self, address: Address) -> Result, Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::account_by_address(self, address)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn code_by_hash(&self, code_hash: B256) -> Result { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::code_by_hash(self, code_hash)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn storage(&self, address: Address, index: U256) -> Result { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::account_storage_slot(self, address, index)) - }) - } -} - -impl<'d, E> DatabaseCommit for &'d AsyncState -where - E: Debug + Send + 'static, -{ - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn commit(&mut self, changes: HashMap) { - task::block_in_place(move || self.runtime.block_on(self.apply(changes))) - } -} - -impl<'d, E> StateDebug for &'d AsyncState -where - E: Debug + Send + 'static, -{ - type Error = E; - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::account_storage_root(*self, address)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn insert_account( - &mut self, - address: Address, - account_info: AccountInfo, - ) -> Result<(), Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::insert_account(*self, address, account_info)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn modify_account( - &mut self, - address: Address, - modifier: AccountModifierFn, - ) -> Result<(), Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::modify_account(*self, address, modifier)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn remove_account(&mut self, address: Address) -> Result, Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::remove_account(*self, address)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn serialize(&mut self) -> String { - task::block_in_place(move || self.runtime.block_on(AsyncState::serialize(*self))) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn set_account_storage_slot( - &mut self, - address: Address, - index: U256, - value: U256, - ) -> Result<(), Self::Error> { - task::block_in_place(move || { - self.runtime.block_on(AsyncState::set_account_storage_slot( - *self, address, index, value, - )) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn state_root(&mut self) -> Result { - task::block_in_place(move || self.runtime.block_on(AsyncState::state_root(*self))) - } -} - -impl<'d, E> StateHistory for &'d AsyncState -where - E: Debug + Send + 'static, -{ - type Error = E; - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::set_state_root(*self, state_root)) - }) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn checkpoint(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.runtime.block_on(AsyncState::checkpoint(*self))) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn revert(&mut self) -> Result<(), Self::Error> { - task::block_in_place(move || self.runtime.block_on(AsyncState::revert(*self))) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn make_snapshot(&mut self) -> (B256, bool) { - task::block_in_place(move || self.runtime.block_on(AsyncState::make_snapshot(*self))) - } - - #[cfg_attr(feature = "tracing", tracing::instrument)] - fn remove_snapshot(&mut self, state_root: &B256) -> bool { - task::block_in_place(move || { - self.runtime - .block_on(AsyncState::remove_snapshot(*self, *state_root)) - }) - } -} diff --git a/crates/rethnet_evm/src/trace.rs b/crates/rethnet_evm/src/trace.rs index 106e53ba6c..ca5b8403f1 100644 --- a/crates/rethnet_evm/src/trace.rs +++ b/crates/rethnet_evm/src/trace.rs @@ -1,7 +1,7 @@ use rethnet_eth::Bytes; use revm::{ interpreter::{opcode, Gas, InstructionResult, Interpreter}, - Database, EVMData, Inspector, + EVMData, Inspector, }; /// A trace for an EVM call. @@ -63,14 +63,11 @@ impl TraceCollector { } } -impl Inspector for TraceCollector -where - D: Database, -{ +impl Inspector for TraceCollector { fn step( &mut self, interp: &mut Interpreter, - _data: &mut EVMData<'_, D>, + _data: &mut dyn EVMData, _is_static: bool, ) -> InstructionResult { self.opcode_stack.push(interp.current_opcode()); @@ -81,7 +78,7 @@ where fn step_end( &mut self, interp: &mut Interpreter, - _data: &mut EVMData<'_, D>, + _data: &mut dyn revm::EVMData, _is_static: bool, exit_code: InstructionResult, ) -> InstructionResult { diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs index cec1916abc..dacaf2d353 100644 --- a/crates/rethnet_evm_napi/src/blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -2,10 +2,10 @@ mod js_blockchain; use std::{fmt::Debug, sync::Arc}; -use napi::{bindgen_prelude::Buffer, Env, JsFunction, NapiRaw, Status}; +use napi::{bindgen_prelude::Buffer, tokio::sync::RwLock, Env, JsFunction, NapiRaw}; use napi_derive::napi; use rethnet_eth::B256; -use rethnet_evm::blockchain::{AsyncBlockchain, SyncBlockchain}; +use rethnet_evm::blockchain::SyncBlockchain; use crate::{ logger::enable_logging, @@ -19,12 +19,12 @@ use self::js_blockchain::{GetBlockHashCall, JsBlockchain}; #[napi] #[derive(Debug)] pub struct Blockchain { - inner: Arc>, + inner: Arc>>>, } impl Blockchain { /// Provides immutable access to the inner implementation. - pub fn as_inner(&self) -> &Arc> { + pub fn as_inner(&self) -> &Arc>>> { &self.inner } } @@ -59,21 +59,19 @@ impl Blockchain { }, )?; - Self::with_blockchain(JsBlockchain { get_block_hash_fn }) + Ok(Self::with_blockchain(JsBlockchain { get_block_hash_fn })) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn with_blockchain(blockchain: B) -> napi::Result + fn with_blockchain(blockchain: B) -> Self where B: SyncBlockchain, { let blockchain: Box> = Box::new(blockchain); - let blockchain = AsyncBlockchain::new(blockchain) - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - Ok(Self { - inner: Arc::new(blockchain), - }) + Self { + inner: Arc::new(RwLock::new(blockchain)), + } } // #[napi] diff --git a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs index 44837a8abe..83b7b0e6fc 100644 --- a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs @@ -1,8 +1,11 @@ -use std::sync::mpsc::{channel, Sender}; +use std::{ + fmt::Debug, + sync::mpsc::{channel, Sender}, +}; use napi::Status; use rethnet_eth::{B256, U256}; -use rethnet_evm::BlockHash; +use rethnet_evm::BlockHashRef; use crate::threadsafe_function::{ThreadsafeFunction, ThreadsafeFunctionCallMode}; @@ -15,10 +18,10 @@ pub struct JsBlockchain { pub(super) get_block_hash_fn: ThreadsafeFunction, } -impl BlockHash for JsBlockchain { +impl BlockHashRef for JsBlockchain { type Error = napi::Error; - fn block_hash(&mut self, block_number: U256) -> Result { + fn block_hash(&self, block_number: U256) -> Result { let (sender, receiver) = channel(); let status = self.get_block_hash_fn.call( @@ -33,3 +36,9 @@ impl BlockHash for JsBlockchain { receiver.recv().unwrap() } } + +impl Debug for JsBlockchain { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + f.debug_struct("JsBlockchain").finish() + } +} diff --git a/crates/rethnet_evm_napi/src/context.rs b/crates/rethnet_evm_napi/src/context.rs new file mode 100644 index 0000000000..13073efcd5 --- /dev/null +++ b/crates/rethnet_evm_napi/src/context.rs @@ -0,0 +1,79 @@ +use std::{io, sync::Arc}; + +use napi::{ + tokio::runtime::{Builder, Runtime}, + Status, +}; +use napi_derive::napi; +use tracing_subscriber::{prelude::*, EnvFilter, Registry}; + +#[napi] +#[derive(Debug)] +pub struct RethnetContext { + inner: Arc, +} + +impl RethnetContext { + /// Provides immutable access to the inner implementation. + pub(crate) fn as_inner(&self) -> &Arc { + &self.inner + } +} + +#[napi] +impl RethnetContext { + /// Creates a new [`RethnetContext`] instance. Should only be called once! + #[napi(constructor)] + pub fn new() -> napi::Result { + let context = + Context::new().map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + Ok(Self { + inner: Arc::new(context), + }) + } +} + +#[derive(Debug)] +pub struct Context { + runtime: Arc, + #[cfg(feature = "tracing")] + _tracing_write_guard: tracing_flame::FlushGuard>, +} + +impl Context { + /// Creates a new [`Context`] instance. Should only be called once! + pub fn new() -> io::Result { + let fmt_layer = tracing_subscriber::fmt::layer() + .with_file(true) + .with_line_number(true) + .with_thread_ids(true) + .with_target(false) + .with_level(true) + .with_filter(EnvFilter::from_default_env()); + + #[cfg(feature = "tracing")] + let (flame_layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded").unwrap(); + + let subscriber = Registry::default().with(fmt_layer); + + #[cfg(feature = "tracing")] + let subscriber = subscriber.with(flame_layer); + + tracing::subscriber::set_global_default(subscriber) + .expect("Could not set global default tracing subscriber"); + + let runtime = Builder::new_multi_thread().build()?; + + Ok(Self { + runtime: Arc::new(runtime), + #[cfg(feature = "tracing")] + _tracing_write_guard: guard, + }) + } + + /// Retrieves the context's runtime. + pub fn runtime(&self) -> &Runtime { + &self.runtime + } +} diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index be2f80bc41..fbf13c1ce7 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -7,6 +7,7 @@ mod block; mod blockchain; mod cast; mod config; +mod context; mod log; mod logger; mod receipt; diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 4e601a3f0f..d6d0b04045 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -8,12 +8,13 @@ use std::{ use napi::{ bindgen_prelude::{BigInt, Buffer, ObjectFinalize}, + tokio::sync::RwLock, Env, JsFunction, JsObject, NapiRaw, Status, }; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ - state::{AccountModifierFn, AsyncState, HybridState, StateError, StateHistory, SyncState}, + state::{AccountModifierFn, HybridState, StateError, StateHistory, SyncState}, AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, }; use secp256k1::Secp256k1; @@ -21,6 +22,7 @@ use secp256k1::Secp256k1; use crate::{ account::Account, cast::TryCast, + context::{Context, RethnetContext}, logger::enable_logging, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, @@ -59,7 +61,8 @@ pub struct SnapshotId { #[napi(custom_finalize)] #[derive(Debug)] pub struct StateManager { - pub(super) state: Arc>, + pub(super) state: Arc>>>, + context: Arc, } #[napi] @@ -67,8 +70,8 @@ impl StateManager { /// Constructs a [`StateManager`] with an empty state. #[napi(constructor)] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - pub fn new(mut env: Env) -> napi::Result { - Self::with_accounts(&mut env, HashMap::default()) + pub fn new(mut env: Env, context: &RethnetContext) -> napi::Result { + Self::with_accounts(&mut env, context, HashMap::default()) } /// Constructs a [`StateManager`] with the provided accounts present in the genesis state. @@ -76,13 +79,14 @@ impl StateManager { #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn with_genesis_accounts( mut env: Env, + context: &RethnetContext, accounts: Vec, ) -> napi::Result { - let context = Secp256k1::signing_only(); + let signer = Secp256k1::signing_only(); let genesis_accounts = accounts .into_iter() .map(|account| { - let address = private_key_to_address(&context, &account.private_key) + let address = private_key_to_address(&signer, &account.private_key) .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; TryCast::::try_cast(account.balance).map(|balance| { let account_info = AccountInfo { @@ -95,12 +99,13 @@ impl StateManager { }) .collect::>>()?; - Self::with_accounts(&mut env, genesis_accounts) + Self::with_accounts(&mut env, context, genesis_accounts) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn with_accounts( env: &mut Env, + context: &RethnetContext, mut accounts: HashMap, ) -> napi::Result { // Mimic precompiles activation @@ -114,24 +119,25 @@ impl StateManager { state.checkpoint().unwrap(); - Self::with_state(env, state) + Self::with_state(env, context, state) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn with_state(env: &mut Env, state: S) -> napi::Result + fn with_state(env: &mut Env, context: &RethnetContext, state: S) -> napi::Result where S: SyncState, { enable_logging(); let state: Box> = Box::new(state); - let state = AsyncState::new(state) - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; + + env.adjust_external_memory(STATE_MEMORY_SIZE)?; env.adjust_external_memory(STATE_MEMORY_SIZE)?; Ok(Self { - state: Arc::new(state), + state: Arc::new(RwLock::new(state)), + context: context.as_inner().clone(), }) } @@ -139,21 +145,31 @@ impl StateManager { #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn checkpoint(&self) -> napi::Result<()> { - self.state - .checkpoint() + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.checkpoint() + }) .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - - Ok(()) + .unwrap() + .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } /// Reverts to the previous checkpoint, created using [`checkpoint`]. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn revert(&self) -> napi::Result<()> { - self.state - .revert() + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.revert() + }) .await + .unwrap() .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } @@ -163,24 +179,28 @@ impl StateManager { pub async fn get_account_by_address(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - let mut account_info = self - .state - .account_by_address(address) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - - if let Some(account_info) = &mut account_info { - if account_info.code.is_none() && account_info.code_hash != KECCAK_EMPTY { - account_info.code = Some( - self.state - .code_by_hash(account_info.code_hash) - .await - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?, - ); - } - } + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let state = state.read().await; - Ok(account_info.map(Account::from)) + state.basic(address).and_then(|account_info| { + account_info.map_or(Ok(None), |mut account_info| { + if account_info.code_hash != KECCAK_EMPTY { + account_info.code = Some(state.code_by_hash(account_info.code_hash)?); + } + + Ok(Some(account_info)) + }) + }) + }) + .await + .unwrap() + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |account_info| Ok(account_info.map(Account::from)), + ) } /// Retrieves the storage root of the account at the specified address. @@ -189,10 +209,19 @@ impl StateManager { pub async fn get_account_storage_root(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.state.account_storage_root(&address).await.map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |root| Ok(root.map(|root| Buffer::from(root.as_ref()))), - ) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.account_storage_root(&address) + }) + .await + .unwrap() + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |root| Ok(root.map(|root| Buffer::from(root.as_ref()))), + ) } /// Retrieves the storage slot at the specified address and index. @@ -206,9 +235,15 @@ impl StateManager { let address = Address::from_slice(&address); let index: U256 = BigInt::try_cast(index)?; - self.state - .account_storage_slot(address, index) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let state = state.read().await; + state.storage(address, index) + }) .await + .unwrap() .map_or_else( |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), |value| { @@ -224,10 +259,19 @@ impl StateManager { #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn get_state_root(&self) -> napi::Result { - self.state.state_root().await.map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |root| Ok(Buffer::from(root.as_ref())), - ) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.state_root() + }) + .await + .unwrap() + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |root| Ok(Buffer::from(root.as_ref())), + ) } /// Inserts the provided account at the specified address. @@ -235,11 +279,17 @@ impl StateManager { #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn insert_account(&self, address: Buffer, account: Account) -> napi::Result<()> { let address = Address::from_slice(&address); - let account: AccountInfo = account.try_cast()?; - - self.state - .insert_account(address, account) + let account_info: AccountInfo = account.try_cast()?; + + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.insert_account(address, account_info) + }) .await + .unwrap() .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } @@ -247,7 +297,16 @@ impl StateManager { #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn make_snapshot(&self) -> SnapshotId { - let (state_root, existed) = self.state.make_snapshot().await; + let state = self.state.clone(); + let (state_root, existed) = self + .context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.make_snapshot() + }) + .await + .unwrap(); SnapshotId { state_root: >::as_ref(&state_root).into(), @@ -330,10 +389,11 @@ impl StateManager { )?; let (deferred, promise) = env.create_deferred()?; - let db = self.state.clone(); + let state = self.state.clone(); + self.context.runtime().spawn(async move { + let mut state = state.write().await; - self.state.runtime().spawn(async move { - let result = db + let result = state .modify_account( address, AccountModifierFn::new(Box::new( @@ -359,7 +419,6 @@ impl StateManager { }, )), ) - .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())); deferred.resolve(|_| result); @@ -374,10 +433,19 @@ impl StateManager { pub async fn remove_account(&self, address: Buffer) -> napi::Result> { let address = Address::from_slice(&address); - self.state.remove_account(address).await.map_or_else( - |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), - |account| Ok(account.map(Account::from)), - ) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.remove_account(address) + }) + .await + .unwrap() + .map_or_else( + |e| Err(napi::Error::new(Status::GenericFailure, e.to_string())), + |account| Ok(account.map(Account::from)), + ) } /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. @@ -386,14 +454,30 @@ impl StateManager { pub async fn remove_snapshot(&self, state_root: Buffer) -> bool { let state_root = B256::from_slice(&state_root); - self.state.remove_snapshot(state_root).await + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.remove_snapshot(&state_root) + }) + .await + .unwrap() } /// Serializes the state using ordering of addresses and storage indices. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] pub async fn serialize(&self) -> String { - self.state.serialize().await + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.serialize() + }) + .await + .unwrap() } /// Sets the storage slot at the specified address and index to the provided value. @@ -409,9 +493,15 @@ impl StateManager { let index: U256 = BigInt::try_cast(index)?; let value: U256 = BigInt::try_cast(value)?; - self.state - .set_account_storage_slot(address, index, value) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.set_account_storage_slot(address, index, value) + }) .await + .unwrap() .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } @@ -421,9 +511,15 @@ impl StateManager { pub async fn set_state_root(&self, state_root: Buffer) -> napi::Result<()> { let state_root = B256::from_slice(&state_root); - self.state - .set_state_root(&state_root) + let state = self.state.clone(); + self.context + .runtime() + .spawn(async move { + let mut state = state.write().await; + state.set_state_root(&state_root) + }) .await + .unwrap() .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) } } diff --git a/crates/rethnet_evm_napi/src/tracer.rs b/crates/rethnet_evm_napi/src/tracer.rs index 1de31c2d8a..f7020949f2 100644 --- a/crates/rethnet_evm_napi/src/tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer.rs @@ -2,7 +2,7 @@ mod js_tracer; use napi::Env; use napi_derive::napi; -use rethnet_evm::{state::StateError, AsyncInspector}; +use rethnet_evm::{state::StateError, SyncInspector}; use self::js_tracer::{JsTracer, TracingCallbacks}; @@ -12,7 +12,7 @@ pub struct Tracer { } impl Tracer { - pub fn as_dyn_inspector(&self) -> Box> { + pub fn as_dyn_inspector(&self) -> Box> { self.inner.clone() } } diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs index 21a3459b21..75974f6356 100644 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -11,7 +11,7 @@ use napi::{ use napi_derive::napi; use rethnet_eth::{Address, Bytes, U256}; use rethnet_evm::{ - opcode, return_revert, AsyncInspector, Bytecode, Gas, InstructionResult, SuccessOrHalt, + opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt, SyncInspector, }; use crate::{ @@ -602,50 +602,54 @@ impl Debug for JsTracer { } } -impl AsyncInspector for JsTracer +impl SyncInspector for JsTracer where BE: Debug + Send + 'static, SE: Debug + Send + 'static, { } -impl rethnet_evm::Inspector for JsTracer +impl rethnet_evm::Inspector for JsTracer where - D: rethnet_evm::Database, - D::Error: Debug, + E: Debug, { fn call( &mut self, - data: &mut rethnet_evm::EVMData<'_, D>, + data: &mut dyn rethnet_evm::EVMData, inputs: &mut rethnet_evm::CallInputs, _is_static: bool, ) -> (InstructionResult, Gas, rethnet_eth::Bytes) { self.validate_before_message(); let code = data - .journaled_state + .journaled_state() .state .get(&inputs.context.code_address) + .cloned() .map(|account| { if let Some(code) = &account.info.code { code.clone() } else { - data.db.code_by_hash(account.info.code_hash).unwrap() + data.database() + .code_by_hash(account.info.code_hash) + .unwrap() } }) .unwrap_or_else(|| { - data.db.basic(inputs.context.code_address).unwrap().map_or( - Bytecode::new(), - |account_info| { + data.database() + .basic(inputs.context.code_address) + .unwrap() + .map_or(Bytecode::new(), |account_info| { account_info.code.unwrap_or_else(|| { - data.db.code_by_hash(account_info.code_hash).unwrap() + data.database() + .code_by_hash(account_info.code_hash) + .unwrap() }) - }, - ) + }) }); self.pending_before = Some(BeforeMessage { - depth: data.journaled_state.depth, + depth: data.journaled_state().depth, to: Some(inputs.context.address), data: inputs.input.clone(), value: inputs.context.apparent_value, @@ -658,7 +662,7 @@ where fn call_end( &mut self, - data: &mut rethnet_evm::EVMData<'_, D>, + data: &mut dyn rethnet_evm::EVMData, _inputs: &rethnet_evm::CallInputs, remaining_gas: Gas, ret: InstructionResult, @@ -689,7 +693,7 @@ where reason, gas_used: remaining_gas.spend(), gas_refunded: remaining_gas.refunded() as u64, - logs: data.journaled_state.logs.clone(), + logs: data.journaled_state().logs.clone(), output: rethnet_evm::Output::Call(out.clone()), }, SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { @@ -700,7 +704,7 @@ where reason, gas_used: remaining_gas.limit(), }, - SuccessOrHalt::Internal => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), }; @@ -722,13 +726,13 @@ where fn create( &mut self, - data: &mut rethnet_evm::EVMData<'_, D>, + data: &mut dyn rethnet_evm::EVMData, inputs: &mut rethnet_evm::CreateInputs, ) -> (InstructionResult, Option, Gas, Bytes) { self.validate_before_message(); self.pending_before = Some(BeforeMessage { - depth: data.journaled_state.depth, + depth: data.journaled_state().depth, to: None, data: inputs.init_code.clone(), value: inputs.value, @@ -746,7 +750,7 @@ where fn create_end( &mut self, - data: &mut rethnet_evm::EVMData<'_, D>, + data: &mut dyn rethnet_evm::EVMData, _inputs: &rethnet_evm::CreateInputs, ret: InstructionResult, address: Option, @@ -767,7 +771,7 @@ where reason, gas_used: remaining_gas.spend(), gas_refunded: remaining_gas.refunded() as u64, - logs: data.journaled_state.logs.clone(), + logs: data.journaled_state().logs.clone(), output: rethnet_evm::Output::Create(out.clone(), address), }, SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { @@ -778,7 +782,7 @@ where reason, gas_used: remaining_gas.limit(), }, - SuccessOrHalt::Internal => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), }; @@ -801,7 +805,7 @@ where fn step( &mut self, interp: &mut rethnet_evm::Interpreter, - data: &mut rethnet_evm::EVMData<'_, D>, + data: &mut dyn rethnet_evm::EVMData, _is_static: bool, ) -> InstructionResult { // Skip the step @@ -823,7 +827,7 @@ where let status = self.step_fn.call( StepHandlerCall { - depth: data.journaled_state.depth, + depth: data.journaled_state().depth, pc: interp.program_counter() as u64, opcode: interp.current_opcode(), // return_value: interp.instruction_result, @@ -833,7 +837,7 @@ where // stack: interp.stack().data().clone(), // memory: Bytes::copy_from_slice(interp.memory.data().as_slice()), contract: data - .journaled_state + .journaled_state() .account(interp.contract.address) .info .clone(), @@ -856,7 +860,7 @@ where // fn step_end( // &mut self, // interp: &mut rethnet_evm::Interpreter, - // _data: &mut rethnet_evm::EVMData<'_, D>, + // _data: &mut dyn rethnet_evm::EVMData, // _is_static: bool, // _eval: InstructionResult, // ) -> InstructionResult { diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index b11042cbbb..4e16d918bc 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -78,13 +78,14 @@ pub enum ExceptionalHalt { StackOverflow, OutOfOffset, CreateCollision, - OverflowPayment, PrecompileError, NonceOverflow, /// Create init code size exceeds limit (runtime). CreateContractSizeLimit, /// Error on created contract that begins with EF CreateContractStartingWithEF, + /// EIP-3860: Limit and meter initcode. Initcode size limit exceeded. + CreateInitcodeSizeLimit, } impl From for ExceptionalHalt { @@ -99,13 +100,20 @@ impl From for ExceptionalHalt { rethnet_evm::Halt::StackOverflow => ExceptionalHalt::StackOverflow, rethnet_evm::Halt::OutOfOffset => ExceptionalHalt::OutOfOffset, rethnet_evm::Halt::CreateCollision => ExceptionalHalt::CreateCollision, - rethnet_evm::Halt::OverflowPayment => ExceptionalHalt::OverflowPayment, rethnet_evm::Halt::PrecompileError => ExceptionalHalt::PrecompileError, rethnet_evm::Halt::NonceOverflow => ExceptionalHalt::NonceOverflow, rethnet_evm::Halt::CreateContractSizeLimit => ExceptionalHalt::CreateContractSizeLimit, rethnet_evm::Halt::CreateContractStartingWithEF => { ExceptionalHalt::CreateContractStartingWithEF } + rethnet_evm::Halt::CreateInitcodeSizeLimit => ExceptionalHalt::CreateInitcodeSizeLimit, + rethnet_evm::Halt::OverflowPayment + | rethnet_evm::Halt::StateChangeDuringStaticCall + | rethnet_evm::Halt::CallNotAllowedInsideStatic + | rethnet_evm::Halt::OutOfFund + | rethnet_evm::Halt::CallTooDeep => { + unreachable!("Internal halts that can be only found inside Inspector") + } } } } diff --git a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts index 993a0d81b4..78e71c79d2 100644 --- a/crates/rethnet_evm_napi/test/evm/RethnetDb.ts +++ b/crates/rethnet_evm_napi/test/evm/RethnetDb.ts @@ -1,96 +1,99 @@ -import { expect } from "chai"; -import { Address } from "@nomicfoundation/ethereumjs-util"; - -import { - Blockchain, - BlockConfig, - Config, - Rethnet, - SpecId, - StateManager, - Transaction, -} from "../.."; - -describe("Rethnet", () => { - const caller = Address.fromString( - "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" - ); - const receiver = Address.fromString( - "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" - ); - - let blockchain: Blockchain; - let stateManager: StateManager; - let rethnet: Rethnet; - - beforeEach(async function () { - blockchain = new Blockchain(async function ( - _blockNumber: bigint - ): Promise { - return Buffer.allocUnsafe(0); - }); - - stateManager = new StateManager(); - - const cfg: Config = { - chainId: BigInt(0), - specId: SpecId.GrayGlacier, - limitContractCodeSize: BigInt(2n) ** BigInt(32n), - disableEip3607: true, - }; - rethnet = new Rethnet(blockchain, stateManager, cfg); - }); - - it("call", async () => { - // Add funds to caller - await stateManager.insertAccount(caller.buf, { - nonce: 0n, - balance: BigInt("0xffffffff"), - }); - - // send some value - const sendValue: Transaction = { - from: caller.buf, - to: receiver.buf, - gasLimit: BigInt(1000000), - value: 100n, - }; - - const block: BlockConfig = { - number: BigInt(1), - timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), - }; - let sendValueChanges = await rethnet.dryRun(sendValue, block); - - // receiver should have 100 (0x64) wei - expect( - BigInt( - sendValueChanges.state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"] - .info.balance - ) - ).to.equal(BigInt("0x64")); - - // create a contract - const createContract: Transaction = { - from: caller.buf, - - gasLimit: BigInt(1000000), - - // minimal creation bytecode - input: Buffer.from("3859818153F3", "hex"), - }; - - let createContractChanges = await rethnet.dryRun(createContract, block); - - expect( - createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] - ).to.exist; - // check that the code hash is not the null hash (i.e., the address has code) - expect( - createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] - .info.code_hash - ).to.not.equal( - "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" - ); - }); -}); +// import { expect } from "chai"; +// import { Address } from "@nomicfoundation/ethereumjs-util"; + +// import { +// Blockchain, +// BlockConfig, +// Config, +// Rethnet, +// RethnetContext, +// SpecId, +// StateManager, +// Transaction, +// } from "../.."; + +// describe("Rethnet", () => { +// const caller = Address.fromString( +// "0xf39Fd6e51aad88F6F4ce6aB8827279cffFb92266" +// ); +// const receiver = Address.fromString( +// "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" +// ); + +// const context = new RethnetContext(); + +// let blockchain: Blockchain; +// let stateManager: StateManager; +// let rethnet: Rethnet; + +// beforeEach(async function () { +// blockchain = new Blockchain(async function ( +// _blockNumber: bigint +// ): Promise { +// return Buffer.allocUnsafe(0); +// }); + +// stateManager = new StateManager(context); + +// const cfg: Config = { +// chainId: BigInt(0), +// specId: SpecId.GrayGlacier, +// limitContractCodeSize: BigInt(2n) ** BigInt(32n), +// disableEip3607: true, +// }; +// rethnet = new Rethnet(blockchain, stateManager, cfg); +// }); + +// it("call", async () => { +// // Add funds to caller +// await stateManager.insertAccount(caller.buf, { +// nonce: 0n, +// balance: BigInt("0xffffffff"), +// }); + +// // send some value +// const sendValue: Transaction = { +// from: caller.buf, +// to: receiver.buf, +// gasLimit: BigInt(1000000), +// value: 100n, +// }; + +// const block: BlockConfig = { +// number: BigInt(1), +// timestamp: BigInt(Math.ceil(new Date().getTime() / 1000)), +// }; +// let sendValueChanges = await rethnet.dryRun(sendValue, block); + +// // receiver should have 100 (0x64) wei +// expect( +// BigInt( +// sendValueChanges .state["0x70997970c51812dc3a010c7d01b50e0d17dc79c8"] +// .info.balance +// ) +// ).to.equal(BigInt("0x64")); + +// // create a contract +// const createContract: Transaction = { +// from: caller.buf, + +// gasLimit: BigInt(1000000), + +// // minimal creation bytecode +// input: Buffer.from("3859818153F3", "hex"), +// }; + +// let createContractChanges = await rethnet.dryRun(createContract, block); + +// expect( +// createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] +// ).to.exist; +// // check that the code hash is not the null hash (i.e., the address has code) +// expect( +// createContractChanges.state["0x5fbdb2315678afecb367f032d93f642f64180aa3"] +// .info.code_hash +// ).to.not.equal( +// "0xc5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" +// ); +// }); +// }); diff --git a/crates/rethnet_evm_napi/test/evm/StateManager.ts b/crates/rethnet_evm_napi/test/evm/StateManager.ts index fc3be4e425..e65f9e9fa0 100644 --- a/crates/rethnet_evm_napi/test/evm/StateManager.ts +++ b/crates/rethnet_evm_napi/test/evm/StateManager.ts @@ -1,7 +1,7 @@ import { expect } from "chai"; import { Address, KECCAK256_NULL } from "@nomicfoundation/ethereumjs-util"; -import { Account, Bytecode, StateManager } from "../.."; +import { Account, Bytecode, RethnetContext, StateManager } from "../.."; describe("State Manager", () => { const caller = Address.fromString( @@ -11,10 +11,12 @@ describe("State Manager", () => { "0x70997970C51812dc3A010C7d01b50e0d17dc79C8" ); + const context = new RethnetContext(); + let stateManager: StateManager; beforeEach(function () { - stateManager = new StateManager(); + stateManager = new StateManager(context); }); // TODO: insertBlock, setAccountCode, setAccountStorageSlot diff --git a/packages/hardhat-core/src/internal/core/providers/http.ts b/packages/hardhat-core/src/internal/core/providers/http.ts index fba1c4e27d..56cb231027 100644 --- a/packages/hardhat-core/src/internal/core/providers/http.ts +++ b/packages/hardhat-core/src/internal/core/providers/http.ts @@ -264,7 +264,7 @@ export class HttpProvider extends EventEmitter implements EIP1193Provider { ): number | undefined { const header = response.headers["retry-after"]; - if (header === undefined || header === null) { + if (header === undefined || header === null || Array.isArray(header)) { return undefined; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 1d6eff5329..0392d1ac14 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -3,20 +3,22 @@ import { bufferToBigInt, toBuffer, } from "@nomicfoundation/ethereumjs-util"; -import { StateManager, Account, Bytecode } from "rethnet-evm"; +import { StateManager, Account, Bytecode, RethnetContext } from "rethnet-evm"; import { GenesisAccount } from "./node-types"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ export class RethnetStateManager { - constructor(private _state: StateManager = new StateManager()) {} + constructor(private _state: StateManager) {} public static withGenesisAccounts( + context: RethnetContext, genesisAccounts: GenesisAccount[] ): RethnetStateManager { return new RethnetStateManager( StateManager.withGenesisAccounts( + context, genesisAccounts.map((account) => { return { privateKey: account.privateKey, diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 2d5bda6907..c383fcc8f9 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -16,6 +16,7 @@ import { TracingMessage, TracingMessageResult, TracingStep, + RethnetContext, } from "rethnet-evm"; import { isForkedNodeConfig, NodeConfig } from "../node-types"; @@ -38,6 +39,8 @@ import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ +const globalContext = new RethnetContext(); + export class RethnetAdapter implements VMAdapter { private _vmTracer: VMTracer; @@ -68,6 +71,7 @@ export class RethnetAdapter implements VMAdapter { config.allowUnlimitedContractSize === true ? 2n ** 64n - 1n : undefined; const state = RethnetStateManager.withGenesisAccounts( + globalContext, config.genesisAccounts ); From 54b5adf4fa722dba636a43251c028838ccbaa3f0 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 17:12:22 -0500 Subject: [PATCH 050/406] fix: force tsc build to avoid stale code (#3843) --- crates/rethnet_evm_napi/src/state.rs | 2 -- packages/hardhat-core/package.json | 4 ++-- 2 files changed, 2 insertions(+), 4 deletions(-) diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index d6d0b04045..9cad0707a5 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -133,8 +133,6 @@ impl StateManager { env.adjust_external_memory(STATE_MEMORY_SIZE)?; - env.adjust_external_memory(STATE_MEMORY_SIZE)?; - Ok(Self { state: Arc::new(RwLock::new(state)), context: context.as_inner().clone(), diff --git a/packages/hardhat-core/package.json b/packages/hardhat-core/package.json index 4ed94f1c8d..c35f74d6f1 100644 --- a/packages/hardhat-core/package.json +++ b/packages/hardhat-core/package.json @@ -34,8 +34,8 @@ "test:forking": "mocha --recursive \"test/internal/hardhat-network/{helpers,jsonrpc,provider}/**/*.ts\" --exit", "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", "prebuild:tracing": "cd ../../crates/rethnet_evm_napi && yarn build:tracing", - "build": "tsc --build .", - "build:tracing": "tsc --build .", + "build": "tsc --build --force --incremental .", + "build:tracing": "tsc --build --force --incremental .", "prepublishOnly": "yarn build", "clean": "rimraf builtin-tasks internal types utils *.d.ts *.map *.js build-test tsconfig.tsbuildinfo test/internal/hardhat-network/provider/.hardhat_node_test_cache" }, From 6c8260da8263a9557144a499b644d3d8fd5ead36 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 17:20:49 -0500 Subject: [PATCH 051/406] improvement: improve tracing (#3844) --- crates/rethnet_evm_napi/src/account.rs | 1 + crates/rethnet_evm_napi/src/block.rs | 3 ++ crates/rethnet_evm_napi/src/block/builder.rs | 4 ++ crates/rethnet_evm_napi/src/blockchain.rs | 3 -- .../src/blockchain/js_blockchain.rs | 1 + crates/rethnet_evm_napi/src/config.rs | 1 + crates/rethnet_evm_napi/src/context.rs | 10 ++++- crates/rethnet_evm_napi/src/lib.rs | 1 - crates/rethnet_evm_napi/src/logger.rs | 39 ------------------- crates/rethnet_evm_napi/src/state.rs | 12 ++++-- .../rethnet_evm_napi/src/tracer/js_tracer.rs | 5 +++ crates/rethnet_evm_napi/src/transaction.rs | 1 + .../src/transaction/result.rs | 2 + 13 files changed, 35 insertions(+), 48 deletions(-) delete mode 100644 crates/rethnet_evm_napi/src/logger.rs diff --git a/crates/rethnet_evm_napi/src/account.rs b/crates/rethnet_evm_napi/src/account.rs index cb74f91462..6f1ab8b691 100644 --- a/crates/rethnet_evm_napi/src/account.rs +++ b/crates/rethnet_evm_napi/src/account.rs @@ -50,6 +50,7 @@ impl From for Bytecode { } impl From for Account { + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn from(account_info: AccountInfo) -> Self { let code = if account_info.code_hash == KECCAK_EMPTY { None diff --git a/crates/rethnet_evm_napi/src/block.rs b/crates/rethnet_evm_napi/src/block.rs index 61d31a0e13..359252a95f 100644 --- a/crates/rethnet_evm_napi/src/block.rs +++ b/crates/rethnet_evm_napi/src/block.rs @@ -24,6 +24,7 @@ pub struct BlockConfig { impl TryFrom for BlockEnv { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from(value: BlockConfig) -> std::result::Result { let default = BlockEnv::default(); @@ -63,6 +64,7 @@ impl TryFrom for BlockEnv { impl TryFrom for rethnet_evm::HeaderData { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from(value: BlockConfig) -> std::result::Result { Ok(Self { number: value @@ -113,6 +115,7 @@ pub struct BlockHeader { impl TryFrom for rethnet_eth::block::Header { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from(value: BlockHeader) -> Result { Ok(Self { parent_hash: B256::from_slice(&value.parent_hash), diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs index bab132b1b0..b1cb34af8b 100644 --- a/crates/rethnet_evm_napi/src/block/builder.rs +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -28,6 +28,7 @@ pub struct BlockBuilder { #[napi] impl BlockBuilder { #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new( blockchain: &Blockchain, state_manager: &StateManager, @@ -53,6 +54,7 @@ impl BlockBuilder { } #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn add_transaction( &self, transaction: Transaction, @@ -81,6 +83,7 @@ impl BlockBuilder { /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call /// methods on the JS object. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn finalize(&self, rewards: Vec<(Buffer, BigInt)>) -> napi::Result<()> { let mut builder = self.builder.lock().await; if let Some(builder) = builder.take() { @@ -107,6 +110,7 @@ impl BlockBuilder { /// This call consumes the [`BlockBuilder`] object in Rust. Afterwards, you can no longer call /// methods on the JS object. #[napi] + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub async fn abort(&self) -> napi::Result<()> { let mut builder = self.builder.lock().await; if let Some(builder) = builder.take() { diff --git a/crates/rethnet_evm_napi/src/blockchain.rs b/crates/rethnet_evm_napi/src/blockchain.rs index dacaf2d353..0bf7c28806 100644 --- a/crates/rethnet_evm_napi/src/blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain.rs @@ -8,7 +8,6 @@ use rethnet_eth::B256; use rethnet_evm::blockchain::SyncBlockchain; use crate::{ - logger::enable_logging, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction}, }; @@ -39,8 +38,6 @@ impl Blockchain { #[napi(ts_arg_type = "(blockNumber: bigint) => Promise")] get_block_hash_fn: JsFunction, ) -> napi::Result { - enable_logging(); - let get_block_hash_fn = ThreadsafeFunction::create( env.raw(), unsafe { get_block_hash_fn.raw() }, diff --git a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs index 83b7b0e6fc..23832c1273 100644 --- a/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs +++ b/crates/rethnet_evm_napi/src/blockchain/js_blockchain.rs @@ -21,6 +21,7 @@ pub struct JsBlockchain { impl BlockHashRef for JsBlockchain { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn block_hash(&self, block_number: U256) -> Result { let (sender, receiver) = channel(); diff --git a/crates/rethnet_evm_napi/src/config.rs b/crates/rethnet_evm_napi/src/config.rs index 16fe3906bb..d7c5dfd4c3 100644 --- a/crates/rethnet_evm_napi/src/config.rs +++ b/crates/rethnet_evm_napi/src/config.rs @@ -94,6 +94,7 @@ pub struct Config { impl TryFrom for CfgEnv { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from(value: Config) -> std::result::Result { let default = CfgEnv::default(); let chain_id = value diff --git a/crates/rethnet_evm_napi/src/context.rs b/crates/rethnet_evm_napi/src/context.rs index 13073efcd5..54b3d756fd 100644 --- a/crates/rethnet_evm_napi/src/context.rs +++ b/crates/rethnet_evm_napi/src/context.rs @@ -52,10 +52,16 @@ impl Context { .with_level(true) .with_filter(EnvFilter::from_default_env()); + let subscriber = Registry::default().with(fmt_layer); + #[cfg(feature = "tracing")] - let (flame_layer, guard) = tracing_flame::FlameLayer::with_file("tracing.folded").unwrap(); + let (flame_layer, guard) = { + let (flame_layer, guard) = + tracing_flame::FlameLayer::with_file("tracing.folded").unwrap(); - let subscriber = Registry::default().with(fmt_layer); + let flame_layer = flame_layer.with_empty_samples(false); + (flame_layer, guard) + }; #[cfg(feature = "tracing")] let subscriber = subscriber.with(flame_layer); diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index fbf13c1ce7..789b837f24 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -9,7 +9,6 @@ mod cast; mod config; mod context; mod log; -mod logger; mod receipt; /// Rethnet runtime for executing individual transactions mod runtime; diff --git a/crates/rethnet_evm_napi/src/logger.rs b/crates/rethnet_evm_napi/src/logger.rs deleted file mode 100644 index af5da362c8..0000000000 --- a/crates/rethnet_evm_napi/src/logger.rs +++ /dev/null @@ -1,39 +0,0 @@ -use once_cell::sync::OnceCell; -use tracing_subscriber::{prelude::*, EnvFilter, Registry}; - -struct Logger { - #[cfg(feature = "tracing")] - _guard: tracing_flame::FlushGuard>, -} - -unsafe impl Sync for Logger {} - -static LOGGER: OnceCell = OnceCell::new(); - -pub fn enable_logging() { - let _logger = LOGGER.get_or_init(|| { - let fmt_layer = tracing_subscriber::fmt::layer() - .with_file(true) - .with_line_number(true) - .with_thread_ids(true) - .with_target(false) - .with_level(true) - .with_filter(EnvFilter::from_default_env()); - - #[cfg(feature = "tracing")] - let (flame_layer, _guard) = tracing_flame::FlameLayer::with_file("tracing.folded").unwrap(); - - let subscriber = Registry::default().with(fmt_layer); - - #[cfg(feature = "tracing")] - let subscriber = subscriber.with(flame_layer); - - tracing::subscriber::set_global_default(subscriber) - .expect("Could not set global default tracing subscriber"); - - Logger { - #[cfg(feature = "tracing")] - _guard, - } - }); -} diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 9cad0707a5..6a44153e74 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -23,7 +23,6 @@ use crate::{ account::Account, cast::TryCast, context::{Context, RethnetContext}, - logger::enable_logging, sync::{await_promise, handle_error}, threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, }; @@ -127,8 +126,6 @@ impl StateManager { where S: SyncState, { - enable_logging(); - let state: Box> = Box::new(state); env.adjust_external_memory(STATE_MEMORY_SIZE)?; @@ -333,6 +330,15 @@ impl StateManager { unsafe { modify_account_fn.raw() }, 0, |mut ctx: ThreadSafeCallContext| { + #[cfg(feature = "tracing")] + let span = tracing::span!( + tracing::Level::TRACE, + "modify_account_threadsafe_function_call" + ); + + #[cfg(feature = "tracing")] + let _span_guard = span.enter(); + let sender = ctx.value.sender.clone(); let balance = ctx diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs index 75974f6356..864e88a833 100644 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs @@ -613,6 +613,7 @@ impl rethnet_evm::Inspector for JsTracer where E: Debug, { + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn call( &mut self, data: &mut dyn rethnet_evm::EVMData, @@ -660,6 +661,7 @@ where (InstructionResult::Continue, Gas::new(0), Bytes::default()) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn call_end( &mut self, data: &mut dyn rethnet_evm::EVMData, @@ -724,6 +726,7 @@ where (ret, remaining_gas, out) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn create( &mut self, data: &mut dyn rethnet_evm::EVMData, @@ -748,6 +751,7 @@ where ) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn create_end( &mut self, data: &mut dyn rethnet_evm::EVMData, @@ -802,6 +806,7 @@ where (ret, address, remaining_gas, out) } + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn step( &mut self, interp: &mut rethnet_evm::Interpreter, diff --git a/crates/rethnet_evm_napi/src/transaction.rs b/crates/rethnet_evm_napi/src/transaction.rs index 7ae5005e90..98ad145232 100644 --- a/crates/rethnet_evm_napi/src/transaction.rs +++ b/crates/rethnet_evm_napi/src/transaction.rs @@ -38,6 +38,7 @@ pub struct Transaction { impl TryFrom for rethnet_evm::TxEnv { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from(value: Transaction) -> std::result::Result { let caller = if let Some(from) = value.from.as_ref() { Address::from_slice(from) diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index 4e16d918bc..b2609c6ce2 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -137,6 +137,7 @@ pub struct ExecutionResult { } impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn from((result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)) -> Self { let result = match result { rethnet_evm::ExecutionResult::Success { @@ -198,6 +199,7 @@ impl { type Error = napi::Error; + #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] fn try_from( (result, state, trace): ( rethnet_evm::ExecutionResult, From 6cde3c7a4bf18344adf4d631e9127b88241f07c0 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 17:21:40 -0500 Subject: [PATCH 052/406] fix: re-enable storage root check (#3845) --- .../internal/hardhat-network/provider/vm/dual.ts | 13 ++++++------- .../internal/hardhat-network/provider/vm/rethnet.ts | 12 +++++++----- 2 files changed, 13 insertions(+), 12 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 3c84c377bd..7c69061710 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -707,13 +707,12 @@ function assertEqualAccounts( } if (!ethereumJSAccount.storageRoot.equals(rethnetAccount.storageRoot)) { - // TODO re-enable - // console.trace( - // `Different storageRoot: ${ethereumJSAccount.storageRoot.toString( - // "hex" - // )} !== ${rethnetAccount.storageRoot.toString("hex")}` - // ); - // throw new Error("Different storageRoot"); + console.trace( + `Different storageRoot: ${ethereumJSAccount.storageRoot.toString( + "hex" + )} !== ${rethnetAccount.storageRoot.toString("hex")}` + ); + throw new Error("Different storageRoot"); } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index c383fcc8f9..8b9dae9e5e 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -149,13 +149,15 @@ export class RethnetAdapter implements VMAdapter { * Get the account info for the given address. */ public async getAccount(address: Address): Promise { - const account = await this._state.getAccount(address); - const storageRoot = await this._state.getAccountStorageRoot(address); + const [accountInfo, storageRoot] = await Promise.all([ + this._state.getAccount(address), + this._state.getAccountStorageRoot(address), + ]); return new Account( - account?.nonce, - account?.balance, + accountInfo?.nonce, + accountInfo?.balance, storageRoot ?? undefined, - account?.code?.hash + accountInfo?.code?.hash ); } From bc4785d6a2a526903b37309db2b4dd36daa64751 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 18:44:33 -0500 Subject: [PATCH 053/406] improvement: replace JSTracer with TraceCollector (#3846) --- crates/rethnet_evm/src/block/builder.rs | 17 +- crates/rethnet_evm/src/evm.rs | 38 +- crates/rethnet_evm/src/inspector.rs | 159 ---- crates/rethnet_evm/src/lib.rs | 3 +- crates/rethnet_evm/src/runtime.rs | 24 +- crates/rethnet_evm/src/trace.rs | 329 ++++++- crates/rethnet_evm_napi/src/block/builder.rs | 23 +- crates/rethnet_evm_napi/src/lib.rs | 1 - crates/rethnet_evm_napi/src/log.rs | 31 +- crates/rethnet_evm_napi/src/runtime.rs | 90 +- crates/rethnet_evm_napi/src/trace.rs | 159 +++- crates/rethnet_evm_napi/src/tracer.rs | 28 - .../rethnet_evm_napi/src/tracer/js_tracer.rs | 887 ------------------ .../src/transaction/result.rs | 175 +++- .../hardhat-network/provider/vm/ethereumjs.ts | 4 - .../hardhat-network/provider/vm/rethnet.ts | 68 +- 16 files changed, 703 insertions(+), 1333 deletions(-) delete mode 100644 crates/rethnet_evm/src/inspector.rs delete mode 100644 crates/rethnet_evm_napi/src/tracer.rs delete mode 100644 crates/rethnet_evm_napi/src/tracer/js_tracer.rs diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index e50e1c0a8b..933afce83a 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -6,7 +6,10 @@ use rethnet_eth::{ }; use revm::{ db::DatabaseComponentError, - primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, SpecId, TxEnv}, + primitives::{ + BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, ResultAndState, SpecId, + TxEnv, + }, }; use tokio::sync::RwLock; @@ -14,7 +17,6 @@ use crate::{ blockchain::SyncBlockchain, evm::{build_evm, run_transaction, SyncInspector}, state::{AccountModifierFn, SyncState}, - trace::Trace, HeaderData, }; @@ -113,8 +115,8 @@ where pub async fn add_transaction( &mut self, transaction: TxEnv, - inspector: Option>>, - ) -> Result<(ExecutionResult, Trace), BlockTransactionError> { + inspector: Option<&mut dyn SyncInspector>, + ) -> Result> { // transaction's gas limit cannot be greater than the remaining gas in the block if U256::from(transaction.gas_limit) > self.gas_remaining() { return Err(BlockTransactionError::ExceedsBlockGasLimit); @@ -140,14 +142,17 @@ where let evm = build_evm(&*blockchain, &*state, self.cfg.clone(), transaction, block); - let (result, changes, trace) = run_transaction(evm, inspector)?; + let ResultAndState { + result, + state: changes, + } = run_transaction(evm, inspector)?; state.commit(changes); self.header.gas_used += U256::from(result.gas_used()); // TODO: store receipt - Ok((result, trace)) + Ok(result) } /// Finalizes the block, returning the state root. diff --git a/crates/rethnet_evm/src/evm.rs b/crates/rethnet_evm/src/evm.rs index b17c143bc7..1a6fc89524 100644 --- a/crates/rethnet_evm/src/evm.rs +++ b/crates/rethnet_evm/src/evm.rs @@ -2,17 +2,11 @@ use std::fmt::Debug; use revm::{ db::{DatabaseComponentError, DatabaseComponents}, - primitives::{BlockEnv, CfgEnv, EVMError, ExecutionResult, ResultAndState, State, TxEnv}, + primitives::{BlockEnv, CfgEnv, EVMError, ResultAndState, TxEnv}, Inspector, }; -use crate::{ - blockchain::SyncBlockchain, - inspector::DualInspector, - state::SyncState, - trace::{Trace, TraceCollector}, - SyncDatabase, -}; +use crate::{blockchain::SyncBlockchain, state::SyncState, SyncDatabase}; /// Super trait for an inspector of an `AsyncDatabase` that's debuggable. pub trait SyncInspector: Inspector> + Debug + Send @@ -22,6 +16,14 @@ where { } +impl SyncInspector for I +where + I: Inspector> + Debug + Send, + BE: Debug + Send + 'static, + SE: Debug + Send + 'static, +{ +} + /// Creates an evm from the provided database, config, transaction, and block. #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn build_evm<'b, 's, BE, SE>( @@ -50,23 +52,15 @@ where #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn run_transaction( evm: revm::EVM>, - inspector: Option>>, -) -> Result<(ExecutionResult, State, Trace), EVMError>> + inspector: Option<&mut dyn SyncInspector>, +) -> Result>> where BE: Debug + Send + 'static, SE: Debug + Send + 'static, { - let (result, state, tracer) = if let Some(inspector) = inspector { - let mut inspector = DualInspector::new(TraceCollector::default(), inspector); - - let ResultAndState { result, state } = evm.inspect_ref(&mut inspector)?; - (result, state, inspector.into_parts().0) + if let Some(inspector) = inspector { + evm.inspect_ref(inspector) } else { - let mut inspector = TraceCollector::default(); - let ResultAndState { result, state } = evm.inspect_ref(&mut inspector)?; - - (result, state, inspector) - }; - - Ok((result, state, tracer.into_trace())) + evm.transact_ref() + } } diff --git a/crates/rethnet_evm/src/inspector.rs b/crates/rethnet_evm/src/inspector.rs deleted file mode 100644 index 8c7bc40eed..0000000000 --- a/crates/rethnet_evm/src/inspector.rs +++ /dev/null @@ -1,159 +0,0 @@ -use std::marker::PhantomData; - -use revm::Inspector; - -// TODO: Improve this design by introducing a InspectorMut trait - -/// Inspector that allows two inspectors to operate side-by-side. The immutable inspector runs -/// first, followed by the mutable inspector. To ensure both inspectors observe a valid state, you -/// have to ensure that only the mutable inspector modifies state. The returned values are solely -/// determined by the mutable inspector. -pub struct DualInspector -where - A: Inspector, - B: Inspector, -{ - immutable: A, - mutable: B, - phantom: PhantomData, -} - -impl DualInspector -where - A: Inspector, - B: Inspector, -{ - /// Constructs a `DualInspector` from the provided inspectors. - pub fn new(immutable: A, mutable: B) -> Self { - Self { - immutable, - mutable, - phantom: PhantomData, - } - } - - /// Returns the two inspectors wrapped by the `DualInspector`. - pub fn into_parts(self) -> (A, B) { - (self.immutable, self.mutable) - } -} - -impl Inspector for DualInspector -where - A: Inspector, - B: Inspector, -{ - fn initialize_interp( - &mut self, - interp: &mut revm::interpreter::Interpreter, - data: &mut dyn revm::EVMData, - is_static: bool, - ) -> revm::interpreter::InstructionResult { - self.immutable.initialize_interp(interp, data, is_static); - self.mutable.initialize_interp(interp, data, is_static) - } - - fn step( - &mut self, - interp: &mut revm::interpreter::Interpreter, - data: &mut dyn revm::EVMData, - is_static: bool, - ) -> revm::interpreter::InstructionResult { - self.immutable.step(interp, data, is_static); - self.mutable.step(interp, data, is_static) - } - - fn log( - &mut self, - evm_data: &mut dyn revm::EVMData, - address: &rethnet_eth::B160, - topics: &[rethnet_eth::B256], - data: &rethnet_eth::Bytes, - ) { - self.immutable.log(evm_data, address, topics, data); - self.mutable.log(evm_data, address, topics, data) - } - - fn step_end( - &mut self, - interp: &mut revm::interpreter::Interpreter, - data: &mut dyn revm::EVMData, - is_static: bool, - eval: revm::interpreter::InstructionResult, - ) -> revm::interpreter::InstructionResult { - self.immutable.step_end(interp, data, is_static, eval); - self.mutable.step_end(interp, data, is_static, eval) - } - - fn call( - &mut self, - data: &mut dyn revm::EVMData, - inputs: &mut revm::interpreter::CallInputs, - is_static: bool, - ) -> ( - revm::interpreter::InstructionResult, - revm::interpreter::Gas, - rethnet_eth::Bytes, - ) { - self.immutable.call(data, inputs, is_static); - self.mutable.call(data, inputs, is_static) - } - - fn call_end( - &mut self, - data: &mut dyn revm::EVMData, - inputs: &revm::interpreter::CallInputs, - remaining_gas: revm::interpreter::Gas, - ret: revm::interpreter::InstructionResult, - out: rethnet_eth::Bytes, - is_static: bool, - ) -> ( - revm::interpreter::InstructionResult, - revm::interpreter::Gas, - rethnet_eth::Bytes, - ) { - self.immutable - .call_end(data, inputs, remaining_gas, ret, out.clone(), is_static); - self.mutable - .call_end(data, inputs, remaining_gas, ret, out, is_static) - } - - fn create( - &mut self, - data: &mut dyn revm::EVMData, - inputs: &mut revm::interpreter::CreateInputs, - ) -> ( - revm::interpreter::InstructionResult, - Option, - revm::interpreter::Gas, - rethnet_eth::Bytes, - ) { - self.immutable.create(data, inputs); - self.mutable.create(data, inputs) - } - - fn create_end( - &mut self, - data: &mut dyn revm::EVMData, - inputs: &revm::interpreter::CreateInputs, - ret: revm::interpreter::InstructionResult, - address: Option, - remaining_gas: revm::interpreter::Gas, - out: rethnet_eth::Bytes, - ) -> ( - revm::interpreter::InstructionResult, - Option, - revm::interpreter::Gas, - rethnet_eth::Bytes, - ) { - self.immutable - .create_end(data, inputs, ret, address, remaining_gas, out.clone()); - self.mutable - .create_end(data, inputs, ret, address, remaining_gas, out) - } - - fn selfdestruct(&mut self, contract: rethnet_eth::B160, target: rethnet_eth::B160) { - self.immutable.selfdestruct(contract, target); - self.mutable.selfdestruct(contract, target); - } -} diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index 514eba5ff6..4e872bc18a 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -16,7 +16,7 @@ pub use revm::{ InstructionResult, Interpreter, OPCODE_JUMPMAP, }, primitives::*, - EVMData, Inspector, + Inspector, }; pub use crate::{ @@ -37,7 +37,6 @@ pub mod trace; mod block; pub(crate) mod evm; -mod inspector; pub(crate) mod random; mod runtime; mod transaction; diff --git a/crates/rethnet_evm/src/runtime.rs b/crates/rethnet_evm/src/runtime.rs index df273e8e62..79866bf8da 100644 --- a/crates/rethnet_evm/src/runtime.rs +++ b/crates/rethnet_evm/src/runtime.rs @@ -2,7 +2,7 @@ use std::{fmt::Debug, sync::Arc}; use revm::{ db::DatabaseComponents, - primitives::{BlockEnv, CfgEnv, ExecutionResult, SpecId, TxEnv}, + primitives::{BlockEnv, CfgEnv, ExecutionResult, ResultAndState, SpecId, TxEnv}, }; use tokio::sync::RwLock; @@ -10,9 +10,7 @@ use crate::{ blockchain::SyncBlockchain, evm::{build_evm, run_transaction, SyncInspector}, state::SyncState, - trace::Trace, transaction::TransactionError, - State, }; /// Asynchronous implementation of the Database super-trait @@ -55,8 +53,8 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, - ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + inspector: Option<&mut dyn SyncInspector>, + ) -> Result> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } @@ -75,8 +73,8 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, - ) -> Result<(ExecutionResult, State, Trace), TransactionError> { + inspector: Option<&mut dyn SyncInspector>, + ) -> Result> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } @@ -98,8 +96,8 @@ where &self, transaction: TxEnv, block: BlockEnv, - inspector: Option>>, - ) -> Result<(ExecutionResult, Trace), TransactionError> { + inspector: Option<&mut dyn SyncInspector>, + ) -> Result> { if self.cfg.spec_id > SpecId::MERGE && block.prevrandao.is_none() { return Err(TransactionError::MissingPrevrandao); } @@ -109,11 +107,13 @@ where let evm = build_evm(&*blockchain, &*state, self.cfg.clone(), transaction, block); - let (result, changes, trace) = - run_transaction(evm, inspector).map_err(TransactionError::from)?; + let ResultAndState { + result, + state: changes, + } = run_transaction(evm, inspector).map_err(TransactionError::from)?; state.commit(changes); - Ok((result, trace)) + Ok(result) } } diff --git a/crates/rethnet_evm/src/trace.rs b/crates/rethnet_evm/src/trace.rs index ca5b8403f1..241d490c81 100644 --- a/crates/rethnet_evm/src/trace.rs +++ b/crates/rethnet_evm/src/trace.rs @@ -1,59 +1,109 @@ -use rethnet_eth::Bytes; +use std::fmt::Debug; + +use rethnet_eth::{Address, Bytes, U256}; use revm::{ - interpreter::{opcode, Gas, InstructionResult, Interpreter}, + interpreter::{ + instruction_result::SuccessOrHalt, opcode, return_revert, CallInputs, CreateInputs, Gas, + InstructionResult, Interpreter, + }, + primitives::{AccountInfo, Bytecode, ExecutionResult, Output}, EVMData, Inspector, }; +/// Stack tracing message +#[derive(Debug)] +pub enum TraceMessage { + /// Event that occurs before a call or create message. + Before(BeforeMessage), + /// Event that occurs every step of a call or create message. + Step(Step), + /// Event that occurs after a call or create message. + After(ExecutionResult), +} + +/// Temporary before message type for handling traces +#[derive(Debug, Clone)] +pub struct BeforeMessage { + /// Call depth + pub depth: usize, + /// Callee + pub to: Option
, + /// Input data + pub data: Bytes, + /// Value + pub value: U256, + /// Code address + pub code_address: Option
, + /// Bytecode + pub code: Option, +} + /// A trace for an EVM call. -#[derive(Default)] +#[derive(Debug, Default)] pub struct Trace { - /// The individual steps of the call - pub steps: Vec, + // /// The individual steps of the call + // pub steps: Vec, + /// Messages + pub messages: Vec, /// The return value of the call pub return_value: Bytes, - gas: Option, } /// A single EVM step. +#[derive(Debug)] pub struct Step { - /// The executed op code - pub opcode: u8, - /// The amount of gas that was used by the step - pub gas_cost: u64, - /// The amount of gas that was refunded by the step - pub gas_refunded: i64, - /// The exit code of the step - pub exit_code: InstructionResult, + /// The program counter + pub pc: u64, + /// The call depth + pub depth: u64, + // /// The executed op code + // pub opcode: u8, + // /// The amount of gas that was used by the step + // pub gas_cost: u64, + // /// The amount of gas that was refunded by the step + // pub gas_refunded: i64, + // /// The contract being executed + // pub contract: AccountInfo, + // /// The address of the contract + // pub contract_address: Address, } impl Trace { - /// Adds a VM step to the trace. - pub fn add_step(&mut self, opcode: u8, gas: &Gas, exit_code: InstructionResult) { - let step = if let Some(old_gas) = self.gas.replace(*gas) { - Step { - opcode, - gas_cost: gas.spend() - old_gas.spend(), - gas_refunded: gas.refunded() - old_gas.refunded(), - exit_code, - } - } else { - Step { - opcode, - gas_cost: gas.spend(), - gas_refunded: gas.refunded(), - exit_code, - } - }; + /// Adds a before message + pub fn add_before(&mut self, message: BeforeMessage) { + self.messages.push(TraceMessage::Before(message)); + } - self.steps.push(step); + /// Adds a result message + pub fn add_after(&mut self, result: ExecutionResult) { + self.messages.push(TraceMessage::After(result)); + } + + /// Adds a VM step to the trace. + pub fn add_step( + &mut self, + depth: u64, + pc: usize, + _opcode: u8, + _gas: &Gas, + _contract: &AccountInfo, + _contract_address: &Address, + ) { + self.messages.push(TraceMessage::Step(Step { + pc: pc as u64, + depth, + // opcode, + // contract: contract.clone(), + // contract_address: *contract_address, + })); } } /// Object that gathers trace information during EVM execution and can be turned into a trace upon completion. -#[derive(Default)] +#[derive(Debug, Default)] pub struct TraceCollector { trace: Trace, - opcode_stack: Vec, + pending_before: Option, } impl TraceCollector { @@ -61,38 +111,213 @@ impl TraceCollector { pub fn into_trace(self) -> Trace { self.trace } + + fn validate_before_message(&mut self) { + if let Some(message) = self.pending_before.take() { + self.trace.add_before(message); + } + } } -impl Inspector for TraceCollector { - fn step( +impl Inspector for TraceCollector +where + E: Debug, +{ + fn call( &mut self, - interp: &mut Interpreter, - _data: &mut dyn EVMData, + data: &mut dyn EVMData, + inputs: &mut CallInputs, _is_static: bool, - ) -> InstructionResult { - self.opcode_stack.push(interp.current_opcode()); + ) -> (InstructionResult, Gas, rethnet_eth::Bytes) { + self.validate_before_message(); - InstructionResult::Continue + let code = data + .journaled_state() + .state + .get(&inputs.contract) + .map(|account| account.info.clone()) + .map(|mut account_info| { + if let Some(code) = account_info.code.take() { + code + } else { + data.database() + .code_by_hash(account_info.code_hash) + .unwrap() + } + }) + .unwrap_or_else(|| { + data.database().basic(inputs.contract).unwrap().map_or( + // If an invalid contract address was provided, return empty code + Bytecode::new(), + |account_info| { + account_info.code.unwrap_or_else(|| { + data.database() + .code_by_hash(account_info.code_hash) + .unwrap() + }) + }, + ) + }); + + self.pending_before = Some(BeforeMessage { + depth: data.journaled_state().depth, + to: Some(inputs.context.address), + data: inputs.input.clone(), + value: inputs.context.apparent_value, + code_address: Some(inputs.context.code_address), + code: Some(code), + }); + + (InstructionResult::Continue, Gas::new(0), Bytes::default()) } - fn step_end( + fn call_end( + &mut self, + data: &mut dyn EVMData, + _inputs: &CallInputs, + remaining_gas: Gas, + ret: InstructionResult, + out: Bytes, + _is_static: bool, + ) -> (InstructionResult, Gas, Bytes) { + match ret { + return_revert!() if self.pending_before.is_some() => { + self.pending_before = None; + return (ret, remaining_gas, out); + } + _ => (), + } + + self.validate_before_message(); + + let safe_ret = if ret == InstructionResult::CallTooDeep + || ret == InstructionResult::OutOfFund + || ret == InstructionResult::StateChangeDuringStaticCall + { + InstructionResult::Revert + } else { + ret + }; + + let result = match safe_ret.into() { + SuccessOrHalt::Success(reason) => ExecutionResult::Success { + reason, + gas_used: remaining_gas.spend(), + gas_refunded: remaining_gas.refunded() as u64, + logs: data.journaled_state().logs.clone(), + output: Output::Call(out.clone()), + }, + SuccessOrHalt::Revert => ExecutionResult::Revert { + gas_used: remaining_gas.spend(), + output: out.clone(), + }, + SuccessOrHalt::Halt(reason) => ExecutionResult::Halt { + reason, + gas_used: remaining_gas.limit(), + }, + SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), + }; + + self.trace.add_after(result); + + (ret, remaining_gas, out) + } + + fn create( + &mut self, + data: &mut dyn EVMData, + inputs: &mut CreateInputs, + ) -> (InstructionResult, Option, Gas, Bytes) { + self.validate_before_message(); + + self.pending_before = Some(BeforeMessage { + depth: data.journaled_state().depth, + to: None, + data: inputs.init_code.clone(), + value: inputs.value, + code_address: None, + code: None, + }); + + ( + InstructionResult::Continue, + None, + Gas::new(0), + Bytes::default(), + ) + } + + fn create_end( + &mut self, + data: &mut dyn EVMData, + _inputs: &CreateInputs, + ret: InstructionResult, + address: Option, + remaining_gas: Gas, + out: Bytes, + ) -> (InstructionResult, Option, Gas, Bytes) { + self.validate_before_message(); + + let safe_ret = + if ret == InstructionResult::CallTooDeep || ret == InstructionResult::OutOfFund { + InstructionResult::Revert + } else { + ret + }; + + let result = match safe_ret.into() { + SuccessOrHalt::Success(reason) => ExecutionResult::Success { + reason, + gas_used: remaining_gas.spend(), + gas_refunded: remaining_gas.refunded() as u64, + logs: data.journaled_state().logs.clone(), + output: Output::Create(out.clone(), address), + }, + SuccessOrHalt::Revert => ExecutionResult::Revert { + gas_used: remaining_gas.spend(), + output: out.clone(), + }, + SuccessOrHalt::Halt(reason) => ExecutionResult::Halt { + reason, + gas_used: remaining_gas.limit(), + }, + SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), + SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), + }; + + self.trace.add_after(result); + + (ret, address, remaining_gas, out) + } + + fn step( &mut self, interp: &mut Interpreter, - _data: &mut dyn revm::EVMData, + data: &mut dyn EVMData, _is_static: bool, - exit_code: InstructionResult, ) -> InstructionResult { - let opcode = self - .opcode_stack - .pop() - .expect("There must always be an opcode when ending a step"); + // Skip the step + let skip_step = self.pending_before.as_ref().map_or(false, |message| { + message.code.is_some() && interp.current_opcode() == opcode::STOP + }); - self.trace.add_step(opcode, interp.gas(), exit_code); + self.validate_before_message(); - if opcode == opcode::RETURN || opcode == opcode::REVERT { - self.trace.return_value = interp.return_value(); + if !skip_step { + self.trace.add_step( + data.journaled_state().depth(), + interp.program_counter(), + interp.current_opcode(), + interp.gas(), + &data + .journaled_state() + .account(interp.contract().address) + .info, + &interp.contract().address, + ); } - exit_code + InstructionResult::Continue } } diff --git a/crates/rethnet_evm_napi/src/block/builder.rs b/crates/rethnet_evm_napi/src/block/builder.rs index b1cb34af8b..9980b556e7 100644 --- a/crates/rethnet_evm_napi/src/block/builder.rs +++ b/crates/rethnet_evm_napi/src/block/builder.rs @@ -7,15 +7,16 @@ use napi::{ }; use napi_derive::napi; use rethnet_eth::{block::Header, Address, U256}; -use rethnet_evm::{state::StateError, CfgEnv, HeaderData, TxEnv}; +use rethnet_evm::{ + state::StateError, trace::TraceCollector, CfgEnv, HeaderData, SyncInspector, TxEnv, +}; use crate::{ blockchain::Blockchain, cast::TryCast, config::Config, state::StateManager, - tracer::Tracer, - transaction::{result::ExecutionResult, Transaction}, + transaction::{result::TransactionResult, Transaction}, }; use super::{BlockConfig, BlockHeader}; @@ -58,20 +59,28 @@ impl BlockBuilder { pub async fn add_transaction( &self, transaction: Transaction, - tracer: Option<&Tracer>, - ) -> napi::Result { + with_trace: bool, + ) -> napi::Result { let mut builder = self.builder.lock().await; if let Some(builder) = builder.as_mut() { let transaction = TxEnv::try_from(transaction)?; - let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + let mut tracer = TraceCollector::default(); + let inspector: Option<&mut dyn SyncInspector> = + if with_trace { Some(&mut tracer) } else { None }; let result = builder .add_transaction(transaction, inspector) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - Ok(result.into()) + let trace = if with_trace { + Some(tracer.into_trace()) + } else { + None + }; + + Ok(TransactionResult::new(result, None, trace)) } else { Err(napi::Error::new( Status::InvalidArg, diff --git a/crates/rethnet_evm_napi/src/lib.rs b/crates/rethnet_evm_napi/src/lib.rs index 789b837f24..15d428b912 100644 --- a/crates/rethnet_evm_napi/src/lib.rs +++ b/crates/rethnet_evm_napi/src/lib.rs @@ -16,5 +16,4 @@ mod state; mod sync; mod threadsafe_function; mod trace; -mod tracer; mod transaction; diff --git a/crates/rethnet_evm_napi/src/log.rs b/crates/rethnet_evm_napi/src/log.rs index 321c2743b1..fa0de90f1b 100644 --- a/crates/rethnet_evm_napi/src/log.rs +++ b/crates/rethnet_evm_napi/src/log.rs @@ -1,4 +1,6 @@ -use napi::bindgen_prelude::Buffer; +use std::mem; + +use napi::{bindgen_prelude::Buffer, Env, JsBuffer, JsBufferValue}; use napi_derive::napi; /// Ethereum log. @@ -6,21 +8,34 @@ use napi_derive::napi; pub struct Log { pub address: Buffer, pub topics: Vec, - pub data: Buffer, + pub data: JsBuffer, } -impl From for Log { - fn from(log: rethnet_evm::Log) -> Self { +impl Log { + pub fn new(env: &Env, log: &rethnet_evm::Log) -> napi::Result { let topics = log .topics - .into_iter() + .iter() .map(|topic| Buffer::from(topic.as_bytes())) .collect(); - Self { + let data = log.data.clone(); + let data = unsafe { + env.create_buffer_with_borrowed_data( + data.as_ptr(), + data.len(), + data, + |data: rethnet_eth::Bytes, _env| { + mem::drop(data); + }, + ) + } + .map(JsBufferValue::into_raw)?; + + Ok(Self { address: Buffer::from(log.address.as_bytes()), topics, - data: Buffer::from(log.data.as_ref()), - } + data, + }) } } diff --git a/crates/rethnet_evm_napi/src/runtime.rs b/crates/rethnet_evm_napi/src/runtime.rs index 472ae5614e..c940a4eddb 100644 --- a/crates/rethnet_evm_napi/src/runtime.rs +++ b/crates/rethnet_evm_napi/src/runtime.rs @@ -1,7 +1,8 @@ use napi::Status; use napi_derive::napi; use rethnet_evm::{ - state::StateError, BlockEnv, CfgEnv, InvalidTransaction, TransactionError, TxEnv, + state::StateError, trace::TraceCollector, BlockEnv, CfgEnv, InvalidTransaction, ResultAndState, + SyncInspector, TransactionError, TxEnv, }; use crate::{ @@ -9,8 +10,7 @@ use crate::{ blockchain::Blockchain, config::Config, state::StateManager, - tracer::Tracer, - transaction::{result::ExecutionResult, Transaction}, + transaction::{result::TransactionResult, Transaction}, }; /// The Rethnet runtime, which can execute individual transactions. @@ -48,20 +48,28 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - tracer: Option<&Tracer>, - ) -> napi::Result { + with_trace: bool, + ) -> napi::Result { let transaction = TxEnv::try_from(transaction)?; let block = BlockEnv::try_from(block)?; - let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + let mut tracer = TraceCollector::default(); + let inspector: Option<&mut dyn SyncInspector> = + if with_trace { Some(&mut tracer) } else { None }; - let (result, _state, trace) = self + let ResultAndState { result, state } = self .runtime .dry_run(transaction, block, inspector) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - Ok(ExecutionResult::from((result, trace))) + let trace = if with_trace { + Some(tracer.into_trace()) + } else { + None + }; + + Ok(TransactionResult::new(result, Some(state), trace)) } /// Executes the provided transaction without changing state, ignoring validation checks in the process. @@ -71,20 +79,28 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - tracer: Option<&Tracer>, - ) -> napi::Result { + with_trace: bool, + ) -> napi::Result { let transaction = TxEnv::try_from(transaction)?; let block = BlockEnv::try_from(block)?; - let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); + let mut tracer = TraceCollector::default(); + let inspector: Option<&mut dyn SyncInspector> = + if with_trace { Some(&mut tracer) } else { None }; - let (result, _state, trace) = self + let ResultAndState { result, state } = self .runtime .guaranteed_dry_run(transaction, block, inspector) .await .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string()))?; - Ok(ExecutionResult::from((result, trace))) + let trace = if with_trace { + Some(tracer.into_trace()) + } else { + None + }; + + Ok(TransactionResult::new(result, Some(state), trace)) } /// Executes the provided transaction, changing state in the process. @@ -94,27 +110,37 @@ impl Rethnet { &self, transaction: Transaction, block: BlockConfig, - tracer: Option<&Tracer>, - ) -> napi::Result { + with_trace: bool, + ) -> napi::Result { let transaction = TxEnv::try_from(transaction)?; let block = BlockEnv::try_from(block)?; - let inspector = tracer.map(|tracer| tracer.as_dyn_inspector()); - - Ok(ExecutionResult::from(self - .runtime - .run(transaction, block, inspector) - .await - .map_err(|e| { - napi::Error::new( - Status::GenericFailure, - match e { - TransactionError::InvalidTransaction( - InvalidTransaction::LackOfFundForGasLimit { gas_limit, balance }, - ) => format!("sender doesn't have enough funds to send tx. The max upfront cost is: {} and the sender's account only has: {}", gas_limit, balance), - e => e.to_string(), - }, - ) - })?)) + let mut tracer = TraceCollector::default(); + let inspector: Option<&mut dyn SyncInspector> = + if with_trace { Some(&mut tracer) } else { None }; + + let result = self + .runtime + .run(transaction, block, inspector) + .await + .map_err(|e| { + napi::Error::new( + Status::GenericFailure, + match e { + TransactionError::InvalidTransaction( + InvalidTransaction::LackOfFundForGasLimit { gas_limit, balance }, + ) => format!("sender doesn't have enough funds to send tx. The max upfront cost is: {} and the sender's account only has: {}", gas_limit, balance), + e => e.to_string(), + }, + ) + })?; + + let trace = if with_trace { + Some(tracer.into_trace()) + } else { + None + }; + + Ok(TransactionResult::new(result, None, trace)) } } diff --git a/crates/rethnet_evm_napi/src/trace.rs b/crates/rethnet_evm_napi/src/trace.rs index b8efe9a618..6f087789f2 100644 --- a/crates/rethnet_evm_napi/src/trace.rs +++ b/crates/rethnet_evm_napi/src/trace.rs @@ -1,39 +1,152 @@ -use napi::bindgen_prelude::{BigInt, Buffer}; +use std::mem; + +use napi::{ + bindgen_prelude::{BigInt, Buffer}, + Env, JsBuffer, JsBufferValue, +}; use napi_derive::napi; +use rethnet_evm::trace::BeforeMessage; + +use crate::transaction::result::ExecutionResult; #[napi(object)] -pub struct Trace { - pub steps: Vec, - pub return_value: Buffer, -} +pub struct TracingMessage { + /// Recipient address. None if it is a Create message. + #[napi(readonly)] + pub to: Option, -impl From for Trace { - fn from(value: rethnet_evm::trace::Trace) -> Self { - let steps = value.steps.into_iter().map(From::from).collect(); - let return_value = Buffer::from(value.return_value.as_ref()); + /// Depth of the message + #[napi(readonly)] + pub depth: u8, - Self { - steps, - return_value, + /// Input data of the message + #[napi(readonly)] + pub data: JsBuffer, + + /// Value sent in the message + #[napi(readonly)] + pub value: BigInt, + + /// Address of the code that is being executed. Can be different from `to` if a delegate call + /// is being done. + #[napi(readonly)] + pub code_address: Option, + + /// Code of the contract that is being executed. + #[napi(readonly)] + pub code: Option, +} + +impl TracingMessage { + pub fn new(env: &Env, message: &BeforeMessage) -> napi::Result { + let data = message.data.clone(); + let data = unsafe { + env.create_buffer_with_borrowed_data( + data.as_ptr(), + data.len(), + data, + |data: rethnet_eth::Bytes, _env| { + mem::drop(data); + }, + ) } + .map(JsBufferValue::into_raw)?; + + let code = message.code.as_ref().map_or(Ok(None), |code| { + let code = code.original_bytes(); + + unsafe { + env.create_buffer_with_borrowed_data( + code.as_ptr(), + code.len(), + code, + |code: rethnet_eth::Bytes, _env| { + mem::drop(code); + }, + ) + } + .map(JsBufferValue::into_raw) + .map(Some) + })?; + + Ok(TracingMessage { + to: message.to.map(|to| Buffer::from(to.to_vec())), + depth: message.depth as u8, + data, + value: BigInt { + sign_bit: false, + words: message.value.into_limbs().to_vec(), + }, + code_address: message + .code_address + .map(|address| Buffer::from(address.to_vec())), + code, + }) } } #[napi(object)] -pub struct Step { - pub opcode: u8, - pub gas_cost: BigInt, - pub gas_refunded: i64, - pub exit_code: u8, +pub struct TracingStep { + /// Call depth + #[napi(readonly)] + pub depth: u8, + /// The program counter + #[napi(readonly)] + pub pc: BigInt, + // /// The executed op code + // #[napi(readonly)] + // pub opcode: String, + // /// The return value of the step + // #[napi(readonly)] + // pub return_value: u8, + // /// The amount of gas that was used by the step + // #[napi(readonly)] + // pub gas_cost: BigInt, + // /// The amount of gas that was refunded by the step + // #[napi(readonly)] + // pub gas_refunded: BigInt, + // /// The amount of gas left + // #[napi(readonly)] + // pub gas_left: BigInt, + // /// The stack + // #[napi(readonly)] + // pub stack: Vec, + // /// The memory + // #[napi(readonly)] + // pub memory: Buffer, + // /// The contract being executed + // #[napi(readonly)] + // pub contract: Account, + // /// The address of the contract + // #[napi(readonly)] + // pub contract_address: Buffer, + // /// The address of the code being executed + // #[napi(readonly)] + // pub code_address: Buffer, } -impl From for Step { - fn from(value: rethnet_evm::trace::Step) -> Self { +impl TracingStep { + pub fn new(step: &rethnet_evm::trace::Step) -> Self { Self { - opcode: value.opcode, - gas_cost: BigInt::from(value.gas_cost), - gas_refunded: value.gas_refunded, - exit_code: value.exit_code as u8, + depth: step.depth as u8, + pc: BigInt::from(step.pc), + // opcode: OPCODE_JUMPMAP[usize::from(step.opcode)] + // .unwrap_or("") + // .to_string(), + // gas_cost: BigInt::from(0u64), + // gas_refunded: BigInt::from(0u64), + // gas_left: BigInt::from(0u64), + // stack: Vec::new(), + // memory: Buffer::from(Vec::new()), + // contract: Account::from(step.contract), + // contract_address: Buffer::from(step.contract_address.to_vec()), } } } + +#[napi(object)] +pub struct TracingMessageResult { + /// Execution result + #[napi(readonly)] + pub execution_result: ExecutionResult, +} diff --git a/crates/rethnet_evm_napi/src/tracer.rs b/crates/rethnet_evm_napi/src/tracer.rs deleted file mode 100644 index f7020949f2..0000000000 --- a/crates/rethnet_evm_napi/src/tracer.rs +++ /dev/null @@ -1,28 +0,0 @@ -mod js_tracer; - -use napi::Env; -use napi_derive::napi; -use rethnet_evm::{state::StateError, SyncInspector}; - -use self::js_tracer::{JsTracer, TracingCallbacks}; - -#[napi] -pub struct Tracer { - inner: Box, -} - -impl Tracer { - pub fn as_dyn_inspector(&self) -> Box> { - self.inner.clone() - } -} - -#[napi] -impl Tracer { - #[napi(constructor)] - pub fn new(env: Env, callbacks: TracingCallbacks) -> napi::Result { - JsTracer::new(&env, callbacks).map(|inner| Self { - inner: Box::new(inner), - }) - } -} diff --git a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs b/crates/rethnet_evm_napi/src/tracer/js_tracer.rs deleted file mode 100644 index 864e88a833..0000000000 --- a/crates/rethnet_evm_napi/src/tracer/js_tracer.rs +++ /dev/null @@ -1,887 +0,0 @@ -use std::{ - fmt::Debug, - mem, - sync::mpsc::{channel, Sender}, -}; - -use napi::{ - bindgen_prelude::{BigInt, Buffer}, - Env, JsBufferValue, JsFunction, JsNumber, JsUndefined, NapiRaw, Status, -}; -use napi_derive::napi; -use rethnet_eth::{Address, Bytes, U256}; -use rethnet_evm::{ - opcode, return_revert, Bytecode, Gas, InstructionResult, SuccessOrHalt, SyncInspector, -}; - -use crate::{ - sync::{await_void_promise, handle_error}, - threadsafe_function::{ThreadSafeCallContext, ThreadsafeFunction, ThreadsafeFunctionCallMode}, - transaction::result::{ExceptionalHalt, ExecutionResult}, -}; - -#[napi(object)] -pub struct TracingMessage { - /// Recipient address. None if it is a Create message. - #[napi(readonly)] - pub to: Option, - - /// Depth of the message - #[napi(readonly)] - pub depth: u8, - - /// Input data of the message - #[napi(readonly)] - pub data: Buffer, - - /// Value sent in the message - #[napi(readonly)] - pub value: BigInt, - - /// Address of the code that is being executed. Can be different from `to` if a delegate call - /// is being done. - #[napi(readonly)] - pub code_address: Option, - - /// Code of the contract that is being executed. - #[napi(readonly)] - pub code: Option, -} - -#[napi(object)] -pub struct TracingStep { - /// Call depth - #[napi(readonly)] - pub depth: JsNumber, - /// The program counter - #[napi(readonly)] - pub pc: BigInt, - // /// The executed op code - // #[napi(readonly)] - // pub opcode: String, - // /// The return value of the step - // #[napi(readonly)] - // pub return_value: u8, - // /// The amount of gas that was used by the step - // #[napi(readonly)] - // pub gas_cost: BigInt, - // /// The amount of gas that was refunded by the step - // #[napi(readonly)] - // pub gas_refunded: BigInt, - // /// The amount of gas left - // #[napi(readonly)] - // pub gas_left: BigInt, - // /// The stack - // #[napi(readonly)] - // pub stack: Vec, - // /// The memory - // #[napi(readonly)] - // pub memory: Buffer, - // /// The contract being executed - // #[napi(readonly)] - // pub contract: Account, - // /// The address of the contract - // #[napi(readonly)] - // pub contract_address: Buffer, - // /// The address of the code being executed - // #[napi(readonly)] - // pub code_address: Buffer, -} - -#[napi(object)] -pub struct TracingMessageResult { - /// Execution result - #[napi(readonly)] - pub execution_result: ExecutionResult, -} - -#[napi(object)] -pub struct TracingCallbacks { - #[napi(ts_type = "(message: TracingMessage, next: any) => Promise")] - pub before_message: JsFunction, - #[napi(ts_type = "(step: TracingStep, next: any) => Promise")] - pub step: JsFunction, - #[napi(ts_type = "(result: TracingMessageResult, next: any) => Promise")] - pub after_message: JsFunction, -} - -#[derive(Clone)] -struct BeforeMessage { - pub depth: usize, - pub to: Option
, - pub data: Bytes, - pub value: U256, - pub code_address: Option
, - pub code: Option, -} - -struct BeforeMessageHandlerCall { - message: BeforeMessage, - sender: Sender>, -} - -pub struct StepHandlerCall { - /// Call depth - pub depth: usize, - /// The program counter - pub pc: u64, - /// The executed op code - pub opcode: u8, - // /// The return value of the step - // pub return_value: InstructionResult, - // /// The amount of gas that was used by the step - // pub gas_cost: u64, - // /// The amount of gas that was refunded by the step - // pub gas_refunded: i64, - // /// The amount of gas left - // pub gas_left: u64, - // /// The stack - // pub stack: Vec, - // /// The memory - // pub memory: Bytes, - /// The contract being executed - pub contract: rethnet_evm::AccountInfo, - /// The address of the contract - pub contract_address: Address, - // /// The address of the code being executed - // pub code_address: Address, - pub sender: Sender>, -} - -pub struct AfterMessageHandlerCall { - pub result: rethnet_evm::ExecutionResult, - pub sender: Sender>, -} - -#[derive(Clone)] -pub struct JsTracer { - before_message_fn: ThreadsafeFunction, - step_fn: ThreadsafeFunction, - after_message_fn: ThreadsafeFunction, - pending_before: Option, -} - -impl JsTracer { - /// Constructs a `JsTracer` from `TracingCallbacks`. - pub fn new(env: &Env, callbacks: TracingCallbacks) -> napi::Result { - let before_message_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.before_message.raw() }, - 0, - |mut ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - - let mut tracing_message = ctx.env.create_object()?; - - ctx.env - .create_int64(ctx.value.message.depth as i64) - .and_then(|depth| tracing_message.set_named_property("depth", depth))?; - - ctx.value - .message - .to - .as_ref() - .map_or_else( - || ctx.env.get_undefined().map(JsUndefined::into_unknown), - |to| { - ctx.env - .create_buffer_copy(to) - .map(JsBufferValue::into_unknown) - }, - ) - .and_then(|to| tracing_message.set_named_property("to", to))?; - - let data = ctx.value.message.data; - - ctx.env - .adjust_external_memory(data.len() as i64) - .expect("Failed to adjust external memory"); - - unsafe { - ctx.env.create_buffer_with_borrowed_data( - data.as_ptr(), - data.len(), - data, - |data: Bytes, mut env| { - env.adjust_external_memory(-(data.len() as i64)) - .expect("Failed to adjust external memory"); - - mem::forget(data); - }, - ) - } - .and_then(|data| tracing_message.set_named_property("data", data.into_raw()))?; - - ctx.env - .create_bigint_from_words(false, ctx.value.message.value.as_limbs().to_vec()) - .and_then(|value| tracing_message.set_named_property("value", value))?; - - ctx.value - .message - .code_address - .as_ref() - .map_or_else( - || ctx.env.get_undefined().map(JsUndefined::into_unknown), - |address| { - ctx.env - .create_buffer_copy(address) - .map(JsBufferValue::into_unknown) - }, - ) - .and_then(|code_address| { - tracing_message.set_named_property("codeAddress", code_address) - })?; - - if let Some(code) = &ctx.value.message.code { - let code = code.original_bytes(); - ctx.env - .adjust_external_memory(code.len() as i64) - .expect("Failed to adjust external memory"); - - unsafe { - ctx.env.create_buffer_with_borrowed_data( - code.as_ptr(), - code.len(), - code, - |code: Bytes, mut env| { - env.adjust_external_memory(-(code.len() as i64)) - .expect("Failed to adjust external memory"); - - mem::forget(code); - }, - ) - } - .map(JsBufferValue::into_unknown) - } else { - ctx.env.get_undefined().map(JsUndefined::into_unknown) - } - .and_then(|code_address| { - tracing_message.set_named_property("code", code_address) - })?; - - let next = ctx.env.create_object()?; - - let promise = ctx.callback.call(None, &[tracing_message, next])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let step_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.step.raw() }, - 0, - |ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - - let mut tracing_step = ctx.env.create_object()?; - - ctx.env - .create_int64(ctx.value.depth as i64) - .and_then(|depth| tracing_step.set_named_property("depth", depth))?; - - ctx.env - .create_bigint_from_u64(ctx.value.pc) - .and_then(|pc| tracing_step.set_named_property("pc", pc))?; - - // ctx.env - // .create_string(OPCODE_JUMPMAP[usize::from(ctx.value.opcode)].unwrap_or("")) - // .and_then(|opcode| tracing_step.set_named_property("opcode", opcode))?; - - // ctx.env - // .create_uint32((ctx.value.return_value as u8).into()) - // .and_then(|return_value| { - // tracing_step.set_named_property("returnValue", return_value) - // })?; - - // ctx.env - // .create_bigint_from_u64(ctx.value.gas_cost) - // .and_then(|gas_cost| tracing_step.set_named_property("gasCost", gas_cost))?; - - // ctx.env - // .create_bigint_from_i64(ctx.value.gas_refunded) - // .and_then(|gas_refunded| { - // tracing_step.set_named_property("gasRefunded", gas_refunded) - // })?; - - // ctx.env - // .create_bigint_from_u64(ctx.value.gas_left) - // .and_then(|gas_left| tracing_step.set_named_property("gasLeft", gas_left))?; - - // let mut stack = - // ctx.env - // .create_array(u32::try_from(ctx.value.stack.len()).map_err(|e| { - // napi::Error::new(Status::GenericFailure, e.to_string()) - // })?)?; - - // for value in ctx.value.stack { - // ctx.env - // .create_bigint_from_words(false, value.as_limbs().to_vec()) - // .and_then(|value| stack.insert(value))?; - // } - - // stack - // .coerce_to_object() - // .and_then(|stack| tracing_step.set_named_property("stack", stack))?; - - // ctx.env - // .create_buffer_copy(&ctx.value.memory) - // .and_then(|memory| { - // tracing_step.set_named_property("memory", memory.into_raw()) - // })?; - - // let mut contract = ctx.env.create_object()?; - - // ctx.env - // .create_bigint_from_words(false, ctx.value.contract.balance.as_limbs().to_vec()) - // .and_then(|balance| contract.set_named_property("balance", balance))?; - - // let nonce = ctx.env.create_bigint_from_u64(ctx.value.contract.nonce)?; - // contract.set_named_property("nonce", nonce)?; - - // ctx.env - // .create_buffer_copy(ctx.value.contract.code_hash) - // .and_then(|code_hash| { - // contract.set_named_property("codeHash", code_hash.into_unknown()) - // })?; - - // ctx.value - // .contract - // .code - // .as_ref() - // .map_or_else( - // || ctx.env.get_undefined().map(JsUndefined::into_unknown), - // |code| { - // ctx.env - // .create_buffer_copy(&code.bytes()[..code.len()]) - // .map(|code| code.into_unknown()) - // }, - // ) - // .and_then(|code| contract.set_named_property("code", code))?; - - // tracing_step.set_named_property("contract", contract)?; - - // let contract_address = &ctx.value.contract_address; - // ctx.env - // .create_buffer_copy(contract_address) - // .and_then(|contract_address| { - // tracing_step - // .set_named_property("contractAddress", contract_address.into_unknown()) - // })?; - - let next = ctx.env.create_object()?; - - let promise = ctx.callback.call(None, &[tracing_step, next])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - let after_message_fn = ThreadsafeFunction::create( - env.raw(), - unsafe { callbacks.after_message.raw() }, - 0, - |mut ctx: ThreadSafeCallContext| { - let sender = ctx.value.sender.clone(); - - let mut tracing_message_result = ctx.env.create_object()?; - - let mut result = ctx.env.create_object()?; - - let gas_used = match ctx.value.result { - rethnet_evm::ExecutionResult::Success { - reason, - gas_used, - gas_refunded, - logs, - output, - } => { - ctx.env - .create_uint32(reason as u32) - .and_then(|reason| result.set_named_property("reason", reason))?; - - ctx.env - .create_bigint_from_u64(gas_refunded) - .and_then(|gas_refunded| { - result.set_named_property("gasRefunded", gas_refunded) - })?; - - u32::try_from(logs.len()) - .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())) - .and_then(|num_logs| ctx.env.create_array(num_logs)) - .and_then(|mut logs_object| { - for log in logs { - let mut log_object = ctx.env.create_object()?; - - ctx.env.create_buffer_copy(log.address).and_then( - |address| { - log_object - .set_named_property("address", address.into_raw()) - }, - )?; - - u32::try_from(log.topics.len()) - .map_err(|e| { - napi::Error::new(Status::GenericFailure, e.to_string()) - }) - .and_then(|num_topics| ctx.env.create_array(num_topics)) - .and_then(|mut topics| { - for topic in log.topics { - ctx.env.create_buffer_copy(topic).and_then( - |topic| topics.insert(topic.into_raw()), - )? - } - - topics.coerce_to_object() - }) - .and_then(|topics| { - log_object.set_named_property("topics", topics) - })?; - - ctx.env - .adjust_external_memory(log.data.len() as i64) - .expect("Failed to adjust external memory"); - - unsafe { - ctx.env.create_buffer_with_borrowed_data( - log.data.as_ptr(), - log.data.len(), - log.data, - |data: Bytes, mut env| { - env.adjust_external_memory(-(data.len() as i64)) - .expect("Failed to adjust external memory"); - - mem::forget(data); - }, - ) - } - .and_then(|data| { - log_object.set_named_property("data", data.into_raw()) - })?; - - logs_object.insert(log_object)?; - } - - logs_object.coerce_to_object() - }) - .and_then(|logs| result.set_named_property("logs", logs))?; - - let (output, address) = match output { - rethnet_evm::Output::Call(output) => (output, None), - rethnet_evm::Output::Create(output, address) => (output, address), - }; - - let mut transaction_output = ctx.env.create_object()?; - - ctx.env - .adjust_external_memory(output.len() as i64) - .expect("Failed to adjust external memory"); - - unsafe { - ctx.env.create_buffer_with_borrowed_data( - output.as_ptr(), - output.len(), - output, - |output: Bytes, mut env| { - env.adjust_external_memory(-(output.len() as i64)) - .expect("Failed to adjust external memory"); - - mem::forget(output); - }, - ) - } - .map(JsBufferValue::into_unknown) - .and_then(|output| { - transaction_output.set_named_property("returnValue", output) - })?; - - address - .map_or_else( - || ctx.env.get_undefined().map(JsUndefined::into_unknown), - |address| { - ctx.env - .create_buffer_copy(address) - .map(JsBufferValue::into_unknown) - }, - ) - .and_then(|address| { - transaction_output.set_named_property("address", address) - })?; - - result.set_named_property("output", transaction_output)?; - - gas_used - } - rethnet_evm::ExecutionResult::Revert { gas_used, output } => { - ctx.env - .adjust_external_memory(output.len() as i64) - .expect("Failed to adjust external memory"); - - unsafe { - ctx.env.create_buffer_with_borrowed_data( - output.as_ptr(), - output.len(), - output, - |output: Bytes, mut env| { - env.adjust_external_memory(-(output.len() as i64)) - .expect("Failed to adjust external memory"); - - mem::forget(output); - }, - ) - } - .map(JsBufferValue::into_unknown) - .and_then(|output| result.set_named_property("output", output))?; - - gas_used - } - rethnet_evm::ExecutionResult::Halt { reason, gas_used } => { - let halt = ExceptionalHalt::from(reason); - ctx.env - .create_uint32(halt as u32) - .and_then(|reason| result.set_named_property("reason", reason))?; - - gas_used - } - }; - - ctx.env - .create_bigint_from_u64(gas_used) - .and_then(|gas_used| result.set_named_property("gasUsed", gas_used))?; - - let mut execution_result = ctx.env.create_object()?; - - execution_result.set_named_property("result", result)?; - - ctx.env - .create_object() - .and_then(|trace| execution_result.set_named_property("trace", trace))?; - - tracing_message_result.set_named_property("executionResult", execution_result)?; - - let next = ctx.env.create_object()?; - - let promise = ctx.callback.call(None, &[tracing_message_result, next])?; - let result = await_void_promise(ctx.env, promise, ctx.value.sender); - - handle_error(sender, result) - }, - )?; - - Ok(Self { - before_message_fn, - step_fn, - after_message_fn, - pending_before: None, - }) - } - - fn validate_before_message(&mut self) { - if let Some(message) = self.pending_before.take() { - let (sender, receiver) = channel(); - - let status = self.before_message_fn.call( - BeforeMessageHandlerCall { message, sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver - .recv() - .unwrap() - .expect("Failed call to BeforeMessageHandler"); - } - } -} - -impl Debug for JsTracer { - fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("JsTracer").finish() - } -} - -impl SyncInspector for JsTracer -where - BE: Debug + Send + 'static, - SE: Debug + Send + 'static, -{ -} - -impl rethnet_evm::Inspector for JsTracer -where - E: Debug, -{ - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn call( - &mut self, - data: &mut dyn rethnet_evm::EVMData, - inputs: &mut rethnet_evm::CallInputs, - _is_static: bool, - ) -> (InstructionResult, Gas, rethnet_eth::Bytes) { - self.validate_before_message(); - - let code = data - .journaled_state() - .state - .get(&inputs.context.code_address) - .cloned() - .map(|account| { - if let Some(code) = &account.info.code { - code.clone() - } else { - data.database() - .code_by_hash(account.info.code_hash) - .unwrap() - } - }) - .unwrap_or_else(|| { - data.database() - .basic(inputs.context.code_address) - .unwrap() - .map_or(Bytecode::new(), |account_info| { - account_info.code.unwrap_or_else(|| { - data.database() - .code_by_hash(account_info.code_hash) - .unwrap() - }) - }) - }); - - self.pending_before = Some(BeforeMessage { - depth: data.journaled_state().depth, - to: Some(inputs.context.address), - data: inputs.input.clone(), - value: inputs.context.apparent_value, - code_address: Some(inputs.context.code_address), - code: Some(code), - }); - - (InstructionResult::Continue, Gas::new(0), Bytes::default()) - } - - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn call_end( - &mut self, - data: &mut dyn rethnet_evm::EVMData, - _inputs: &rethnet_evm::CallInputs, - remaining_gas: Gas, - ret: InstructionResult, - out: Bytes, - _is_static: bool, - ) -> (InstructionResult, Gas, Bytes) { - match ret { - return_revert!() if self.pending_before.is_some() => { - self.pending_before = None; - return (ret, remaining_gas, out); - } - _ => (), - } - - self.validate_before_message(); - - let safe_ret = if ret == InstructionResult::CallTooDeep - || ret == InstructionResult::OutOfFund - || ret == InstructionResult::StateChangeDuringStaticCall - { - InstructionResult::Revert - } else { - ret - }; - - let result = match safe_ret.into() { - SuccessOrHalt::Success(reason) => rethnet_evm::ExecutionResult::Success { - reason, - gas_used: remaining_gas.spend(), - gas_refunded: remaining_gas.refunded() as u64, - logs: data.journaled_state().logs.clone(), - output: rethnet_evm::Output::Call(out.clone()), - }, - SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { - gas_used: remaining_gas.spend(), - output: out.clone(), - }, - SuccessOrHalt::Halt(reason) => rethnet_evm::ExecutionResult::Halt { - reason, - gas_used: remaining_gas.limit(), - }, - SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), - SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), - }; - - let (sender, receiver) = channel(); - - let status = self.after_message_fn.call( - AfterMessageHandlerCall { result, sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver - .recv() - .unwrap() - .expect("Failed call to BeforeMessageHandler"); - - (ret, remaining_gas, out) - } - - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn create( - &mut self, - data: &mut dyn rethnet_evm::EVMData, - inputs: &mut rethnet_evm::CreateInputs, - ) -> (InstructionResult, Option, Gas, Bytes) { - self.validate_before_message(); - - self.pending_before = Some(BeforeMessage { - depth: data.journaled_state().depth, - to: None, - data: inputs.init_code.clone(), - value: inputs.value, - code_address: None, - code: None, - }); - - ( - InstructionResult::Continue, - None, - Gas::new(0), - Bytes::default(), - ) - } - - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn create_end( - &mut self, - data: &mut dyn rethnet_evm::EVMData, - _inputs: &rethnet_evm::CreateInputs, - ret: InstructionResult, - address: Option, - remaining_gas: Gas, - out: Bytes, - ) -> (InstructionResult, Option, Gas, Bytes) { - self.validate_before_message(); - - let safe_ret = - if ret == InstructionResult::CallTooDeep || ret == InstructionResult::OutOfFund { - InstructionResult::Revert - } else { - ret - }; - - let result = match safe_ret.into() { - SuccessOrHalt::Success(reason) => rethnet_evm::ExecutionResult::Success { - reason, - gas_used: remaining_gas.spend(), - gas_refunded: remaining_gas.refunded() as u64, - logs: data.journaled_state().logs.clone(), - output: rethnet_evm::Output::Create(out.clone(), address), - }, - SuccessOrHalt::Revert => rethnet_evm::ExecutionResult::Revert { - gas_used: remaining_gas.spend(), - output: out.clone(), - }, - SuccessOrHalt::Halt(reason) => rethnet_evm::ExecutionResult::Halt { - reason, - gas_used: remaining_gas.limit(), - }, - SuccessOrHalt::InternalContinue => panic!("Internal error: {:?}", safe_ret), - SuccessOrHalt::FatalExternalError => panic!("Fatal external error"), - }; - - let (sender, receiver) = channel(); - - let status = self.after_message_fn.call( - AfterMessageHandlerCall { result, sender }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver - .recv() - .unwrap() - .expect("Failed call to BeforeMessageHandler"); - - (ret, address, remaining_gas, out) - } - - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn step( - &mut self, - interp: &mut rethnet_evm::Interpreter, - data: &mut dyn rethnet_evm::EVMData, - _is_static: bool, - ) -> InstructionResult { - // Skip the step - let skip_step = self.pending_before.as_ref().map_or(false, |message| { - message.code.is_some() && interp.current_opcode() == opcode::STOP - }); - - self.validate_before_message(); - - if !skip_step { - // self.pre_steps.push(StepData { - // depth: data.journaled_state.depth, - // pc: interp.program_counter() as u64, - // opcode: interp.current_opcode(), - // gas: *interp.gas(), - // }); - - let (sender, receiver) = channel(); - - let status = self.step_fn.call( - StepHandlerCall { - depth: data.journaled_state().depth, - pc: interp.program_counter() as u64, - opcode: interp.current_opcode(), - // return_value: interp.instruction_result, - // gas_cost: post_step_gas.spend() - pre_step_gas.spend(), - // gas_refunded: post_step_gas.refunded() - pre_step_gas.refunded(), - // gas_left: interp.gas().remaining(), - // stack: interp.stack().data().clone(), - // memory: Bytes::copy_from_slice(interp.memory.data().as_slice()), - contract: data - .journaled_state() - .account(interp.contract.address) - .info - .clone(), - contract_address: interp.contract().address, - sender, - }, - ThreadsafeFunctionCallMode::Blocking, - ); - assert_eq!(status, Status::Ok); - - receiver - .recv() - .unwrap() - .expect("Failed call to BeforeMessageHandler"); - } - - InstructionResult::Continue - } - - // fn step_end( - // &mut self, - // interp: &mut rethnet_evm::Interpreter, - // _data: &mut dyn rethnet_evm::EVMData, - // _is_static: bool, - // _eval: InstructionResult, - // ) -> InstructionResult { - // // TODO: temporary fix - // let StepData { - // depth, - // pc, - // opcode, - // gas: pre_step_gas, - // } = self - // .pre_steps - // .pop() - // .expect("At least one pre-step should exist"); - - // let post_step_gas = interp.gas(); - - // InstructionResult::Continue - // } -} diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index b2609c6ce2..34e1d0e188 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -1,10 +1,15 @@ +use std::mem; + use napi::{ bindgen_prelude::{BigInt, Buffer, Either3, FromNapiValue, ToNapiValue}, - Either, + Either, Env, JsBuffer, JsBufferValue, }; use napi_derive::napi; -use crate::{log::Log, trace::Trace}; +use crate::{ + log::Log, + trace::{TracingMessage, TracingMessageResult, TracingStep}, +}; /// The possible reasons for successful termination of the EVM. #[napi] @@ -30,13 +35,13 @@ impl From for SuccessReason { #[napi(object)] pub struct CallOutput { /// Return value - pub return_value: Buffer, + pub return_value: JsBuffer, } #[napi(object)] pub struct CreateOutput { /// Return value - pub return_value: Buffer, + pub return_value: JsBuffer, /// Optionally, a 160-bit address pub address: Option, } @@ -62,7 +67,7 @@ pub struct RevertResult { /// The amount of gas used pub gas_used: BigInt, /// The transaction output - pub output: Buffer, + pub output: JsBuffer, } /// Indicates that the EVM has experienced an exceptional halt. This causes execution to @@ -132,13 +137,11 @@ pub struct HaltResult { pub struct ExecutionResult { /// The transaction result pub result: Either3, - /// The transaction trace - pub trace: Trace, } -impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for ExecutionResult { +impl ExecutionResult { #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn from((result, trace): (rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)) -> Self { + pub fn new(env: &Env, result: &rethnet_evm::ExecutionResult) -> napi::Result { let result = match result { rethnet_evm::ExecutionResult::Success { reason, @@ -147,69 +150,141 @@ impl From<(rethnet_evm::ExecutionResult, rethnet_evm::trace::Trace)> for Executi logs, output, } => { - let logs = logs.into_iter().map(Log::from).collect(); + let logs = logs + .iter() + .map(|log| Log::new(env, log)) + .collect::>()?; Either3::A(SuccessResult { - reason: reason.into(), - gas_used: BigInt::from(gas_used), - gas_refunded: BigInt::from(gas_refunded), + reason: SuccessReason::from(*reason), + gas_used: BigInt::from(*gas_used), + gas_refunded: BigInt::from(*gas_refunded), logs, output: match output { - rethnet_evm::Output::Call(return_value) => Either::A(CallOutput { - return_value: Buffer::from(return_value.as_ref()), - }), + rethnet_evm::Output::Call(return_value) => { + let return_value = return_value.clone(); + Either::A(CallOutput { + return_value: unsafe { + env.create_buffer_with_borrowed_data( + return_value.as_ptr(), + return_value.len(), + return_value, + |return_value: rethnet_eth::Bytes, _env| { + mem::drop(return_value); + }, + ) + } + .map(JsBufferValue::into_raw)?, + }) + } rethnet_evm::Output::Create(return_value, address) => { + let return_value = return_value.clone(); + Either::B(CreateOutput { - return_value: Buffer::from(return_value.as_ref()), + return_value: unsafe { + env.create_buffer_with_borrowed_data( + return_value.as_ptr(), + return_value.len(), + return_value, + |return_value: rethnet_eth::Bytes, _env| { + mem::drop(return_value); + }, + ) + } + .map(JsBufferValue::into_raw)?, address: address.map(|address| Buffer::from(address.as_bytes())), }) } }, }) } - rethnet_evm::ExecutionResult::Revert { gas_used, output } => Either3::B(RevertResult { - gas_used: BigInt::from(gas_used), - output: Buffer::from(output.as_ref()), - }), + rethnet_evm::ExecutionResult::Revert { gas_used, output } => { + let output = output.clone(); + Either3::B(RevertResult { + gas_used: BigInt::from(*gas_used), + output: unsafe { + env.create_buffer_with_borrowed_data( + output.as_ptr(), + output.len(), + output, + |output: rethnet_eth::Bytes, _env| { + mem::drop(output); + }, + ) + } + .map(JsBufferValue::into_raw)?, + }) + } rethnet_evm::ExecutionResult::Halt { reason, gas_used } => Either3::C(HaltResult { - reason: reason.into(), - gas_used: BigInt::from(gas_used), + reason: ExceptionalHalt::from(*reason), + gas_used: BigInt::from(*gas_used), }), }; - Self { - result, - trace: trace.into(), - } + Ok(Self { result }) } } -#[napi(object)] +#[napi] pub struct TransactionResult { - pub exec_result: ExecutionResult, - pub state: serde_json::Value, + inner: rethnet_evm::ExecutionResult, + state: Option, + trace: Option, } -impl - TryFrom<( - rethnet_evm::ExecutionResult, - rethnet_evm::State, - rethnet_evm::trace::Trace, - )> for TransactionResult -{ - type Error = napi::Error; +impl TransactionResult { + /// Constructs a new [`TransactionResult`] instance. + pub fn new( + result: rethnet_evm::ExecutionResult, + state: Option, + trace: Option, + ) -> Self { + Self { + inner: result, + state, + trace, + } + } +} - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn try_from( - (result, state, trace): ( - rethnet_evm::ExecutionResult, - rethnet_evm::State, - rethnet_evm::trace::Trace, - ), - ) -> std::result::Result { - let exec_result = (result, trace).into(); - let state = serde_json::to_value(state)?; - - Ok(Self { exec_result, state }) +#[napi] +impl TransactionResult { + #[napi(getter)] + pub fn result(&self, env: Env) -> napi::Result { + ExecutionResult::new(&env, &self.inner) + } + + #[napi(getter)] + pub fn state(&self) -> napi::Result> { + serde_json::to_value(&self.state) + .map(Some) + .map_err(From::from) + } + + #[napi(getter)] + pub fn trace( + &self, + env: Env, + ) -> napi::Result>>> { + self.trace.as_ref().map_or(Ok(None), |trace| { + trace + .messages + .iter() + .map(|message| match message { + rethnet_evm::trace::TraceMessage::Before(message) => { + TracingMessage::new(&env, message).map(Either3::A) + } + rethnet_evm::trace::TraceMessage::Step(step) => { + Ok(Either3::B(TracingStep::new(step))) + } + rethnet_evm::trace::TraceMessage::After(result) => { + ExecutionResult::new(&env, result).map(|execution_result| { + Either3::C(TracingMessageResult { execution_result }) + }) + } + }) + .collect::>() + .map(Some) + }) } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index 916ea5aea1..c340dc831f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -752,10 +752,6 @@ export class EthereumJSAdapter implements VMAdapter { await this._vmTracer.addAfterMessage({ executionResult: { result: executionResult, - trace: { - steps: [], - returnValue: result.execResult.returnValue, - }, }, }); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 8b9dae9e5e..7cfc8af104 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -12,10 +12,6 @@ import { Blockchain, Bytecode, Rethnet, - Tracer, - TracingMessage, - TracingMessageResult, - TracingStep, RethnetContext, } from "rethnet-evm"; @@ -111,12 +107,6 @@ export class RethnetAdapter implements VMAdapter { blockContext.header.mixHash ); - const tracer = new Tracer({ - beforeMessage: this._beforeMessageHandler.bind(this), - step: this._stepHandler.bind(this), - afterMessage: this._afterMessageHandler.bind(this), - }); - const rethnetResult = await this._rethnet.guaranteedDryRun( rethnetTx, { @@ -129,15 +119,26 @@ export class RethnetAdapter implements VMAdapter { difficulty, prevrandao: prevRandao, }, - tracer + true ); + const trace = rethnetResult.trace!; + for (const traceItem of trace) { + if ("pc" in traceItem) { + await this._vmTracer.addStep(traceItem); + } else if ("executionResult" in traceItem) { + await this._vmTracer.addAfterMessage(traceItem); + } else { + await this._vmTracer.addBeforeMessage(traceItem); + } + } + try { const result = rethnetResultToRunTxResult( - rethnetResult, + rethnetResult.result, blockContext.header.gasUsed ); - return [result, rethnetResult.trace]; + return [result, trace]; } catch (e) { // console.log("Rethnet trace"); // console.log(rethnetResult.execResult.trace); @@ -307,24 +308,29 @@ export class RethnetAdapter implements VMAdapter { block.header.mixHash ); - const tracer = new Tracer({ - beforeMessage: this._beforeMessageHandler.bind(this), - step: this._stepHandler.bind(this), - afterMessage: this._afterMessageHandler.bind(this), - }); - const rethnetResult = await this._rethnet.run( rethnetTx, ethereumjsHeaderDataToRethnet(block.header, difficulty, prevRandao), - tracer + true ); + const trace = rethnetResult.trace!; + for (const traceItem of trace) { + if ("pc" in traceItem) { + await this._vmTracer.addStep(traceItem); + } else if ("executionResult" in traceItem) { + await this._vmTracer.addAfterMessage(traceItem); + } else { + await this._vmTracer.addBeforeMessage(traceItem); + } + } + try { const result = rethnetResultToRunTxResult( - rethnetResult, + rethnetResult.result, block.header.gasUsed ); - return [result, rethnetResult.trace]; + return [result, trace]; } catch (e) { // console.log("Rethnet trace"); // console.log(rethnetResult.trace); @@ -456,22 +462,4 @@ export class RethnetAdapter implements VMAdapter { return undefined; } - - private async _beforeMessageHandler( - message: TracingMessage, - next: any - ): Promise { - await this._vmTracer.addBeforeMessage(message); - } - - private async _stepHandler(step: TracingStep, _next: any): Promise { - await this._vmTracer.addStep(step); - } - - private async _afterMessageHandler( - result: TracingMessageResult, - _next: any - ): Promise { - await this._vmTracer.addAfterMessage(result); - } } From 5b0d475032fbc81967705acc4a80d785bb6f3d3a Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 18:54:00 -0500 Subject: [PATCH 054/406] improvement: maintain reference count of snapshots (#3847) --- crates/rethnet_evm/src/collections.rs | 3 + .../rethnet_evm/src/collections/shared_map.rs | 143 ++++++++++++++++++ crates/rethnet_evm/src/lib.rs | 1 + crates/rethnet_evm/src/state.rs | 1 - crates/rethnet_evm/src/state/contract.rs | 118 --------------- crates/rethnet_evm/src/state/history.rs | 4 +- crates/rethnet_evm/src/state/hybrid.rs | 27 ++-- crates/rethnet_evm/src/state/layered.rs | 26 ++-- .../rethnet_evm/src/state/layered/changes.rs | 59 ++++++-- crates/rethnet_evm/src/state/trie.rs | 28 +++- crates/rethnet_evm_napi/src/state.rs | 20 +-- .../hardhat-network/provider/RethnetState.ts | 7 +- .../internal/hardhat-network/provider/node.ts | 8 +- .../hardhat-network/provider/vm/dual.ts | 19 +-- .../hardhat-network/provider/vm/ethereumjs.ts | 7 +- .../hardhat-network/provider/vm/rethnet.ts | 4 +- .../hardhat-network/provider/vm/vm-adapter.ts | 4 +- 17 files changed, 269 insertions(+), 210 deletions(-) create mode 100644 crates/rethnet_evm/src/collections.rs create mode 100644 crates/rethnet_evm/src/collections/shared_map.rs delete mode 100644 crates/rethnet_evm/src/state/contract.rs diff --git a/crates/rethnet_evm/src/collections.rs b/crates/rethnet_evm/src/collections.rs new file mode 100644 index 0000000000..677b30370c --- /dev/null +++ b/crates/rethnet_evm/src/collections.rs @@ -0,0 +1,3 @@ +mod shared_map; + +pub use shared_map::{SharedMap, SharedMapEntry}; diff --git a/crates/rethnet_evm/src/collections/shared_map.rs b/crates/rethnet_evm/src/collections/shared_map.rs new file mode 100644 index 0000000000..a21874a183 --- /dev/null +++ b/crates/rethnet_evm/src/collections/shared_map.rs @@ -0,0 +1,143 @@ +use std::hash::Hash; + +use hashbrown::HashMap; + +#[derive(Clone, Debug)] +pub struct SharedMapEntry { + value: T, + occurences: usize, +} + +impl SharedMapEntry { + /// Creates a new [`SharedMapEntry`] for the provided value. + pub fn new(value: T) -> Self { + Self { + value, + occurences: 1, + } + } + + /// Creates a new [`SharedMapEntry`] for the provided value and with the number of specified occurences. + pub(crate) fn with_occurences(value: T, occurences: usize) -> Self { + Self { value, occurences } + } + + /// Retrieves the number of occurences that exist of the entry. + pub fn occurences(&self) -> usize { + self.occurences + } + + /// Retrieves the value of the entry. + pub fn value(&self) -> &T { + &self.value + } + + /// Increments the number of occurences. + pub fn increment(&mut self) { + self.occurences += 1; + } + + /// Decrements the number of occurences. If no occurences are left, the [`SharedMapEntry`] + /// is consumed. + pub fn decrement(mut self) -> Option { + self.occurences -= 1; + + if !DELETE_UNUSED_ENTRY || self.occurences > 0 { + Some(self) + } else { + None + } + } +} + +#[derive(Clone, Debug)] +pub struct SharedMap { + entries: HashMap>, +} + +impl SharedMap +where + K: Eq + Hash, +{ + /// Inserts new value or, if it already exists, increments the number of occurences of + /// the corresponding entry. + pub fn insert(&mut self, key: K, value: V) { + self.entries + .entry(key) + .and_modify(|entry| entry.increment()) + .or_insert_with(|| SharedMapEntry::new(value)); + } + + /// Inserts new value or, if it already exists, increments the number of occurences of + /// the corresponding entry. + pub fn insert_with(&mut self, key: K, constructor: F) + where + F: FnOnce() -> V, + { + self.entries + .entry(key) + .and_modify(|entry| entry.increment()) + .or_insert_with(|| SharedMapEntry::new(constructor())); + } +} + +impl SharedMap +where + K: Clone + Eq + Hash, +{ + /// Decremenents the number of occurences of the value corresponding to the provided key, + /// if it exists, and removes unused entry. + pub fn remove(&mut self, key: &K) { + self.entries + .entry(key.clone()) + .and_replace_entry_with(|_key, entry| entry.decrement()); + } +} + +impl SharedMap { + /// Returns an iterator over its entries. + pub fn iter(&self) -> impl Iterator)> { + self.entries.iter() + } + + /// Returns a mutable reference to the underlaying collection. + pub(crate) fn as_inner_mut( + &mut self, + ) -> &mut HashMap> { + &mut self.entries + } +} + +impl SharedMap +where + K: Eq + Hash, +{ + /// Retrieves the entry corresponding to the provided key. + pub fn get(&self, key: &K) -> Option<&V> { + self.entries.get(key).map(|entry| &entry.value) + } +} + +impl SharedMap +where + K: Eq + Hash, +{ + /// Retrieves the entry corresponding to the provided key. + pub fn get(&self, key: &K) -> Option<&V> { + self.entries.get(key).and_then(|entry| { + if entry.occurences > 0 { + Some(&entry.value) + } else { + None + } + }) + } +} + +impl Default for SharedMap { + fn default() -> Self { + Self { + entries: Default::default(), + } + } +} diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index 4e872bc18a..f8dde9c800 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -36,6 +36,7 @@ pub mod state; pub mod trace; mod block; +pub(crate) mod collections; pub(crate) mod evm; pub(crate) mod random; mod runtime; diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs index 4eb3a03ca5..ecfd4f0e44 100644 --- a/crates/rethnet_evm/src/state.rs +++ b/crates/rethnet_evm/src/state.rs @@ -1,5 +1,4 @@ mod account; -mod contract; mod debug; mod history; mod hybrid; diff --git a/crates/rethnet_evm/src/state/contract.rs b/crates/rethnet_evm/src/state/contract.rs deleted file mode 100644 index a136648a72..0000000000 --- a/crates/rethnet_evm/src/state/contract.rs +++ /dev/null @@ -1,118 +0,0 @@ -use hashbrown::HashMap; -use rethnet_eth::{account::KECCAK_EMPTY, B256}; -use revm::primitives::Bytecode; - -use super::{layered::LayeredChanges, RethnetLayer}; - -#[derive(Clone, Debug)] -struct ContractEntry { - code: Bytecode, - occurences: usize, -} - -impl ContractEntry { - pub fn new(code: Bytecode) -> Self { - Self { - code, - occurences: 1, - } - } - - /// Increments the number of occurences. - pub fn increment(&mut self) { - self.occurences += 1; - } - - /// Decrements the number of occurences. If no occurences are left, the [`ContractEntry`] - /// is consumed. - pub fn decrement(mut self) -> Option { - self.occurences -= 1; - - if !DELETE_UNUSED_CODE || self.occurences > 0 { - Some(self) - } else { - None - } - } -} - -#[derive(Clone, Debug)] -pub struct ContractStorage { - contracts: HashMap>, -} - -impl ContractStorage { - /// Inserts new code or, if it already exists, increments the number of occurences of - /// the code. - pub fn insert_code(&mut self, code: Bytecode) { - self.contracts - .entry(code.hash()) - .and_modify(|entry| entry.increment()) - .or_insert_with(|| ContractEntry::new(code)); - } - - /// Decremenents the number of occurences of the code corresponding to the provided code hash, - /// if it exists, and removes unused code. - pub fn remove_code(&mut self, code_hash: &B256) { - self.contracts - .entry(*code_hash) - .and_replace_entry_with(|_code, entry| entry.decrement()); - } -} - -impl ContractStorage { - /// Retrieves the contract code corresponding to the provided hash. - pub fn get(&self, code_hash: &B256) -> Option<&Bytecode> { - self.contracts.get(code_hash).map(|entry| &entry.code) - } -} - -impl ContractStorage { - /// Retrieves the contract code corresponding to the provided hash. - pub fn get(&self, code_hash: &B256) -> Option<&Bytecode> { - self.contracts.get(code_hash).and_then(|entry| { - if entry.occurences > 0 { - Some(&entry.code) - } else { - None - } - }) - } -} - -impl Default for ContractStorage { - fn default() -> Self { - let mut contracts = HashMap::new(); - contracts.insert(KECCAK_EMPTY, ContractEntry::new(Bytecode::new())); - - Self { contracts } - } -} - -impl From<&LayeredChanges> for ContractStorage { - fn from(changes: &LayeredChanges) -> Self { - let mut storage = Self::default(); - - changes.iter().for_each(|layer| { - layer - .contracts() - .contracts - .iter() - .for_each(|(code_hash, entry)| { - if entry.occurences > 0 { - storage.contracts.insert( - *code_hash, - ContractEntry { - code: entry.code.clone(), - occurences: entry.occurences, - }, - ); - } else { - storage.contracts.remove(code_hash); - } - }) - }); - - storage - } -} diff --git a/crates/rethnet_evm/src/state/history.rs b/crates/rethnet_evm/src/state/history.rs index 55590aa1b0..86b7a4558c 100644 --- a/crates/rethnet_evm/src/state/history.rs +++ b/crates/rethnet_evm/src/state/history.rs @@ -18,8 +18,8 @@ pub trait StateHistory { /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier and whether /// that snapshot already existed. - fn make_snapshot(&mut self) -> (B256, bool); + fn make_snapshot(&mut self) -> B256; /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. - fn remove_snapshot(&mut self, state_root: &B256) -> bool; + fn remove_snapshot(&mut self, state_root: &B256); } diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs index 53530210d3..489e7db2f8 100644 --- a/crates/rethnet_evm/src/state/hybrid.rs +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -8,6 +8,8 @@ use revm::{ DatabaseCommit, }; +use crate::collections::SharedMap; + use super::{ history::StateHistory, layered::LayeredChanges, @@ -26,7 +28,7 @@ struct Snapshot { pub struct HybridState { trie: TrieState, changes: LayeredChanges, - snapshots: HashMap>, + snapshots: SharedMap, true>, } impl>> HybridState { @@ -39,7 +41,7 @@ impl>> HybridState { Self { trie: latest_state, changes: LayeredChanges::with_layer(layer), - snapshots: HashMap::new(), + snapshots: SharedMap::default(), } } } @@ -186,13 +188,10 @@ impl StateHistory for HybridState { type Error = StateError; #[cfg_attr(feature = "tracing", tracing::instrument)] - fn make_snapshot(&mut self) -> (B256, bool) { + fn make_snapshot(&mut self) -> B256 { let state_root = self.state_root().unwrap(); - let mut exists = true; - self.snapshots.entry(state_root).or_insert_with(|| { - exists = false; - + self.snapshots.insert_with(state_root, || { let mut changes = self.changes.clone(); changes.last_layer_mut().set_state_root(state_root); @@ -202,12 +201,12 @@ impl StateHistory for HybridState { } }); - (state_root, exists) + state_root } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn remove_snapshot(&mut self, state_root: &B256) -> bool { - self.snapshots.remove(state_root).is_some() + fn remove_snapshot(&mut self, state_root: &B256) { + self.snapshots.remove(state_root) } #[cfg_attr(feature = "tracing", tracing::instrument)] @@ -221,10 +220,12 @@ impl StateHistory for HybridState { if let Some(Snapshot { changes, trie: latest_state, - }) = self.snapshots.remove(state_root) + }) = self.snapshots.get(state_root) { - self.trie = latest_state; - self.changes = changes; + self.trie = latest_state.clone(); + self.changes = changes.clone(); + + self.snapshots.remove(state_root); return Ok(()); } diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs index b84e966ba4..ce7fb1677c 100644 --- a/crates/rethnet_evm/src/state/layered.rs +++ b/crates/rethnet_evm/src/state/layered.rs @@ -16,14 +16,15 @@ use revm::{ DatabaseCommit, }; +use crate::collections::SharedMap; + use super::{history::StateHistory, AccountModifierFn, StateDebug, StateError}; /// A state consisting of layers. #[derive(Debug, Default)] pub struct LayeredState { changes: LayeredChanges, - /// Snapshots - snapshots: HashMap>, + snapshots: SharedMap, true>, } impl>> LayeredState { @@ -34,7 +35,7 @@ impl>> LayeredState { Self { changes: LayeredChanges::with_layer(layer), - snapshots: HashMap::new(), + snapshots: SharedMap::default(), } } } @@ -197,25 +198,22 @@ impl StateHistory for LayeredState { type Error = StateError; #[cfg_attr(feature = "tracing", tracing::instrument)] - fn make_snapshot(&mut self) -> (B256, bool) { + fn make_snapshot(&mut self) -> B256 { let state_root = self.state_root().unwrap(); - let mut exists = true; - self.snapshots.entry(state_root).or_insert_with(|| { - exists = false; - + self.snapshots.insert_with(state_root, || { let mut snapshot = self.changes.clone(); snapshot.last_layer_mut().set_state_root(state_root); snapshot }); - (state_root, exists) + state_root } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn remove_snapshot(&mut self, state_root: &B256) -> bool { - self.snapshots.remove(state_root).is_some() + fn remove_snapshot(&mut self, state_root: &B256) { + self.snapshots.remove(state_root) } #[cfg_attr(feature = "tracing", tracing::instrument)] @@ -226,8 +224,10 @@ impl StateHistory for LayeredState { self.changes.last_layer_mut().set_state_root(state_root); } - if let Some(snapshot) = self.snapshots.remove(state_root) { - self.changes = snapshot; + if let Some(snapshot) = self.snapshots.get(state_root) { + self.changes = snapshot.clone(); + + self.snapshots.remove(state_root); return Ok(()); } diff --git a/crates/rethnet_evm/src/state/layered/changes.rs b/crates/rethnet_evm/src/state/layered/changes.rs index 8d02e80b17..9bc3ac5297 100644 --- a/crates/rethnet_evm/src/state/layered/changes.rs +++ b/crates/rethnet_evm/src/state/layered/changes.rs @@ -6,7 +6,10 @@ use hasher::HasherKeccak; use rethnet_eth::{account::KECCAK_EMPTY, state::storage_root, Address, B256, U256}; use revm::primitives::{Account, AccountInfo, Bytecode}; -use crate::state::{account::RethnetAccount, contract::ContractStorage}; +use crate::{ + collections::{SharedMap, SharedMapEntry}, + state::account::RethnetAccount, +}; #[derive(Clone, Debug)] pub struct LayeredChanges { @@ -78,12 +81,12 @@ impl Default for LayeredChanges { } /// A layer with information needed for [`Rethnet`]. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct RethnetLayer { /// Accounts, where the Option signals deletion. accounts: HashMap>, /// Code hash -> Address - contracts: ContractStorage, + contracts: SharedMap, /// Cached state root state_root: Option, } @@ -95,7 +98,7 @@ impl RethnetLayer { } /// Retrieves the contract storage - pub fn contracts(&self) -> &ContractStorage { + pub fn contracts(&self) -> &SharedMap { &self.contracts } @@ -115,6 +118,19 @@ impl RethnetLayer { } } +impl Default for RethnetLayer { + fn default() -> Self { + let mut contracts = SharedMap::default(); + contracts.insert(KECCAK_EMPTY, Bytecode::new()); + + Self { + accounts: HashMap::default(), + contracts, + state_root: None, + } + } +} + impl From> for RethnetLayer { fn from(accounts: HashMap) -> Self { let mut accounts: HashMap> = accounts @@ -122,7 +138,7 @@ impl From> for RethnetLayer { .map(|(address, account_info)| (address, Some(account_info.into()))) .collect(); - let mut contracts = ContractStorage::default(); + let mut contracts = SharedMap::default(); accounts .values_mut() @@ -133,10 +149,12 @@ impl From> for RethnetLayer { }) .for_each(|code| { if code.hash() != KECCAK_EMPTY { - contracts.insert_code(code); + contracts.insert(code.hash(), code); } }); + contracts.insert(KECCAK_EMPTY, Bytecode::new()); + Self { accounts, contracts, @@ -247,7 +265,7 @@ impl LayeredChanges { let code_hash = account.info.code_hash; - self.last_layer_mut().contracts.remove_code(&code_hash); + self.last_layer_mut().contracts.remove(&code_hash); } // Insert `None` to signal that the account was deleted @@ -319,11 +337,34 @@ impl LayeredChanges { /// Inserts the provided bytecode using its hash, potentially overwriting an existing value. pub fn insert_code(&mut self, code: Bytecode) { - self.last_layer_mut().contracts.insert_code(code); + self.last_layer_mut().contracts.insert(code.hash(), code); } /// Removes the code corresponding to the provided hash, if it exists. pub fn remove_code(&mut self, code_hash: &B256) { - self.last_layer_mut().contracts.remove_code(code_hash); + self.last_layer_mut().contracts.remove(code_hash); + } +} + +impl From<&LayeredChanges> for SharedMap { + fn from(changes: &LayeredChanges) -> Self { + let mut storage = Self::default(); + + changes.iter().for_each(|layer| { + layer.contracts().iter().for_each(|(code_hash, entry)| { + if entry.occurences() > 0 { + storage.as_inner_mut().insert( + *code_hash, + SharedMapEntry::with_occurences(entry.value().clone(), entry.occurences()), + ); + } else { + storage.as_inner_mut().remove(code_hash); + } + }) + }); + + storage.insert(KECCAK_EMPTY, Bytecode::new()); + + storage } } diff --git a/crates/rethnet_evm/src/state/trie.rs b/crates/rethnet_evm/src/state/trie.rs index bb29da80e5..58a6ca2571 100644 --- a/crates/rethnet_evm/src/state/trie.rs +++ b/crates/rethnet_evm/src/state/trie.rs @@ -1,5 +1,7 @@ mod account; +use crate::collections::SharedMap; + pub use self::account::AccountTrie; use hashbrown::HashMap; @@ -14,15 +16,13 @@ use revm::{ DatabaseCommit, }; -use super::{ - contract::ContractStorage, layered::LayeredChanges, RethnetLayer, StateDebug, StateError, -}; +use super::{layered::LayeredChanges, RethnetLayer, StateDebug, StateError}; /// An implementation of revm's state that uses a trie. -#[derive(Clone, Debug, Default)] +#[derive(Clone, Debug)] pub struct TrieState { accounts: AccountTrie, - contracts: ContractStorage, + contracts: SharedMap, } impl TrieState { @@ -37,13 +37,25 @@ impl TrieState { /// Inserts the provided bytecode using its hash, potentially overwriting an existing value. pub fn insert_code(&mut self, code: Bytecode) { - self.contracts.insert_code(code); + self.contracts.insert(code.hash(), code); } /// Removes the code corresponding to the provided hash, if it exists. pub fn remove_code(&mut self, code_hash: &B256) { if *code_hash != KECCAK_EMPTY { - self.contracts.remove_code(code_hash); + self.contracts.remove(code_hash); + } + } +} + +impl Default for TrieState { + fn default() -> Self { + let mut contracts = SharedMap::default(); + contracts.insert(KECCAK_EMPTY, Bytecode::new()); + + Self { + accounts: AccountTrie::default(), + contracts, } } } @@ -224,7 +236,7 @@ impl From<&LayeredChanges> for TrieState { }) })); - let contracts = ContractStorage::from(changes); + let contracts = SharedMap::from(changes); Self { accounts, diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 6a44153e74..00a1b6c4eb 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -47,15 +47,6 @@ pub struct GenesisAccount { pub balance: BigInt, } -/// An identifier for a snapshot of the state -#[napi(object)] -pub struct SnapshotId { - /// Snapshot's state root - pub state_root: Buffer, - /// Whether the snapshot already existed. - pub existed: bool, -} - /// The Rethnet state #[napi(custom_finalize)] #[derive(Debug)] @@ -291,9 +282,9 @@ impl StateManager { /// Makes a snapshot of the database that's retained until [`removeSnapshot`] is called. Returns the snapshot's identifier. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument)] - pub async fn make_snapshot(&self) -> SnapshotId { + pub async fn make_snapshot(&self) -> Buffer { let state = self.state.clone(); - let (state_root, existed) = self + let state_root = self .context .runtime() .spawn(async move { @@ -303,10 +294,7 @@ impl StateManager { .await .unwrap(); - SnapshotId { - state_root: >::as_ref(&state_root).into(), - existed, - } + Buffer::from(state_root.as_ref()) } /// Modifies the account with the provided address using the specified modifier function. @@ -455,7 +443,7 @@ impl StateManager { /// Removes the snapshot corresponding to the specified state root, if it exists. Returns whether a snapshot was removed. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - pub async fn remove_snapshot(&self, state_root: Buffer) -> bool { + pub async fn remove_snapshot(&self, state_root: Buffer) { let state_root = B256::from_slice(&state_root); let state = self.state.clone(); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 0392d1ac14..1e148ee8ea 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -60,12 +60,11 @@ export class RethnetStateManager { await this._state.removeAccount(address.buf); } - public async makeSnapshot(): Promise<[Buffer, boolean]> { - const snapshot = await this._state.makeSnapshot(); - return [snapshot.stateRoot, snapshot.existed]; + public async makeSnapshot(): Promise { + return this._state.makeSnapshot(); } - public async removeSnapshot(stateRoot: Buffer): Promise { + public async removeSnapshot(stateRoot: Buffer): Promise { return this._state.removeSnapshot(stateRoot); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 36f72d7d62..46d3622b1b 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -973,7 +973,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu id, date: new Date(), latestBlock: await this.getLatestBlock(), - stateRoot: (await this._vm.makeSnapshot())[0], + stateRoot: await this._vm.makeSnapshot(), txPoolSnapshotId: this._txPool.snapshot(), blockTimeOffsetSeconds: this.getTimeIncrement(), nextBlockTimestamp: this.getNextBlockTimestamp(), @@ -2074,16 +2074,12 @@ Hardhat Network's forking functionality only works with blocks from at least spu ); } - const [snapshot, existed] = await this._vm.makeSnapshot(); + const snapshot = await this._vm.makeSnapshot(); await this._setBlockContext(block); try { return await action(); } finally { await this._vm.restoreContext(snapshot); - - if (!existed) { - await this._vm.removeSnapshot(snapshot); - } } } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index 7c69061710..a50b715aba 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -300,10 +300,9 @@ export class DualModeAdapter implements VMAdapter { return this._ethereumJSAdapter.revertBlock(); } - public async makeSnapshot(): Promise<[Buffer, boolean]> { - const [ethereumJSRoot, _] = await this._ethereumJSAdapter.makeSnapshot(); - const [rethnetRoot, rethnetExisted] = - await this._rethnetAdapter.makeSnapshot(); + public async makeSnapshot(): Promise { + const ethereumJSRoot = await this._ethereumJSAdapter.makeSnapshot(); + const rethnetRoot = await this._rethnetAdapter.makeSnapshot(); if (!ethereumJSRoot.equals(rethnetRoot)) { console.trace( @@ -315,16 +314,12 @@ export class DualModeAdapter implements VMAdapter { throw new Error("Different snapshot state root"); } - return [rethnetRoot, rethnetExisted]; + return rethnetRoot; } - public async removeSnapshot(stateRoot: Buffer): Promise { - const _ethereumJSSuccess = await this._ethereumJSAdapter.removeSnapshot( - stateRoot - ); - const rethnetSuccess = await this._rethnetAdapter.removeSnapshot(stateRoot); - - return rethnetSuccess; + public async removeSnapshot(stateRoot: Buffer): Promise { + await this._ethereumJSAdapter.removeSnapshot(stateRoot); + await this._rethnetAdapter.removeSnapshot(stateRoot); } public getLastTrace(): { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index c340dc831f..b95602560d 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -510,13 +510,12 @@ export class EthereumJSAdapter implements VMAdapter { this._blockStartStateRoot = undefined; } - public async makeSnapshot(): Promise<[Buffer, boolean]> { - return [await this.getStateRoot(), false]; + public async makeSnapshot(): Promise { + return this.getStateRoot(); } - public async removeSnapshot(_stateRoot: Buffer): Promise { + public async removeSnapshot(_stateRoot: Buffer): Promise { // No way of deleting snapshot - return true; } public getLastTrace(): { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 7cfc8af104..7b1f147cca 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -401,11 +401,11 @@ export class RethnetAdapter implements VMAdapter { throw new Error("traceTransaction not implemented for Rethnet"); } - public async makeSnapshot(): Promise<[Buffer, boolean]> { + public async makeSnapshot(): Promise { return this._state.makeSnapshot(); } - public async removeSnapshot(stateRoot: Buffer): Promise { + public async removeSnapshot(stateRoot: Buffer): Promise { return this._state.removeSnapshot(stateRoot); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts index 0e1085725a..159fd92715 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/vm-adapter.ts @@ -82,8 +82,8 @@ export interface VMAdapter { ): Promise; // methods for snapshotting - makeSnapshot(): Promise<[Buffer, boolean]>; - removeSnapshot(stateRoot: Buffer): Promise; + makeSnapshot(): Promise; + removeSnapshot(stateRoot: Buffer): Promise; // for debugging purposes printState(): Promise; From 5feac5d823815854b3ef4ac5255c1281f2a1c233 Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 18:59:14 -0500 Subject: [PATCH 055/406] improvement: correctly annotate immutable trait functions (#3848) --- crates/rethnet_evm/src/state/debug.rs | 6 +++--- crates/rethnet_evm/src/state/hybrid.rs | 6 +++--- crates/rethnet_evm/src/state/layered.rs | 6 +++--- crates/rethnet_evm/src/state/trie.rs | 6 +++--- crates/rethnet_evm_napi/src/state.rs | 6 +++--- 5 files changed, 15 insertions(+), 15 deletions(-) diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index bc97960d49..1ed6093eee 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -43,7 +43,7 @@ pub trait StateDebug { type Error; /// Retrieves the storage root of the account at the specified address. - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error>; + fn account_storage_root(&self, address: &Address) -> Result, Self::Error>; /// Inserts the provided account at the specified address. fn insert_account( @@ -64,7 +64,7 @@ pub trait StateDebug { fn remove_account(&mut self, address: Address) -> Result, Self::Error>; /// Serializes the state using ordering of addresses and storage indices. - fn serialize(&mut self) -> String; + fn serialize(&self) -> String; /// Sets the storage slot at the specified address and index to the provided value. fn set_account_storage_slot( @@ -75,5 +75,5 @@ pub trait StateDebug { ) -> Result<(), Self::Error>; /// Retrieves the storage root of the database. - fn state_root(&mut self) -> Result; + fn state_root(&self) -> Result; } diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs index 489e7db2f8..62dda5ece3 100644 --- a/crates/rethnet_evm/src/state/hybrid.rs +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -77,7 +77,7 @@ impl StateDebug for HybridState { type Error = StateError; #[cfg_attr(feature = "tracing", tracing::instrument)] - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + fn account_storage_root(&self, address: &Address) -> Result, Self::Error> { self.trie.account_storage_root(address) } @@ -157,7 +157,7 @@ impl StateDebug for HybridState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn serialize(&mut self) -> String { + fn serialize(&self) -> String { self.trie.serialize() } @@ -179,7 +179,7 @@ impl StateDebug for HybridState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn state_root(&mut self) -> Result { + fn state_root(&self) -> Result { self.trie.state_root() } } diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs index ce7fb1677c..3c899a7517 100644 --- a/crates/rethnet_evm/src/state/layered.rs +++ b/crates/rethnet_evm/src/state/layered.rs @@ -81,7 +81,7 @@ impl StateDebug for LayeredState { type Error = StateError; #[cfg_attr(feature = "tracing", tracing::instrument)] - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + fn account_storage_root(&self, address: &Address) -> Result, Self::Error> { Ok(self .changes .account(address) @@ -149,7 +149,7 @@ impl StateDebug for LayeredState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn serialize(&mut self) -> String { + fn serialize(&self) -> String { self.changes.serialize() } @@ -169,7 +169,7 @@ impl StateDebug for LayeredState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn state_root(&mut self) -> Result { + fn state_root(&self) -> Result { let mut state = HashMap::new(); self.changes diff --git a/crates/rethnet_evm/src/state/trie.rs b/crates/rethnet_evm/src/state/trie.rs index 58a6ca2571..d6dc481fff 100644 --- a/crates/rethnet_evm/src/state/trie.rs +++ b/crates/rethnet_evm/src/state/trie.rs @@ -113,7 +113,7 @@ impl DatabaseCommit for TrieState { impl StateDebug for TrieState { type Error = StateError; - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { + fn account_storage_root(&self, address: &Address) -> Result, Self::Error> { Ok(self.accounts.storage_root(address)) } @@ -199,7 +199,7 @@ impl StateDebug for TrieState { })) } - fn serialize(&mut self) -> String { + fn serialize(&self) -> String { self.accounts.serialize() } @@ -215,7 +215,7 @@ impl StateDebug for TrieState { Ok(()) } - fn state_root(&mut self) -> Result { + fn state_root(&self) -> Result { Ok(self.accounts.state_root()) } } diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 00a1b6c4eb..204c51eac4 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -199,7 +199,7 @@ impl StateManager { self.context .runtime() .spawn(async move { - let mut state = state.write().await; + let state = state.read().await; state.account_storage_root(&address) }) .await @@ -249,7 +249,7 @@ impl StateManager { self.context .runtime() .spawn(async move { - let mut state = state.write().await; + let state = state.read().await; state.state_root() }) .await @@ -465,7 +465,7 @@ impl StateManager { self.context .runtime() .spawn(async move { - let mut state = state.write().await; + let state = state.read().await; state.serialize() }) .await From cc5ee2c9799ac2f67716f00d3f3b3f42bb29bdca Mon Sep 17 00:00:00 2001 From: Wodann Date: Thu, 20 Apr 2023 19:02:13 -0500 Subject: [PATCH 056/406] improvement: more validation in `DualModeAdapter` (#3849) Co-authored-by: F. Eugene Aumson --- .../hardhat-network/provider/vm/dual.ts | 152 ++++++++++++------ .../hardhat-network/provider/vm/rethnet.ts | 2 +- 2 files changed, 101 insertions(+), 53 deletions(-) diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index a50b715aba..ceef24339b 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -28,11 +28,7 @@ import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/restrict-template-expressions */ -function _printEthereumJSTrace(trace: any) { - console.log(JSON.stringify(trace, null, 2)); -} - -function _printRethnetTrace(trace: any) { +function printTrace(trace: any) { console.log( JSON.stringify( trace, @@ -255,22 +251,21 @@ export class DualModeAdapter implements VMAdapter { tx: TypedTransaction, block: Block ): Promise<[RunTxResult, Trace]> { - const ethereumJSResultPromise = this._ethereumJSAdapter.runTxInBlock( - tx, - block - ); - - const rethnetResultPromise = this._rethnetAdapter.runTxInBlock(tx, block); - const [ - [ethereumJSResult, ethereumJSTrace], - [rethnetResult, _rethnetTrace], - ] = await Promise.all([ethereumJSResultPromise, rethnetResultPromise]); + [ethereumJSResult, ethereumJSDebugTrace], + [rethnetResult, _rethnetDebugTrace], + ] = await Promise.all([ + this._ethereumJSAdapter.runTxInBlock(tx, block), + this._rethnetAdapter.runTxInBlock(tx, block), + ]); try { assertEqualRunTxResults(ethereumJSResult, rethnetResult); - return [ethereumJSResult, ethereumJSTrace]; + // Validate trace + const _trace = this.getLastTrace(); + + return [ethereumJSResult, ethereumJSDebugTrace]; } catch (e) { // if the results didn't match, print the traces // console.log("EthereumJS trace"); @@ -298,6 +293,8 @@ export class DualModeAdapter implements VMAdapter { public async revertBlock(): Promise { await this._rethnetAdapter.revertBlock(); return this._ethereumJSAdapter.revertBlock(); + + const _stateRoot = this.getStateRoot(); } public async makeSnapshot(): Promise { @@ -344,7 +341,16 @@ export class DualModeAdapter implements VMAdapter { ); } - assertEqualTraces(ethereumJSTrace, rethnetTrace); + const differences = traceDifferences(ethereumJSTrace, rethnetTrace); + if (differences.length > 0) { + console.trace(`Different traces: ${differences}`); + console.log("EthereumJS trace:"); + printTrace(ethereumJSTrace); + console.log(); + console.log("Rethnet trace:"); + printTrace(rethnetTrace); + throw new Error(`Different traces: ${differences}`); + } } if (ethereumJSError === undefined) { @@ -549,18 +555,20 @@ function assertEqualRunTxResults( ethereumJSResult: RunTxResult, rethnetResult: RunTxResult ) { + const differences: string[] = []; + if (ethereumJSResult.exit.kind !== rethnetResult.exit.kind) { console.trace( `Different exceptionError.error: ${ethereumJSResult.exit.kind} !== ${rethnetResult.exit.kind}` ); - throw new Error("Different exceptionError.error"); + differences.push("exceptionError.error"); } if (ethereumJSResult.gasUsed !== rethnetResult.gasUsed) { console.trace( `Different totalGasSpent: ${ethereumJSResult.gasUsed} !== ${rethnetResult.gasUsed}` ); - throw new Error("Different totalGasSpent"); + differences.push("totalGasSpent"); } const exitCode = ethereumJSResult.exit.kind; @@ -578,7 +586,7 @@ function assertEqualRunTxResults( "hex" )} !== ${rethnetResult.returnValue.toString("hex")}` ); - throw new Error("Different returnValue"); + differences.push("returnValue"); } // } @@ -586,7 +594,7 @@ function assertEqualRunTxResults( console.trace( `Different bloom: ${ethereumJSResult.bloom} !== ${rethnetResult.bloom}` ); - throw new Error("Different bloom"); + differences.push("bloom"); } if ( @@ -597,7 +605,7 @@ function assertEqualRunTxResults( console.trace( `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} !== ${rethnetResult.receipt.bitvector}` ); - throw new Error("Different receipt bitvector"); + differences.push("receipt.bitvector"); } if ( @@ -607,7 +615,7 @@ function assertEqualRunTxResults( console.trace( `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} !== ${rethnetResult.receipt.cumulativeBlockGasUsed}` ); - throw new Error("Different receipt cumulativeBlockGasUsed"); + differences.push("receipt.cumulativeBlockGasUsed"); } assertEqualLogs(ethereumJSResult.receipt.logs, rethnetResult.receipt.logs); @@ -626,17 +634,23 @@ function assertEqualRunTxResults( console.trace( `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} !== ${rethnetResult.createdAddress?.toString()}` ); - throw new Error("Different createdAddress"); + differences.push("createdAddress"); } } + + if (differences.length !== 0) { + throw new Error(`Different result fields: ${differences}`); + } } function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { + const differences: string[] = []; + if (ethereumJSLogs.length !== rethnetLogs.length) { console.trace( `Different logs length: ${ethereumJSLogs.length} !== ${rethnetLogs.length}` ); - throw new Error("Different logs length"); + differences.push("length"); } for (let logIdx = 0; logIdx < ethereumJSLogs.length; ++logIdx) { @@ -644,7 +658,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { console.trace( `Different log[${logIdx}] address: ${ethereumJSLogs[logIdx][0]} !== ${rethnetLogs[logIdx][0]}` ); - throw new Error("Different log address"); + differences.push("address"); } const ethereumJSTopics = ethereumJSLogs[logIdx][1]; @@ -653,7 +667,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { console.trace( `Different log[${logIdx}] topics length: ${ethereumJSTopics.length} !== ${rethnetTopics.length}` ); - throw new Error("Different log topics length"); + differences.push("topics length"); } for (let topicIdx = 0; topicIdx < ethereumJSTopics.length; ++topicIdx) { @@ -661,7 +675,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { console.trace( `Different log[${logIdx}] topic[${topicIdx}]: ${ethereumJSTopics[topicIdx]} !== ${rethnetTopics[topicIdx]}` ); - throw new Error("Different log topic"); + differences.push("topic"); } } @@ -669,9 +683,13 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { console.trace( `Different log[${logIdx}] data: ${ethereumJSLogs[logIdx][2]} !== ${rethnetLogs[logIdx][2]}` ); - throw new Error("Different log data"); + differences.push("data"); } } + + if (differences.length !== 0) { + throw new Error(`Different log fields: ${differences}`); + } } function assertEqualAccounts( @@ -679,26 +697,28 @@ function assertEqualAccounts( ethereumJSAccount: Account, rethnetAccount: Account ) { + const differences: string[] = []; + if (ethereumJSAccount.balance !== rethnetAccount.balance) { console.trace(`Account: ${address}`); console.trace( `Different balance: ${ethereumJSAccount.balance} !== ${rethnetAccount.balance}` ); - throw new Error("Different balance"); + differences.push("balance"); } if (!ethereumJSAccount.codeHash.equals(rethnetAccount.codeHash)) { console.trace( `Different codeHash: ${ethereumJSAccount.codeHash} !== ${rethnetAccount.codeHash}` ); - throw new Error("Different codeHash"); + differences.push("codeHash"); } if (ethereumJSAccount.nonce !== rethnetAccount.nonce) { console.trace( `Different nonce: ${ethereumJSAccount.nonce} !== ${rethnetAccount.nonce}` ); - throw new Error("Different nonce"); + differences.push("nonce"); } if (!ethereumJSAccount.storageRoot.equals(rethnetAccount.storageRoot)) { @@ -711,39 +731,46 @@ function assertEqualAccounts( } } -function assertEqualTraces( +function traceDifferences( ethereumJSTrace: MessageTrace, rethnetTrace: MessageTrace -) { +): string[] { + const differences: string[] = []; + // both traces are defined if (ethereumJSTrace.depth !== rethnetTrace.depth) { - throw new Error( + console.log( `Different depth: ${ethereumJSTrace.depth} !== ${rethnetTrace.depth}` ); + differences.push("depth"); } if (ethereumJSTrace.exit.kind !== rethnetTrace.exit.kind) { - throw new Error( + console.log( `Different exit: ${ethereumJSTrace.exit.kind} !== ${rethnetTrace.exit.kind}` ); + differences.push("exit"); } if (ethereumJSTrace.gasUsed !== rethnetTrace.gasUsed) { - throw new Error( + console.log( `Different gasUsed: ${ethereumJSTrace.gasUsed} !== ${rethnetTrace.gasUsed}` ); + differences.push("gasUsed"); } if (!ethereumJSTrace.returnData.equals(rethnetTrace.returnData)) { - throw new Error( + console.log( `Different returnData: ${ethereumJSTrace.returnData} !== ${rethnetTrace.returnData}` ); + differences.push("returnData"); } if (ethereumJSTrace.value !== rethnetTrace.value) { - throw new Error( + console.log( `Different value: ${ethereumJSTrace.value} !== ${rethnetTrace.value}` ); + differences.push("value"); } if (isPrecompileTrace(ethereumJSTrace)) { @@ -755,44 +782,48 @@ function assertEqualTraces( // Both traces are precompile traces if (ethereumJSTrace.precompile !== rethnetTrace.precompile) { - throw new Error( + console.log( `Different precompile: ${ethereumJSTrace.precompile} !== ${rethnetTrace.precompile}` ); + differences.push("precompile"); } if (!ethereumJSTrace.calldata.equals(rethnetTrace.calldata)) { - throw new Error( + console.log( `Different calldata: ${ethereumJSTrace.calldata} !== ${rethnetTrace.calldata}` ); + differences.push("calldata"); } } else { if (isPrecompileTrace(rethnetTrace)) { throw new Error( - `ethereumJSTrace is a precompiled trace but ethereumJSTrace is not` + `rethnetTrace is a precompiled trace but ethereumJSTrace is not` ); } // Both traces are NOT precompile traces if (!ethereumJSTrace.code.equals(rethnetTrace.code)) { - console.log("ethereumjs:", ethereumJSTrace); - console.log("rethnet:", rethnetTrace); - throw new Error( + console.log( `Different code: ${ethereumJSTrace.code.toString( "hex" )} !== ${rethnetTrace.code.toString("hex")}` ); + differences.push("code"); } if (ethereumJSTrace.steps.length !== rethnetTrace.steps.length) { - throw new Error( + console.log( `Different steps length: ${ethereumJSTrace.steps.length} !== ${rethnetTrace.steps.length}` ); + differences.push("steps.length"); } for (let stepIdx = 0; stepIdx < ethereumJSTrace.steps.length; stepIdx++) { const ethereumJSStep = ethereumJSTrace.steps[stepIdx]; const rethnetStep = rethnetTrace.steps[stepIdx]; + const stepDifferences: string[] = []; + if (isEvmStep(ethereumJSStep)) { // if (stepIdx >= rethnetTrace.steps.length) { // console.log("code:", ethereumJSTrace.code); @@ -809,9 +840,10 @@ function assertEqualTraces( } if (ethereumJSStep.pc !== rethnetStep.pc) { - throw new Error( - `Different step[${stepIdx}]: ${ethereumJSStep.pc} !== ${rethnetStep.pc}` + console.log( + `Different step[${stepIdx}] pc: ${ethereumJSStep.pc} !== ${rethnetStep.pc}` ); + stepDifferences.push("pc"); } } else { if (isEvmStep(rethnetStep)) { @@ -820,15 +852,27 @@ function assertEqualTraces( ); } - assertEqualTraces(ethereumJSStep, rethnetStep); + const messageDifferences = traceDifferences( + ethereumJSStep, + rethnetStep + ); + + if (messageDifferences.length > 0) { + stepDifferences.push(`message: ${messageDifferences}`); + } + } + + if (stepDifferences.length > 0) { + differences.push(`step[${stepIdx}]: ${stepDifferences}`); } } if (ethereumJSTrace.bytecode === undefined) { if (rethnetTrace.bytecode !== undefined) { - throw new Error( + console.log( "ethereumJSTrace.bytecode is undefined but rethnetTrace.bytecode is defined" ); + differences.push("bytecode"); } } else { if (rethnetTrace.bytecode === undefined) { @@ -839,16 +883,20 @@ function assertEqualTraces( // Both traces contain bytecode if (!ethereumJSTrace.bytecode.equals(rethnetTrace.bytecode)) { - throw new Error( + console.log( `Different bytecode: ${ethereumJSTrace.bytecode} !== ${rethnetTrace.bytecode}` ); + differences.push("bytecode"); } } if (ethereumJSTrace.numberOfSubtraces !== rethnetTrace.numberOfSubtraces) { - throw new Error( + console.log( `Different numberOfSubtraces: ${ethereumJSTrace.numberOfSubtraces} !== ${rethnetTrace.numberOfSubtraces}` ); + differences.push("numberOfSubtraces"); } } + + return differences; } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index 7b1f147cca..a9aa89aeaa 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -330,7 +330,7 @@ export class RethnetAdapter implements VMAdapter { rethnetResult.result, block.header.gasUsed ); - return [result, trace]; + return [result, this._vmTracer.getLastTopLevelMessageTrace()]; } catch (e) { // console.log("Rethnet trace"); // console.log(rethnetResult.trace); From eb24c51bc268f8d80b970e2a9da1d7819a35ccdd Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Tue, 18 Apr 2023 19:43:07 -0400 Subject: [PATCH 057/406] feat: assorted synthetic benchmarks for `SyncState` (#3868) Co-authored-by: Wodann --- crates/rethnet_evm/Cargo.toml | 8 ++ crates/rethnet_evm/benches/state.rs | 119 ++++++++++++++++++++++++ crates/rethnet_evm/src/state/hybrid.rs | 4 +- crates/rethnet_evm/src/state/layered.rs | 2 +- 4 files changed, 130 insertions(+), 3 deletions(-) create mode 100644 crates/rethnet_evm/benches/state.rs diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 422d7213b8..905d0364c4 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -1,3 +1,6 @@ +[lib] +bench = false + [package] name = "rethnet_evm" version = "0.1.0-dev" @@ -21,7 +24,12 @@ tokio = { version = "1.21.2", default-features = false, features = ["rt-multi-th tracing = { version = "0.1.37", features = ["attributes", "std"], optional = true } [dev-dependencies] +criterion = { version = "0.4.0", default-features = false, features = ["cargo_bench_support", "html_reports", "plotters"] } test-with = { version = "0.9.1", default-features = false } [features] tracing = ["dep:tracing"] + +[[bench]] +name = "state" +harness = false diff --git a/crates/rethnet_evm/benches/state.rs b/crates/rethnet_evm/benches/state.rs new file mode 100644 index 0000000000..83d85140bd --- /dev/null +++ b/crates/rethnet_evm/benches/state.rs @@ -0,0 +1,119 @@ +use std::{clone::Clone, str::FromStr}; + +use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; +use rethnet_eth::Address; +use rethnet_evm::state::{HybridState, LayeredState, RethnetLayer, StateError, SyncState}; +use revm::{db::StateRef, primitives::AccountInfo}; + +#[derive(Default)] +struct RethnetStates { + layered: LayeredState, + hybrid: HybridState, +} + +impl RethnetStates { + fn fill(&mut self, number_of_accounts: u64, number_of_accounts_per_checkpoint: u64) { + let mut states: [&mut dyn SyncState; 2] = [&mut self.layered, &mut self.hybrid]; + for state in states.iter_mut() { + let number_of_checkpoints = number_of_accounts / number_of_accounts_per_checkpoint; + for _ in 0..=number_of_checkpoints { + for i in 1..=number_of_accounts_per_checkpoint { + state + .insert_account(Address::from_low_u64_ne(i), AccountInfo::default()) + .unwrap(); + } + state.checkpoint().unwrap(); + } + } + } + + /// Returns a set of factories, each member of which produces a clone of one of the state objects in this struct. + fn make_clone_factories( + &self, + ) -> Vec<( + &'static str, // label of the type of state produced by this factory + Box Box> + '_>, + )> { + vec![ + ("LayeredState", Box::new(|| Box::new(self.layered.clone()))), + ("HybridState", Box::new(|| Box::new(self.hybrid.clone()))), + ] + } +} + +const NUM_SCALES: usize = 4; +const CHECKPOINT_SCALES: [u64; NUM_SCALES] = [1, 5, 10, 20]; +const MAX_CHECKPOINT_SCALE: u64 = CHECKPOINT_SCALES[NUM_SCALES - 1]; +const ADDRESS_SCALES: [u64; 4] = [ + MAX_CHECKPOINT_SCALE * 5, + MAX_CHECKPOINT_SCALE * 25, + MAX_CHECKPOINT_SCALE * 50, + MAX_CHECKPOINT_SCALE * 100, +]; + +fn bench_sync_state_method(c: &mut Criterion, method_name: &str, mut method_invocation: R) +where + R: FnMut(Box>, u64) -> O, +{ + let mut group = c.benchmark_group(method_name); + for accounts_per_checkpoint in CHECKPOINT_SCALES.iter() { + for number_of_accounts in ADDRESS_SCALES.iter() { + let mut rethnet_states = RethnetStates::default(); + rethnet_states.fill(*number_of_accounts, *accounts_per_checkpoint); + + for (label, state_factory) in rethnet_states.make_clone_factories().into_iter() { + group.bench_with_input( + BenchmarkId::new( + format!( + "{},{} account(s) per checkpoint", + label, *accounts_per_checkpoint + ), + *number_of_accounts, + ), + number_of_accounts, + |b, number_of_accounts| { + b.iter_batched( + || state_factory(), + |state| method_invocation(state, *number_of_accounts), + BatchSize::SmallInput, + ); + }, + ); + } + } + } +} + +fn bench_insert_account(c: &mut Criterion) { + bench_sync_state_method( + c, + "StateDebug::insert_account()", + |mut state, number_of_accounts| { + state.insert_account( + Address::from_low_u64_ne(number_of_accounts), + AccountInfo::default(), + ) + }, + ); +} + +fn bench_checkpoint(c: &mut Criterion) { + bench_sync_state_method( + c, + "StateHistory::checkpoint()", + |mut state, _number_of_accounts| state.checkpoint(), + ); +} + +fn bench_basic(c: &mut Criterion) { + bench_sync_state_method(c, "StateRef::basic()", |state, number_of_accounts| { + for i in number_of_accounts..=1 { + state + .basic(Address::from_str(&format!("0x{:0>40x}", i)).unwrap()) + .unwrap(); + } + }); +} + +criterion_group!(benches, bench_insert_account, bench_checkpoint, bench_basic); +criterion_main!(benches); diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs index 62dda5ece3..179bd21343 100644 --- a/crates/rethnet_evm/src/state/hybrid.rs +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -17,14 +17,14 @@ use super::{ AccountModifierFn, RethnetLayer, StateDebug, StateError, }; -#[derive(Debug)] +#[derive(Clone, Debug)] struct Snapshot { pub changes: LayeredChanges, pub trie: TrieState, } /// A state consisting of layers. -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct HybridState { trie: TrieState, changes: LayeredChanges, diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs index 3c899a7517..0780bac197 100644 --- a/crates/rethnet_evm/src/state/layered.rs +++ b/crates/rethnet_evm/src/state/layered.rs @@ -21,7 +21,7 @@ use crate::collections::SharedMap; use super::{history::StateHistory, AccountModifierFn, StateDebug, StateError}; /// A state consisting of layers. -#[derive(Debug, Default)] +#[derive(Clone, Debug, Default)] pub struct LayeredState { changes: LayeredChanges, snapshots: SharedMap, true>, From b7e7634276ec8fb31b50ab8d61575206a7e16e64 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Wed, 26 Apr 2023 18:28:31 -0400 Subject: [PATCH 058/406] fix: bug in benchmark of `StateRef::basic()` (#3886) --- crates/rethnet_evm/benches/state.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/rethnet_evm/benches/state.rs b/crates/rethnet_evm/benches/state.rs index 83d85140bd..3e7e71afda 100644 --- a/crates/rethnet_evm/benches/state.rs +++ b/crates/rethnet_evm/benches/state.rs @@ -107,7 +107,7 @@ fn bench_checkpoint(c: &mut Criterion) { fn bench_basic(c: &mut Criterion) { bench_sync_state_method(c, "StateRef::basic()", |state, number_of_accounts| { - for i in number_of_accounts..=1 { + for i in (1..=number_of_accounts).rev() { state .basic(Address::from_str(&format!("0x{:0>40x}", i)).unwrap()) .unwrap(); From bd570cab655236a5fe6da1fdc34f46ed925ac184 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Tue, 2 May 2023 23:42:03 -0400 Subject: [PATCH 059/406] fix: store code when inserting account in `LayeredState` (#3887) Co-authored-by: Wodann --- crates/rethnet_evm/src/state/hybrid.rs | 2 +- crates/rethnet_evm/src/state/layered.rs | 23 ++++++++++++++++++- .../rethnet_evm/src/state/layered/changes.rs | 8 +++++++ 3 files changed, 31 insertions(+), 2 deletions(-) diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs index 179bd21343..3e0fba4aa6 100644 --- a/crates/rethnet_evm/src/state/hybrid.rs +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -88,7 +88,7 @@ impl StateDebug for HybridState { account_info: AccountInfo, ) -> Result<(), Self::Error> { self.trie.insert_account(address, account_info.clone())?; - self.changes.account_or_insert_mut(&address).info = account_info; + self.changes.insert_account(&address, account_info); Ok(()) } diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs index 0780bac197..2ae97667e3 100644 --- a/crates/rethnet_evm/src/state/layered.rs +++ b/crates/rethnet_evm/src/state/layered.rs @@ -94,7 +94,7 @@ impl StateDebug for LayeredState { address: Address, account_info: AccountInfo, ) -> Result<(), Self::Error> { - self.changes.account_or_insert_mut(&address).info = account_info; + self.changes.insert_account(&address, account_info); Ok(()) } @@ -275,3 +275,24 @@ impl StateHistory for LayeredState { } } } + +#[cfg(test)] +mod tests { + use super::*; + + use rethnet_eth::Bytes; + + #[test] + fn code_by_hash_success() { + let mut state = LayeredState::::default(); + let inserted_bytecode = Bytecode::new_raw(Bytes::from("0x11")); + state + .insert_account( + Address::from_low_u64_ne(1234), + AccountInfo::new(U256::ZERO, 0, inserted_bytecode.clone()), + ) + .unwrap(); + let retrieved_bytecode = state.code_by_hash(inserted_bytecode.hash()).unwrap(); + assert_eq!(retrieved_bytecode, inserted_bytecode); + } +} diff --git a/crates/rethnet_evm/src/state/layered/changes.rs b/crates/rethnet_evm/src/state/layered/changes.rs index 9bc3ac5297..ffb58bacd4 100644 --- a/crates/rethnet_evm/src/state/layered/changes.rs +++ b/crates/rethnet_evm/src/state/layered/changes.rs @@ -344,6 +344,14 @@ impl LayeredChanges { pub fn remove_code(&mut self, code_hash: &B256) { self.last_layer_mut().contracts.remove(code_hash); } + + pub fn insert_account(&mut self, address: &Address, mut account_info: AccountInfo) { + if let Some(code) = account_info.code.take() { + self.insert_code(code); + } + + self.account_or_insert_mut(address).info = account_info; + } } impl From<&LayeredChanges> for SharedMap { From baaebb251d47f9e900525b24a3f4469c81b1fd68 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 3 May 2023 11:49:59 -0500 Subject: [PATCH 060/406] chore(ci): combine hardhat-core CI workflows --- .github/workflows/hardhat-core-ci.yml | 282 ++++++++++++++++-- .../workflows/hardhat-network-forking-ci.yml | 60 ---- .../workflows/hardhat-network-tracing-ci.yml | 55 ---- .github/workflows/rethnet-ci.yml | 166 ----------- 4 files changed, 256 insertions(+), 307 deletions(-) delete mode 100644 .github/workflows/hardhat-network-forking-ci.yml delete mode 100644 .github/workflows/hardhat-network-tracing-ci.yml delete mode 100644 .github/workflows/rethnet-ci.yml diff --git a/.github/workflows/hardhat-core-ci.yml b/.github/workflows/hardhat-core-ci.yml index a2b3870eae..6cb1819fff 100644 --- a/.github/workflows/hardhat-core-ci.yml +++ b/.github/workflows/hardhat-core-ci.yml @@ -2,18 +2,32 @@ name: hardhat-core CI on: push: - branches: [$default-branch] + branches: + - $default-branch + - "rethnet/main" paths: + - ".github/workflows/hardhat-core-ci.yml" + - "rust-toolchain" + - "Cargo.toml" + - "config/**" + - "crates/**" - "packages/hardhat-core/**" - "packages/hardhat-common/**" - - "config/**" pull_request: branches: - "**" paths: + - ".github/workflows/hardhat-core-ci.yml" + - "rust-toolchain" + - "Cargo.toml" + - "config/**" + - "crates/**" - "packages/hardhat-core/**" - "packages/hardhat-common/**" - - "config/**" + workflow_dispatch: + +env: + RUSTFLAGS: -Dwarnings defaults: run: @@ -24,64 +38,280 @@ concurrency: cancel-in-progress: true jobs: - test_on_windows: - name: Test hardhat-core on Windows with Node 18 - runs-on: windows-latest + check-rethnet: + name: Check Rethnet + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 + - uses: actions/checkout@v3 + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + + - name: Cargo check + uses: actions-rs/cargo@v1 with: - node-version: 18 + command: check + args: --workspace --all-features --all-targets + + test-core: + name: Test core (${{ matrix.os }}, Node ${{ matrix.node }}, ${{ matrix.vm }}) + runs-on: ${{ matrix.os }} + needs: check-rethnet + strategy: + fail-fast: false + matrix: + node: [18] + # disable until actions/virtual-environments#4896 is fixed + # os: ["macos-latest", "ubuntu-latest", "windows-latest"] + os: ["ubuntu-latest", "windows-latest"] + vm: ["dual", "rethnet"] + steps: + - uses: actions/checkout@v3 + + - name: Install Node + uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node }} cache: yarn - - name: Install + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + + - name: Install package run: yarn --frozen-lockfile + - name: Build run: yarn build + - name: Run tests env: + HARDHAT_EXPERIMENTAL_VM_MODE: ${{ matrix.vm }} DO_NOT_SET_THIS_ENV_VAR____IS_HARDHAT_CI: true FORCE_COLOR: 3 run: yarn test:except-tracing - test_on_macos: - name: Test hardhat-core on MacOS with Node 18 - runs-on: macos-latest - # disable until actions/virtual-environments#4896 is fixed - if: ${{ false }} + test-fork: + name: Test forking (${{ matrix.os }}, Node ${{ matrix.node }}, ${{ matrix.vm }}) + runs-on: ${{ matrix.os }} + needs: check-rethnet + strategy: + fail-fast: false + matrix: + node: [18] + # disable until actions/virtual-environments#4896 is fixed + # os: ["macos-latest", "ubuntu-latest", "windows-latest"] + os: ["ubuntu-latest", "windows-latest"] + vm: ["dual", "rethnet"] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 + - uses: actions/checkout@v3 + + - name: Install Node + uses: actions/setup-node@v2 with: - node-version: 18 + node-version: ${{ matrix.node }} cache: yarn - - name: Install + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + + - name: Install package run: yarn --frozen-lockfile + + - name: Cache network requests + uses: actions/cache@v2 + with: + path: | + packages/hardhat-core/test/internal/hardhat-network/provider/.hardhat_node_test_cache + key: hardhat-network-forking-tests-${{ hashFiles('yarn.lock') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/provider/node.ts') }} + - name: Build run: yarn build + - name: Run tests env: + INFURA_URL: ${{ secrets.INFURA_URL }} + ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} + HARDHAT_EXPERIMENTAL_VM_MODE: ${{ matrix.vm }} DO_NOT_SET_THIS_ENV_VAR____IS_HARDHAT_CI: true FORCE_COLOR: 3 run: yarn test:except-tracing - test_on_linux: - name: Test hardhat-core on Ubuntu with Node ${{ matrix.node }} - runs-on: ubuntu-latest + test-tracing: + name: Test stack tracing (${{ matrix.os }}, Node ${{ matrix.node }}, ${{ matrix.vm }}) + runs-on: ${{ matrix.os }} + needs: check-rethnet strategy: + fail-fast: false matrix: node: [18] + # disable until actions/virtual-environments#4896 is fixed + # os: ["macos-latest", "ubuntu-latest", "windows-latest"] + os: ["ubuntu-latest", "windows-latest"] + vm: ["dual", "rethnet"] steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 + - uses: actions/checkout@v3 + + - name: Install Node + uses: actions/setup-node@v2 with: node-version: ${{ matrix.node }} cache: yarn - - name: Install + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + + - name: Install package run: yarn --frozen-lockfile + + - name: Cache stack trace artifacts + uses: actions/cache@v2 + with: + path: | + packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/artifacts + key: hardhat-network-stack-traces-tests-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/**/*.sol') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/**/test.json') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/**/*.ts') }} + - name: Build run: yarn build + - name: Run tests env: + HARDHAT_EXPERIMENTAL_VM_MODE: ${{ matrix.vm }} + NODE_OPTIONS: "--max-old-space-size=4096" DO_NOT_SET_THIS_ENV_VAR____IS_HARDHAT_CI: true FORCE_COLOR: 3 - run: yarn test:except-tracing + run: yarn test:tracing + + test-rethnet-js: + name: Test Rethnet bindings (${{ matrix.os }}, Node ${{ matrix.node }}) + runs-on: ${{ matrix.os }} + needs: check-rethnet + strategy: + fail-fast: false + matrix: + node: [18] + os: ["macos-latest", "ubuntu-latest", "windows-latest"] + include: + - RUSTFLAGS: "-Dwarnings" + - os: "windows-latest" + RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + defaults: + run: + working-directory: crates/rethnet_evm_napi + steps: + - uses: actions/checkout@v3 + + - uses: actions/setup-node@v2 + with: + node-version: ${{ matrix.node }} + cache: yarn + + - name: Install node dependencies + run: yarn --frozen-lockfile + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + components: rustfmt + + - uses: Swatinem/rust-cache@v2 + + - name: Build + run: yarn build + + - name: Test + run: yarn test + + test-rethnet-rs: + name: Test Rethnet (${{ matrix.os }}) + runs-on: ${{ matrix.os }} + needs: check-rethnet + strategy: + fail-fast: false + matrix: + os: ["ubuntu-latest", "macos-latest"] + include: + - RUSTFLAGS: "-Dwarnings" + # disable until https://github.com/napi-rs/napi-rs/issues/1405 is resolved + # - os: "windows-latest" + # RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + steps: + - uses: actions/checkout@v3 + + - name: Install Rust (stable) + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + + - uses: Swatinem/rust-cache@v2 + + - name: Doctests + uses: actions-rs/cargo@v1 + env: + RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + with: + command: test + args: --doc --workspace --all-features + + - name: Install latest nextest release + uses: taiki-e/install-action@nextest + + - name: Test with latest nextest release + uses: actions-rs/cargo@v1 + env: + RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} + ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} + with: + command: nextest + args: run --workspace --all-features + + rethnet-style: + name: Check Rethnet Style + runs-on: ubuntu-latest + needs: check-rethnet + steps: + - uses: actions/checkout@v3 + + - name: Install stable toolchain + uses: actions-rs/toolchain@v1 + with: + profile: minimal + override: true + components: clippy, rustfmt + + - uses: Swatinem/rust-cache@v2 + + - name: Run cargo fmt + uses: actions-rs/cargo@v1 + with: + command: fmt + args: --all --check + + - name: Run cargo clippy + uses: actions-rs/clippy-check@v1 + with: + token: ${{ secrets.GITHUB_TOKEN }} + args: --workspace --all-features diff --git a/.github/workflows/hardhat-network-forking-ci.yml b/.github/workflows/hardhat-network-forking-ci.yml deleted file mode 100644 index 17f377fe9a..0000000000 --- a/.github/workflows/hardhat-network-forking-ci.yml +++ /dev/null @@ -1,60 +0,0 @@ -name: Hardhat Network Forking Functionality CI - -on: - push: - branches: [$default-branch] - paths: - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - pull_request: - branches: - - "**" - paths: - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - -defaults: - run: - working-directory: packages/hardhat-core - -concurrency: - group: ${{github.workflow}}-${{github.ref}} - cancel-in-progress: true - -jobs: - # We should run this tests with multiple configurations - # but somehow the requests to Alchemy take much longer when doing that. - # As a temporary workaround, we run them with a single config. - # - # Once we properly refactor Hardhat Network's tests we should - # add more configurations (different OS and node versions). - # - # We should also understand what's going on before blindly refactoring them. - test_fork: - name: Test Hardhat Network's forking functionality - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Cache network requests - uses: actions/cache@v2 - with: - path: | - packages/hardhat-core/test/internal/hardhat-network/provider/.hardhat_node_test_cache - key: hardhat-network-forking-tests-${{ hashFiles('yarn.lock') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/provider/node.ts') }} - - name: Run tests - env: - INFURA_URL: ${{ secrets.INFURA_URL }} - ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} - DO_NOT_SET_THIS_ENV_VAR____IS_HARDHAT_CI: true - FORCE_COLOR: 3 - run: | - yarn build - yarn test:forking diff --git a/.github/workflows/hardhat-network-tracing-ci.yml b/.github/workflows/hardhat-network-tracing-ci.yml deleted file mode 100644 index 87549528ec..0000000000 --- a/.github/workflows/hardhat-network-tracing-ci.yml +++ /dev/null @@ -1,55 +0,0 @@ -name: Hardhat Network Tracing Capabilities CI - -on: - push: - branches: [$default-branch] - paths: - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - pull_request: - branches: - - "**" - paths: - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - workflow_dispatch: - -defaults: - run: - working-directory: packages/hardhat-core - -concurrency: - group: ${{github.workflow}}-${{github.ref}} - cancel-in-progress: true - -jobs: - test_tracing: - name: Test tracing capabilities on Ubuntu with Node ${{ matrix.node }} - runs-on: ubuntu-latest - strategy: - matrix: - node: [18] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: ${{ matrix.node }} - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Cache artifacts - uses: actions/cache@v2 - with: - path: | - packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/artifacts - key: hardhat-network-stack-traces-tests-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/**/*.sol') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/**/test.json') }}-${{ hashFiles('packages/hardhat-core/test/internal/hardhat-network/stack-traces/**/*.ts') }} - - name: Run tests - env: - DO_NOT_SET_THIS_ENV_VAR____IS_HARDHAT_CI: true - FORCE_COLOR: 3 - NODE_OPTIONS: "--max-old-space-size=4096" - run: yarn test:tracing diff --git a/.github/workflows/rethnet-ci.yml b/.github/workflows/rethnet-ci.yml deleted file mode 100644 index c4bf5a7eb0..0000000000 --- a/.github/workflows/rethnet-ci.yml +++ /dev/null @@ -1,166 +0,0 @@ -name: rethnet CI - -on: - push: - branches: - - $default-branch - - "rethnet/main" - paths: - - ".github/workflows/rethnet-ci.yml" - - "config/**" - - "crates/**" - - "Cargo.toml" - - "rust-toolchain" - pull_request: - branches: ["**"] - paths: - - ".github/workflows/rethnet-ci.yml" - - "config/**" - - "crates/**" - - "Cargo.toml" - - "rust-toolchain" - -env: - RUSTFLAGS: -Dwarnings - -concurrency: - group: ${{ github.workflow }}-${{ github.ref }} - cancel-in-progress: true - -jobs: - check: - name: Check - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v3 - - - name: Install Rust (stable) - uses: actions-rs/toolchain@v1 - with: - profile: minimal - override: true - - - uses: Swatinem/rust-cache@v2 - - - name: Cargo check - uses: actions-rs/cargo@v1 - with: - command: check - args: --workspace --all-features --all-targets - - test-js: - name: Test Node.js - runs-on: ${{ matrix.os }} - needs: check - strategy: - fail-fast: false - matrix: - os: ["ubuntu-latest", "windows-latest", "macOS-latest"] - include: - - RUSTFLAGS: "-Dwarnings" - - os: "windows-latest" - RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" - defaults: - run: - working-directory: crates/rethnet_evm_napi - steps: - - uses: actions/checkout@v3 - - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - - name: Install node dependencies - run: yarn --frozen-lockfile - - - name: Install Rust (stable) - uses: actions-rs/toolchain@v1 - with: - profile: minimal - override: true - components: rustfmt - - - uses: Swatinem/rust-cache@v2 - - - name: Build - run: yarn build - - - name: Test - run: yarn test - - test-rs: - name: Test Rust - runs-on: ${{ matrix.os }} - needs: check - strategy: - fail-fast: false - matrix: - # "windows-latest" removed from the following lines pending https://github.com/napi-rs/napi-rs/issues/1405 - os: ["ubuntu-latest", "macOS-latest"] - include: - - RUSTFLAGS: "-Dwarnings" - #- os: "windows-latest" - # RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" - steps: - - uses: actions/checkout@v3 - - - name: Install Rust (stable) - uses: actions-rs/toolchain@v1 - with: - profile: minimal - override: true - components: rustfmt - - - uses: Swatinem/rust-cache@v2 - - - name: Doctests - uses: actions-rs/cargo@v1 - env: - RUSTFLAGS: ${{ matrix.RUSTFLAGS }} - with: - command: test - args: --doc --workspace --all-features - - - name: Install latest nextest release - uses: taiki-e/install-action@nextest - - - name: Test with latest nextest release - uses: actions-rs/cargo@v1 - env: - RUSTFLAGS: ${{ matrix.RUSTFLAGS }} - CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} - ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} - with: - command: nextest - args: run --workspace --all-features - - style: - name: Check Style - runs-on: ubuntu-latest - needs: check - steps: - - uses: actions/checkout@v3 - with: - submodules: true - - - name: Install stable toolchain - uses: actions-rs/toolchain@v1 - with: - profile: minimal - override: true - components: clippy, rustfmt - - - uses: Swatinem/rust-cache@v2 - - - name: Run cargo fmt - uses: actions-rs/cargo@v1 - with: - command: fmt - args: --all --check - - - name: Run cargo clippy - uses: actions-rs/clippy-check@v1 - with: - token: ${{ secrets.GITHUB_TOKEN }} - args: --workspace --all-features From aebf527aa98512536c961b9c25c21cbd0f177d36 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Wed, 26 Apr 2023 17:46:17 -0400 Subject: [PATCH 061/406] feat: `StateRef::code_by_hash` worst-case benchmark (#3881) --- crates/rethnet_evm/benches/state.rs | 46 +++++++++++++++++++++++++---- 1 file changed, 40 insertions(+), 6 deletions(-) diff --git a/crates/rethnet_evm/benches/state.rs b/crates/rethnet_evm/benches/state.rs index 3e7e71afda..8aca6d4e29 100644 --- a/crates/rethnet_evm/benches/state.rs +++ b/crates/rethnet_evm/benches/state.rs @@ -1,9 +1,12 @@ use std::{clone::Clone, str::FromStr}; use criterion::{criterion_group, criterion_main, BatchSize, BenchmarkId, Criterion}; -use rethnet_eth::Address; +use rethnet_eth::{Address, Bytes, U256}; use rethnet_evm::state::{HybridState, LayeredState, RethnetLayer, StateError, SyncState}; -use revm::{db::StateRef, primitives::AccountInfo}; +use revm::{ + db::StateRef, + primitives::{AccountInfo, Bytecode}, +}; #[derive(Default)] struct RethnetStates { @@ -16,10 +19,20 @@ impl RethnetStates { let mut states: [&mut dyn SyncState; 2] = [&mut self.layered, &mut self.hybrid]; for state in states.iter_mut() { let number_of_checkpoints = number_of_accounts / number_of_accounts_per_checkpoint; - for _ in 0..=number_of_checkpoints { - for i in 1..=number_of_accounts_per_checkpoint { + for checkpoint_number in 0..number_of_checkpoints { + for account_number in 1..=number_of_accounts_per_checkpoint { + let account_number = + (checkpoint_number * number_of_accounts_per_checkpoint) + account_number; + let address = Address::from_low_u64_ne(account_number); state - .insert_account(Address::from_low_u64_ne(i), AccountInfo::default()) + .insert_account( + address, + AccountInfo::new( + U256::from(account_number), + account_number, + Bytecode::new_raw(Bytes::copy_from_slice(address.as_bytes())), + ), + ) .unwrap(); } state.checkpoint().unwrap(); @@ -115,5 +128,26 @@ fn bench_basic(c: &mut Criterion) { }); } -criterion_group!(benches, bench_insert_account, bench_checkpoint, bench_basic); +fn bench_code_by_hash(c: &mut Criterion) { + bench_sync_state_method(c, "StateRef::code_by_hash", |state, number_of_accounts| { + for i in (1..=number_of_accounts).rev() { + state + .code_by_hash( + Bytecode::new_raw(Bytes::copy_from_slice( + Address::from_low_u64_ne(i).as_bytes(), + )) + .hash(), + ) + .unwrap(); + } + }); +} + +criterion_group!( + benches, + bench_insert_account, + bench_checkpoint, + bench_basic, + bench_code_by_hash +); criterion_main!(benches); From 4ce4c4f1bf09dfc8652e3c9b0feb7a06678589d8 Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Thu, 4 May 2023 00:47:59 -0400 Subject: [PATCH 062/406] feat: `StateRef::storage` worst-case benchmark (#3896) --- crates/rethnet_evm/benches/state.rs | 20 +++++++++++++++++++- 1 file changed, 19 insertions(+), 1 deletion(-) diff --git a/crates/rethnet_evm/benches/state.rs b/crates/rethnet_evm/benches/state.rs index 8aca6d4e29..4990b50d38 100644 --- a/crates/rethnet_evm/benches/state.rs +++ b/crates/rethnet_evm/benches/state.rs @@ -34,6 +34,13 @@ impl RethnetStates { ), ) .unwrap(); + state + .set_account_storage_slot( + address, + U256::from(account_number), + U256::from(account_number), + ) + .unwrap(); } state.checkpoint().unwrap(); } @@ -143,11 +150,22 @@ fn bench_code_by_hash(c: &mut Criterion) { }); } +fn bench_storage(c: &mut Criterion) { + bench_sync_state_method(c, "StateRef::storage", |state, number_of_accounts| { + for i in (1..=number_of_accounts).rev() { + state + .storage(Address::from_low_u64_ne(i), U256::from(i)) + .unwrap(); + } + }); +} + criterion_group!( benches, bench_insert_account, bench_checkpoint, bench_basic, - bench_code_by_hash + bench_code_by_hash, + bench_storage, ); criterion_main!(benches); From 43e8572551d1b1b702e5a961e2e478d45c7a1bff Mon Sep 17 00:00:00 2001 From: "F. Eugene Aumson" Date: Thu, 4 May 2023 14:51:23 -0400 Subject: [PATCH 063/406] feat: implement fork state management (#3612) Co-authored-by: Franco Victorio Co-authored-by: Wodann --- .gitignore | 3 + crates/rethnet_eth/Cargo.toml | 6 +- crates/rethnet_eth/src/lib.rs | 4 +- crates/rethnet_eth/src/remote.rs | 808 ++---------- crates/rethnet_eth/src/remote/client.rs | 1159 +++++++++++++++++ crates/rethnet_eth/src/remote/eth.rs | 22 +- crates/rethnet_eth/src/remote/jsonrpc.rs | 78 +- crates/rethnet_eth/src/remote/withdrawal.rs | 41 + crates/rethnet_evm/Cargo.toml | 2 +- crates/rethnet_evm/src/block/builder.rs | 12 +- crates/rethnet_evm/src/lib.rs | 1 + crates/rethnet_evm/src/random.rs | 2 +- crates/rethnet_evm/src/state.rs | 18 +- crates/rethnet_evm/src/state/debug.rs | 6 +- crates/rethnet_evm/src/state/fork.rs | 549 +++++--- crates/rethnet_evm/src/state/history.rs | 10 +- crates/rethnet_evm/src/state/hybrid.rs | 113 +- crates/rethnet_evm/src/state/layered.rs | 67 +- .../rethnet_evm/src/state/layered/changes.rs | 216 ++- crates/rethnet_evm/src/state/remote.rs | 126 +- crates/rethnet_evm/src/state/remote/cached.rs | 110 ++ crates/rethnet_evm/src/state/trie.rs | 14 +- crates/rethnet_evm/src/state/trie/account.rs | 153 ++- crates/rethnet_evm/src/transaction.rs | 2 +- crates/rethnet_evm_napi/Cargo.toml | 2 +- crates/rethnet_evm_napi/src/context.rs | 28 +- crates/rethnet_evm_napi/src/state.rs | 120 +- .../src/transaction/result.rs | 1 - .../rethnet_evm_napi/test/evm/StateManager.ts | 141 +- .../hardhat-network/provider/RethnetState.ts | 40 +- .../provider/fork/ForkStateManager.ts | 13 +- .../internal/hardhat-network/provider/node.ts | 9 +- .../provider/utils/makeForkClient.ts | 65 +- .../hardhat-network/provider/utils/random.ts | 4 + .../provider/vm/block-builder.ts | 4 + .../hardhat-network/provider/vm/dual.ts | 111 +- .../hardhat-network/provider/vm/ethereumjs.ts | 24 +- .../hardhat-network/provider/vm/exit.ts | 12 + .../hardhat-network/provider/vm/rethnet.ts | 55 +- .../hardhat-network/stack-traces/opcodes.ts | 4 + .../hardhat-network/helpers/assertions.ts | 6 +- .../internal/hardhat-network/provider/logs.ts | 10 +- .../provider/modules/eth/hardforks.ts | 2 +- .../provider/utils/assertEqualBlocks.ts | 27 +- .../provider/utils/runFullBlock.ts | 69 +- rust-toolchain | 2 +- 46 files changed, 2839 insertions(+), 1432 deletions(-) create mode 100644 crates/rethnet_eth/src/remote/client.rs create mode 100644 crates/rethnet_eth/src/remote/withdrawal.rs create mode 100644 crates/rethnet_evm/src/state/remote/cached.rs diff --git a/.gitignore b/.gitignore index 8cd8dd3872..ffff2571db 100644 --- a/.gitignore +++ b/.gitignore @@ -114,3 +114,6 @@ Cargo.lock # VSCode settings .vscode/ + +# Rethnet remote node cache +remote_node_cache/ diff --git a/crates/rethnet_eth/Cargo.toml b/crates/rethnet_eth/Cargo.toml index 814d0aef3c..51b6da90fb 100644 --- a/crates/rethnet_eth/Cargo.toml +++ b/crates/rethnet_eth/Cargo.toml @@ -10,6 +10,7 @@ hash-db = { version = "0.15.2", default-features = false } hash256-std-hasher = { version = "0.15.2", default-features = false } hashbrown = { version = "0.13", default-features = false, features = ["ahash"] } hex = { version = "0.4.3", default-features = false, features = ["alloc"] } +itertools = { version = "0.10.5", default-features = false, features = ["use_alloc"] } open-fastrlp = { version = "0.1.2", default-features = false, features = ["derive"], optional = true } primitive-types = { version = "0.11.1", default-features = false, features = ["rlp"] } reqwest = { version = "0.11", features = ["blocking", "json"] } @@ -25,11 +26,12 @@ thiserror = { version = "1.0.37", default-features = false } triehash = { version = "0.8.4", default-features = false } [dev-dependencies] -test-with = { version = "0.9.1", default-features = false } +mockito = { version = "1.0.2", default-features = false } tokio = { version = "1.23.0", features = ["macros"] } [features] default = ["std"] # fastrlp = ["dep:open-fastrlp", "ruint/fastrlp"] Broken due to lack of support for fastrlp in primitive-types serde = ["dep:serde", "bytes/serde", "ethbloom/serialize", "hashbrown/serde", "primitive-types/serde", "revm-primitives/serde", "ruint/serde", "serde_json"] -std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "open-fastrlp?/std", "primitive-types/std", "revm-primitives/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] +std = ["bytes/std", "ethbloom/std", "hash256-std-hasher/std", "hash-db/std", "hex/std", "itertools/use_std", "open-fastrlp?/std", "primitive-types/std", "revm-primitives/std", "rlp/std", "secp256k1/std", "serde?/std", "sha3/std", "triehash/std"] +test-disable-remote = [] diff --git a/crates/rethnet_eth/src/lib.rs b/crates/rethnet_eth/src/lib.rs index 19e984169e..6adc087d4d 100644 --- a/crates/rethnet_eth/src/lib.rs +++ b/crates/rethnet_eth/src/lib.rs @@ -28,11 +28,9 @@ pub mod utils; pub use bytes::Bytes; pub use ethbloom::Bloom; -pub use revm_primitives::{B160, B256}; +pub use revm_primitives::{Address, B160, B256}; pub use ruint::aliases::{B512, B64, U256, U64}; -/// An Ethereum address -pub type Address = B160; /// A secret key pub type Secret = B256; /// A public key diff --git a/crates/rethnet_eth/src/remote.rs b/crates/rethnet_eth/src/remote.rs index 5995b5a0d4..f87371e2b0 100644 --- a/crates/rethnet_eth/src/remote.rs +++ b/crates/rethnet_eth/src/remote.rs @@ -1,60 +1,16 @@ -use std::sync::atomic::{AtomicU64, Ordering}; - -use revm_primitives::AccountInfo; - -use crate::{Address, Bytes, B256, U256}; - +mod client; mod eth; mod jsonrpc; +mod withdrawal; -/// Specialized error types -#[derive(thiserror::Error, Debug)] -pub enum RpcClientError { - /// The remote node's response did not conform to the expected format - #[error("Response was not of the expected type")] - InterpretationError { - /// A more specific message - msg: String, - /// The body of the request that was submitted to elicit the response - request_body: String, - /// The Rust type which was expected to be decoded from the JSON received - expected_type: String, - /// The body of the response given by the remote node - response_text: String, - }, +use std::fmt::Write; - /// The message could not be sent to the remote node - #[error("Failed to send request")] - SendError { - /// The error message - msg: String, - /// The body of the request that was submitted - request_body: String, - }, +use crate::{Address, B256, U256}; - /// The remote node failed to reply with the body of the response - #[error("Failed to get response body")] - ResponseError { - /// The specific error message - msg: String, - /// The body of the request that was submitted - request_body: String, - }, - - /// Some other error from an underlying dependency - #[error(transparent)] - OtherError(#[from] std::io::Error), -} - -/// A client for executing RPC methods on a remote Ethereum node -#[derive(Debug)] -pub struct RpcClient { - url: String, - client: reqwest::Client, - next_id: AtomicU64, -} +pub use client::{RpcClient, RpcClientError}; struct U64(u64); + impl serde::Serialize for U64 { fn serialize(&self, serializer: S) -> Result where @@ -81,6 +37,73 @@ where seq.end() } +/// a custom implementation because the one from ruint includes leading zeroes and the JSON-RPC +/// server implementations reject that. +fn serialize_u256(x: &U256, s: S) -> Result +where + S: serde::Serializer, +{ + let bytes = x.to_be_bytes_vec(); + + // OPT: Allocation free method. + let mut result = String::with_capacity(2 * U256::BYTES + 2); + result.push_str("0x"); + + let mut leading_zeroes = true; + for byte in bytes { + if leading_zeroes { + if byte != 0 { + write!(result, "{byte:x}").unwrap(); + leading_zeroes = false; + } + continue; + } + write!(result, "{byte:02x}").unwrap(); + } + + // 0x0 + if leading_zeroes { + result.push('0'); + } + + s.serialize_str(&result) +} + +/// For specifying a block +#[derive(Clone)] +pub enum BlockSpec { + /// as a block number + Number(U256), + /// as a block tag (eg "latest") + Tag(String), +} + +impl BlockSpec { + /// Constructs a `BlockSpec` for the latest block. + pub fn latest() -> Self { + Self::Tag(String::from("latest")) + } +} + +#[derive(serde::Serialize)] +#[serde(untagged)] +enum SerializableBlockSpec { + /// as a block number + #[serde(serialize_with = "serialize_u256")] + Number(U256), + /// as a block tag (eg "latest") + Tag(String), +} + +impl From for SerializableBlockSpec { + fn from(block_spec: BlockSpec) -> SerializableBlockSpec { + match block_spec { + BlockSpec::Number(n) => SerializableBlockSpec::Number(U256::from(n)), + BlockSpec::Tag(s) => SerializableBlockSpec::Tag(s), + } + } +} + #[derive(serde::Serialize)] #[serde(tag = "method", content = "params")] enum MethodInvocation { @@ -89,9 +112,7 @@ enum MethodInvocation { Address, /// position U256, - /// block_number - #[serde(skip_serializing_if = "Option::is_none")] - Option, + SerializableBlockSpec, ), #[serde( rename = "eth_getTransactionByHash", @@ -106,693 +127,30 @@ enum MethodInvocation { #[serde(rename = "eth_getLogs", serialize_with = "single_to_sequence")] Logs(GetLogsInput), #[serde(rename = "eth_getBalance")] - Balance( - Address, - /// block number - #[serde(skip_serializing_if = "Option::is_none")] - Option, - ), + Balance(Address, SerializableBlockSpec), #[serde(rename = "eth_getBlockByHash")] BlockByHash( /// hash B256, - /// include transactions + /// include transaction data bool, ), #[serde(rename = "eth_getBlockByNumber")] - BlockByNumber( - /// block number - U64, - /// include transactions + Block( + SerializableBlockSpec, + /// include transaction data bool, ), #[serde(rename = "eth_getCode")] - Code( - Address, - /// block number - #[serde(skip_serializing_if = "Option::is_none")] - Option, - ), + Code(Address, SerializableBlockSpec), #[serde(rename = "eth_getTransactionCount")] - TxCount( - Address, - /// block number - #[serde(skip_serializing_if = "Option::is_none")] - Option, - ), -} - -struct Response { - text: String, - request_body: String, - request_id: jsonrpc::Id, -} - -struct BatchResponse { - text: String, - request_body: String, - request_ids: Vec, + TxCount(Address, SerializableBlockSpec), } #[derive(serde::Serialize)] #[serde(rename_all = "camelCase")] struct GetLogsInput { - from_block: U64, - to_block: U64, + from_block: SerializableBlockSpec, + to_block: SerializableBlockSpec, address: Address, } - -#[derive(serde::Serialize)] -struct Request<'a> { - version: jsonrpc::Version, - #[serde(flatten)] - method: &'a MethodInvocation, - id: jsonrpc::Id, -} - -impl RpcClient { - fn verify_success(response: Response) -> Result - where - T: for<'a> serde::Deserialize<'a>, - { - let response_text = response.text.clone(); - let success: jsonrpc::Success = serde_json::from_str(&response.text).map_err(|err| { - RpcClientError::InterpretationError { - msg: err.to_string(), - request_body: response.request_body, - expected_type: format!( - "rethnet_eth::remote::jsonrpc::Success<{}>", - std::any::type_name::() - ), - response_text, - } - })?; - - assert_eq!(success.id, response.request_id); - - Ok(success.result) - } - - /// returns response text - async fn send_request_body(&self, request_body: String) -> Result { - use RpcClientError::{ResponseError, SendError}; - self.client - .post(self.url.to_string()) - .body(request_body.to_string()) - .send() - .await - .map_err(|err| SendError { - msg: err.to_string(), - request_body: request_body.to_string(), - })? - .text() - .await - .map_err(|err| ResponseError { - msg: err.to_string(), - request_body: request_body.to_string(), - }) - } - - async fn call(&self, input: &MethodInvocation) -> Result - where - T: for<'a> serde::Deserialize<'a>, - { - let id = jsonrpc::Id::Num(self.next_id.fetch_add(1, Ordering::Relaxed)); - let json = serde_json::json!(Request { - version: crate::remote::jsonrpc::Version::V2_0, - id: id.clone(), - method: input, - }) - .to_string(); - - Self::verify_success(Response { - request_id: id, - request_body: json.clone(), - text: self.send_request_body(json).await?, - }) - } - - async fn batch_call( - &self, - inputs: &[MethodInvocation], - ) -> Result { - let (request_strings, request_ids): (Vec, Vec) = inputs - .iter() - .map(|i| { - let id = jsonrpc::Id::Num(self.next_id.fetch_add(1, Ordering::Relaxed)); - let json = serde_json::json!(Request { - version: crate::remote::jsonrpc::Version::V2_0, - id: id.clone(), - method: i, - }) - .to_string(); - (json, id) - }) - .unzip(); - - let request_body = format!("[{}]", request_strings.join(",")); - - let response_text = self.send_request_body(request_body.clone()).await?; - - Ok(BatchResponse { - request_body, - request_ids, - text: response_text, - }) - } - - /// Create a new RpcClient instance, given a remote node URL. - pub fn new(url: &str) -> Self { - RpcClient { - url: url.to_string(), - client: reqwest::Client::new(), - next_id: AtomicU64::new(0), - } - } - - /// eth_getTransactionByHash - pub async fn get_tx_by_hash(&self, tx_hash: &B256) -> Result { - self.call(&MethodInvocation::TxByHash(*tx_hash)).await - } - - /// eth_getTransactionReceipt - pub async fn get_tx_receipt( - &self, - tx_hash: &B256, - ) -> Result { - self.call(&MethodInvocation::TxReceipt(*tx_hash)).await - } - - /// eth_getLogs - pub async fn get_logs( - &self, - from_block: u64, - to_block: u64, - address: &Address, - ) -> Result, RpcClientError> { - self.call(&MethodInvocation::Logs(GetLogsInput { - from_block: U64::from(from_block), - to_block: U64::from(to_block), - address: *address, - })) - .await - } - - /// eth_getBlockByHash - pub async fn get_block_by_hash( - &self, - hash: &B256, - include_transactions: bool, - ) -> Result, RpcClientError> { - self.call(&MethodInvocation::BlockByHash(*hash, include_transactions)) - .await - } - - /// eth_getBlockByNumber - pub async fn get_block_by_number( - &self, - number: u64, - include_transactions: bool, - ) -> Result, RpcClientError> { - self.call(&MethodInvocation::BlockByNumber( - U64::from(number), - include_transactions, - )) - .await - } - - /// eth_getTransactionCount - pub async fn get_transaction_count( - &self, - address: &Address, - block_number: Option, - ) -> Result { - self.call(&MethodInvocation::TxCount( - *address, - block_number.map(U64::from), - )) - .await - } - - /// eth_getStorageAt - pub async fn get_storage_at( - &self, - address: &Address, - position: U256, - block_number: Option, - ) -> Result { - self.call(&MethodInvocation::StorageAt( - *address, - position, - block_number.map(U64::from), - )) - .await - } - - /// Submit a consolidated batch of RPC method invocations in order to obtain the set of data - /// contained in AccountInfo. - pub async fn get_account_info( - &self, - address: &Address, - block_number: Option, - ) -> Result { - let inputs = Vec::from([ - MethodInvocation::Balance(*address, block_number.map(U64::from)), - MethodInvocation::Code(*address, block_number.map(U64::from)), - MethodInvocation::TxCount(*address, block_number.map(U64::from)), - ]); - - let response = self.batch_call(&inputs).await?; - - let results: ( - jsonrpc::Success, - jsonrpc::Success, - jsonrpc::Success, - ) = serde_json::from_str(&response.text).map_err(|err| { - RpcClientError::InterpretationError { - msg: err.to_string(), - request_body: response.request_body.clone(), - expected_type: String::from("Array"), - response_text: response.text.clone(), - } - })?; - - assert_eq!(results.0.id, response.request_ids[0]); - assert_eq!(results.1.id, response.request_ids[1]); - assert_eq!(results.2.id, response.request_ids[2]); - - let code = revm_primitives::Bytecode::new_raw(results.1.result); - - Ok(AccountInfo { - balance: results.0.result, - code: Some(code.clone()), - code_hash: code.hash(), - nonce: results.2.result.to(), - }) - } -} - -#[cfg(test)] -mod tests { - use std::str::FromStr; - - use crate::{Address, Bytes, U256}; - - use super::*; - - fn get_alchemy_url() -> String { - std::env::var_os("ALCHEMY_URL") - .expect("ALCHEMY_URL environment variable not defined") - .into_string() - .expect("Couldn't convert OsString into a String") - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_by_hash_success() { - let alchemy_url = get_alchemy_url(); - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let tx: eth::Transaction = RpcClient::new(&alchemy_url) - .get_tx_by_hash(&hash) - .await - .expect("failed to get transaction by hash"); - - assert_eq!( - tx.block_hash, - Some( - B256::from_str( - "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" - ) - .expect("couldn't parse data") - ) - ); - assert_eq!( - tx.block_number, - Some(u64::from_str_radix("a74fde", 16).expect("couldn't parse data")) - ); - assert_eq!(tx.hash, hash); - assert_eq!( - tx.from, - Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") - .expect("couldn't parse data") - ); - assert_eq!( - tx.gas, - U256::from_str_radix("30d40", 16).expect("couldn't parse data") - ); - assert_eq!( - tx.gas_price, - Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) - ); - assert_eq!( - tx.input, - Bytes::from("0xa9059cbb000000000000000000000000e2c1e729e05f34c07d80083982ccd9154045dcc600000000000000000000000000000000000000000000000000000004a817c800") - ); - assert_eq!( - tx.nonce, - U256::from_str_radix("653b", 16).expect("couldn't parse data") - ); - assert_eq!( - tx.r, - U256::from_str_radix( - "eb56df45bd355e182fba854506bc73737df275af5a323d30f98db13fdf44393a", - 16 - ) - .expect("couldn't parse data") - ); - assert_eq!( - tx.s, - U256::from_str_radix( - "2c6efcd210cdc7b3d3191360f796ca84cab25a52ed8f72efff1652adaabc1c83", - 16 - ) - .expect("couldn't parse data") - ); - assert_eq!( - tx.to, - Some( - Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") - .expect("couldn't parse data") - ) - ); - assert_eq!( - tx.transaction_index, - Some(u64::from_str_radix("88", 16).expect("couldn't parse data")) - ); - assert_eq!( - tx.v, - u64::from_str_radix("1c", 16).expect("couldn't parse data") - ); - assert_eq!( - tx.value, - U256::from_str_radix("0", 16).expect("couldn't parse data") - ); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_by_hash_dns_error() { - let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let error_string = format!( - "{:?}", - RpcClient::new(alchemy_url) - .get_tx_by_hash(&hash) - .await - .expect_err("should have failed to connect to a garbage domain name") - ); - - assert!(error_string.contains("SendError")); - assert!(error_string.contains("dns error")); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_by_hash_bad_api_key() { - let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let error_string = format!( - "{:?}", - RpcClient::new(alchemy_url) - .get_tx_by_hash(&hash) - .await - .expect_err("should have failed to interpret response as a Transaction") - ); - - assert!(error_string.contains("InterpretationError")); - assert!(error_string.contains("Success")); - assert!(error_string.contains("Must be authenticated!")); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_receipt_success() { - let alchemy_url = get_alchemy_url(); - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let receipt: eth::TransactionReceipt = RpcClient::new(&alchemy_url) - .get_tx_receipt(&hash) - .await - .expect("failed to get transaction by hash"); - - assert_eq!( - receipt.block_hash, - Some( - B256::from_str( - "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" - ) - .expect("couldn't parse data") - ) - ); - assert_eq!( - receipt.block_number, - Some(u64::from_str_radix("a74fde", 16).expect("couldn't parse data")) - ); - assert_eq!(receipt.contract_address, None); - assert_eq!( - receipt.cumulative_gas_used, - U256::from_str_radix("56c81b", 16).expect("couldn't parse data") - ); - assert_eq!( - receipt.effective_gas_price, - Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) - ); - assert_eq!( - receipt.from, - Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") - .expect("couldn't parse data") - ); - assert_eq!( - receipt.gas_used, - Some(U256::from_str_radix("a0f9", 16).expect("couldn't parse data")) - ); - assert_eq!(receipt.logs.len(), 1); - assert_eq!(receipt.root, None); - assert_eq!(receipt.status, Some(1)); - assert_eq!( - receipt.to, - Some( - Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") - .expect("couldn't parse data") - ) - ); - assert_eq!(receipt.transaction_hash, hash); - assert_eq!(receipt.transaction_index, 136); - assert_eq!(receipt.transaction_type, Some(0)); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_receipt_dns_error() { - let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com"; - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let error_string = format!( - "{:?}", - RpcClient::new(alchemy_url) - .get_tx_receipt(&hash) - .await - .expect_err("should have failed to connect to a garbage domain name") - ); - - assert!(error_string.contains("SendError")); - assert!(error_string.contains("dns error")); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_tx_receipt_bad_api_key() { - let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; - - let hash = - B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a") - .expect("failed to parse hash from string"); - - let error_string = format!( - "{:?}", - RpcClient::new(alchemy_url) - .get_tx_receipt(&hash) - .await - .expect_err("should have failed to interpret response as a Receipt") - ); - - assert!(error_string.contains("InterpretationError")); - assert!(error_string.contains("Success")); - assert!(error_string.contains("Must be authenticated!")); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_logs_success() { - let alchemy_url = get_alchemy_url(); - let logs = RpcClient::new(&alchemy_url) - .get_logs( - 10496585, - 10496585, - &Address::from_str("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") - .expect("failed to parse data"), - ) - .await - .expect("failed to get logs"); - assert_eq!(logs.len(), 12); - // TODO: assert more things about the log(s) - // TODO: consider asserting something about the logs bloom - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_block_by_hash_success() { - let alchemy_url = get_alchemy_url(); - - let hash = - B256::from_str("0x71d5e7c8ff9ea737034c16e333a75575a4a94d29482e0c2b88f0a6a8369c1812") - .expect("failed to parse hash from string"); - - let block = RpcClient::new(&alchemy_url) - .get_block_by_hash(&hash, true) - .await - .expect("should have succeeded"); - - assert_eq!(block.hash, Some(hash)); - assert_eq!(block.transactions.len(), 192); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_block_by_number_success() { - let alchemy_url = get_alchemy_url(); - - let block_number = 16222385; - - let block = RpcClient::new(&alchemy_url) - .get_block_by_number(block_number, true) - .await - .expect("should have succeeded"); - - assert_eq!(block.number, Some(block_number)); - assert_eq!(block.transactions.len(), 102); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_storage_at_with_block_success() { - let alchemy_url = get_alchemy_url(); - - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - - let total_supply: U256 = RpcClient::new(&alchemy_url) - .get_storage_at( - &dai_address, - U256::from_str_radix( - "0000000000000000000000000000000000000000000000000000000000000001", - 16, - ) - .expect("failed to parse storage location"), - Some(16220843), - ) - .await - .expect("should have succeeded"); - - assert_eq!( - total_supply, - U256::from_str_radix( - "000000000000000000000000000000000000000010a596ae049e066d4991945c", - 16 - ) - .expect("failed to parse storage location") - ); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_storage_at_success() { - let alchemy_url = get_alchemy_url(); - - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - - let total_supply: U256 = RpcClient::new(&alchemy_url) - .get_storage_at( - &dai_address, - U256::from_str_radix( - "0000000000000000000000000000000000000000000000000000000000000001", - 16, - ) - .expect("failed to parse storage location"), - None, - ) - .await - .expect("should have succeeded"); - - assert!(total_supply > U256::from(0)); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_transaction_count_success() { - let alchemy_url = get_alchemy_url(); - - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - - let transaction_count = RpcClient::new(&alchemy_url) - .get_transaction_count(&dai_address, Some(16220843)) - .await - .expect("should have succeeded"); - - assert_eq!(transaction_count, U256::from(1)); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_account_info_with_block_success() { - let alchemy_url = get_alchemy_url(); - - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - - let account_info = RpcClient::new(&alchemy_url) - .get_account_info(&dai_address, Some(16220843)) - .await - .expect("should have succeeded"); - - assert_eq!(account_info.balance, U256::from(0)); - assert_eq!(account_info.nonce, 1); - } - - #[test_with::env(ALCHEMY_URL)] - #[tokio::test] - async fn get_account_info_success() { - let alchemy_url = get_alchemy_url(); - - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - - let account_info = RpcClient::new(&alchemy_url) - .get_account_info(&dai_address, None) - .await - .expect("should have succeeded"); - - assert_eq!(account_info.balance, U256::from(0)); - assert_eq!(account_info.nonce, 1); - } -} diff --git a/crates/rethnet_eth/src/remote/client.rs b/crates/rethnet_eth/src/remote/client.rs new file mode 100644 index 0000000000..602684fd00 --- /dev/null +++ b/crates/rethnet_eth/src/remote/client.rs @@ -0,0 +1,1159 @@ +use std::{ + io, + sync::atomic::{AtomicU64, Ordering}, +}; + +use itertools::Itertools; +use revm_primitives::{AccountInfo, Address, Bytecode, B256, KECCAK_EMPTY, U256}; + +use super::{eth, jsonrpc, BlockSpec, GetLogsInput, MethodInvocation}; + +/// Specialized error types +#[derive(thiserror::Error, Debug)] +pub enum RpcClientError { + /// The message could not be sent to the remote node + #[error(transparent)] + FailedToSend(reqwest::Error), + + /// The remote node failed to reply with the body of the response + #[error("The response text was corrupted: {0}.")] + CorruptedResponse(reqwest::Error), + + /// The server returned an error code. + #[error("The Http server returned error status code: {0}")] + HttpStatus(reqwest::Error), + + /// The JSON-RPC returned an error. + #[error("{error}. Request: {request}")] + JsonRpcError { + /// The JSON-RPC error + error: jsonrpc::Error, + /// The request JSON + request: String, + }, + + /// Some other error from an underlying dependency + #[error(transparent)] + OtherError(#[from] io::Error), +} + +#[derive(serde::Serialize)] +struct Request<'a> { + version: jsonrpc::Version, + #[serde(flatten)] + method: &'a MethodInvocation, + id: &'a jsonrpc::Id, +} + +#[derive(Debug)] +struct BatchResponse { + text: String, + request_strings: Vec, +} + +/// A client for executing RPC methods on a remote Ethereum node +#[derive(Debug)] +pub struct RpcClient { + url: String, + client: reqwest::Client, + next_id: AtomicU64, +} + +impl RpcClient { + fn extract_response(response: &str, request_id: &jsonrpc::Id) -> Result + where + T: for<'a> serde::Deserialize<'a>, + { + let response: jsonrpc::Response = + serde_json::from_str(response).unwrap_or_else(|error| { + panic!( + "Response `{response}` failed to parse with expected type `{expected_type}`, due to error: {error}", + expected_type = std::any::type_name::() + ) + }); + + debug_assert_eq!(response.id, *request_id); + + response.data.into_result() + } + + fn hash_string(input: &str) -> String { + use std::hash::Hasher; + let mut hasher = std::collections::hash_map::DefaultHasher::new(); + hasher.write(input.as_bytes()); + hasher.finish().to_string() + } + + fn make_cache_path(&self, request_body: &str) -> std::path::PathBuf { + // TODO: consider using a better path for this directory. currently, for test runs, + // it's going to crates/rethnet_eth/remote_node_cache. shouldn't it be rooted somewhere + // more accessible/convenient? + let directory = format!("remote_node_cache/{}", Self::hash_string(&self.url)); + + // ensure directory exists + std::fs::DirBuilder::new() + .recursive(true) + .create(directory.clone()) + .expect("failed to create on-disk RPC response cache"); + + std::path::Path::new(&directory).join(format!("{}.json", Self::hash_string(request_body))) + } + + fn read_response_from_cache(&self, request_body: &str) -> Option { + if let Ok(mut file) = std::fs::File::open(self.make_cache_path(request_body)) { + let mut response = String::new(); + if io::Read::read_to_string(&mut file, &mut response).is_ok() { + Some(response) + } else { + None + } + } else { + None + } + } + + fn write_response_to_cache(&self, request_body: &str, response: &str) { + std::fs::write(self.make_cache_path(request_body), response.as_bytes()) + .expect("failed to write to on-disk RPC response cache") + } + + /// returns response text + async fn send_request_body(&self, request_body: &str) -> Result { + if let Some(cached_response) = self.read_response_from_cache(request_body) { + Ok(cached_response) + } else { + let response = self + .client + .post(self.url.to_string()) + .body(request_body.to_owned()) + .send() + .await + .map_err(RpcClientError::FailedToSend)? + .error_for_status() + .map_err(RpcClientError::HttpStatus)? + .text() + .await + .map_err(RpcClientError::CorruptedResponse)?; + + self.write_response_to_cache(request_body, &response); + Ok(response) + } + } + + async fn call(&self, input: &MethodInvocation) -> Result + where + T: for<'a> serde::Deserialize<'a>, + { + let request_id = jsonrpc::Id::Num(self.next_id.fetch_add(1, Ordering::Relaxed)); + let request = serde_json::json!(Request { + version: crate::remote::jsonrpc::Version::V2_0, + id: &request_id, + method: input, + }) + .to_string(); + + self.send_request_body(&request).await.and_then(|response| { + Self::extract_response(&response, &request_id) + .map_err(|error| RpcClientError::JsonRpcError { error, request }) + }) + } + + async fn batch_call( + &self, + inputs: &[MethodInvocation], + ) -> Result { + let request_strings: Vec = inputs + .iter() + .map(|i| { + let request_id = self.next_id.fetch_add(1, Ordering::Relaxed); + serde_json::json!(Request { + version: crate::remote::jsonrpc::Version::V2_0, + id: &jsonrpc::Id::Num(request_id), + method: i, + }) + .to_string() + }) + .collect(); + + let request_body = format!("[{}]", request_strings.join(",")); + + self.send_request_body(&request_body) + .await + .map(|response| BatchResponse { + text: response, + request_strings, + }) + } + + /// Create a new RpcClient instance, given a remote node URL. + pub fn new(url: &str) -> Self { + RpcClient { + url: url.to_string(), + client: reqwest::Client::new(), + next_id: AtomicU64::new(0), + } + } + + /// Submit a consolidated batch of RPC method invocations in order to obtain the set of data + /// contained in AccountInfo. + pub async fn get_account_info( + &self, + address: &Address, + block: BlockSpec, + ) -> Result { + let inputs = Vec::from([ + MethodInvocation::Balance(*address, block.clone().into()), + MethodInvocation::TxCount(*address, block.clone().into()), + MethodInvocation::Code(*address, block.into()), + ]); + + let response = self.batch_call(&inputs).await?; + + type BatchResult = ( + jsonrpc::Response, + jsonrpc::Response, + jsonrpc::Response, + ); + + let responses: Vec = serde_json::from_str(&response.text) + .unwrap_or_else(|error| { + panic!("Batch response `{response:?}` failed to parse due to error: {error}") + }); + + let response_ids: Vec = responses + .iter() + .map(|value| { + value + .get("id") + .expect("Response must have ID") + .as_u64() + .expect("Response ID must be a `u64`") + }) + .collect(); + + let (balance_response, nonce_response, code_response) = responses + .into_iter() + .zip(response_ids.into_iter()) + .sorted_by(|(_, id1), (_, id2)| id1.cmp(id2)) + .map(|(response, _)| response) + .tuples() + .next() + .unwrap_or_else(|| { + panic!( + "Batch response must contain 3 elements. Response: {}", + response.text.clone(), + ) + }); + + let (balance_request, nonce_request, code_request) = response + .request_strings + .into_iter() + .tuples() + .next() + .expect("request strings must contain 3 elements"); + + let balance = serde_json::from_value::>(balance_response) + .map_err(|err| { + panic!( + "Failed to deserialize balance due to error: {:?}. Response: {}", + err, + response.text.clone() + ) + }) + .and_then(|response| { + response + .data + .into_result() + .map_err(|error| RpcClientError::JsonRpcError { + error, + request: balance_request, + }) + })?; + + let nonce = serde_json::from_value::>(nonce_response) + .map_err(|err| { + panic!( + "Failed to deserialize nonce due to error: {:?}. Response: {}", + err, + response.text.clone() + ) + }) + .and_then(|response| { + response.data.into_result().map_or_else( + |error| { + Err(RpcClientError::JsonRpcError { + error, + request: nonce_request, + }) + }, + |nonce| Ok(nonce.to()), + ) + })?; + + let code = + serde_json::from_value::>(code_response) + .map_err(|err| { + panic!( + "Failed to deserialize code due to error: {:?}. Response: {}", + err, + response.text.clone(), + ) + }) + .and_then(|response| { + response.data.into_result().map_or_else( + |error| { + Err(RpcClientError::JsonRpcError { + error, + request: code_request, + }) + }, + |bytes| { + Ok(if bytes.inner.is_empty() { + None + } else { + Some(Bytecode::new_raw(bytes.inner)) + }) + }, + ) + })?; + + Ok(AccountInfo { + balance, + code_hash: code.as_ref().map_or(KECCAK_EMPTY, Bytecode::hash), + code, + nonce, + }) + } + + /// Calls `eth_getBlockByHash` and returns the transaction's hash. + pub async fn get_block_by_hash( + &self, + hash: &B256, + ) -> Result>, RpcClientError> { + self.call(&MethodInvocation::BlockByHash(*hash, false)) + .await + } + + /// Calls `eth_getBlockByHash` and returns the transaction's data. + pub async fn get_block_by_hash_with_transaction_data( + &self, + hash: &B256, + ) -> Result>, RpcClientError> { + self.call(&MethodInvocation::BlockByHash(*hash, true)).await + } + + /// Calls `eth_getBlockByNumber` and returns the transaction's hash. + pub async fn get_block_by_number( + &self, + spec: BlockSpec, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::Block(spec.into(), false)) + .await + } + + /// Calls `eth_getBlockByNumber` and returns the transaction's data. + pub async fn get_block_by_number_with_transaction_data( + &self, + spec: BlockSpec, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::Block(spec.into(), true)).await + } + + /// eth_getLogs + pub async fn get_logs( + &self, + from_block: BlockSpec, + to_block: BlockSpec, + address: &Address, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::Logs(GetLogsInput { + from_block: from_block.into(), + to_block: to_block.into(), + address: *address, + })) + .await + } + + /// eth_getTransactionByHash + pub async fn get_transaction_by_hash( + &self, + tx_hash: &B256, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::TxByHash(*tx_hash)).await + } + + /// eth_getTransactionCount + pub async fn get_transaction_count( + &self, + address: &Address, + block: BlockSpec, + ) -> Result { + self.call(&MethodInvocation::TxCount(*address, block.into())) + .await + } + + /// eth_getTransactionReceipt + pub async fn get_transaction_receipt( + &self, + tx_hash: &B256, + ) -> Result, RpcClientError> { + self.call(&MethodInvocation::TxReceipt(*tx_hash)).await + } + + /// eth_getStorageAt + pub async fn get_storage_at( + &self, + address: &Address, + position: U256, + block: BlockSpec, + ) -> Result { + self.call(&MethodInvocation::StorageAt( + *address, + position, + block.into(), + )) + .await + } +} + +#[cfg(test)] +mod tests { + use reqwest::StatusCode; + + use super::*; + + use std::str::FromStr; + + #[tokio::test] + async fn send_request_body_500_status() { + const STATUS_CODE: u16 = 500; + + let mut server = mockito::Server::new_async().await; + + let mock = server + .mock("POST", "/") + .with_status(STATUS_CODE.into()) + .with_header("content-type", "text/plain") + .create_async() + .await; + + let hash = + B256::from_str("0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933022222") + .expect("failed to parse hash from string"); + + let error = RpcClient::new(&server.url()) + .call::>(&MethodInvocation::TxByHash(hash)) + .await + .expect_err("should have failed to interpret response as a Transaction"); + + if let RpcClientError::HttpStatus(error) = error { + assert_eq!( + error.status(), + Some(StatusCode::from_u16(STATUS_CODE).unwrap()) + ); + } else { + unreachable!("Invalid error"); + } + + mock.assert_async().await; + } + + #[cfg(not(feature = "test-disable-remote"))] + mod alchemy { + use crate::Bytes; + + use super::*; + + fn get_alchemy_url() -> String { + match std::env::var_os("ALCHEMY_URL") + .expect("ALCHEMY_URL environment variable not defined") + .into_string() + .expect("Couldn't convert OsString into a String") + { + url if url.is_empty() => panic!("ALCHEMY_URL environment variable is empty"), + url => url, + } + } + + #[tokio::test] + async fn call_bad_api_key() { + let alchemy_url = "https://eth-mainnet.g.alchemy.com/v2/abcdefg"; + + let hash = B256::from_str( + "0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933022222", + ) + .expect("failed to parse hash from string"); + + let error = RpcClient::new(alchemy_url) + .call::>(&MethodInvocation::TxByHash(hash)) + .await + .expect_err("should have failed to interpret response as a Transaction"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32000); + assert_eq!(error.message, "Must be authenticated!"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn call_failed_to_send_error() { + let alchemy_url = "https://xxxeth-mainnet.g.alchemy.com/"; + + let hash = B256::from_str( + "0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933051111", + ) + .expect("failed to parse hash from string"); + + let error = RpcClient::new(alchemy_url) + .call::>(&MethodInvocation::TxByHash(hash)) + .await + .expect_err("should have failed to connect due to a garbage domain name"); + + if let RpcClientError::FailedToSend(error) = error { + assert!(error.to_string().contains(&format!("error sending request for url ({alchemy_url}): error trying to connect: dns error: "))); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_account_info_contract() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info = RpcClient::new(&alchemy_url) + .get_account_info(&dai_address, BlockSpec::Number(U256::from(16220843))) + .await + .expect("should have succeeded"); + + assert_eq!(account_info.balance, U256::ZERO); + assert_eq!(account_info.nonce, 1); + assert_ne!(account_info.code_hash, KECCAK_EMPTY); + assert!(account_info.code.is_some()) + } + + #[tokio::test] + async fn get_account_info_empty_account() { + let alchemy_url = get_alchemy_url(); + + let empty_address = Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse address"); + + let account_info = RpcClient::new(&alchemy_url) + .get_account_info(&empty_address, BlockSpec::Number(U256::from(1))) + .await + .expect("should have succeeded"); + + assert_eq!(account_info.balance, U256::ZERO); + assert_eq!(account_info.nonce, 0); + assert_eq!(account_info.code_hash, KECCAK_EMPTY); + assert!(account_info.code.is_none()) + } + + #[tokio::test] + async fn get_account_info_future_block() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let error = RpcClient::new(&alchemy_url) + .get_account_info(&dai_address, BlockSpec::Number(U256::MAX)) + .await + .expect_err("should have failed"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32000); + assert_eq!(error.message, "header for hash not found"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_account_info_latest_contract() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info = RpcClient::new(&alchemy_url) + .get_account_info(&dai_address, BlockSpec::Tag("latest".to_string())) + .await + .expect("should have succeeded"); + + assert_ne!(account_info.code_hash, KECCAK_EMPTY); + assert!(account_info.code.is_some()); + } + + #[tokio::test] + async fn get_block_by_hash_some() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0x71d5e7c8ff9ea737034c16e333a75575a4a94d29482e0c2b88f0a6a8369c1812", + ) + .expect("failed to parse hash from string"); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_hash(&hash) + .await + .expect("should have succeeded"); + + assert!(block.is_some()); + let block = block.unwrap(); + + assert_eq!(block.hash, Some(hash)); + assert_eq!(block.transactions.len(), 192); + } + + #[tokio::test] + async fn get_block_by_hash_none() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + ) + .expect("failed to parse hash from string"); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_hash(&hash) + .await + .expect("should have succeeded"); + + assert!(block.is_none()); + } + + #[tokio::test] + async fn get_block_by_hash_with_transaction_data_some() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0x71d5e7c8ff9ea737034c16e333a75575a4a94d29482e0c2b88f0a6a8369c1812", + ) + .expect("failed to parse hash from string"); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_hash_with_transaction_data(&hash) + .await + .expect("should have succeeded"); + + assert!(block.is_some()); + let block = block.unwrap(); + + assert_eq!(block.hash, Some(hash)); + assert_eq!(block.transactions.len(), 192); + } + + #[tokio::test] + async fn get_block_by_hash_with_transaction_data_none() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + ) + .expect("failed to parse hash from string"); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_hash_with_transaction_data(&hash) + .await + .expect("should have succeeded"); + + assert!(block.is_none()); + } + + #[tokio::test] + async fn get_block_by_number_some() { + let alchemy_url = get_alchemy_url(); + + let block_number = U256::from(16222385); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_number(BlockSpec::Number(block_number)) + .await + .expect("should have succeeded"); + + assert_eq!(block.number, Some(block_number)); + assert_eq!(block.transactions.len(), 102); + } + + #[tokio::test] + async fn get_block_by_number_none() { + let alchemy_url = get_alchemy_url(); + + let block_number = U256::MAX; + + let error = RpcClient::new(&alchemy_url) + .get_block_by_number(BlockSpec::Number(block_number)) + .await + .expect_err("should have failed to retrieve non-existent block number"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32602); + assert_eq!( + error.message, + "invalid 1st argument: block_number value was not valid block tag or block number" + ); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_block_by_number_with_transaction_data_some() { + let alchemy_url = get_alchemy_url(); + + let block_number = U256::from(16222385); + + let block = RpcClient::new(&alchemy_url) + .get_block_by_number(BlockSpec::Number(block_number)) + .await + .expect("should have succeeded"); + + assert_eq!(block.number, Some(block_number)); + assert_eq!(block.transactions.len(), 102); + } + + #[tokio::test] + async fn get_block_by_number_with_transaction_data_none() { + let alchemy_url = get_alchemy_url(); + + let block_number = U256::MAX; + + let error = RpcClient::new(&alchemy_url) + .get_block_by_number(BlockSpec::Number(block_number)) + .await + .expect_err("should have failed to retrieve non-existent block number"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32602); + assert_eq!( + error.message, + "invalid 1st argument: block_number value was not valid block tag or block number" + ); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_latest_block() { + let alchemy_url = get_alchemy_url(); + + let _block = RpcClient::new(&alchemy_url) + .get_block_by_number(BlockSpec::latest()) + .await + .expect("should have succeeded"); + } + + #[tokio::test] + async fn get_logs_some() { + let alchemy_url = get_alchemy_url(); + let logs = RpcClient::new(&alchemy_url) + .get_logs( + BlockSpec::Number(U256::from(10496585)), + BlockSpec::Number(U256::from(10496585)), + &Address::from_str("0xc02aaa39b223fe8d0a0e5c4f27ead9083c756cc2") + .expect("failed to parse data"), + ) + .await + .expect("failed to get logs"); + + assert_eq!(logs.len(), 12); + // TODO: assert more things about the log(s) + // TODO: consider asserting something about the logs bloom + } + + #[tokio::test] + async fn get_logs_future_from_block() { + let alchemy_url = get_alchemy_url(); + let error = RpcClient::new(&alchemy_url) + .get_logs( + BlockSpec::Number(U256::MAX), + BlockSpec::Number(U256::MAX), + &Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse data"), + ) + .await + .expect_err("should have failed to get logs"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32602); + assert_eq!(error.message, "invalid 1st argument: filter 'fromBlock': value was not valid block tag or block number"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_logs_future_to_block() { + let alchemy_url = get_alchemy_url(); + let error = RpcClient::new(&alchemy_url) + .get_logs( + BlockSpec::Number(U256::from(10496585)), + BlockSpec::Number(U256::MAX), + &Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse data"), + ) + .await + .expect_err("should have failed to get logs"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32602); + assert_eq!(error.message, "invalid 1st argument: filter 'toBlock': value was not valid block tag or block number"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_logs_missing_address() { + let alchemy_url = get_alchemy_url(); + let logs = RpcClient::new(&alchemy_url) + .get_logs( + BlockSpec::Number(U256::from(10496585)), + BlockSpec::Number(U256::from(10496585)), + &Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse data"), + ) + .await + .expect("failed to get logs"); + + assert_eq!(logs.len(), 0); + } + + #[tokio::test] + async fn get_transaction_by_hash_some() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a", + ) + .expect("failed to parse hash from string"); + + let tx = RpcClient::new(&alchemy_url) + .get_transaction_by_hash(&hash) + .await + .expect("failed to get transaction by hash"); + + assert!(tx.is_some()); + let tx = tx.unwrap(); + + assert_eq!( + tx.block_hash, + Some( + B256::from_str( + "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" + ) + .expect("couldn't parse data") + ) + ); + assert_eq!( + tx.block_number, + Some(U256::from_str_radix("a74fde", 16).expect("couldn't parse data")) + ); + assert_eq!(tx.hash, hash); + assert_eq!( + tx.from, + Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") + .expect("couldn't parse data") + ); + assert_eq!( + tx.gas, + U256::from_str_radix("30d40", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.gas_price, + Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) + ); + assert_eq!( + tx.input, + Bytes::from("0xa9059cbb000000000000000000000000e2c1e729e05f34c07d80083982ccd9154045dcc600000000000000000000000000000000000000000000000000000004a817c800") + ); + assert_eq!( + tx.nonce, + U256::from_str_radix("653b", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.r, + U256::from_str_radix( + "eb56df45bd355e182fba854506bc73737df275af5a323d30f98db13fdf44393a", + 16 + ) + .expect("couldn't parse data") + ); + assert_eq!( + tx.s, + U256::from_str_radix( + "2c6efcd210cdc7b3d3191360f796ca84cab25a52ed8f72efff1652adaabc1c83", + 16 + ) + .expect("couldn't parse data") + ); + assert_eq!( + tx.to, + Some( + Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") + .expect("couldn't parse data") + ) + ); + assert_eq!( + tx.transaction_index, + Some(u64::from_str_radix("88", 16).expect("couldn't parse data")) + ); + assert_eq!( + tx.v, + u64::from_str_radix("1c", 16).expect("couldn't parse data") + ); + assert_eq!( + tx.value, + U256::from_str_radix("0", 16).expect("couldn't parse data") + ); + } + + #[tokio::test] + async fn get_transaction_by_hash_none() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + ) + .expect("failed to parse hash from string"); + + let tx = RpcClient::new(&alchemy_url) + .get_transaction_by_hash(&hash) + .await + .expect("failed to get transaction by hash"); + + assert!(tx.is_none()); + } + + #[tokio::test] + async fn get_transaction_count_some() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let transaction_count = RpcClient::new(&alchemy_url) + .get_transaction_count(&dai_address, BlockSpec::Number(U256::from(16220843))) + .await + .expect("should have succeeded"); + + assert_eq!(transaction_count, U256::from(1)); + } + + #[tokio::test] + async fn get_transaction_count_none() { + let alchemy_url = get_alchemy_url(); + + let missing_address = Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse address"); + + let transaction_count = RpcClient::new(&alchemy_url) + .get_transaction_count(&missing_address, BlockSpec::Number(U256::from(16220843))) + .await + .expect("should have succeeded"); + + assert_eq!(transaction_count, U256::ZERO); + } + + #[tokio::test] + async fn get_transaction_count_future_block() { + let alchemy_url = get_alchemy_url(); + + let missing_address = Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse address"); + + let error = RpcClient::new(&alchemy_url) + .get_transaction_count(&missing_address, BlockSpec::Number(U256::MAX)) + .await + .expect_err("should have failed"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32000); + assert_eq!(error.message, "header for hash not found"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + + #[tokio::test] + async fn get_transaction_receipt_some() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xc008e9f9bb92057dd0035496fbf4fb54f66b4b18b370928e46d6603933054d5a", + ) + .expect("failed to parse hash from string"); + + let receipt = RpcClient::new(&alchemy_url) + .get_transaction_receipt(&hash) + .await + .expect("failed to get transaction by hash"); + + assert!(receipt.is_some()); + let receipt = receipt.unwrap(); + + assert_eq!( + receipt.block_hash, + Some( + B256::from_str( + "0x88fadbb673928c61b9ede3694ae0589ac77ae38ec90a24a6e12e83f42f18c7e8" + ) + .expect("couldn't parse data") + ) + ); + assert_eq!( + receipt.block_number, + Some(U256::from_str_radix("a74fde", 16).expect("couldn't parse data")) + ); + assert_eq!(receipt.contract_address, None); + assert_eq!( + receipt.cumulative_gas_used, + U256::from_str_radix("56c81b", 16).expect("couldn't parse data") + ); + assert_eq!( + receipt.effective_gas_price, + Some(U256::from_str_radix("1e449a99b8", 16).expect("couldn't parse data")) + ); + assert_eq!( + receipt.from, + Address::from_str("0x7d97fcdb98632a91be79d3122b4eb99c0c4223ee") + .expect("couldn't parse data") + ); + assert_eq!( + receipt.gas_used, + Some(U256::from_str_radix("a0f9", 16).expect("couldn't parse data")) + ); + assert_eq!(receipt.logs.len(), 1); + assert_eq!(receipt.root, None); + assert_eq!(receipt.status, Some(1)); + assert_eq!( + receipt.to, + Some( + Address::from_str("dac17f958d2ee523a2206206994597c13d831ec7") + .expect("couldn't parse data") + ) + ); + assert_eq!(receipt.transaction_hash, hash); + assert_eq!(receipt.transaction_index, 136); + assert_eq!(receipt.transaction_type, Some(0)); + } + + #[tokio::test] + async fn get_transaction_receipt_none() { + let alchemy_url = get_alchemy_url(); + + let hash = B256::from_str( + "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", + ) + .expect("failed to parse hash from string"); + + let receipt = RpcClient::new(&alchemy_url) + .get_transaction_receipt(&hash) + .await + .expect("failed to get transaction receipt"); + + assert!(receipt.is_none()); + } + + #[tokio::test] + async fn get_storage_at_some() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let total_supply = RpcClient::new(&alchemy_url) + .get_storage_at( + &dai_address, + U256::from(1), + BlockSpec::Number(U256::from(16220843)), + ) + .await + .expect("should have succeeded"); + + assert_eq!( + total_supply, + U256::from_str_radix( + "000000000000000000000000000000000000000010a596ae049e066d4991945c", + 16 + ) + .expect("failed to parse storage location") + ); + } + + #[tokio::test] + async fn get_storage_at_none() { + let alchemy_url = get_alchemy_url(); + + let missing_address = Address::from_str("0xffffffffffffffffffffffffffffffffffffffff") + .expect("failed to parse address"); + + let value = RpcClient::new(&alchemy_url) + .get_storage_at( + &missing_address, + U256::from(1), + BlockSpec::Number(U256::from(1)), + ) + .await + .expect("should have succeeded"); + + assert_eq!(value, U256::ZERO); + } + + #[tokio::test] + async fn get_storage_at_latest() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let _total_supply = RpcClient::new(&alchemy_url) + .get_storage_at( + &dai_address, + U256::from_str_radix( + "0000000000000000000000000000000000000000000000000000000000000001", + 16, + ) + .expect("failed to parse storage location"), + BlockSpec::latest(), + ) + .await + .expect("should have succeeded"); + } + + #[tokio::test] + async fn get_storage_at_future_block() { + let alchemy_url = get_alchemy_url(); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let error = RpcClient::new(&alchemy_url) + .get_storage_at(&dai_address, U256::from(1), BlockSpec::Number(U256::MAX)) + .await + .expect_err("should have failed"); + + if let RpcClientError::JsonRpcError { error, .. } = error { + assert_eq!(error.code, -32000); + assert_eq!(error.message, "header for hash not found"); + assert!(error.data.is_none()); + } else { + unreachable!("Invalid error"); + } + } + } +} diff --git a/crates/rethnet_eth/src/remote/eth.rs b/crates/rethnet_eth/src/remote/eth.rs index d6a170e8ac..0d3a728268 100644 --- a/crates/rethnet_eth/src/remote/eth.rs +++ b/crates/rethnet_eth/src/remote/eth.rs @@ -10,6 +10,8 @@ use std::fmt::Debug; use crate::{Address, Bloom, Bytes, B256, U256}; +use super::withdrawal::Withdrawal; + #[derive(Clone, Debug, PartialEq, Eq, Default, serde::Deserialize, serde::Serialize)] #[serde(deny_unknown_fields)] #[serde(rename_all = "camelCase")] @@ -26,8 +28,7 @@ pub struct Transaction { pub hash: B256, pub nonce: U256, pub block_hash: Option, - #[serde(deserialize_with = "optional_u64_from_hex")] - pub block_number: Option, + pub block_number: Option, #[serde(deserialize_with = "optional_u64_from_hex")] pub transaction_index: Option, pub from: Address, @@ -84,11 +85,8 @@ pub struct Log { pub data: Bytes, #[serde(skip_serializing_if = "Option::is_none")] pub block_hash: Option, - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "optional_u64_from_hex" - )] - pub block_number: Option, + #[serde(skip_serializing_if = "Option::is_none")] + pub block_number: Option, #[serde(skip_serializing_if = "Option::is_none")] pub transaction_hash: Option, #[serde( @@ -111,8 +109,7 @@ pub struct Log { #[serde(rename_all = "camelCase")] pub struct TransactionReceipt { pub block_hash: Option, - #[serde(deserialize_with = "optional_u64_from_hex")] - pub block_number: Option, + pub block_number: Option, pub contract_address: Option
, pub cumulative_gas_used: U256, #[serde(default, skip_serializing_if = "Option::is_none")] @@ -152,8 +149,7 @@ where pub state_root: B256, pub transactions_root: B256, pub receipts_root: B256, - #[serde(deserialize_with = "optional_u64_from_hex")] - pub number: Option, + pub number: Option, pub gas_used: U256, pub gas_limit: U256, pub extra_data: Bytes, @@ -174,6 +170,10 @@ where pub nonce: Option, pub base_fee_per_gas: Option, pub miner: Address, + #[serde(default)] + pub withdrawals: Vec, + #[serde(default)] + pub withdrawals_root: B256, } fn deserialize_null_default<'de, D, T>(deserializer: D) -> Result diff --git a/crates/rethnet_eth/src/remote/jsonrpc.rs b/crates/rethnet_eth/src/remote/jsonrpc.rs index afb9c7a58d..d5800663e4 100644 --- a/crates/rethnet_eth/src/remote/jsonrpc.rs +++ b/crates/rethnet_eth/src/remote/jsonrpc.rs @@ -6,18 +6,46 @@ use serde::{Deserialize, Serialize}; -/// Represents JSON-RPC 2.0 success response. +/// Represents a JSON-RPC error. +#[derive(thiserror::Error, Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[error("The response reported error `{code}`: `{message}`. (optional data: {data:?})")] +pub struct Error { + pub code: i16, + pub message: String, + pub data: Option, +} + +/// Represents a JSON-RPC 2.0 response. #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] -#[serde(deny_unknown_fields)] -pub struct Success { +pub struct Response { /// A String specifying the version of the JSON-RPC protocol. pub jsonrpc: Version, - /// Successful execution result. - pub result: T, + // /// Correlation id. /// /// It **MUST** be the same as the value of the id member in the Request Object. pub id: Id, + /// Response data. + #[serde(flatten)] + pub data: ResponseData, +} + +/// Represents JSON-RPC 2.0 success response. +#[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] +#[serde(untagged)] +pub enum ResponseData { + Error { error: Error }, + Success { result: T }, +} + +impl ResponseData { + /// Returns a [`Result`] where `Success` is mapped to `Ok` and `Error` to `Err`. + pub fn into_result(self) -> Result { + match self { + ResponseData::Success { result } => Ok(result), + ResponseData::Error { error } => Err(error), + } + } } /// Represents JSON-RPC request/response id. @@ -65,6 +93,7 @@ impl<'a> Deserialize<'a> for Version { } struct VersionVisitor; + impl<'a> serde::de::Visitor<'a> for VersionVisitor { type Value = Version; @@ -84,3 +113,42 @@ impl<'a> serde::de::Visitor<'a> for VersionVisitor { } } } + +pub struct ZeroXPrefixedBytes { + pub inner: bytes::Bytes, +} + +impl<'a> Deserialize<'a> for ZeroXPrefixedBytes { + fn deserialize(deserializer: D) -> Result + where + D: serde::Deserializer<'a>, + { + deserializer.deserialize_identifier(ZeroXPrefixedBytesVisitor) + } +} + +struct ZeroXPrefixedBytesVisitor; +impl<'a> serde::de::Visitor<'a> for ZeroXPrefixedBytesVisitor { + type Value = ZeroXPrefixedBytes; + + fn expecting(&self, formatter: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { + formatter.write_str("a 0x-prefixed string of hex digits") + } + + fn visit_str(self, value: &str) -> Result + where + E: serde::de::Error, + { + if &value[0..1] == "0x" { + Err(serde::de::Error::custom( + "string does not have a '0x' prefix", + )) + } else { + Ok(ZeroXPrefixedBytes { + inner: bytes::Bytes::from( + hex::decode(&value[2..]).expect("failed to decode hex string"), + ), + }) + } + } +} diff --git a/crates/rethnet_eth/src/remote/withdrawal.rs b/crates/rethnet_eth/src/remote/withdrawal.rs new file mode 100644 index 0000000000..6d0ae04a61 --- /dev/null +++ b/crates/rethnet_eth/src/remote/withdrawal.rs @@ -0,0 +1,41 @@ +use revm_primitives::{Address, U256}; +use ruint::aliases::U128; + +/// Ethereum withdrawal +#[derive(Clone, Debug, PartialEq, Eq)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] +pub struct Withdrawal { + /// The index of withdrawal + pub index: U128, + /// The index of the validator that generated the withdrawal + pub validator_index: U128, + /// The recipient address for withdrawal value + pub address: Address, + /// The value contained in withdrawal + pub amount: U256, +} + +impl rlp::Decodable for Withdrawal { + fn decode(rlp: &rlp::Rlp) -> Result { + Ok(Self { + index: rlp.val_at(0)?, + validator_index: rlp.val_at(1)?, + address: { + let address = rlp.val_at::(2)?.to_be_bytes(); + Address::from(address) + }, + amount: rlp.val_at(3)?, + }) + } +} + +impl rlp::Encodable for Withdrawal { + fn rlp_append(&self, s: &mut rlp::RlpStream) { + s.begin_list(4); + s.append(&self.index); + s.append(&self.validator_index); + s.append(&ruint::aliases::B160::from_be_bytes(self.address.0)); + s.append(&self.amount); + } +} diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index 905d0364c4..ac16acad14 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -25,9 +25,9 @@ tracing = { version = "0.1.37", features = ["attributes", "std"], optional = tru [dev-dependencies] criterion = { version = "0.4.0", default-features = false, features = ["cargo_bench_support", "html_reports", "plotters"] } -test-with = { version = "0.9.1", default-features = false } [features] +test-disable-remote = [] tracing = ["dep:tracing"] [[bench]] diff --git a/crates/rethnet_evm/src/block/builder.rs b/crates/rethnet_evm/src/block/builder.rs index 933afce83a..f9b19f2539 100644 --- a/crates/rethnet_evm/src/block/builder.rs +++ b/crates/rethnet_evm/src/block/builder.rs @@ -7,8 +7,8 @@ use rethnet_eth::{ use revm::{ db::DatabaseComponentError, primitives::{ - BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, ResultAndState, SpecId, - TxEnv, + AccountInfo, BlockEnv, CfgEnv, EVMError, ExecutionResult, InvalidTransaction, + ResultAndState, SpecId, TxEnv, }, }; use tokio::sync::RwLock; @@ -26,7 +26,7 @@ pub enum BlockTransactionError { BlockHash(BE), #[error("Transaction has a higher gas limit than the remaining gas in the block")] ExceedsBlockGasLimit, - #[error("Invalid transaction")] + #[error("Invalid transaction: {0:?}")] InvalidTransaction(InvalidTransaction), #[error(transparent)] State(SE), @@ -165,6 +165,12 @@ where AccountModifierFn::new(Box::new(move |balance, _nonce, _code| { *balance += reward; })), + &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }, )?; } diff --git a/crates/rethnet_evm/src/lib.rs b/crates/rethnet_evm/src/lib.rs index f8dde9c800..e31a05be85 100644 --- a/crates/rethnet_evm/src/lib.rs +++ b/crates/rethnet_evm/src/lib.rs @@ -22,6 +22,7 @@ pub use revm::{ pub use crate::{ block::{BlockBuilder, HeaderData}, evm::SyncInspector, + random::RandomHashGenerator, runtime::{Rethnet, SyncDatabase}, transaction::{PendingTransaction, TransactionError}, }; diff --git a/crates/rethnet_evm/src/random.rs b/crates/rethnet_evm/src/random.rs index f63290a330..ebc85e9b19 100644 --- a/crates/rethnet_evm/src/random.rs +++ b/crates/rethnet_evm/src/random.rs @@ -19,7 +19,7 @@ impl RandomHashGenerator { } /// Returns the next hash, generated the future next hash, and caches it. - pub fn next(&mut self) -> B256 { + pub fn next_value(&mut self) -> B256 { let mut next_value = keccak256(self.next_value.as_bytes()); std::mem::swap(&mut self.next_value, &mut next_value); diff --git a/crates/rethnet_evm/src/state.rs b/crates/rethnet_evm/src/state.rs index ecfd4f0e44..d344769bab 100644 --- a/crates/rethnet_evm/src/state.rs +++ b/crates/rethnet_evm/src/state.rs @@ -1,5 +1,6 @@ mod account; mod debug; +mod fork; mod history; mod hybrid; mod layered; @@ -8,15 +9,16 @@ mod trie; use std::fmt::Debug; -use rethnet_eth::B256; +use rethnet_eth::{remote::RpcClientError, B256}; use revm::{db::StateRef, DatabaseCommit}; pub use self::{ debug::{AccountModifierFn, StateDebug}, + fork::ForkState, history::StateHistory, hybrid::HybridState, layered::{LayeredState, RethnetLayer}, - remote::RemoteDatabase, + remote::RemoteState, }; /// Combinatorial error for the database API @@ -29,8 +31,16 @@ pub enum StateError { #[error("Contract with code hash `{0}` does not exist.")] InvalidCodeHash(B256), /// Specified state root does not exist - #[error("State root `{0}` does not exist.")] - InvalidStateRoot(B256), + #[error("State root `{state_root:?}` does not exist (fork: {is_fork}).")] + InvalidStateRoot { + /// Requested state root + state_root: B256, + /// Whether the state root was intended for a fork + is_fork: bool, + }, + /// Error from the underlying RPC client + #[error(transparent)] + Remote(#[from] RpcClientError), } /// Trait that meets all requirements for a synchronous database that can be used by [`AsyncDatabase`]. diff --git a/crates/rethnet_evm/src/state/debug.rs b/crates/rethnet_evm/src/state/debug.rs index 1ed6093eee..b27ae7075a 100644 --- a/crates/rethnet_evm/src/state/debug.rs +++ b/crates/rethnet_evm/src/state/debug.rs @@ -52,12 +52,14 @@ pub trait StateDebug { account_info: AccountInfo, ) -> Result<(), Self::Error>; - /// Modifies the account at the specified address using the provided function. If the address - /// points to an empty account, that will be modified instead. + /// Modifies the account at the specified address using the provided function. If no account + /// exists for the specified address, an account will be generated using the `default_account_fn` + /// and modified. fn modify_account( &mut self, address: Address, modifier: AccountModifierFn, + default_account_fn: &dyn Fn() -> Result, ) -> Result<(), Self::Error>; /// Removes and returns the account at the specified address, if it exists. diff --git a/crates/rethnet_evm/src/state/fork.rs b/crates/rethnet_evm/src/state/fork.rs index 404695c277..04faf8c778 100644 --- a/crates/rethnet_evm/src/state/fork.rs +++ b/crates/rethnet_evm/src/state/fork.rs @@ -1,298 +1,457 @@ -use hashbrown::HashMap; -use revm::{db::DatabaseRef, Account, AccountInfo, Bytecode}; +use std::sync::Arc; -use rethnet_eth::{Address, B256, U256}; +use hashbrown::{HashMap, HashSet}; +use parking_lot::{Mutex, RwLock, RwLockUpgradableReadGuard}; +use rethnet_eth::{ + remote::{BlockSpec, RpcClient}, + Address, B256, U256, +}; +use revm::{ + db::{State, StateRef}, + primitives::{Account, AccountInfo, Bytecode}, + DatabaseCommit, +}; +use tokio::runtime::Runtime; -use crate::db::{ - layered_db::{LayeredDatabase, RethnetLayer}, - remote::{RemoteDatabase, RemoteDatabaseError}, +use crate::random::RandomHashGenerator; + +use super::{ + remote::CachedRemoteState, HybridState, RemoteState, RethnetLayer, StateDebug, StateError, + StateHistory, }; /// A database integrating the state from a remote node and the state from a local layered /// database. -pub struct ForkDatabase { - layered_db: LayeredDatabase, - remote_db: RemoteDatabase, - account_info_cache: HashMap, - code_by_hash_cache: HashMap, - storage_cache: HashMap<(Address, U256), U256>, - fork_block_number: u64, - fork_block_state_root_cache: Option, +#[derive(Debug)] +pub struct ForkState { + local_state: HybridState, + remote_state: Arc>, + removed_storage_slots: HashSet<(Address, U256)>, + fork_block_number: U256, + /// client-facing state root (pseudorandomly generated) mapped to internal (layered_state) state root + state_root_to_state: RwLock>, + /// A pair of the generated state root and local state root + current_state: RwLock<(B256, B256)>, + initial_state_root: B256, + hash_generator: Arc>, } -/// An error emitted by ForkDatabase -#[derive(thiserror::Error, Debug)] -pub enum ForkDatabaseError { - /// An error from the underlying RemoteDatabase - #[error(transparent)] - RemoteDatabase(#[from] RemoteDatabaseError), - - /// An error from the underlying LayeredDatabase - #[error(transparent)] - LayeredDatabase(#[from] as revm::Database>::Error), - - /// Code hash not found in cache of remote database - #[error("Cache of remote database does not contain contract with code hash: {0}.")] - NoSuchCodeHash(B256), - - /// Some other error from an underlying dependency - #[error(transparent)] - OtherError(#[from] std::io::Error), -} - -impl ForkDatabase { - /// instantiate a new ForkDatabase - pub fn new(url: &str, fork_block_number: u64) -> Self { - let remote_db = RemoteDatabase::new(url); - - let layered_db = LayeredDatabase::::default(); +impl ForkState { + /// instantiate a new ForkState + pub fn new( + runtime: Arc, + hash_generator: Arc>, + url: &str, + fork_block_number: U256, + mut accounts: HashMap, + ) -> Self { + let rpc_client = RpcClient::new(url); + + let remote_state = RemoteState::new(runtime.clone(), url, fork_block_number); + + accounts.iter_mut().for_each(|(address, mut account_info)| { + let nonce = runtime + .block_on( + rpc_client.get_transaction_count(address, BlockSpec::Number(fork_block_number)), + ) + .expect("failed to retrieve remote account info for local account initialization"); + + account_info.nonce = nonce.to(); + }); + + let mut local_state = HybridState::with_accounts(accounts); + local_state.checkpoint().unwrap(); + + let generated_state_root = hash_generator.lock().next_value(); + let mut state_root_to_state = HashMap::new(); + let local_root = local_state.state_root().unwrap(); + state_root_to_state.insert(generated_state_root, local_root); Self { - layered_db, - remote_db, - account_info_cache: HashMap::new(), - code_by_hash_cache: HashMap::new(), - storage_cache: HashMap::new(), + local_state, + remote_state: Arc::new(Mutex::new(CachedRemoteState::new(remote_state))), + removed_storage_slots: HashSet::new(), fork_block_number, - fork_block_state_root_cache: None, + state_root_to_state: RwLock::new(state_root_to_state), + current_state: RwLock::new((generated_state_root, local_root)), + initial_state_root: generated_state_root, + hash_generator, } } -} -impl revm::Database for ForkDatabase { - type Error = ForkDatabaseError; - - fn basic(&mut self, address: Address) -> Result, Self::Error> { - if let Some(layered) = self - .layered_db - .basic(address) - .map_err(ForkDatabaseError::LayeredDatabase)? - { - Ok(Some(layered)) - } else if let Some(cached) = self.account_info_cache.get(&address) { - Ok(Some(cached.clone())) - } else if let Some(remote) = self - .remote_db - .basic(address) - .map_err(ForkDatabaseError::RemoteDatabase)? - { - self.account_info_cache.insert(address, remote.clone()); + fn update_removed_storage_slots(&mut self) { + self.removed_storage_slots.clear(); + + self.local_state + .changes() + .rev() + .flat_map(|layer| layer.accounts()) + .for_each(|(address, account)| { + // We never need to remove zero entries as a "removed" entry means that the lookup for + // a value in the hybrid state succeeded. + if let Some(account) = account { + account.storage.iter().for_each(|(index, value)| { + if *value == U256::ZERO { + self.removed_storage_slots.insert((*address, *index)); + } + }); + } + }) + } +} - if remote.code.is_some() { - self.code_by_hash_cache - .insert(remote.code_hash, remote.code.clone().unwrap()); - } +impl StateRef for ForkState { + type Error = StateError; - Ok(Some(remote)) + fn basic(&self, address: Address) -> Result, Self::Error> { + if let Some(local) = self.local_state.basic(address)? { + Ok(Some(local)) } else { - Ok(None) + self.remote_state.lock().basic(address) } } - fn code_by_hash(&mut self, code_hash: B256) -> Result { - if let Ok(layered) = self.layered_db.code_by_hash(code_hash) { + fn code_by_hash(&self, code_hash: B256) -> Result { + if let Ok(layered) = self.local_state.code_by_hash(code_hash) { Ok(layered) - } else if let Some(cached) = self.code_by_hash_cache.get(&code_hash) { - Ok(cached.clone()) } else { - // remote_db doesn't support code_by_hash, so there's no delegation to it here. - Err(ForkDatabaseError::NoSuchCodeHash(code_hash)) + self.remote_state.lock().code_by_hash(code_hash) } } - fn storage(&mut self, address: Address, index: U256) -> Result { - let layered = self - .layered_db - .storage(address, index) - .map_err(ForkDatabaseError::LayeredDatabase)?; - - if layered != U256::from(0) { - Ok(layered) - } else if let Some(cached) = self.storage_cache.get(&(address, index)) { - Ok(*cached) + fn storage(&self, address: Address, index: U256) -> Result { + let local = self.local_state.storage(address, index)?; + if local != U256::ZERO || self.removed_storage_slots.contains(&(address, index)) { + Ok(local) } else { - let remote = self - .remote_db - .storage(address, index) - .map_err(ForkDatabaseError::RemoteDatabase)?; - - self.storage_cache.insert((address, index), remote); - - Ok(remote) + self.remote_state.lock().storage(address, index) } } } -impl revm::DatabaseCommit for ForkDatabase { +impl DatabaseCommit for ForkState { fn commit(&mut self, changes: HashMap) { - self.layered_db.commit(changes) + changes.iter().for_each(|(address, account)| { + account.storage.iter().for_each(|(index, value)| { + // We never need to remove zero entries as a "removed" entry means that the lookup for + // a value in the hybrid state succeeded. + if value.present_value() == U256::ZERO { + self.removed_storage_slots.insert((*address, *index)); + } + }); + }); + + self.local_state.commit(changes) } } -impl crate::DatabaseDebug for ForkDatabase { - type Error = ForkDatabaseError; +impl StateDebug for ForkState { + type Error = StateError; - fn account_storage_root(&mut self, address: &Address) -> Result, Self::Error> { - self.layered_db - .account_storage_root(address) - .map_err(ForkDatabaseError::LayeredDatabase) + fn account_storage_root(&self, address: &Address) -> Result, Self::Error> { + self.local_state.account_storage_root(address) } - /// Inserts an account with the specified address. fn insert_account( &mut self, address: Address, account_info: AccountInfo, ) -> Result<(), Self::Error> { - self.layered_db - .insert_account(address, account_info) - .map_err(ForkDatabaseError::LayeredDatabase) + self.local_state.insert_account(address, account_info) } - /// Modifies the account at the specified address using the provided function. fn modify_account( &mut self, address: Address, - modifier: crate::debug::ModifierFn, + modifier: crate::state::AccountModifierFn, + default_account_fn: &dyn Fn() -> Result, ) -> Result<(), Self::Error> { - use revm::Database; // for basic() - - if (self - .layered_db - .basic(address) - .map_err(ForkDatabaseError::LayeredDatabase)?) - .is_none() - { - let account_info = if let Some(cached) = self.account_info_cache.get(&address) { - Some(cached.clone()) - } else if let Some(remote) = self - .remote_db - .basic(address) - .map_err(ForkDatabaseError::RemoteDatabase)? - { - self.account_info_cache.insert(address, remote.clone()); - - if remote.code.is_some() { - self.code_by_hash_cache - .insert(remote.code_hash, remote.code.clone().unwrap()); - } - - Some(remote) - } else { - None - }; - if let Some(account_info) = account_info { - self.layered_db.insert_account(address, account_info)? - } - } - self.layered_db - .modify_account(address, modifier) - .map_err(ForkDatabaseError::LayeredDatabase) + #[allow(clippy::redundant_closure)] + self.local_state.modify_account(address, modifier, &|| { + self.remote_state + .lock() + .basic(address)? + .map_or_else(|| default_account_fn(), Result::Ok) + }) } - /// Removes and returns the account at the specified address, if it exists. fn remove_account(&mut self, address: Address) -> Result, Self::Error> { - crate::DatabaseDebug::remove_account(&mut self.layered_db, address) - .map_err(ForkDatabaseError::LayeredDatabase) + self.local_state.remove_account(address) + } + + fn serialize(&self) -> String { + // TODO: Do we want to print history? + self.local_state.serialize() } - /// Sets the storage slot at the specified address and index to the provided value. fn set_account_storage_slot( &mut self, address: Address, index: U256, value: U256, ) -> Result<(), Self::Error> { - self.layered_db + // We never need to remove zero entries as a "removed" entry means that the lookup for + // a value in the hybrid state succeeded. + if value == U256::ZERO { + self.removed_storage_slots.insert((address, index)); + } + + self.local_state .set_account_storage_slot(address, index, value) - .map_err(ForkDatabaseError::LayeredDatabase) } - /// Reverts the state to match the specified state root. - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { - self.layered_db - .set_state_root(state_root) - .map_err(ForkDatabaseError::LayeredDatabase) + fn state_root(&self) -> Result { + let local_root = self.local_state.state_root().unwrap(); + + let current_state = self.current_state.upgradable_read(); + let state_root_to_state = self.state_root_to_state.upgradable_read(); + + Ok(if local_root != current_state.1 { + let next_state_root = self.hash_generator.lock().next_value(); + + let mut state_root_to_state = RwLockUpgradableReadGuard::upgrade(state_root_to_state); + state_root_to_state.insert(next_state_root, local_root); + + *RwLockUpgradableReadGuard::upgrade(current_state) = (next_state_root, local_root); + + next_state_root + } else { + current_state.0 + }) } +} + +impl StateHistory for ForkState { + type Error = StateError; + + fn set_block_context( + &mut self, + state_root: &B256, + block_number: Option, + ) -> Result<(), Self::Error> { + if let Some(block_number) = block_number { + if block_number < self.fork_block_number { + self.remote_state.lock().set_block_number(&block_number); + + let local_root = self + .state_root_to_state + .get_mut() + .get(&self.initial_state_root) + .unwrap(); - /// Retrieves the storage root of the database. - fn state_root(&mut self) -> Result { - if self.layered_db.iter().next().is_some() { - Ok(self - .layered_db - .state_root() - .map_err(ForkDatabaseError::LayeredDatabase)?) - } else if let Some(cached) = self.fork_block_state_root_cache { - Ok(cached) + self.local_state.set_block_context(local_root, None)?; + + *self.current_state.get_mut() = (self.initial_state_root, *local_root); + } else { + let state_root_to_state = self.state_root_to_state.get_mut(); + let local_root = state_root_to_state.get(state_root).or_else(|| { + if block_number == self.fork_block_number { + state_root_to_state.get(&self.initial_state_root) + } else { + None + } + }); + + if let Some(local_root) = local_root { + self.local_state + .set_block_context(local_root, Some(block_number))?; + + let block_number = block_number.min(self.fork_block_number); + self.remote_state.lock().set_block_number(&block_number); + + *self.current_state.get_mut() = (*state_root, *local_root); + } else { + return Err(Self::Error::InvalidStateRoot { + state_root: *state_root, + is_fork: true, + }); + } + } + } else if let Some(local_root) = self.state_root_to_state.get_mut().get(state_root) { + self.local_state.set_block_context(local_root, None)?; + self.remote_state + .lock() + .set_block_number(&self.fork_block_number); + + *self.current_state.get_mut() = (*state_root, *local_root); } else { - self.fork_block_state_root_cache = Some( - self.remote_db - .state_root(self.fork_block_number) - .map_err(ForkDatabaseError::RemoteDatabase) - .map_err(|e| anyhow::anyhow!(e))?, - ); - Ok(self.fork_block_state_root_cache.unwrap()) + return Err(Self::Error::InvalidStateRoot { + state_root: *state_root, + is_fork: true, + }); } + + self.update_removed_storage_slots(); + + Ok(()) } - /// Creates a checkpoint that can be reverted to using [`revert`]. fn checkpoint(&mut self) -> Result<(), Self::Error> { - self.layered_db - .checkpoint() - .map_err(ForkDatabaseError::LayeredDatabase) + // Ensure a potential state root is generated + self.state_root()?; + + self.local_state.checkpoint() } - /// Reverts to the previous checkpoint, created using [`checkpoint`]. fn revert(&mut self) -> Result<(), Self::Error> { - self.layered_db - .revert() - .map_err(ForkDatabaseError::LayeredDatabase) + self.local_state.revert() } - /// Makes a snapshot of the database that's retained until [`remove_snapshot`] is called. Returns the snapshot's identifier. fn make_snapshot(&mut self) -> B256 { - self.layered_db.make_snapshot() + self.local_state.make_snapshot(); + + self.state_root().expect("should have been able to generate a new state root after triggering a snapshot in the underlying state") } - /// Removes the snapshot corresponding to the specified id, if it exists. Returns whether a snapshot was removed. - fn remove_snapshot(&mut self, state_root: &B256) -> bool { - self.layered_db.remove_snapshot(state_root) + fn remove_snapshot(&mut self, state_root: &B256) { + self.local_state.remove_snapshot(state_root); } } -#[cfg(test)] +#[cfg(all(test, not(feature = "test-disable-remote")))] mod tests { - use super::*; - use std::str::FromStr; + use tokio::runtime::Builder; + + use super::*; + fn get_alchemy_url() -> Result { - Ok(std::env::var_os("ALCHEMY_URL") + match std::env::var_os("ALCHEMY_URL") .expect("ALCHEMY_URL environment variable not defined") .into_string() - .expect("couldn't convert OsString into a String")) + .expect("couldn't convert OsString into a String") + { + url if url.is_empty() => panic!("ALCHEMY_URL environment variable is empty"), + url => Ok(url), + } } - #[test_with::env(ALCHEMY_URL)] #[test] fn basic_success() { - let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") - .expect("failed to parse address"); - let mut fork_db = ForkDatabase::new( + let runtime = Arc::new( + Builder::new_multi_thread() + .enable_io() + .enable_time() + .build() + .expect("failed to construct async runtime"), + ); + + let hash_generator = Arc::new(Mutex::new(RandomHashGenerator::with_seed("seed"))); + + let fork_state = ForkState::new( + runtime, + hash_generator, &get_alchemy_url().expect("failed to get alchemy url"), - 16220843, + U256::from(16220843), + HashMap::default(), ); - let account_info = - revm::Database::basic(&mut fork_db, dai_address).expect("should have succeeded"); + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + let account_info = fork_state + .basic(dai_address) + .expect("should have succeeded"); assert!(account_info.is_some()); + let account_info = account_info.unwrap(); assert_eq!(account_info.balance, U256::from(0)); assert_eq!(account_info.nonce, 1); assert_eq!( account_info.code_hash, - B256::from_str("0x74280a6e975486b18c8a65edee16b3b7a2f4c24398a094648552810549cbf864") + B256::from_str("0x4e36f96ee1667a663dfaac57c4d185a0e369a3a217e0079d49620f34f85d1ac7") .expect("failed to parse") ); } + + #[test] + fn set_block_context_with_zeroed_storage_slots() { + let runtime = Arc::new( + Builder::new_multi_thread() + .enable_io() + .enable_time() + .build() + .expect("failed to construct async runtime"), + ); + + let hash_generator = Arc::new(Mutex::new(RandomHashGenerator::with_seed("seed"))); + + let mut fork_state = ForkState::new( + runtime, + hash_generator, + &get_alchemy_url().expect("failed to get alchemy url"), + U256::from(16220843), + HashMap::default(), + ); + + let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") + .expect("failed to parse address"); + + const STORAGE_SLOT_INDEX: u64 = 1; + const DUMMY_STORAGE_VALUE: u64 = 1000; + + let storage_slot_index = U256::from(STORAGE_SLOT_INDEX); + let dummy_storage_value = U256::from(DUMMY_STORAGE_VALUE); + let initial_state_root = fork_state.initial_state_root; + + let remote_value = + U256::from_str("0x000000000000000000000000000000000000000010a596ae049e066d4991945c") + .unwrap(); + + // Validate remote storage slot value + assert_eq!( + fork_state.storage(dai_address, storage_slot_index).unwrap(), + remote_value + ); + + // Set storage slot to zero + fork_state + .set_account_storage_slot(dai_address, storage_slot_index, U256::ZERO) + .unwrap(); + + // Validate storage slot equals zero + let fork_storage_slot = fork_state.storage(dai_address, storage_slot_index).unwrap(); + assert_eq!(fork_storage_slot, U256::ZERO); + + // Retrieve the state root that we want to revert to later on + let zeroed_state_root = fork_state.state_root().unwrap(); + + // Create layers with modified storage slot values that will be reverted + fork_state.checkpoint().unwrap(); + + fork_state + .set_account_storage_slot(dai_address, storage_slot_index, dummy_storage_value) + .unwrap(); + + fork_state.checkpoint().unwrap(); + + let dummy_storage_state_root = fork_state.make_snapshot(); + + // Validate storage slot equals zero after reverting to zeroed storage slot state + fork_state + .set_block_context(&zeroed_state_root, None) + .unwrap(); + + let fork_storage_slot = fork_state.storage(dai_address, storage_slot_index).unwrap(); + assert_eq!(fork_storage_slot, U256::ZERO); + + // Validate remote storage slot value after reverting to initial state + fork_state + .set_block_context(&initial_state_root, None) + .unwrap(); + + assert_eq!( + fork_state.storage(dai_address, storage_slot_index).unwrap(), + remote_value + ); + + // Validate that the dummy value is returned after fast-forward to that state + fork_state + .set_block_context(&dummy_storage_state_root, None) + .unwrap(); + + let fork_storage_slot = fork_state.storage(dai_address, storage_slot_index).unwrap(); + assert_eq!(fork_storage_slot, dummy_storage_value); + } } diff --git a/crates/rethnet_evm/src/state/history.rs b/crates/rethnet_evm/src/state/history.rs index 86b7a4558c..1ab9c01089 100644 --- a/crates/rethnet_evm/src/state/history.rs +++ b/crates/rethnet_evm/src/state/history.rs @@ -1,5 +1,5 @@ use auto_impl::auto_impl; -use rethnet_eth::B256; +use rethnet_eth::{B256, U256}; /// A trait for debug operation on a database. #[auto_impl(Box)] @@ -7,8 +7,12 @@ pub trait StateHistory { /// The database's error type. type Error; - /// Reverts the state to match the specified state root. - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error>; + /// Reverts the state to match the specified block. + fn set_block_context( + &mut self, + state_root: &B256, + block_number: Option, + ) -> Result<(), Self::Error>; /// Creates a checkpoint that can be reverted to using [`revert`]. fn checkpoint(&mut self) -> Result<(), Self::Error>; diff --git a/crates/rethnet_evm/src/state/hybrid.rs b/crates/rethnet_evm/src/state/hybrid.rs index 3e0fba4aa6..c0494974c8 100644 --- a/crates/rethnet_evm/src/state/hybrid.rs +++ b/crates/rethnet_evm/src/state/hybrid.rs @@ -17,6 +17,14 @@ use super::{ AccountModifierFn, RethnetLayer, StateDebug, StateError, }; +#[derive(Clone, Debug)] +struct RevertedLayers { + /// The parent layer's state root + pub parent_state_root: B256, + /// The reverted layers + pub stack: Vec, +} + #[derive(Clone, Debug)] struct Snapshot { pub changes: LayeredChanges, @@ -28,6 +36,7 @@ struct Snapshot { pub struct HybridState { trie: TrieState, changes: LayeredChanges, + reverted_layers: Option>, snapshots: SharedMap, true>, } @@ -41,9 +50,15 @@ impl>> HybridState { Self { trie: latest_state, changes: LayeredChanges::with_layer(layer), + reverted_layers: None, snapshots: SharedMap::default(), } } + + /// Returns the changes that allow reconstructing the state. + pub fn changes(&self) -> &LayeredChanges { + &self.changes + } } impl StateRef for HybridState { @@ -93,18 +108,15 @@ impl StateDebug for HybridState { Ok(()) } - #[cfg_attr(feature = "tracing", tracing::instrument)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip(default_account_fn)))] fn modify_account( &mut self, address: Address, modifier: AccountModifierFn, + default_account_fn: &dyn Fn() -> Result, ) -> Result<(), Self::Error> { - let mut account_info = self.trie.basic(address)?.map_or_else( - || AccountInfo { - code: None, - ..AccountInfo::default() - }, - |mut account_info| { + let mut account_info = match self.trie.basic(address)? { + Some(mut account_info) => { // Fill the bytecode if account_info.code_hash != KECCAK_EMPTY { account_info.code = Some( @@ -115,8 +127,9 @@ impl StateDebug for HybridState { } account_info - }, - ); + } + None => default_account_fn()?, + }; let old_code_hash = account_info.code_hash; @@ -142,7 +155,14 @@ impl StateDebug for HybridState { } self.trie.insert_account(address, account_info.clone())?; - self.changes.account_or_insert_mut(&address).info = account_info; + self.changes + .account_or_insert_mut(&address, &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }) + .info = account_info; Ok(()) } @@ -169,11 +189,8 @@ impl StateDebug for HybridState { value: U256, ) -> Result<(), Self::Error> { self.trie.set_account_storage_slot(address, index, value)?; - self.changes - .account_or_insert_mut(&address) - .storage - .insert(index, value); + .set_account_storage_slot(&address, &index, value); Ok(()) } @@ -210,7 +227,11 @@ impl StateHistory for HybridState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + fn set_block_context( + &mut self, + state_root: &B256, + _block_number: Option, + ) -> Result<(), Self::Error> { // Ensure the last layer has a state root if !self.changes.last_layer_mut().has_state_root() { let state_root = self.state_root()?; @@ -222,6 +243,15 @@ impl StateHistory for HybridState { trie: latest_state, }) = self.snapshots.get(state_root) { + // Retain all layers except the first + let stack = self.changes.revert_to_layer(0); + let parent_state_root = self.changes.last_layer_mut().state_root().cloned().unwrap(); + + self.reverted_layers = Some(RevertedLayers { + parent_state_root, + stack, + }); + self.trie = latest_state.clone(); self.changes = changes.clone(); @@ -230,6 +260,37 @@ impl StateHistory for HybridState { return Ok(()); } + // Check whether the state root is contained in the previously reverted layers + let reinstated_layers = self.reverted_layers.take().and_then(|mut reverted_layers| { + let layer_id = reverted_layers + .stack + .iter() + .rev() + .enumerate() + .find_map(|(layer_id, layer)| { + if *layer.state_root().unwrap() == *state_root { + Some(layer_id) + } else { + None + } + }) + .map(|inverted_layer_id| reverted_layers.stack.len() - inverted_layer_id - 1); + + if let Some(layer_id) = layer_id { + reverted_layers.stack.truncate(layer_id + 1); + + Some(reverted_layers) + } else { + None + } + }); + + let state_root = reinstated_layers + .as_ref() + .map_or(state_root, |reinstated_layers| { + &reinstated_layers.parent_state_root + }); + let inverted_layer_id = self .changes .iter() @@ -245,12 +306,30 @@ impl StateHistory for HybridState { if let Some(layer_id) = inverted_layer_id { let layer_id = self.changes.last_layer_id() - layer_id; - self.changes.revert_to_layer(layer_id); + let reverted_layers = self.changes.revert_to_layer(layer_id); + let parent_state_root = self.changes.last_layer_mut().state_root().cloned().unwrap(); + + if let Some(mut reinstated_layers) = reinstated_layers { + self.changes.append(&mut reinstated_layers.stack); + } + + self.reverted_layers = if reverted_layers.is_empty() { + None + } else { + Some(RevertedLayers { + parent_state_root, + stack: reverted_layers, + }) + }; + self.trie = TrieState::from(&self.changes); Ok(()) } else { - Err(StateError::InvalidStateRoot(*state_root)) + Err(StateError::InvalidStateRoot { + state_root: *state_root, + is_fork: false, + }) } } diff --git a/crates/rethnet_evm/src/state/layered.rs b/crates/rethnet_evm/src/state/layered.rs index 2ae97667e3..6f96468f76 100644 --- a/crates/rethnet_evm/src/state/layered.rs +++ b/crates/rethnet_evm/src/state/layered.rs @@ -5,11 +5,7 @@ pub use changes::{LayeredChanges, RethnetLayer}; use std::fmt::Debug; use hashbrown::HashMap; -use rethnet_eth::{ - account::BasicAccount, - state::{state_root, storage_root}, - Address, B256, U256, -}; +use rethnet_eth::{Address, B256, U256}; use revm::{ db::StateRef, primitives::{Account, AccountInfo, Bytecode, KECCAK_EMPTY}, @@ -24,6 +20,7 @@ use super::{history::StateHistory, AccountModifierFn, StateDebug, StateError}; #[derive(Clone, Debug, Default)] pub struct LayeredState { changes: LayeredChanges, + /// Snapshots snapshots: SharedMap, true>, } @@ -82,10 +79,7 @@ impl StateDebug for LayeredState { #[cfg_attr(feature = "tracing", tracing::instrument)] fn account_storage_root(&self, address: &Address) -> Result, Self::Error> { - Ok(self - .changes - .account(address) - .map(|account| storage_root(&account.storage))) + Ok(self.changes.storage_root(address)) } #[cfg_attr(feature = "tracing", tracing::instrument)] @@ -99,13 +93,18 @@ impl StateDebug for LayeredState { Ok(()) } - #[cfg_attr(feature = "tracing", tracing::instrument)] + #[cfg_attr(feature = "tracing", tracing::instrument(skip(default_account_fn)))] fn modify_account( &mut self, address: Address, modifier: AccountModifierFn, + default_account_fn: &dyn Fn() -> Result, ) -> Result<(), Self::Error> { - let mut account_info = self.changes.account_or_insert_mut(&address).info.clone(); + let mut account_info = self + .changes + .account_or_insert_mut(&address, default_account_fn) + .info + .clone(); // Fill the bytecode if account_info.code_hash != KECCAK_EMPTY { @@ -138,7 +137,14 @@ impl StateDebug for LayeredState { self.changes.remove_code(&old_code_hash); } - self.changes.account_or_insert_mut(&address).info = account_info; + self.changes + .account_or_insert_mut(&address, &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }) + .info = account_info; Ok(()) } @@ -161,36 +167,14 @@ impl StateDebug for LayeredState { value: U256, ) -> Result<(), Self::Error> { self.changes - .account_or_insert_mut(&address) - .storage - .insert(index, value); + .set_account_storage_slot(&address, &index, value); Ok(()) } #[cfg_attr(feature = "tracing", tracing::instrument)] fn state_root(&self) -> Result { - let mut state = HashMap::new(); - - self.changes - .iter() - .flat_map(|layer| layer.accounts()) - .for_each(|(address, account)| { - state - .entry(*address) - .or_insert(account.as_ref().map(|account| BasicAccount { - nonce: account.info.nonce, - balance: account.info.balance, - storage_root: storage_root(&account.storage), - code_hash: account.info.code_hash, - })); - }); - - let state = state - .iter() - .filter_map(|(address, account)| account.as_ref().map(|account| (address, account))); - - Ok(state_root(state)) + Ok(self.changes.state_root()) } } @@ -217,7 +201,11 @@ impl StateHistory for LayeredState { } #[cfg_attr(feature = "tracing", tracing::instrument)] - fn set_state_root(&mut self, state_root: &B256) -> Result<(), Self::Error> { + fn set_block_context( + &mut self, + state_root: &B256, + _block_number: Option, + ) -> Result<(), Self::Error> { // Ensure the last layer has a state root if !self.changes.last_layer_mut().has_state_root() { let state_root = self.state_root()?; @@ -250,7 +238,10 @@ impl StateHistory for LayeredState { Ok(()) } else { - Err(StateError::InvalidStateRoot(*state_root)) + Err(StateError::InvalidStateRoot { + state_root: *state_root, + is_fork: false, + }) } } diff --git a/crates/rethnet_evm/src/state/layered/changes.rs b/crates/rethnet_evm/src/state/layered/changes.rs index ffb58bacd4..303417ca0d 100644 --- a/crates/rethnet_evm/src/state/layered/changes.rs +++ b/crates/rethnet_evm/src/state/layered/changes.rs @@ -3,12 +3,16 @@ use std::{collections::BTreeMap, fmt::Debug}; use cita_trie::Hasher; use hashbrown::HashMap; use hasher::HasherKeccak; -use rethnet_eth::{account::KECCAK_EMPTY, state::storage_root, Address, B256, U256}; +use rethnet_eth::{ + account::{BasicAccount, KECCAK_EMPTY}, + state::{state_root, storage_root, Storage}, + Address, B256, U256, +}; use revm::primitives::{Account, AccountInfo, Bytecode}; use crate::{ collections::{SharedMap, SharedMapEntry}, - state::account::RethnetAccount, + state::{account::RethnetAccount, StateError}, }; #[derive(Clone, Debug)] @@ -42,6 +46,11 @@ impl LayeredChanges { pub fn rev(&self) -> impl Iterator { self.stack.iter() } + + /// Appends the provided layers. + pub fn append(&mut self, layers: &mut Vec) { + self.stack.append(layers); + } } impl LayeredChanges { @@ -57,9 +66,9 @@ impl LayeredChanges { /// Reverts to the layer with specified `layer_id`, removing all /// layers above it. #[cfg_attr(feature = "tracing", tracing::instrument)] - pub fn revert_to_layer(&mut self, layer_id: usize) { + pub fn revert_to_layer(&mut self, layer_id: usize) -> Vec { assert!(layer_id < self.stack.len(), "Invalid layer id."); - self.stack.truncate(layer_id + 1); + self.stack.split_off(layer_id + 1) } } @@ -93,8 +102,10 @@ pub struct RethnetLayer { impl RethnetLayer { /// Retrieves an iterator over all accounts. - pub fn accounts(&self) -> impl Iterator)> { - self.accounts.iter() + pub fn accounts(&self) -> impl Iterator)> { + self.accounts + .iter() + .map(|(address, account)| (address, account.as_ref())) } /// Retrieves the contract storage @@ -174,8 +185,12 @@ impl LayeredChanges { /// Retrieves a mutable reference to the account corresponding to the address, if it exists. /// Otherwise, inserts a new account. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub fn account_or_insert_mut(&mut self, address: &Address) -> &mut RethnetAccount { + #[cfg_attr(feature = "tracing", tracing::instrument(skip(default_account_fn)))] + pub fn account_or_insert_mut( + &mut self, + address: &Address, + default_account_fn: &dyn Fn() -> Result, + ) -> &mut RethnetAccount { // WORKAROUND: https://blog.rust-lang.org/2022/08/05/nll-by-default.html if self.last_layer_mut().accounts.contains_key(address) { let was_deleted = self @@ -196,7 +211,11 @@ impl LayeredChanges { } } - let account = self.account(address).cloned().unwrap_or_default(); + let account = self.account(address).cloned().unwrap_or_else(|| { + default_account_fn() + .expect("Default account construction is not allowed to fail") + .into() + }); self.last_layer_mut() .accounts @@ -214,19 +233,19 @@ impl LayeredChanges { // Removes account only if it exists, so safe to use for empty, touched accounts self.remove_account(address); } else { - let old_account = self.account_or_insert_mut(address); + let old_account = self.account_or_insert_mut(address, &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }); if account.storage_cleared { old_account.storage.clear(); } account.storage.iter().for_each(|(index, value)| { - let value = value.present_value(); - if value == U256::ZERO { - old_account.storage.remove(index); - } else { - old_account.storage.insert(*index, value); - } + old_account.storage.insert(*index, value.present_value()); }); let mut account_info = account.info.clone(); @@ -280,7 +299,26 @@ impl LayeredChanges { /// Serializes the state using ordering of addresses and storage indices. #[cfg_attr(feature = "tracing", tracing::instrument)] pub fn serialize(&self) -> String { - let mut state = BTreeMap::new(); + let mut state = HashMap::new(); + + self.rev() + .flat_map(|layer| layer.accounts()) + .for_each(|(address, account)| { + if let Some(new_account) = account { + state + .entry(*address) + .and_modify(|account: &mut RethnetAccount| { + account.info = new_account.info.clone(); + + new_account.storage.iter().for_each(|(index, value)| { + account.storage.insert(*index, *value); + }); + }) + .or_insert_with(|| new_account.clone()); + } else { + state.remove(address); + } + }); #[derive(serde::Serialize)] struct StateAccount { @@ -296,43 +334,119 @@ impl LayeredChanges { pub storage_root: B256, } - self.iter() + let state: BTreeMap<_, _> = state + .into_iter() + .map(|(address, mut account)| { + account.storage.retain(|_index, value| *value != U256::ZERO); + + let storage_root = storage_root(&account.storage); + + // Sort entries + let storage: BTreeMap = account + .storage + .iter() + .map(|(index, value)| { + let hashed_index = HasherKeccak::new().digest(&index.to_be_bytes::<32>()); + + (B256::from_slice(&hashed_index), *value) + }) + .collect(); + + let account = StateAccount { + balance: account.info.balance, + nonce: account.info.nonce, + code_hash: account.info.code_hash, + storage_root, + storage, + }; + + (address, account) + }) + .collect(); + + serde_json::to_string_pretty(&state).unwrap() + } + + /// Sets the storage slot at the specified address and index to the provided value. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn set_account_storage_slot(&mut self, address: &Address, index: &U256, value: U256) { + self.account_or_insert_mut(address, &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }) + .storage + .insert(*index, value); + } + + /// Retrieves the trie's state root. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn state_root(&self) -> B256 { + let mut state = HashMap::new(); + + self.rev() .flat_map(|layer| layer.accounts()) .for_each(|(address, account)| { - state.entry(*address).or_insert_with(|| { - account.as_ref().map(|account| { - let storage_root = storage_root(&account.storage); - - // Sort entries - let storage: BTreeMap = account - .storage - .iter() - .map(|(index, value)| { - let hashed_index = - HasherKeccak::new().digest(&index.to_be_bytes::<32>()); - - (B256::from_slice(&hashed_index), *value) - }) - .collect(); - - StateAccount { - balance: account.info.balance, - nonce: account.info.nonce, - code_hash: account.info.code_hash, - storage_root, - storage, - } - }) - }); + if let Some(new_account) = account { + state + .entry(*address) + .and_modify(|account: &mut RethnetAccount| { + account.info = new_account.info.clone(); + + new_account.storage.iter().for_each(|(index, value)| { + account.storage.insert(*index, *value); + }); + }) + .or_insert_with(|| new_account.clone()); + } else { + state.remove(address); + } }); - // Remove deleted entries - let state: BTreeMap<_, _> = state + let state: HashMap<_, _> = state .into_iter() - .filter_map(|(address, account)| account.map(|account| (address, account))) + .map(|(address, account)| { + let account = BasicAccount { + nonce: account.info.nonce, + balance: account.info.balance, + storage_root: storage_root(&account.storage), + code_hash: account.info.code_hash, + }; + (address, account) + }) .collect(); - serde_json::to_string_pretty(&state).unwrap() + state_root(&state) + } + + /// Retrieves the storage root of the account at the specified address. + #[cfg_attr(feature = "tracing", tracing::instrument)] + pub fn storage_root(&self, address: &Address) -> Option { + let mut exists = false; + let mut storage = Storage::default(); + + self.rev() + .flat_map(|layer| layer.accounts.get(address)) + .for_each(|account| { + if let Some(account) = account { + account.storage.iter().for_each(|(index, value)| { + storage.insert(*index, *value); + }); + + exists = true; + } else { + storage.clear(); + } + }); + + if exists { + storage.retain(|_index, value| *value != U256::ZERO); + + Some(storage_root(&storage)) + } else { + None + } } /// Inserts the provided bytecode using its hash, potentially overwriting an existing value. @@ -350,7 +464,13 @@ impl LayeredChanges { self.insert_code(code); } - self.account_or_insert_mut(address).info = account_info; + self.account_or_insert_mut(address, &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }) + .info = account_info; } } diff --git a/crates/rethnet_evm/src/state/remote.rs b/crates/rethnet_evm/src/state/remote.rs index b8da9b5dba..cf6b461407 100644 --- a/crates/rethnet_evm/src/state/remote.rs +++ b/crates/rethnet_evm/src/state/remote.rs @@ -1,89 +1,114 @@ +mod cached; + +use std::sync::Arc; + use revm::{ db::StateRef, primitives::{AccountInfo, Bytecode}, }; -use tokio::runtime::{Builder, Runtime}; +use tokio::runtime::Runtime; use rethnet_eth::{ - remote::{RpcClient, RpcClientError}, + remote::{BlockSpec, RpcClient}, Address, B256, U256, }; -/// An revm database backed by a remote Ethereum node -#[derive(Debug)] -pub struct RemoteDatabase { - client: RpcClient, - runtime: Runtime, -} +use super::StateError; -/// Errors that might be returned from RemoteDatabase -#[derive(thiserror::Error, Debug)] -pub enum RemoteDatabaseError { - #[error(transparent)] - RpcError(#[from] RpcClientError), +pub use cached::CachedRemoteState; - /// Some other error from an underlying dependency - #[error(transparent)] - OtherError(#[from] std::io::Error), +/// A state backed by a remote Ethereum node +#[derive(Debug)] +pub struct RemoteState { + client: RpcClient, + runtime: Arc, + block_number: U256, } -impl RemoteDatabase { - /// Construct a new RemoteDatabse given the URL of a remote Ethereum node. - #[cfg_attr(feature = "tracing", tracing::instrument)] - pub fn new(url: &str) -> Self { +impl RemoteState { + /// Construct a new RemoteDatabse given the URL of a remote Ethereum node and a + /// block number from which data will be pulled. + pub fn new(runtime: Arc, url: &str, block_number: U256) -> Self { Self { client: RpcClient::new(url), - runtime: Builder::new_multi_thread() - .enable_io() - .enable_time() - .build() - .expect("failed to construct async runtime"), + runtime, + block_number, } } + /// Retrieves the current block number + pub fn block_number(&self) -> &U256 { + &self.block_number + } + + /// Sets the block number used for calls to the remote Ethereum node. + pub fn set_block_number(&mut self, block_number: &U256) { + self.block_number = *block_number; + } + /// Retrieve the state root of the given block - pub fn state_root(&self, block_number: u64) -> Result { - Ok(self - .runtime - .block_on(self.client.get_block_by_number(block_number, false))? - .state_root) + pub fn state_root(&self, block_number: U256) -> Result { + Ok(tokio::task::block_in_place(move || { + self.runtime.block_on( + self.client + .get_block_by_number(BlockSpec::Number(block_number)), + ) + })? + .state_root) } } -impl StateRef for RemoteDatabase { - type Error = RemoteDatabaseError; +impl StateRef for RemoteState { + type Error = StateError; #[cfg_attr(feature = "tracing", tracing::instrument)] fn basic(&self, address: Address) -> Result, Self::Error> { - Ok(Some( + Ok(Some(tokio::task::block_in_place(move || { self.runtime - .block_on(self.client.get_account_info(&address, None)) - .map_err(RemoteDatabaseError::RpcError)?, - )) + .block_on( + self.client + .get_account_info(&address, BlockSpec::Number(self.block_number)), + ) + .map_err(StateError::Remote) + })?)) } - /// unimplemented - fn code_by_hash(&self, _code_hash: B256) -> Result { - unimplemented!(); + fn code_by_hash(&self, code_hash: B256) -> Result { + Err(StateError::InvalidCodeHash(code_hash)) } #[cfg_attr(feature = "tracing", tracing::instrument)] fn storage(&self, address: Address, index: U256) -> Result { - self.runtime - .block_on(self.client.get_storage_at(&address, index, None)) - .map_err(RemoteDatabaseError::RpcError) + tokio::task::block_in_place(move || { + self.runtime + .block_on(self.client.get_storage_at( + &address, + index, + BlockSpec::Number(self.block_number), + )) + .map_err(StateError::Remote) + }) } } -#[cfg(test)] +#[cfg(all(test, not(feature = "test-disable-remote")))] mod tests { - use super::*; - use std::str::FromStr; - #[test_with::env(ALCHEMY_URL)] + use tokio::runtime::Builder; + + use super::*; + #[test] fn basic_success() { + let runtime = Arc::new( + Builder::new_multi_thread() + .enable_io() + .enable_time() + .build() + .expect("failed to construct async runtime"), + ); + let alchemy_url = std::env::var_os("ALCHEMY_URL") .expect("ALCHEMY_URL environment variable not defined") .into_string() @@ -92,10 +117,11 @@ mod tests { let dai_address = Address::from_str("0x6b175474e89094c44da98b954eedeac495271d0f") .expect("failed to parse address"); - let account_info: AccountInfo = RemoteDatabase::new(&alchemy_url) - .basic(dai_address) - .expect("should succeed") - .unwrap(); + let account_info: AccountInfo = + RemoteState::new(runtime, &alchemy_url, U256::from(16643427)) + .basic(dai_address) + .expect("should succeed") + .unwrap(); assert_eq!(account_info.balance, U256::from(0)); assert_eq!(account_info.nonce, 1); diff --git a/crates/rethnet_evm/src/state/remote/cached.rs b/crates/rethnet_evm/src/state/remote/cached.rs new file mode 100644 index 0000000000..b1cf4b89fa --- /dev/null +++ b/crates/rethnet_evm/src/state/remote/cached.rs @@ -0,0 +1,110 @@ +use hashbrown::{hash_map::Entry, HashMap}; +use rethnet_eth::{Address, B256, U256}; +use revm::{ + db::{State, StateRef}, + primitives::{AccountInfo, Bytecode}, +}; + +use crate::state::{account::RethnetAccount, StateError}; + +use super::RemoteState; + +/// A cached version of [`RemoteState`]. +#[derive(Debug)] +pub struct CachedRemoteState { + remote: RemoteState, + /// Mapping of block numbers to cached accounts + account_cache: HashMap>, + /// Mapping of block numbers to cached code + code_cache: HashMap>, +} + +impl CachedRemoteState { + /// Constructs a new [`CachedRemoteState`]. + pub fn new(remote: RemoteState) -> Self { + Self { + remote, + account_cache: HashMap::new(), + code_cache: HashMap::new(), + } + } + + /// Sets the block number used for calls to the remote Ethereum node. + pub fn set_block_number(&mut self, block_number: &U256) { + self.remote.set_block_number(block_number); + } +} + +impl State for CachedRemoteState { + type Error = StateError; + + fn basic(&mut self, address: Address) -> Result, Self::Error> { + let block_accounts = self + .account_cache + .entry(*self.remote.block_number()) + .or_default(); + + if let Some(account) = block_accounts.get(&address) { + return Ok(Some(account.info.clone())); + } + + if let Some(mut account_info) = self.remote.basic(address)? { + // Split code and store separately + if let Some(code) = account_info.code.take() { + let block_code = self + .code_cache + .entry(*self.remote.block_number()) + .or_default(); + + block_code.entry(code.hash()).or_insert(code); + } + + block_accounts.insert(address, account_info.clone().into()); + + return Ok(Some(account_info)); + } + + Ok(None) + } + + fn code_by_hash(&mut self, code_hash: B256) -> Result { + let block_code = self + .code_cache + .entry(*self.remote.block_number()) + .or_default(); + + block_code + .get(&code_hash) + .cloned() + .ok_or(StateError::InvalidCodeHash(code_hash)) + } + + fn storage(&mut self, address: Address, index: U256) -> Result { + let block_accounts = self + .account_cache + .entry(*self.remote.block_number()) + .or_default(); + + Ok(match block_accounts.entry(address) { + Entry::Occupied(mut account_entry) => { + match account_entry.get_mut().storage.entry(index) { + Entry::Occupied(entry) => *entry.get(), + Entry::Vacant(entry) => *entry.insert(self.remote.storage(address, index)?), + } + } + Entry::Vacant(account_entry) => { + // account needs to be loaded for us to access slots. + let mut account = self + .remote + .basic(address)? + .map_or_else(RethnetAccount::default, RethnetAccount::from); + + let value = self.remote.storage(address, index)?; + account.storage.insert(index, value); + + account_entry.insert(account); + value + } + }) + } +} diff --git a/crates/rethnet_evm/src/state/trie.rs b/crates/rethnet_evm/src/state/trie.rs index d6dc481fff..83fcc44524 100644 --- a/crates/rethnet_evm/src/state/trie.rs +++ b/crates/rethnet_evm/src/state/trie.rs @@ -137,13 +137,10 @@ impl StateDebug for TrieState { &mut self, address: Address, modifier: super::AccountModifierFn, + default_account_fn: &dyn Fn() -> Result, ) -> Result<(), Self::Error> { - let mut account_info = self.accounts.account(&address).map_or_else( - || AccountInfo { - code: None, - ..AccountInfo::default() - }, - |account| { + let mut account_info = match self.accounts.account(&address) { + Some(account) => { let mut account_info = AccountInfo::from(account); // Fill the bytecode @@ -155,8 +152,9 @@ impl StateDebug for TrieState { } account_info - }, - ); + } + None => default_account_fn()?, + }; let old_code_hash = account_info.code_hash; diff --git a/crates/rethnet_evm/src/state/trie/account.rs b/crates/rethnet_evm/src/state/trie/account.rs index 3c2e89253e..061fed6af3 100644 --- a/crates/rethnet_evm/src/state/trie/account.rs +++ b/crates/rethnet_evm/src/state/trie/account.rs @@ -62,33 +62,45 @@ impl AccountTrie { C: IntoIterator>, { let state_trie_db = Arc::new(MemoryDB::new(true)); - let hasher = Arc::new(HasherKeccak::new()); let mut storage_trie_dbs = HashMap::new(); let state_root = { - let mut state_trie = Trie::new(state_trie_db.clone(), hasher.clone()); + let mut state_trie = Trie::new(state_trie_db.clone(), Arc::new(HasherKeccak::new())); layers.into_iter().for_each(|layer| { layer.into_iter().for_each(|(address, change)| { if let Some((mut account, storage)) = change { - let storage_trie_db = Arc::new(MemoryDB::new(true)); + let (storage_trie_db, storage_root) = + storage_trie_dbs.entry(*address).or_insert_with(|| { + let storage_trie_db = Arc::new(MemoryDB::new(true)); + let storage_root = { + let mut storage_trie = Trie::new( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + ); + B256::from_slice(&storage_trie.root().unwrap()) + }; + + (storage_trie_db, storage_root) + }); - let storage_root = { - let mut storage_trie = - Trie::new(storage_trie_db.clone(), hasher.clone()); + { + let mut storage_trie = Trie::from( + storage_trie_db.clone(), + Arc::new(HasherKeccak::new()), + storage_root.as_bytes(), + ) + .expect("Invalid storage root"); - storage.iter().for_each(|(index, value): (&U256, &U256)| { + storage.iter().for_each(|(index, value)| { Self::set_account_storage_slot_in(index, value, &mut storage_trie); }); - B256::from_slice(&storage_trie.root().unwrap()) + *storage_root = B256::from_slice(&storage_trie.root().unwrap()); }; - // Overwrites any existing storage in the process, as we receive the complete storage every change - storage_trie_dbs.insert(*address, (storage_trie_db, storage_root)); - - account.storage_root = storage_root; + account.storage_root = *storage_root; let hashed_address = HasherKeccak::new().digest(address.as_bytes()); state_trie @@ -486,7 +498,7 @@ impl Default for AccountTrie { mod tests { use rethnet_eth::{ account::KECCAK_EMPTY, - state::{state_root, Storage}, + state::{state_root, storage_root, Storage}, trie::KECCAK_NULL_RLP, }; @@ -575,14 +587,20 @@ mod tests { #[test] fn from_changes_one_layer() { const DUMMY_ADDRESS: [u8; 20] = [1u8; 20]; + const DUMMY_STORAGE_SLOT_INDEX: u64 = 100; + const DUMMY_STORAGE_SLOT_VALUE: u64 = 100; let expected_address = Address::from(DUMMY_ADDRESS); - let expected_storage = Storage::new(); + let expected_index = U256::from(DUMMY_STORAGE_SLOT_INDEX); + let expected_storage_value = U256::from(DUMMY_STORAGE_SLOT_VALUE); + + let mut expected_storage = Storage::new(); + expected_storage.insert(expected_index, expected_storage_value); let expected_account = BasicAccount { nonce: 1, balance: U256::from(100u32), - storage_root: KECCAK_NULL_RLP, + storage_root: storage_root(expected_storage.iter()), code_hash: KECCAK_EMPTY, }; @@ -592,67 +610,106 @@ mod tests { )]]; let state = AccountTrie::from_changes(changes); - let state_trie = Trie::from( - state.state_trie_db.clone(), - Arc::new(HasherKeccak::new()), - state.state_root.as_bytes(), - ) - .expect("Invalid state root"); - - let account = state_trie - .get(&HasherKeccak::new().digest(expected_address.as_bytes())) - .unwrap() - .expect("Account must exist"); - - let account: BasicAccount = rlp::decode(&account).expect("Failed to decode account"); + let account = state.account(&expected_address); + assert_eq!(account, Some(expected_account)); - assert_eq!(account, expected_account); + let storage_value = state.account_storage_slot(&expected_address, &expected_index); + assert_eq!(storage_value, Some(expected_storage_value)); } #[test] fn from_changes_two_layers() { const DUMMY_ADDRESS: [u8; 20] = [1u8; 20]; + const DUMMY_STORAGE_SLOT_INDEX: u64 = 100; + const DUMMY_STORAGE_SLOT_VALUE1: u64 = 50; + const DUMMY_STORAGE_SLOT_VALUE2: u64 = 100; let expected_address = Address::from(DUMMY_ADDRESS); - let expected_storage = Storage::new(); + let expected_index = U256::from(DUMMY_STORAGE_SLOT_INDEX); + let expected_storage_value = U256::from(DUMMY_STORAGE_SLOT_VALUE2); - let account_layer1 = BasicAccount { + let mut storage_layer1 = Storage::new(); + storage_layer1.insert(expected_index, U256::from(DUMMY_STORAGE_SLOT_VALUE1)); + + let init_account = BasicAccount { nonce: 1, balance: U256::from(100u32), - storage_root: KECCAK_NULL_RLP, + storage_root: storage_root(storage_layer1.iter()), code_hash: KECCAK_EMPTY, }; - let account_layer2 = BasicAccount { + let mut storage_layer2 = Storage::new(); + storage_layer2.insert(expected_index, expected_storage_value); + + let expected_account = BasicAccount { nonce: 2, balance: U256::from(200u32), - storage_root: KECCAK_NULL_RLP, + storage_root: storage_root(storage_layer2.iter()), code_hash: KECCAK_EMPTY, }; let changes: Vec>> = vec![ - vec![(&expected_address, Some((account_layer1, &expected_storage)))], + vec![(&expected_address, Some((init_account, &storage_layer1)))], vec![( &expected_address, - Some((account_layer2.clone(), &expected_storage)), + Some((expected_account.clone(), &storage_layer2)), )], ]; let state = AccountTrie::from_changes(changes); - let state_trie = Trie::from( - state.state_trie_db.clone(), - Arc::new(HasherKeccak::new()), - state.state_root.as_bytes(), - ) - .expect("Invalid state root"); + let account = state.account(&expected_address); + assert_eq!(account, Some(expected_account)); - let account = state_trie - .get(&HasherKeccak::new().digest(expected_address.as_bytes())) - .unwrap() - .expect("Account must exist"); + let storage_value = state.account_storage_slot(&expected_address, &expected_index); + assert_eq!(storage_value, Some(expected_storage_value)); + } + + #[test] + fn from_changes_remove_zeroed_storage_slot() { + const DUMMY_ADDRESS: [u8; 20] = [1u8; 20]; + const DUMMY_STORAGE_SLOT_INDEX: u64 = 100; + const DUMMY_STORAGE_SLOT_VALUE: u64 = 100; + + let expected_address = Address::from(DUMMY_ADDRESS); + let expected_index = U256::from(DUMMY_STORAGE_SLOT_INDEX); + + let mut storage_layer1 = Storage::new(); + storage_layer1.insert(expected_index, U256::from(DUMMY_STORAGE_SLOT_VALUE)); + + let init_account = BasicAccount { + nonce: 1, + balance: U256::from(100u32), + storage_root: storage_root(storage_layer1.iter()), + code_hash: KECCAK_EMPTY, + }; + + let mut storage_layer2 = Storage::new(); + storage_layer2.insert(U256::from(100), U256::ZERO); + + let expected_account = BasicAccount { + nonce: 2, + balance: U256::from(200u32), + storage_root: storage_root( + storage_layer2 + .iter() + .filter(|(_index, value)| **value != U256::ZERO), + ), + code_hash: KECCAK_EMPTY, + }; + + let changes: Vec>> = vec![ + vec![(&expected_address, Some((init_account, &storage_layer1)))], + vec![( + &expected_address, + Some((expected_account.clone(), &storage_layer2)), + )], + ]; + let state = AccountTrie::from_changes(changes); - let account: BasicAccount = rlp::decode(&account).expect("Failed to decode account"); + let account = state.account(&expected_address); + assert_eq!(account, Some(expected_account)); - assert_eq!(account, account_layer2); + let storage_value = state.account_storage_slot(&expected_address, &expected_index); + assert_eq!(storage_value, None); } } diff --git a/crates/rethnet_evm/src/transaction.rs b/crates/rethnet_evm/src/transaction.rs index f0a936fc15..9357c4f625 100644 --- a/crates/rethnet_evm/src/transaction.rs +++ b/crates/rethnet_evm/src/transaction.rs @@ -22,7 +22,7 @@ pub enum TransactionError { #[error(transparent)] BlockHash(BE), /// Corrupt transaction data - #[error("Invalid transaction")] + #[error("Invalid transaction: {0:?}")] InvalidTransaction(InvalidTransaction), /// The transaction is expected to have a prevrandao, as the executor's config is on a post-merge hardfork. #[error("Post-merge transaction is missing prevrandao")] diff --git a/crates/rethnet_evm_napi/Cargo.toml b/crates/rethnet_evm_napi/Cargo.toml index e05efaf300..a15cdaa065 100644 --- a/crates/rethnet_evm_napi/Cargo.toml +++ b/crates/rethnet_evm_napi/Cargo.toml @@ -11,7 +11,6 @@ crossbeam-channel = { version = "0.5.6", default-features = false } # when napi is pinned, be sure to pin napi-derive to the same version napi = { version = "2.12.4", default-features = false, features = ["async", "error_anyhow", "napi8", "serde-json"] } napi-derive = "2.12.3" -once_cell = "1.15.0" rethnet_evm = { version = "0.1.0-dev", path = "../rethnet_evm" } rethnet_eth = { version = "0.1.0-dev", path = "../rethnet_eth" } secp256k1 = { version = "0.24.0", default-features = false, features = ["alloc"] } @@ -19,6 +18,7 @@ serde_json = { version = "1.0.85", default-features = false, features = ["alloc" tracing = { version = "0.1.37", default-features = false, features = ["std"] } tracing-flame = { version = "0.2.0", default-features = false, features = ["smallvec"] } tracing-subscriber = { version = "0.3.16", default-features = false, features = ["ansi", "env-filter", "fmt", "parking_lot", "smallvec", "std"] } +parking_lot = { version = "0.12.1", default-features = false } [build-dependencies] napi-build = "2.0.1" diff --git a/crates/rethnet_evm_napi/src/context.rs b/crates/rethnet_evm_napi/src/context.rs index 54b3d756fd..da47dcafbf 100644 --- a/crates/rethnet_evm_napi/src/context.rs +++ b/crates/rethnet_evm_napi/src/context.rs @@ -1,10 +1,14 @@ use std::{io, sync::Arc}; use napi::{ + bindgen_prelude::Buffer, tokio::runtime::{Builder, Runtime}, Status, }; use napi_derive::napi; +use parking_lot::Mutex; +use rethnet_eth::B256; +use rethnet_evm::RandomHashGenerator; use tracing_subscriber::{prelude::*, EnvFilter, Registry}; #[napi] @@ -32,11 +36,20 @@ impl RethnetContext { inner: Arc::new(context), }) } + + /// Overwrites the next value generated with the provided seed. + #[napi] + pub fn set_hash_generator_seed(&self, seed: Buffer) { + let seed = B256::from_slice(&seed); + + self.inner.hash_generator.lock().set_next(seed); + } } #[derive(Debug)] pub struct Context { runtime: Arc, + hash_generator: Arc>, #[cfg(feature = "tracing")] _tracing_write_guard: tracing_flame::FlushGuard>, } @@ -69,17 +82,28 @@ impl Context { tracing::subscriber::set_global_default(subscriber) .expect("Could not set global default tracing subscriber"); - let runtime = Builder::new_multi_thread().build()?; + let runtime = Builder::new_multi_thread() + .enable_io() + .enable_time() + .build()?; + + let hash_generator = Arc::new(Mutex::new(RandomHashGenerator::with_seed("seed"))); Ok(Self { runtime: Arc::new(runtime), + hash_generator, #[cfg(feature = "tracing")] _tracing_write_guard: guard, }) } + /// Retrieves the context's hash generator. + pub fn hash_generator(&self) -> &Arc> { + &self.hash_generator + } + /// Retrieves the context's runtime. - pub fn runtime(&self) -> &Runtime { + pub fn runtime(&self) -> &Arc { &self.runtime } } diff --git a/crates/rethnet_evm_napi/src/state.rs b/crates/rethnet_evm_napi/src/state.rs index 204c51eac4..e6e281870b 100644 --- a/crates/rethnet_evm_napi/src/state.rs +++ b/crates/rethnet_evm_napi/src/state.rs @@ -9,12 +9,12 @@ use std::{ use napi::{ bindgen_prelude::{BigInt, Buffer, ObjectFinalize}, tokio::sync::RwLock, - Env, JsFunction, JsObject, NapiRaw, Status, + Env, JsFunction, JsObject, JsString, NapiRaw, Status, }; use napi_derive::napi; use rethnet_eth::{signature::private_key_to_address, Address, Bytes, B256, U256}; use rethnet_evm::{ - state::{AccountModifierFn, HybridState, StateError, StateHistory, SyncState}, + state::{AccountModifierFn, ForkState, HybridState, StateError, StateHistory, SyncState}, AccountInfo, Bytecode, HashMap, KECCAK_EMPTY, }; use secp256k1::Secp256k1; @@ -47,6 +47,36 @@ pub struct GenesisAccount { pub balance: BigInt, } +#[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] +fn genesis_accounts(accounts: Vec) -> napi::Result> { + let signer = Secp256k1::signing_only(); + + accounts + .into_iter() + .map(|account| { + let address = private_key_to_address(&signer, &account.private_key) + .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; + TryCast::::try_cast(account.balance).map(|balance| { + let account_info = AccountInfo { + balance, + ..Default::default() + }; + + (address, account_info) + }) + }) + .collect::>>() +} + +// Mimic precompiles activation +fn add_precompiles(accounts: &mut HashMap) { + for idx in 1..=8 { + let mut address = Address::zero(); + address.0[19] = idx; + accounts.insert(address, AccountInfo::default()); + } +} + /// The Rethnet state #[napi(custom_finalize)] #[derive(Debug)] @@ -61,7 +91,13 @@ impl StateManager { #[napi(constructor)] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new(mut env: Env, context: &RethnetContext) -> napi::Result { - Self::with_accounts(&mut env, context, HashMap::default()) + let mut accounts = HashMap::new(); + add_precompiles(&mut accounts); + + let mut state = HybridState::with_accounts(accounts); + state.checkpoint().unwrap(); + + Self::with_state(&mut env, context, state) } /// Constructs a [`StateManager`] with the provided accounts present in the genesis state. @@ -72,44 +108,39 @@ impl StateManager { context: &RethnetContext, accounts: Vec, ) -> napi::Result { - let signer = Secp256k1::signing_only(); - let genesis_accounts = accounts - .into_iter() - .map(|account| { - let address = private_key_to_address(&signer, &account.private_key) - .map_err(|e| napi::Error::new(Status::InvalidArg, e.to_string()))?; - TryCast::::try_cast(account.balance).map(|balance| { - let account_info = AccountInfo { - balance, - ..Default::default() - }; - - (address, account_info) - }) - }) - .collect::>>()?; + let mut accounts = genesis_accounts(accounts)?; + add_precompiles(&mut accounts); - Self::with_accounts(&mut env, context, genesis_accounts) + let mut state = HybridState::with_accounts(accounts); + state.checkpoint().unwrap(); + + Self::with_state(&mut env, context, state) } - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - fn with_accounts( - env: &mut Env, + /// Constructs a [`StateManager`] that uses the remote node and block number as the basis for + /// its state. + #[napi(factory)] + pub fn fork_remote( + mut env: Env, context: &RethnetContext, - mut accounts: HashMap, + remote_node_url: JsString, + fork_block_number: BigInt, + accounts: Vec, ) -> napi::Result { - // Mimic precompiles activation - for idx in 1..=8 { - let mut address = Address::zero(); - address.0[19] = idx; - accounts.insert(address, AccountInfo::default()); - } - - let mut state = HybridState::with_accounts(accounts); - - state.checkpoint().unwrap(); - - Self::with_state(env, context, state) + let fork_block_number: U256 = BigInt::try_cast(fork_block_number)?; + + let accounts = genesis_accounts(accounts)?; + Self::with_state( + &mut env, + context, + ForkState::new( + context.as_inner().runtime().clone(), + context.as_inner().hash_generator().clone(), + remote_node_url.into_utf8()?.as_str()?, + fork_block_number, + accounts, + ), + ) } #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] @@ -410,6 +441,12 @@ impl StateManager { *code = new_account.code; }, )), + &|| { + Ok(AccountInfo { + code: None, + ..AccountInfo::default() + }) + }, ) .map_err(|e| napi::Error::new(Status::GenericFailure, e.to_string())); @@ -500,15 +537,22 @@ impl StateManager { /// Reverts the state to match the specified state root. #[napi] #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] - pub async fn set_state_root(&self, state_root: Buffer) -> napi::Result<()> { + pub async fn set_block_context( + &self, + state_root: Buffer, + block_number: Option, + ) -> napi::Result<()> { let state_root = B256::from_slice(&state_root); + let block_number: Option = block_number.map_or(Ok(None), |number| { + BigInt::try_cast(number).map(Option::Some) + })?; let state = self.state.clone(); self.context .runtime() .spawn(async move { let mut state = state.write().await; - state.set_state_root(&state_root) + state.set_block_context(&state_root, block_number) }) .await .unwrap() diff --git a/crates/rethnet_evm_napi/src/transaction/result.rs b/crates/rethnet_evm_napi/src/transaction/result.rs index 34e1d0e188..7b168f1797 100644 --- a/crates/rethnet_evm_napi/src/transaction/result.rs +++ b/crates/rethnet_evm_napi/src/transaction/result.rs @@ -140,7 +140,6 @@ pub struct ExecutionResult { } impl ExecutionResult { - #[cfg_attr(feature = "tracing", tracing::instrument(skip_all))] pub fn new(env: &Env, result: &rethnet_evm::ExecutionResult) -> napi::Result { let result = match result { rethnet_evm::ExecutionResult::Success { diff --git a/crates/rethnet_evm_napi/test/evm/StateManager.ts b/crates/rethnet_evm_napi/test/evm/StateManager.ts index e65f9e9fa0..149720909b 100644 --- a/crates/rethnet_evm_napi/test/evm/StateManager.ts +++ b/crates/rethnet_evm_napi/test/evm/StateManager.ts @@ -13,75 +13,96 @@ describe("State Manager", () => { const context = new RethnetContext(); - let stateManager: StateManager; + const stateManagers = [ + { name: "default", getStateManager: () => new StateManager(context) }, + ]; - beforeEach(function () { - stateManager = new StateManager(context); - }); - - // TODO: insertBlock, setAccountCode, setAccountStorageSlot - it("getAccountByAddress", async () => { - await stateManager.insertAccount(caller.buf, { - nonce: 0n, - balance: 0n, + const alchemyUrl = process.env.ALCHEMY_URL; + if (alchemyUrl === undefined) { + console.log( + "WARNING: skipping fork tests because the ALCHEMY_URL environment variable is undefined" + ); + } else { + stateManagers.push({ + name: "fork", + getStateManager: () => + StateManager.forkRemote(context, alchemyUrl, BigInt(16220843), []), }); - let account = await stateManager.getAccountByAddress(caller.buf); + } - expect(account?.balance).to.equal(0n); - expect(account?.nonce).to.equal(0n); - }); + for (const { name, getStateManager } of stateManagers) { + describe(`With the ${name} StateManager`, () => { + let stateManager: StateManager; - it("setAccountBalance", async () => { - await stateManager.insertAccount(caller.buf, { - nonce: 0n, - balance: 0n, - }); + beforeEach(function () { + stateManager = getStateManager(); + }); - await stateManager.modifyAccount( - caller.buf, - async function ( - _balance: bigint, - nonce: bigint, - code: Bytecode | undefined - ): Promise { - return { - balance: 100n, - nonce, - code, - }; - } - ); + // TODO: insertBlock, setAccountCode, setAccountStorageSlot + it("getAccountByAddress", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + }); + let account = await stateManager.getAccountByAddress(caller.buf); - let account = await stateManager.getAccountByAddress(caller.buf); + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(0n); + }); - expect(account?.balance).to.equal(100n); - expect(account?.nonce).to.equal(0n); - }); + it("setAccountBalance", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + }); - it("setAccountNonce", async () => { - await stateManager.insertAccount(caller.buf, { - nonce: 0n, - balance: 0n, - }); + await stateManager.modifyAccount( + caller.buf, + async function ( + _balance: bigint, + nonce: bigint, + code: Bytecode | undefined + ): Promise { + return { + balance: 100n, + nonce, + code, + }; + } + ); - await stateManager.modifyAccount( - caller.buf, - async function ( - balance: bigint, - nonce: bigint, - code: Bytecode | undefined - ): Promise { - return { - balance, - nonce: 5n, - code, - }; - } - ); + let account = await stateManager.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(100n); + expect(account?.nonce).to.equal(0n); + }); + + it("setAccountNonce", async () => { + await stateManager.insertAccount(caller.buf, { + nonce: 0n, + balance: 0n, + }); - let account = await stateManager.getAccountByAddress(caller.buf); + await stateManager.modifyAccount( + caller.buf, + async function ( + balance: bigint, + nonce: bigint, + code: Bytecode | undefined + ): Promise { + return { + balance, + nonce: 5n, + code, + }; + } + ); - expect(account?.balance).to.equal(0n); - expect(account?.nonce).to.equal(5n); - }); + let account = await stateManager.getAccountByAddress(caller.buf); + + expect(account?.balance).to.equal(0n); + expect(account?.nonce).to.equal(5n); + }); + }); + } }); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts index 1e148ee8ea..555cbc045c 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/RethnetState.ts @@ -4,7 +4,8 @@ import { toBuffer, } from "@nomicfoundation/ethereumjs-util"; import { StateManager, Account, Bytecode, RethnetContext } from "rethnet-evm"; -import { GenesisAccount } from "./node-types"; +import { ForkConfig, GenesisAccount } from "./node-types"; +import { makeForkProvider } from "./utils/makeForkClient"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ @@ -29,6 +30,36 @@ export class RethnetStateManager { ); } + public static async forkRemote( + context: RethnetContext, + forkConfig: ForkConfig, + genesisAccounts: GenesisAccount[] + ): Promise { + let blockNumber: bigint; + if (forkConfig.blockNumber !== undefined) { + blockNumber = BigInt(forkConfig.blockNumber); + } else { + const { forkBlockNumber } = await makeForkProvider(forkConfig); + blockNumber = forkBlockNumber; + } + + return new RethnetStateManager( + StateManager.forkRemote( + context, + forkConfig.jsonRpcUrl, + blockNumber, + genesisAccounts.map((account) => { + return { + privateKey: account.privateKey, + balance: BigInt(account.balance), + }; + }) + ) + // TODO: consider changing StateManager.withFork() to also support + // passing in (and of course using) forkConfig.httpHeaders. + ); + } + public asInner(): StateManager { return this._state; } @@ -127,8 +158,11 @@ export class RethnetStateManager { return this._state.getStateRoot(); } - public async setStateRoot(stateRoot: Buffer): Promise { - return this._state.setStateRoot(stateRoot); + public async setBlockContext( + stateRoot: Buffer, + blockNumber?: bigint + ): Promise { + return this._state.setBlockContext(stateRoot, blockNumber); } public async serialize(): Promise { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts index 1d50a10114..6aa19ff396 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/fork/ForkStateManager.ts @@ -17,7 +17,7 @@ import { keccak256 } from "../../../util/keccak"; import { JsonRpcClient } from "../../jsonrpc/client"; import { GenesisAccount } from "../node-types"; import { makeAccount } from "../utils/makeAccount"; -import { randomHash } from "../utils/random"; +import { RandomBufferGenerator } from "../utils/random"; import { AccountState, @@ -42,6 +42,17 @@ const notCheckpointedError = (method: string) => const notSupportedError = (method: string) => new Error(`${method} is not supported when forking from remote network`); +const generator = RandomBufferGenerator.create("seed"); +const randomHashBuffer = (): Buffer => { + return generator.next(); +}; +const randomHash = () => { + return bufferToHex(randomHashBuffer()); +}; +export const randomHashSeed = (): Buffer => { + return generator.seed(); +}; + export class ForkStateManager implements StateManager { // temporary, used to print the whole storage // should be removed diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts index 46d3622b1b..251fa0bc20 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/node.ts @@ -1665,6 +1665,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu ? "0x0000000000000000" : "0x0000000000000042", timestamp: blockTimestamp, + number: this.getLatestBlockNumber() + 1n, }; if (this.isPostMergeHardfork()) { @@ -1679,6 +1680,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu }); await blockBuilder.startBlock(); + let sealed = false; try { const traces: GatherTracesResult[] = []; @@ -1719,6 +1721,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu const minerReward = this._common.param("pow", "minerReward"); await blockBuilder.addRewards([[coinbase, minerReward]]); const block = await blockBuilder.seal(); + sealed = true; await this._blockchain.putBlock(block); await this._txPool.updatePendingAndQueued( @@ -1738,7 +1741,9 @@ Hardhat Network's forking functionality only works with blocks from at least spu traces, }; } catch (err) { - await blockBuilder.revert(); + if (!sealed) { + await blockBuilder.revert(); + } throw err; } } @@ -2337,7 +2342,7 @@ Hardhat Network's forking functionality only works with blocks from at least spu private async _persistIrregularWorldState(): Promise { this._irregularStatesByBlockNumber.set( this.getLatestBlockNumber(), - await this._vm.getStateRoot() + await this._vm.makeSnapshot() ); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/makeForkClient.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/makeForkClient.ts index 1db38ed2eb..c09a34a61a 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/makeForkClient.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/makeForkClient.ts @@ -23,47 +23,46 @@ import { // anymore, so this really should be revisited. const FORK_HTTP_TIMEOUT = 35000; -export async function makeForkClient( - forkConfig: ForkConfig, - forkCachePath?: string -): Promise<{ - forkClient: JsonRpcClient; +export async function makeForkProvider(forkConfig: ForkConfig): Promise<{ + forkProvider: HttpProvider; + networkId: number; forkBlockNumber: bigint; - forkBlockTimestamp: number; - forkBlockHash: string; + latestBlockNumber: bigint; + maxReorg: bigint; }> { - const provider = new HttpProvider( + const forkProvider = new HttpProvider( forkConfig.jsonRpcUrl, HARDHAT_NETWORK_NAME, forkConfig.httpHeaders, FORK_HTTP_TIMEOUT ); - const networkId = await getNetworkId(provider); + const networkId = await getNetworkId(forkProvider); const actualMaxReorg = getLargestPossibleReorg(networkId); const maxReorg = actualMaxReorg ?? FALLBACK_MAX_REORG; - const latestBlock = await getLatestBlockNumber(provider); - const lastSafeBlock = latestBlock - maxReorg; + const latestBlockNumber = await getLatestBlockNumber(forkProvider); + const lastSafeBlockNumber = latestBlockNumber - maxReorg; let forkBlockNumber; if (forkConfig.blockNumber !== undefined) { - if (forkConfig.blockNumber > latestBlock) { + if (forkConfig.blockNumber > latestBlockNumber) { // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error throw new Error( - `Trying to initialize a provider with block ${forkConfig.blockNumber} but the current block is ${latestBlock}` + `Trying to initialize a provider with block ${forkConfig.blockNumber} but the current block is ${latestBlockNumber}` ); } - if (forkConfig.blockNumber > lastSafeBlock) { - const confirmations = latestBlock - BigInt(forkConfig.blockNumber) + 1n; + if (forkConfig.blockNumber > lastSafeBlockNumber) { + const confirmations = + latestBlockNumber - BigInt(forkConfig.blockNumber) + 1n; const requiredConfirmations = maxReorg + 1n; console.warn( chalk.yellow( `You are forking from block ${ forkConfig.blockNumber }, which has less than ${requiredConfirmations} confirmations, and will affect Hardhat Network's performance. -Please use block number ${lastSafeBlock} or wait for the block to get ${ +Please use block number ${lastSafeBlockNumber} or wait for the block to get ${ requiredConfirmations - confirmations } more confirmations.` ) @@ -72,10 +71,36 @@ Please use block number ${lastSafeBlock} or wait for the block to get ${ forkBlockNumber = BigInt(forkConfig.blockNumber); } else { - forkBlockNumber = BigInt(lastSafeBlock); + forkBlockNumber = BigInt(lastSafeBlockNumber); } - const block = await getBlockByNumber(provider, forkBlockNumber); + return { + forkProvider, + networkId, + forkBlockNumber, + latestBlockNumber, + maxReorg, + }; +} + +export async function makeForkClient( + forkConfig: ForkConfig, + forkCachePath?: string +): Promise<{ + forkClient: JsonRpcClient; + forkBlockNumber: bigint; + forkBlockTimestamp: number; + forkBlockHash: string; +}> { + const { + forkProvider, + networkId, + forkBlockNumber, + latestBlockNumber, + maxReorg, + } = await makeForkProvider(forkConfig); + + const block = await getBlockByNumber(forkProvider, forkBlockNumber); const forkBlockTimestamp = rpcQuantityToNumber(block.timestamp) * 1000; @@ -83,9 +108,9 @@ Please use block number ${lastSafeBlock} or wait for the block to get ${ forkConfig.blockNumber !== undefined && forkCachePath !== undefined; const forkClient = new JsonRpcClient( - provider, + forkProvider, networkId, - latestBlock, + latestBlockNumber, maxReorg, cacheToDiskEnabled ? forkCachePath : undefined ); diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts index f73f00003f..ccbbbb1f18 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/utils/random.ts @@ -22,6 +22,10 @@ export class RandomBufferGenerator { return valueToReturn; } + public seed(): Buffer { + return this._nextValue; + } + public setNext(nextValue: Buffer) { this._nextValue = Buffer.from(nextValue); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts index dee765b51c..5d552f5b9f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/block-builder.ts @@ -176,6 +176,10 @@ export class BlockBuilder { this._state = "reverted"; } + public getTransactionResults(): RunTxResult[] { + return this._transactionResults; + } + private async _getTransactionsTrie(): Promise { const trie = new Trie(); for (const [i, tx] of this._transactions.entries()) { diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts index ceef24339b..cef0fea735 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/dual.ts @@ -15,24 +15,34 @@ import { isPrecompileTrace, MessageTrace, } from "../../stack-traces/message-trace"; +import { opcodeName } from "../../stack-traces/opcodes"; import { VMTracer } from "../../stack-traces/vm-tracer"; -import { NodeConfig } from "../node-types"; +import { isForkedNodeConfig, NodeConfig } from "../node-types"; import { RpcDebugTraceOutput } from "../output"; +import { randomHashSeed } from "../fork/ForkStateManager"; import { HardhatBlockchainInterface } from "../types/HardhatBlockchainInterface"; import { EthereumJSAdapter } from "./ethereumjs"; import { ExitCode } from "./exit"; -import { RethnetAdapter } from "./rethnet"; +import { globalRethnetContext, RethnetAdapter } from "./rethnet"; import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/restrict-template-expressions */ -function printTrace(trace: any) { +function _printTrace(trace: any) { console.log( JSON.stringify( trace, - (key, value) => (typeof value === "bigint" ? value.toString() : value), + function (key, value) { + if (key === "op") { + return opcodeName(value); + } else if (typeof value === "bigint") { + return value.toString(); + } else { + return value; + } + }, 2 ) ); @@ -59,6 +69,21 @@ export class DualModeAdapter implements VMAdapter { selectHardfork ); + // If the fork node config doesn't specify a block number, then the + // ethereumjs adapter will fetch and use the latest block number. We re-use + // that value here; otherwise, rethnet would also fetch it and we could have + // a race condition if the latest block changed in the meantime. + if ( + isForkedNodeConfig(config) && + config.forkConfig.blockNumber === undefined + ) { + const forkBlockNumber = ethereumJSAdapter.getForkBlockNumber(); + config.forkConfig.blockNumber = parseInt( + forkBlockNumber!.toString(10), + 10 + ); + } + const rethnetAdapter = await RethnetAdapter.create( config, selectHardfork, @@ -99,6 +124,9 @@ export class DualModeAdapter implements VMAdapter { forceBaseFeeZero ); + // Matches EthereumJS' runCall checkpoint call + globalRethnetContext.setHashGeneratorSeed(randomHashSeed()); + const [ [ethereumJSResult, _ethereumJSTrace], [rethnetResult, rethnetTrace], @@ -127,7 +155,7 @@ export class DualModeAdapter implements VMAdapter { console.trace( `Different state root: ${ethereumJSRoot.toString( "hex" - )} !== ${rethnetRoot.toString("hex")}` + )} (ethereumjs) !== ${rethnetRoot.toString("hex")} (rethnet)` ); await this.printState(); throw new Error("Different state root"); @@ -142,7 +170,7 @@ export class DualModeAdapter implements VMAdapter { assertEqualAccounts(address, ethereumJSAccount, rethnetAccount); - return ethereumJSAccount; + return rethnetAccount; } public async getContractStorage( @@ -167,7 +195,7 @@ export class DualModeAdapter implements VMAdapter { console.trace( `Different storage slot: ${bufferToHex( ethereumJSStorageSlot - )} !== ${bufferToHex(rethnetStorageSlot)}` + )} (ethereumjs) !== ${bufferToHex(rethnetStorageSlot)} (rethnet)` ); throw new Error("Different storage slot"); } @@ -187,7 +215,7 @@ export class DualModeAdapter implements VMAdapter { console.trace( `Different contract code: ${ethereumJSCode.toString( "hex" - )} !== ${rethnetCode.toString("hex")}` + )} (ethereumjs) !== ${rethnetCode.toString("hex")} (rethnet)` ); throw new Error("Different contract code"); } @@ -259,13 +287,16 @@ export class DualModeAdapter implements VMAdapter { this._rethnetAdapter.runTxInBlock(tx, block), ]); + // Matches EthereumJS' runCall checkpoint call + globalRethnetContext.setHashGeneratorSeed(randomHashSeed()); + try { assertEqualRunTxResults(ethereumJSResult, rethnetResult); // Validate trace const _trace = this.getLastTrace(); - return [ethereumJSResult, ethereumJSDebugTrace]; + return [rethnetResult, ethereumJSDebugTrace]; } catch (e) { // if the results didn't match, print the traces // console.log("EthereumJS trace"); @@ -282,19 +313,17 @@ export class DualModeAdapter implements VMAdapter { rewards: Array<[Address, bigint]> ): Promise { await this._rethnetAdapter.addBlockRewards(rewards); - return this._ethereumJSAdapter.addBlockRewards(rewards); + await this._ethereumJSAdapter.addBlockRewards(rewards); } public async sealBlock(): Promise { await this._rethnetAdapter.sealBlock(); - return this._ethereumJSAdapter.sealBlock(); + await this._ethereumJSAdapter.sealBlock(); } public async revertBlock(): Promise { await this._rethnetAdapter.revertBlock(); - return this._ethereumJSAdapter.revertBlock(); - - const _stateRoot = this.getStateRoot(); + await this._ethereumJSAdapter.revertBlock(); } public async makeSnapshot(): Promise { @@ -305,7 +334,7 @@ export class DualModeAdapter implements VMAdapter { console.trace( `Different snapshot state root: ${ethereumJSRoot.toString( "hex" - )} !== ${rethnetRoot.toString("hex")}` + )} (ethereumjs) !== ${rethnetRoot.toString("hex")} (rethnet)` ); await this.printState(); throw new Error("Different snapshot state root"); @@ -344,11 +373,11 @@ export class DualModeAdapter implements VMAdapter { const differences = traceDifferences(ethereumJSTrace, rethnetTrace); if (differences.length > 0) { console.trace(`Different traces: ${differences}`); - console.log("EthereumJS trace:"); - printTrace(ethereumJSTrace); - console.log(); - console.log("Rethnet trace:"); - printTrace(rethnetTrace); + // console.log("EthereumJS trace:"); + // printTrace(ethereumJSTrace); + // console.log(); + // console.log("Rethnet trace:"); + // printTrace(rethnetTrace); throw new Error(`Different traces: ${differences}`); } } @@ -369,13 +398,13 @@ export class DualModeAdapter implements VMAdapter { // both errors are defined if (ethereumJSError.name !== rethnetError.name) { throw new Error( - `Different error name: ${ethereumJSError.name} !== ${rethnetError.name}` + `Different error name: ${ethereumJSError.name} (ethereumjs) !== ${rethnetError.name} (rethnet)` ); } if (ethereumJSError.message !== rethnetError.message) { throw new Error( - `Different error message: ${ethereumJSError.message} !== ${rethnetError.message}` + `Different error message: ${ethereumJSError.message} (ethereumjs) !== ${rethnetError.message} (rethnet)` ); } @@ -395,7 +424,7 @@ export class DualModeAdapter implements VMAdapter { // both error stacks are defined if (ethereumJSError.stack !== rethnetError.stack) { throw new Error( - `Different error stack: ${ethereumJSError.stack} !== ${rethnetError.stack}` + `Different error stack: ${ethereumJSError.stack} (ethereumjs) !== ${rethnetError.stack} (rethnet)` ); } } @@ -405,7 +434,7 @@ export class DualModeAdapter implements VMAdapter { const rethnetSteps = this._rethnetVMTracer.tracingSteps; if (ethereumJSSteps.length !== rethnetSteps.length) { throw new Error( - `Different number of steps in tracers: ${this._ethereumJSVMTracer.tracingSteps.length} !== ${this._rethnetVMTracer.tracingSteps.length}` + `Different number of steps in tracers: ${this._ethereumJSVMTracer.tracingSteps.length} (ethereumjs) !== ${this._rethnetVMTracer.tracingSteps.length} (rethnet)` ); } @@ -415,14 +444,14 @@ export class DualModeAdapter implements VMAdapter { if (ethereumJSStep.depth !== rethnetStep.depth) { console.trace( - `Different steps[${stepIdx}] depth: ${ethereumJSStep.depth} !== ${rethnetStep.depth}` + `Different steps[${stepIdx}] depth: ${ethereumJSStep.depth} (ethereumjs) !== ${rethnetStep.depth} (rethnet) (rethnet)` ); throw new Error("Different step depth"); } if (ethereumJSStep.pc !== rethnetStep.pc) { console.trace( - `Different steps[${stepIdx}] pc: ${ethereumJSStep.pc} !== ${rethnetStep.pc}` + `Different steps[${stepIdx}] pc: ${ethereumJSStep.pc} (ethereumjs) !== ${rethnetStep.pc} (rethnet) (rethnet)` ); throw new Error("Different step pc"); } @@ -559,14 +588,14 @@ function assertEqualRunTxResults( if (ethereumJSResult.exit.kind !== rethnetResult.exit.kind) { console.trace( - `Different exceptionError.error: ${ethereumJSResult.exit.kind} !== ${rethnetResult.exit.kind}` + `Different exceptionError.error: ${ethereumJSResult.exit.kind} (ethereumjs) !== ${rethnetResult.exit.kind} (rethnet)` ); differences.push("exceptionError.error"); } if (ethereumJSResult.gasUsed !== rethnetResult.gasUsed) { console.trace( - `Different totalGasSpent: ${ethereumJSResult.gasUsed} !== ${rethnetResult.gasUsed}` + `Different totalGasSpent: ${ethereumJSResult.gasUsed} (ethereumjs) !== ${rethnetResult.gasUsed} (rethnet)` ); differences.push("totalGasSpent"); } @@ -584,7 +613,9 @@ function assertEqualRunTxResults( console.trace( `Different returnValue: ${ethereumJSResult.returnValue.toString( "hex" - )} !== ${rethnetResult.returnValue.toString("hex")}` + )} (ethereumjs) !== ${rethnetResult.returnValue.toString( + "hex" + )} (rethnet)` ); differences.push("returnValue"); } @@ -592,7 +623,7 @@ function assertEqualRunTxResults( if (!ethereumJSResult.bloom.equals(rethnetResult.bloom)) { console.trace( - `Different bloom: ${ethereumJSResult.bloom} !== ${rethnetResult.bloom}` + `Different bloom: ${ethereumJSResult.bloom} (ethereumjs) !== ${rethnetResult.bloom} (rethnet)` ); differences.push("bloom"); } @@ -603,7 +634,7 @@ function assertEqualRunTxResults( ) ) { console.trace( - `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} !== ${rethnetResult.receipt.bitvector}` + `Different receipt bitvector: ${ethereumJSResult.receipt.bitvector} (ethereumjs) !== ${rethnetResult.receipt.bitvector} (rethnet)` ); differences.push("receipt.bitvector"); } @@ -613,7 +644,7 @@ function assertEqualRunTxResults( rethnetResult.receipt.cumulativeBlockGasUsed ) { console.trace( - `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} !== ${rethnetResult.receipt.cumulativeBlockGasUsed}` + `Different receipt cumulativeBlockGasUsed: ${ethereumJSResult.receipt.cumulativeBlockGasUsed} (ethereumjs) !== ${rethnetResult.receipt.cumulativeBlockGasUsed} (rethnet)` ); differences.push("receipt.cumulativeBlockGasUsed"); } @@ -632,7 +663,7 @@ function assertEqualRunTxResults( ) ) { console.trace( - `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} !== ${rethnetResult.createdAddress?.toString()}` + `Different createdAddress: ${ethereumJSResult.createdAddress?.toString()} (ethereumjs) !== ${rethnetResult.createdAddress?.toString()} (rethnet)` ); differences.push("createdAddress"); } @@ -648,7 +679,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { if (ethereumJSLogs.length !== rethnetLogs.length) { console.trace( - `Different logs length: ${ethereumJSLogs.length} !== ${rethnetLogs.length}` + `Different logs length: ${ethereumJSLogs.length} (ethereumjs) !== ${rethnetLogs.length} (rethnet)` ); differences.push("length"); } @@ -656,7 +687,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { for (let logIdx = 0; logIdx < ethereumJSLogs.length; ++logIdx) { if (!ethereumJSLogs[logIdx][0].equals(rethnetLogs[logIdx][0])) { console.trace( - `Different log[${logIdx}] address: ${ethereumJSLogs[logIdx][0]} !== ${rethnetLogs[logIdx][0]}` + `Different log[${logIdx}] address: ${ethereumJSLogs[logIdx][0]} (ethereumjs) !== ${rethnetLogs[logIdx][0]} (rethnet)` ); differences.push("address"); } @@ -665,7 +696,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { const rethnetTopics = rethnetLogs[logIdx][1]; if (ethereumJSTopics.length !== rethnetTopics.length) { console.trace( - `Different log[${logIdx}] topics length: ${ethereumJSTopics.length} !== ${rethnetTopics.length}` + `Different log[${logIdx}] topics length: ${ethereumJSTopics.length} (ethereumjs) !== ${rethnetTopics.length} (rethnet)` ); differences.push("topics length"); } @@ -673,7 +704,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { for (let topicIdx = 0; topicIdx < ethereumJSTopics.length; ++topicIdx) { if (!ethereumJSTopics[topicIdx].equals(rethnetTopics[topicIdx])) { console.trace( - `Different log[${logIdx}] topic[${topicIdx}]: ${ethereumJSTopics[topicIdx]} !== ${rethnetTopics[topicIdx]}` + `Different log[${logIdx}] topic[${topicIdx}]: ${ethereumJSTopics[topicIdx]} (ethereumjs) !== ${rethnetTopics[topicIdx]} (rethnet)` ); differences.push("topic"); } @@ -681,7 +712,7 @@ function assertEqualLogs(ethereumJSLogs: Log[], rethnetLogs: Log[]) { if (!ethereumJSLogs[logIdx][2].equals(rethnetLogs[logIdx][2])) { console.trace( - `Different log[${logIdx}] data: ${ethereumJSLogs[logIdx][2]} !== ${rethnetLogs[logIdx][2]}` + `Different log[${logIdx}] data: ${ethereumJSLogs[logIdx][2]} (ethereumjs) !== ${rethnetLogs[logIdx][2]} (rethnet)` ); differences.push("data"); } @@ -702,7 +733,7 @@ function assertEqualAccounts( if (ethereumJSAccount.balance !== rethnetAccount.balance) { console.trace(`Account: ${address}`); console.trace( - `Different balance: ${ethereumJSAccount.balance} !== ${rethnetAccount.balance}` + `Different balance: ${ethereumJSAccount.balance} (ethereumjs) !== ${rethnetAccount.balance} (rethnet)` ); differences.push("balance"); } @@ -806,7 +837,7 @@ function traceDifferences( console.log( `Different code: ${ethereumJSTrace.code.toString( "hex" - )} !== ${rethnetTrace.code.toString("hex")}` + )} (ethereumjs) !== ${rethnetTrace.code.toString("hex")} (rethnet)` ); differences.push("code"); } diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts index b95602560d..b2372a1deb 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/ethereumjs.ts @@ -139,6 +139,10 @@ export class EthereumJSAdapter implements VMAdapter { ); } + public getForkBlockNumber() { + return this._forkBlockNumber; + } + public static async create( common: Common, blockchain: HardhatBlockchainInterface, @@ -550,17 +554,19 @@ export class EthereumJSAdapter implements VMAdapter { .toString("hex") .padStart(64, "0")}`; - const dumpedAccountStorage = await this._stateManager.dumpStorage( - Address.fromString(address) - ); - const accountStorage: Record = {}; - for (const [key, value] of Object.entries(dumpedAccountStorage)) { - accountStorage[`0x${key.padStart(64, "0")}`] = `0x${value.padStart( - 64, - "0" - )}`; + if (this._forkBlockNumber === undefined) { + const dumpedAccountStorage = await this._stateManager.dumpStorage( + Address.fromString(address) + ); + + for (const [key, value] of Object.entries(dumpedAccountStorage)) { + accountStorage[`0x${key.padStart(64, "0")}`] = `0x${value.padStart( + 64, + "0" + )}`; + } } if ( diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts index 01a03b204a..fa68b63ae9 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/exit.ts @@ -8,6 +8,7 @@ export enum ExitCode { OUT_OF_GAS, INTERNAL_ERROR, INVALID_OPCODE, + STACK_UNDERFLOW, CODESIZE_EXCEEDS_MAXIMUM, CREATE_COLLISION, } @@ -32,6 +33,9 @@ export class Exit { case ExceptionalHalt.InvalidFEOpcode: return new Exit(ExitCode.INVALID_OPCODE); + case ExceptionalHalt.StackUnderflow: + return new Exit(ExitCode.STACK_UNDERFLOW); + case ExceptionalHalt.CreateCollision: return new Exit(ExitCode.CREATE_COLLISION); @@ -67,6 +71,10 @@ export class Exit { return new Exit(ExitCode.INVALID_OPCODE); } + if (evmError.error === ERROR.STACK_UNDERFLOW) { + return new Exit(ExitCode.STACK_UNDERFLOW); + } + if (evmError.error === ERROR.CODESIZE_EXCEEDS_MAXIMUM) { return new Exit(ExitCode.CODESIZE_EXCEEDS_MAXIMUM); } @@ -98,6 +106,8 @@ export class Exit { return "Internal error"; case ExitCode.INVALID_OPCODE: return "Invalid opcode"; + case ExitCode.STACK_UNDERFLOW: + return "Stack underflow"; case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: return "Codesize exceeds maximum"; case ExitCode.CREATE_COLLISION: @@ -119,6 +129,8 @@ export class Exit { return new EvmError(ERROR.INTERNAL_ERROR); case ExitCode.INVALID_OPCODE: return new EvmError(ERROR.INVALID_OPCODE); + case ExitCode.STACK_UNDERFLOW: + return new EvmError(ERROR.STACK_UNDERFLOW); case ExitCode.CODESIZE_EXCEEDS_MAXIMUM: return new EvmError(ERROR.CODESIZE_EXCEEDS_MAXIMUM); case ExitCode.CREATE_COLLISION: diff --git a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts index a9aa89aeaa..b081caee9f 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/provider/vm/rethnet.ts @@ -5,7 +5,7 @@ import { Address, KECCAK256_NULL, } from "@nomicfoundation/ethereumjs-util"; -import { TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; +import { Capability, TypedTransaction } from "@nomicfoundation/ethereumjs-tx"; import { Account as RethnetAccount, BlockBuilder, @@ -35,7 +35,7 @@ import { RunTxResult, Trace, VMAdapter } from "./vm-adapter"; /* eslint-disable @nomiclabs/hardhat-internal-rules/only-hardhat-error */ /* eslint-disable @typescript-eslint/no-unused-vars */ -const globalContext = new RethnetContext(); +export const globalRethnetContext = new RethnetContext(); export class RethnetAdapter implements VMAdapter { private _vmTracer: VMTracer; @@ -56,20 +56,24 @@ export class RethnetAdapter implements VMAdapter { getBlockHash: (blockNumber: bigint) => Promise, common: Common ): Promise { - if (isForkedNodeConfig(config)) { - // eslint-disable-next-line @nomiclabs/hardhat-internal-rules/only-hardhat-error - throw new Error("Forking is not supported for Rethnet yet"); - } - const blockchain = new Blockchain(getBlockHash); const limitContractCodeSize = config.allowUnlimitedContractSize === true ? 2n ** 64n - 1n : undefined; - const state = RethnetStateManager.withGenesisAccounts( - globalContext, - config.genesisAccounts - ); + let state: RethnetStateManager; + if (isForkedNodeConfig(config)) { + state = await RethnetStateManager.forkRemote( + globalRethnetContext, + config.forkConfig, + config.genesisAccounts + ); + } else { + state = RethnetStateManager.withGenesisAccounts( + globalRethnetContext, + config.genesisAccounts + ); + } const rethnet = new Rethnet(blockchain, state.asInner(), { chainId: BigInt(config.chainId), @@ -96,6 +100,16 @@ export class RethnetAdapter implements VMAdapter { blockContext: Block, forceBaseFeeZero?: boolean ): Promise<[RunTxResult, Trace]> { + if ( + tx.supports(Capability.EIP1559FeeMarket) && + !blockContext._common.hardforkGteHardfork( + this._selectHardfork(blockContext.header.number), + "london" + ) + ) { + throw new Error("Cannot run transaction: EIP 1559 is not activated."); + } + const rethnetTx = ethereumjsTransactionToRethnet(tx); const difficulty = this._getBlockEnvDifficulty( @@ -271,8 +285,9 @@ export class RethnetAdapter implements VMAdapter { block: Block, irregularStateOrUndefined: Buffer | undefined ): Promise { - return this._state.setStateRoot( - irregularStateOrUndefined ?? block.header.stateRoot + return this._state.setBlockContext( + irregularStateOrUndefined ?? block.header.stateRoot, + block.header.number ); } @@ -282,7 +297,7 @@ export class RethnetAdapter implements VMAdapter { * Throw if it can't. */ public async restoreContext(stateRoot: Buffer): Promise { - return this._state.setStateRoot(stateRoot); + return this._state.setBlockContext(stateRoot); } /** @@ -299,6 +314,16 @@ export class RethnetAdapter implements VMAdapter { tx: TypedTransaction, block: Block ): Promise<[RunTxResult, Trace]> { + if ( + tx.supports(Capability.EIP1559FeeMarket) && + !block._common.hardforkGteHardfork( + this._selectHardfork(block.header.number), + "london" + ) + ) { + throw new Error("Cannot run transaction: EIP 1559 is not activated."); + } + const rethnetTx = ethereumjsTransactionToRethnet(tx); const difficulty = this._getBlockEnvDifficulty(block.header.difficulty); @@ -432,7 +457,7 @@ export class RethnetAdapter implements VMAdapter { ): bigint | undefined { const MAX_DIFFICULTY = 2n ** 32n - 1n; if (difficulty !== undefined && difficulty > MAX_DIFFICULTY) { - console.debug( + console.warn( "Difficulty is larger than U256::max:", difficulty.toString(16) ); diff --git a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/opcodes.ts b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/opcodes.ts index f9d4c2b5ee..e0602c17fb 100644 --- a/packages/hardhat-core/src/internal/hardhat-network/stack-traces/opcodes.ts +++ b/packages/hardhat-core/src/internal/hardhat-network/stack-traces/opcodes.ts @@ -306,6 +306,10 @@ export enum Opcode { SELFDESTRUCT = 0xff, } +export function opcodeName(opcode: number): string { + return Opcode[opcode] ?? ``; +} + export function isPush(opcode: Opcode) { return opcode >= Opcode.PUSH1 && opcode <= Opcode.PUSH32; } diff --git a/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts b/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts index bdc96763f6..83e197d28d 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/helpers/assertions.ts @@ -186,7 +186,11 @@ export async function assertTransactionFailure( } if (message !== undefined) { - assert.include(error.message, message); + assert.include( + error.message, + message, + `"${message}" not found in "${error.message}"` + ); } return; diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/logs.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/logs.ts index a1f82d957e..e1318d2b63 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/logs.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/logs.ts @@ -414,7 +414,7 @@ describe("Provider logs", function () { assert.match(this.logger.lines[8 ], /^ Value:\s+0 ETH$/); assert.match(this.logger.lines[9 ], /^ Gas used:\s+21000 of 21000$/); assert.equal(this.logger.lines[10], ""); - assert.match(this.logger.lines[11], /^ Transaction:\s+\u001b[1m0x[0-9a-f]{64}/); + assert.match(this.logger.lines[11], /^ Transaction:\s+(\u001b\[1m)?0x[0-9a-f]{64}/); assert.match(this.logger.lines[12], /^ From:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[13], /^ To:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[14], /^ Value:\s+0 ETH$/); @@ -458,7 +458,7 @@ describe("Provider logs", function () { assert.equal(this.logger.lines[2 ], ""); assert.match(this.logger.lines[3 ], /^ Block #\d+:\s+0x[0-9a-f]{64}$/); assert.match(this.logger.lines[4 ], /^ Base fee: \d+$/); - assert.match(this.logger.lines[5 ], /^ Transaction:\s+\u001b[1m0x[0-9a-f]{64}/); + assert.match(this.logger.lines[5 ], /^ Transaction:\s+(\u001b\[1m)?0x[0-9a-f]{64}/); assert.match(this.logger.lines[6 ], /^ From:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[7 ], /^ To:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[8 ], /^ Value:\s+0 ETH$/); @@ -527,7 +527,7 @@ describe("Provider logs", function () { assert.match(this.logger.lines[22], /^ Value:\s+0 ETH$/); assert.match(this.logger.lines[23], /^ Gas used:\s+21000 of 21000$/); assert.equal(this.logger.lines[24], ""); - assert.match(this.logger.lines[25], /^ Transaction:\s+\u001b[1m0x[0-9a-f]{64}/); + assert.match(this.logger.lines[25], /^ Transaction:\s+(\u001b\[1m)?0x[0-9a-f]{64}/); assert.match(this.logger.lines[26], /^ From:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[27], /^ To:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[28], /^ Value:\s+0 ETH$/); @@ -570,7 +570,7 @@ describe("Provider logs", function () { assert.equal(this.logger.lines[2 ], ""); assert.match(this.logger.lines[3 ], /^ Block #\d+:\s+0x[0-9a-f]{64}$/); assert.match(this.logger.lines[4 ], /^ Base fee: \d+$/); - assert.match(this.logger.lines[5 ], /^ Transaction:\s+\u001b[1m0x[0-9a-f]{64}/); + assert.match(this.logger.lines[5 ], /^ Transaction:\s+(\u001b\[1m)?0x[0-9a-f]{64}/); assert.match(this.logger.lines[6 ], /^ From:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[7 ], /^ To:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[8 ], /^ Value:\s+0 ETH$/); @@ -643,7 +643,7 @@ describe("Provider logs", function () { assert.match(this.logger.lines[ 8], /^ Value:\s+0 ETH$/); assert.match(this.logger.lines[ 9], /^ Gas used:\s+21000 of 21000$/); assert.equal(this.logger.lines[10], ""); - assert.match(this.logger.lines[11], /^ Transaction:\s+\u001b[1m0x[0-9a-f]{64}/); + assert.match(this.logger.lines[11], /^ Transaction:\s+(\u001b\[1m)?0x[0-9a-f]{64}/); assert.match(this.logger.lines[12], /^ Contract call:\s+/); assert.match(this.logger.lines[13], /^ From:\s+0x[0-9a-f]{40}/); assert.match(this.logger.lines[14], /^ To:\s+0x[0-9a-f]{40}/); diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts index 2c61ad9cae..22b47c7306 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/modules/eth/hardforks.ts @@ -706,7 +706,7 @@ describe("Eth module - hardfork dependant tests", function () { [tx] ); - assert.isDefined(receipt.root); + assert.isDefined(receipt.root, "receipt does not have a root"); assert.isUndefined(receipt.status); assert.isUndefined(receipt.type); }); diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts index fc7eb50178..06b3f9fdda 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/assertEqualBlocks.ts @@ -1,20 +1,21 @@ -import type { - AfterBlockEvent, - PostByzantiumTxReceipt, -} from "@nomicfoundation/ethereumjs-vm"; +import type { PostByzantiumTxReceipt } from "@nomicfoundation/ethereumjs-vm"; import { Block } from "@nomicfoundation/ethereumjs-block"; -import { assert } from "chai"; +import { assert, config as chaiConfig } from "chai"; import { bufferToHex } from "@nomicfoundation/ethereumjs-util"; import { numberToRpcQuantity } from "../../../../../src/internal/core/jsonrpc/types/base-types"; import { RpcBlockWithTransactions } from "../../../../../src/internal/core/jsonrpc/types/output/block"; import { JsonRpcClient } from "../../../../../src/internal/hardhat-network/jsonrpc/client"; +import { RunTxResult } from "../../../../../src/internal/hardhat-network/provider/vm/vm-adapter"; /* eslint-disable @typescript-eslint/dot-notation */ +// don't turncate actual/expected values in assertion messages +chaiConfig.truncateThreshold = 0; + export async function assertEqualBlocks( block: Block, - afterBlockEvent: AfterBlockEvent, + transactionResults: RunTxResult[], expectedBlock: RpcBlockWithTransactions, forkClient: JsonRpcClient ) { @@ -26,37 +27,35 @@ export async function assertEqualBlocks( for (let i = 0; i < block.transactions.length; i++) { const tx = block.transactions[i]; const txHash = bufferToHex(tx.hash()); + const txResult = transactionResults[i]; const remoteReceipt = (await forkClient["_httpProvider"].request({ method: "eth_getTransactionReceipt", params: [txHash], })) as any; - const localReceipt = afterBlockEvent.receipts[i]; - const evmResult = afterBlockEvent.results[i]; - assert.equal( - bufferToHex(localReceipt.bitvector), + bufferToHex(txResult.receipt.bitvector), remoteReceipt.logsBloom, `Logs bloom of tx index ${i} (${txHash}) should match` ); assert.equal( - numberToRpcQuantity(evmResult.totalGasSpent), + numberToRpcQuantity(txResult.gasUsed), remoteReceipt.gasUsed, `Gas used of tx index ${i} (${txHash}) should match` ); assert.equal( - (localReceipt as PostByzantiumTxReceipt).status, + (txResult.receipt as PostByzantiumTxReceipt).status, remoteReceipt.status, `Status of tx index ${i} (${txHash}) should be the same` ); assert.equal( - evmResult.createdAddress === undefined + txResult.createdAddress === undefined ? undefined - : evmResult.createdAddress.toString(), + : txResult.createdAddress.toString(), remoteReceipt.contractAddress, `Contract address created by tx index ${i} (${txHash}) should be the same` ); diff --git a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts index 50c84697fa..b9b95729e8 100644 --- a/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts +++ b/packages/hardhat-core/test/internal/hardhat-network/provider/utils/runFullBlock.ts @@ -1,8 +1,3 @@ -import type { - AfterBlockEvent, - RunBlockOpts, - VM, -} from "@nomicfoundation/ethereumjs-vm"; import { Block } from "@nomicfoundation/ethereumjs-block"; import { assert } from "chai"; @@ -11,6 +6,7 @@ import { rpcToBlockData } from "../../../../../src/internal/hardhat-network/prov import { makeForkClient } from "../../../../../src/internal/hardhat-network/provider/utils/makeForkClient"; import { HardhatNode } from "../../../../../src/internal/hardhat-network/provider/node"; import { ForkedNodeConfig } from "../../../../../src/internal/hardhat-network/provider/node-types"; +import { BlockBuilder } from "../../../../../src/internal/hardhat-network/provider/vm/block-builder"; import { FORK_TESTS_CACHE_PATH } from "../../helpers/constants"; import { assertEqualBlocks } from "./assertEqualBlocks"; @@ -33,7 +29,7 @@ export async function runFullBlock( const rpcBlock = await forkClient.getBlockByNumber(blockToRun, true); if (rpcBlock === null) { - assert.fail(); + assert.fail(`Block ${blockToRun} doesn't exist`); } const forkedNodeConfig: ForkedNodeConfig = { @@ -54,6 +50,8 @@ export async function runFullBlock( const [common, forkedNode] = await HardhatNode.create(forkedNodeConfig); + const parentBlock = await forkedNode.getLatestBlock(); + const block = Block.fromBlockData( rpcToBlockData({ ...rpcBlock, @@ -69,57 +67,22 @@ export async function runFullBlock( } ); - // TODO uncomment and fix this - // forkedNode["_vmTracer"].disableTracing(); - - const afterBlockEvent = await runBlockAndGetAfterBlockEvent( - // TODO remove "as any" and make this work with VMAdapter - forkedNode["_vm"] as any, - { - block, - generate: true, - skipBlockValidation: true, - } - ); - - const modifiedBlock = afterBlockEvent.block; - - // TODO remove "as any" and make this work with VMAdapter - await (forkedNode["_vm"] as any).blockchain.putBlock(modifiedBlock); - await (forkedNode["_vm"] as any).putBlock(modifiedBlock); - await forkedNode["_saveBlockAsSuccessfullyRun"]( - modifiedBlock, - afterBlockEvent as any // TODO remove this as any - ); - - const newBlock = await forkedNode.getBlockByNumber(blockToRun); + const vm = forkedNode["_vm"]; - if (newBlock === undefined) { - assert.fail(); - } + const blockBuilder = new BlockBuilder(vm, common, { + parentBlock, + headerData: block.header, + }); + await blockBuilder.startBlock(); - await assertEqualBlocks(newBlock, afterBlockEvent, rpcBlock, forkClient); -} - -async function runBlockAndGetAfterBlockEvent( - vm: VM, - runBlockOpts: RunBlockOpts -): Promise { - let results: AfterBlockEvent; - - function handler(event: AfterBlockEvent) { - results = event; + for (const tx of block.transactions.values()) { + await blockBuilder.addTransaction(tx); } - try { - vm.events.once("afterBlock", handler); - await vm.runBlock(runBlockOpts); - } finally { - // We need this in case `runBlock` throws before emitting the event. - // Otherwise we'd be leaking the listener until the next call to runBlock. + await blockBuilder.addRewards([]); + const newBlock = await blockBuilder.seal(); - vm.events.removeListener("afterBlock", handler); - } + const transactionResults = blockBuilder.getTransactionResults(); - return results!; + await assertEqualBlocks(newBlock, transactionResults, rpcBlock, forkClient); } diff --git a/rust-toolchain b/rust-toolchain index 5b6cd6b3cd..083b97b96a 100644 --- a/rust-toolchain +++ b/rust-toolchain @@ -1 +1 @@ -1.65 +1.68 From fa2c4cb9f6c772b8e1b7e53aff4b976d3179a697 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 10 May 2023 00:05:53 -0500 Subject: [PATCH 064/406] chore(ci): enable tests for all targets (#3899) --- .github/workflows/hardhat-core-ci.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/hardhat-core-ci.yml b/.github/workflows/hardhat-core-ci.yml index 6cb1819fff..1f96357f3b 100644 --- a/.github/workflows/hardhat-core-ci.yml +++ b/.github/workflows/hardhat-core-ci.yml @@ -286,7 +286,7 @@ jobs: ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} with: command: nextest - args: run --workspace --all-features + args: run --workspace --all-features --all-targets rethnet-style: name: Check Rethnet Style From 05971b2f5f5095402400eef88a5080cc77cf98c7 Mon Sep 17 00:00:00 2001 From: Wodann Date: Wed, 10 May 2023 11:51:09 -0500 Subject: [PATCH 065/406] fix: silently failing Rethnet CI tests (#3913) --- .github/workflows/hardhat-core-ci.yml | 40 ++++++++++++------ crates/rethnet_eth/src/remote/client.rs | 56 +++++++++---------------- crates/rethnet_evm/Cargo.toml | 1 + crates/rethnet_evm/benches/state.rs | 30 +++++++++---- 4 files changed, 69 insertions(+), 58 deletions(-) diff --git a/.github/workflows/hardhat-core-ci.yml b/.github/workflows/hardhat-core-ci.yml index 1f96357f3b..e8654d14ca 100644 --- a/.github/workflows/hardhat-core-ci.yml +++ b/.github/workflows/hardhat-core-ci.yml @@ -253,9 +253,8 @@ jobs: os: ["ubuntu-latest", "macos-latest"] include: - RUSTFLAGS: "-Dwarnings" - # disable until https://github.com/napi-rs/napi-rs/issues/1405 is resolved - # - os: "windows-latest" - # RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" + - os: "windows-latest" + RUSTFLAGS: "-Dwarnings -Ctarget-feature=+crt-static" steps: - uses: actions/checkout@v3 @@ -267,26 +266,41 @@ jobs: - uses: Swatinem/rust-cache@v2 - - name: Doctests + - name: Run cargo test uses: actions-rs/cargo@v1 env: RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} with: command: test - args: --doc --workspace --all-features + args: --workspace --all-targets --features tracing,bench-once - - name: Install latest nextest release - uses: taiki-e/install-action@nextest - - - name: Test with latest nextest release + - name: Doctests uses: actions-rs/cargo@v1 env: RUSTFLAGS: ${{ matrix.RUSTFLAGS }} - CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} - ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} with: - command: nextest - args: run --workspace --all-features --all-targets + command: test + args: --doc --workspace --features tracing + + # disable until: + # 1) https://github.com/napi-rs/napi-rs/issues/1405 is resolved (Windows-only) + # 2) https://github.com/nextest-rs/nextest/issues/871 (all platforms) + # when re-enabled, remove "Run cargo test" + + # Nextest + # - name: Install latest nextest release + # uses: taiki-e/install-action@nextest + + # - name: Test with latest nextest release + # uses: actions-rs/cargo@v1 + # env: + # RUSTFLAGS: ${{ matrix.RUSTFLAGS }} + # CARGO_INCREMENTAL: ${{ matrix.CARGO_INCREMENTAL }} + # ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} + # with: + # command: nextest + # args: run --workspace --all-features --all-targets rethnet-style: name: Check Rethnet Style diff --git a/crates/rethnet_eth/src/remote/client.rs b/crates/rethnet_eth/src/remote/client.rs index 602684fd00..1beded3c7c 100644 --- a/crates/rethnet_eth/src/remote/client.rs +++ b/crates/rethnet_eth/src/remote/client.rs @@ -452,7 +452,7 @@ mod tests { Some(StatusCode::from_u16(STATUS_CODE).unwrap()) ); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } mock.assert_async().await; @@ -489,12 +489,10 @@ mod tests { .await .expect_err("should have failed to interpret response as a Transaction"); - if let RpcClientError::JsonRpcError { error, .. } = error { - assert_eq!(error.code, -32000); - assert_eq!(error.message, "Must be authenticated!"); - assert!(error.data.is_none()); + if let RpcClientError::HttpStatus(error) = error { + assert_eq!(error.status(), Some(StatusCode::from_u16(401).unwrap())); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -515,7 +513,7 @@ mod tests { if let RpcClientError::FailedToSend(error) = error { assert!(error.to_string().contains(&format!("error sending request for url ({alchemy_url}): error trying to connect: dns error: "))); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -572,7 +570,7 @@ mod tests { assert_eq!(error.message, "header for hash not found"); assert!(error.data.is_none()); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -694,15 +692,10 @@ mod tests { .await .expect_err("should have failed to retrieve non-existent block number"); - if let RpcClientError::JsonRpcError { error, .. } = error { - assert_eq!(error.code, -32602); - assert_eq!( - error.message, - "invalid 1st argument: block_number value was not valid block tag or block number" - ); - assert!(error.data.is_none()); + if let RpcClientError::HttpStatus(error) = error { + assert_eq!(error.status(), Some(StatusCode::from_u16(400).unwrap())); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -732,15 +725,10 @@ mod tests { .await .expect_err("should have failed to retrieve non-existent block number"); - if let RpcClientError::JsonRpcError { error, .. } = error { - assert_eq!(error.code, -32602); - assert_eq!( - error.message, - "invalid 1st argument: block_number value was not valid block tag or block number" - ); - assert!(error.data.is_none()); + if let RpcClientError::HttpStatus(error) = error { + assert_eq!(error.status(), Some(StatusCode::from_u16(400).unwrap())); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -785,12 +773,10 @@ mod tests { .await .expect_err("should have failed to get logs"); - if let RpcClientError::JsonRpcError { error, .. } = error { - assert_eq!(error.code, -32602); - assert_eq!(error.message, "invalid 1st argument: filter 'fromBlock': value was not valid block tag or block number"); - assert!(error.data.is_none()); + if let RpcClientError::HttpStatus(error) = error { + assert_eq!(error.status(), Some(StatusCode::from_u16(400).unwrap())); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -807,12 +793,10 @@ mod tests { .await .expect_err("should have failed to get logs"); - if let RpcClientError::JsonRpcError { error, .. } = error { - assert_eq!(error.code, -32602); - assert_eq!(error.message, "invalid 1st argument: filter 'toBlock': value was not valid block tag or block number"); - assert!(error.data.is_none()); + if let RpcClientError::HttpStatus(error) = error { + assert_eq!(error.status(), Some(StatusCode::from_u16(400).unwrap())); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -985,7 +969,7 @@ mod tests { assert_eq!(error.message, "header for hash not found"); assert!(error.data.is_none()); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } @@ -1152,7 +1136,7 @@ mod tests { assert_eq!(error.message, "header for hash not found"); assert!(error.data.is_none()); } else { - unreachable!("Invalid error"); + unreachable!("Invalid error: {error}"); } } } diff --git a/crates/rethnet_evm/Cargo.toml b/crates/rethnet_evm/Cargo.toml index ac16acad14..7161831a1b 100644 --- a/crates/rethnet_evm/Cargo.toml +++ b/crates/rethnet_evm/Cargo.toml @@ -27,6 +27,7 @@ tracing = { version = "0.1.37", features = ["attributes", "std"], optional = tru criterion = { version = "0.4.0", default-features = false, features = ["cargo_bench_support", "html_reports", "plotters"] } [features] +bench-once = [] # limits the benchmark variants to one test-disable-remote = [] tracing = ["dep:tracing"] diff --git a/crates/rethnet_evm/benches/state.rs b/crates/rethnet_evm/benches/state.rs index 4990b50d38..facc808c5e 100644 --- a/crates/rethnet_evm/benches/state.rs +++ b/crates/rethnet_evm/benches/state.rs @@ -61,15 +61,27 @@ impl RethnetStates { } } -const NUM_SCALES: usize = 4; -const CHECKPOINT_SCALES: [u64; NUM_SCALES] = [1, 5, 10, 20]; -const MAX_CHECKPOINT_SCALE: u64 = CHECKPOINT_SCALES[NUM_SCALES - 1]; -const ADDRESS_SCALES: [u64; 4] = [ - MAX_CHECKPOINT_SCALE * 5, - MAX_CHECKPOINT_SCALE * 25, - MAX_CHECKPOINT_SCALE * 50, - MAX_CHECKPOINT_SCALE * 100, -]; +#[cfg(feature = "bench-once")] +mod config { + pub const CHECKPOINT_SCALES: [u64; 1] = [1]; + pub const ADDRESS_SCALES: [u64; 1] = [1]; +} + +#[cfg(not(feature = "bench-once"))] +mod config { + const NUM_SCALES: usize = 4; + pub const CHECKPOINT_SCALES: [u64; NUM_SCALES] = [1, 5, 10, 20]; + + const MAX_CHECKPOINT_SCALE: u64 = CHECKPOINT_SCALES[NUM_SCALES - 1]; + pub const ADDRESS_SCALES: [u64; NUM_SCALES] = [ + MAX_CHECKPOINT_SCALE * 5, + MAX_CHECKPOINT_SCALE * 25, + MAX_CHECKPOINT_SCALE * 50, + MAX_CHECKPOINT_SCALE * 100, + ]; +} + +use config::*; fn bench_sync_state_method(c: &mut Criterion, method_name: &str, mut method_invocation: R) where From b177db2ba79c883b1571504d8bf2ece2cdadebff Mon Sep 17 00:00:00 2001 From: Franco Victorio Date: Wed, 10 May 2023 19:14:20 +0200 Subject: [PATCH 066/406] Merge main into rethnet/main --- .changeset/config.json | 2 +- .changeset/kind-humans-grow.md | 5 - .changeset/lazy-hornets-clap.md | 5 + .changeset/plenty-comics-bow.md | 5 - .changeset/polite-mugs-cry.m | 5 + .changeset/stale-garlics-mate.md | 5 - .github/workflows/e2e-ci.yml | 48 - .github/workflows/hardhat-etherscan-ci.yml | 80 - ...t-ganache-ci.yml => hardhat-verify-ci.yml} | 12 +- .github/workflows/hardhat-waffle-ci.yml | 82 - .../workflows/test-recent-mainnet-block.yml | 2 +- config/eslint/eslintrc.js | 20 +- config/typescript/@types/adm-zip/index.d.ts | 3 - .../@types/ethereumjs-abi/index.d.ts | 1 - config/typescript/@types/solc/index.d.ts | 2 - config/typescript/@types/solhint/index.d.ts | 2 - config/typescript/@types/solpp/index.d.ts | 1 - config/typescript/@types/tabtab/index.d.ts | 1 - config/typescript/tsconfig.json | 1 - docs/package.json | 2 +- docs/public/privacy-policy.html | 1957 ++++- docs/redirects.config.js | 16 +- docs/src/assets/trustedTeamsLogos/logos.ts | 4 +- docs/src/components/DocumentationLayout.tsx | 2 +- docs/src/components/LandingFooter.tsx | 2 +- docs/src/components/Navigation.mocks.json | 8 +- .../components/landingBlocks/HeroBlock.tsx | 2 +- .../docs/migrate-from-waffle.md | 34 +- .../hardhat-chai-matchers/docs/overview.md | 4 +- .../hardhat-chai-matchers/docs/reference.md | 2 +- .../docs/guides/forking-other-networks.md | 98 +- .../docs/metamask-issue/index.md | 4 +- .../hardhat-network/docs/overview/index.md | 2 +- .../hardhat-network/docs/reference/index.md | 41 +- .../docs/advanced/_dirinfo.yaml | 1 + .../docs/advanced/hardhat-and-foundry.md | 6 + .../advanced/hardhat-runtime-environment.md | 2 +- .../hardhat-runner/docs/advanced/using-esm.md | 132 + .../hardhat-runner/docs/config/index.md | 54 +- .../docs/getting-started/index.md | 2 +- .../guides/migrating-from-hardhat-waffle.md | 112 +- .../hardhat-runner/docs/guides/verifying.md | 67 +- .../docs/reference/solidity-support.md | 27 +- .../docs/supporter-guides/_dirinfo.yaml | 5 + .../docs/supporter-guides/oracles.md | 229 + .../docs/troubleshooting/common-problems.md | 6 + .../hardhat-runner/plugins/_dirinfo.yaml | 3 +- .../content/hardhat-runner/plugins/plugins.ts | 129 +- docs/src/content/home.ts | 2 +- docs/src/content/layouts.yaml | 1 + .../content/tutorial/boilerplate-project.md | 6 +- .../creating-a-new-hardhat-project.md | 2 +- .../debugging-with-hardhat-network.md | 2 +- .../tutorial/deploying-to-a-live-network.md | 69 +- docs/src/content/tutorial/final-thoughts.md | 1 + docs/src/content/tutorial/index.md | 2 +- .../src/content/tutorial/testing-contracts.md | 2 +- docs/temp/tabsConfig.json | 4 +- docs/yarn.lock | 28 +- package.json | 16 +- packages/e2e/.eslintignore | 1 - packages/e2e/.gitignore | 95 - packages/e2e/.mocharc.json | 5 - packages/e2e/.prettierignore | 2 - packages/e2e/README.md | 11 - packages/e2e/package.json | 44 - packages/e2e/run-tests.js | 92 - .../fixture-projects/basic-project/.gitignore | 2 - .../basic-project/contracts/Contract.sol | 4 - .../basic-project/hardhat.config.js | 3 - .../basic-project/package.json | 6 - .../basic-project/scripts/multi-run-test.js | 25 - .../basic-project/test/another-test.js | 3 - .../basic-project/test/simple.js | 3 - .../e2e/test/fixture-projects/empty/.gitkeep | 0 .../javascript-sample-project/package.json | 6 - .../type-error-in-config/.gitignore | 2 - .../type-error-in-config/hardhat.config.ts | 6 - .../type-error-in-config/package.json | 9 - .../type-error-in-script/.gitignore | 2 - .../type-error-in-script/hardhat.config.ts | 3 - .../type-error-in-script/package.json | 9 - .../type-error-in-script/script.ts | 2 - .../type-error-in-test/.gitignore | 2 - .../type-error-in-test/hardhat.config.ts | 3 - .../type-error-in-test/package.json | 9 - .../type-error-in-test/test/test.ts | 6 - .../typescript-sample-project/package.json | 6 - packages/e2e/test/helpers.ts | 48 - packages/e2e/test/index.ts | 315 - packages/e2e/tsconfig.json | 13 - packages/eslint-plugin/CHANGELOG.md | 13 + packages/eslint-plugin/index.js | 12 +- .../eslint-plugin/onlyHardhatErrorRule.js | 51 +- packages/eslint-plugin/package.json | 2 +- packages/hardhat-chai-matchers/CHANGELOG.md | 6 + packages/hardhat-chai-matchers/package.json | 11 +- packages/hardhat-chai-matchers/src/index.ts | 4 +- .../src/internal/checkIfWaffleIsInstalled.ts | 15 - .../src/internal/emit.ts | 11 + .../hardhatWaffleIncompatibilityCheck.ts | 11 + .../reverted/revertedWithCustomError.ts | 11 + packages/hardhat-chai-matchers/test/events.ts | 20 + .../test/reverted/revertedWithCustomError.ts | 20 + packages/hardhat-core/CHANGELOG.md | 51 + packages/hardhat-core/package.json | 37 +- .../sample-projects/javascript-esm/LICENSE.md | 11 + .../sample-projects/javascript-esm/README.md | 13 + .../javascript-esm/contracts/Lock.sol | 34 + .../javascript-esm/hardhat.config.cjs | 6 + .../javascript-esm/scripts/deploy.js | 23 + .../javascript-esm/test/Lock.js | 123 + .../javascript/hardhat.config.js | 2 +- .../javascript/scripts/deploy.js | 9 +- .../typescript/hardhat.config.ts | 2 +- .../typescript/scripts/deploy.ts | 9 +- .../sample-projects/typescript/tsconfig.json | 3 +- .../scripts/test-debug-trace-transaction.ts | 35 +- .../scripts/test-recent-mainnet-block.ts | 7 +- .../hardhat-core/src/builtin-tasks/compile.ts | 18 +- .../hardhat-core/src/builtin-tasks/test.ts | 32 +- .../src/internal/cli/bootstrap.ts | 50 +- packages/hardhat-core/src/internal/cli/cli.ts | 66 +- .../src/internal/cli/project-creation.ts | 135 +- .../hardhat-core/src/internal/constants.ts | 1 + .../src/internal/core/config/config-env.ts | 28 +- .../internal/core/config/config-loading.ts | 42 +- .../internal/core/config/default-config.ts | 2 +- .../src/internal/core/errors-list.ts | 29 +- .../core/jsonrpc/types/output/block.ts | 12 + .../src/internal/core/project-structure.ts | 8 + .../internal/core/providers/construction.ts | 53 +- .../src/internal/core/providers/http.ts | 62 +- .../src/internal/core/runtime-environment.ts | 159 +- .../src/internal/core/tasks/dsl.ts | 34 +- .../internal/core/tasks/task-definitions.ts | 8 +- .../provider/BlockchainBase.ts | 29 +- .../hardhat-network/provider/TxPool.ts | 1 + .../provider/fork/ForkBlockchain.ts | 46 +- .../provider/fork/rpcToBlockData.ts | 2 + .../hardhat-network/provider/modules/eth.ts | 43 +- .../hardhat-network/provider/modules/evm.ts | 18 +- .../hardhat-network/provider/node-types.ts | 2 +- .../internal/hardhat-network/provider/node.ts | 54 +- .../hardhat-network/provider/output.ts | 20 + .../hardhat-network/provider/provider.ts | 109 +- .../hardhat-network/provider/return-data.ts | 4 +- .../FakeSenderAccessListEIP2930Transaction.ts | 2 +- .../FakeSenderEIP1559Transaction.ts | 2 +- .../transactions/FakeSenderTransaction.ts | 2 +- .../ReadOnlyValidEIP1559Transaction.ts | 6 +- .../ReadOnlyValidEIP2930Transaction.ts | 6 +- .../transactions/ReadOnlyValidTransaction.ts | 2 +- .../ReadOnlyValidUnknownTypeTransaction.ts | 2 +- .../hardhat-network/provider/utils/bloom.ts | 2 +- .../provider/utils/convertToRethnet.ts | 2 + .../provider/utils/putGenesisBlock.ts | 4 +- .../hardhat-network/provider/vm/ethereumjs.ts | 46 +- .../stack-traces/compiler-to-model.ts | 80 +- .../stack-traces/consoleLogger.ts | 2 +- .../hardhat-network/stack-traces/constants.ts | 2 +- .../hardhat-network/stack-traces/debug.ts | 11 +- .../stack-traces/error-inferrer.ts | 266 +- .../hardhat-network/stack-traces/model.ts | 12 +- .../stack-traces/solidity-errors.ts | 4 +- .../stack-traces/solidity-stack-trace.ts | 4 +- .../stack-traces/solidityTracer.ts | 3 + .../internal/solidity/compiler/downloader.ts | 4 +- .../src/internal/solidity/compiler/index.ts | 2 +- .../src/internal/solidity/resolver.ts | 31 +- .../src/internal/util/abi-helpers.ts | 9 + .../src/internal/util/download.ts | 26 +- .../src/internal/util/hardforks.ts | 2 + .../src/internal/util/packageInfo.ts | 15 + .../hardhat-core/src/internal/util/proxy.ts | 18 + packages/hardhat-core/src/types/artifacts.ts | 11 +- packages/hardhat-core/src/types/config.ts | 2 + packages/hardhat-core/src/types/runtime.ts | 20 +- .../hardhat-core/test/builtin-tasks/run.ts | 88 +- .../hardhat-core/test/builtin-tasks/test.ts | 298 +- .../esm-project-with-scripts/.gitignore | 2 + .../assert-hardhat-arguments.js | 5 + .../esm-project-with-scripts/async-script.js | 7 + .../esm-project-with-scripts/contracts/a.sol | 2 + .../env-var-script.js | 5 + .../failing-script.js | 3 + .../hardhat.config.cjs | 8 + .../esm-project-with-scripts/package.json | 3 + .../esm-project-with-scripts/params-script.js | 10 + .../successful-script.js | 5 + .../hardhat.config.cjs} | 0 .../bail-config-false-overriden/package.json | 3 + .../bail-config-false-overriden/test/test.js | 11 + .../bail/bail-config-false/hardhat.config.cjs | 10 + .../bail/bail-config-false/package.json | 3 + .../bail/bail-config-false/test/test.js | 11 + .../bail/bail-config-true/hardhat.config.cjs} | 0 .../bail/bail-config-true/package.json | 3 + .../bail/bail-config-true/test/test.js | 11 + .../bail/default/hardhat.config.cjs} | 0 .../esm-test-task/bail/default/package.json | 3 + .../esm-test-task/bail/default/test/test.js | 11 + .../bail/with-bail-flag/hardhat.config.cjs} | 0 .../bail/with-bail-flag/package.json | 3 + .../bail/with-bail-flag/test/test.js | 11 + .../failing-tests/hardhat.config.cjs | 9 + .../esm-test-task/failing-tests/package.json | 3 + .../esm-test-task/failing-tests/test/test.js | 13 + .../minimal-config/hardhat.config.cjs | 9 + .../esm-test-task/minimal-config/package.json | 3 + .../esm-test-task/minimal-config/test/test.js | 18 + .../mixed-test-files/hardhat.config.cjs | 9 + .../mixed-test-files/package.json | 3 + .../mixed-test-files/test/test.cjs | 7 + .../mixed-test-files/test/test.js | 7 + .../mixed-test-files/test/test.mjs | 7 + .../hardhat.config.cjs} | 0 .../package.json | 3 + .../test/check-parallel.js | 7 + .../parallel-config-false/hardhat.config.cjs | 10 + .../parallel-config-false/package.json | 3 + .../test/check-parallel.js | 7 + .../parallel-config-true/hardhat.config.cjs} | 0 .../parallel-config-true/package.json | 3 + .../test/check-parallel.js | 7 + .../parallel/hardhat.config.cjs | 9 + .../parallel-tests/parallel/package.json | 3 + .../parallel/test/check-parallel.js | 7 + .../parallel-tests/serial/hardhat.config.cjs | 9 + .../parallel-tests/serial/package.json | 3 + .../serial/test/check-parallel.js | 7 + .../run-tests-twice/hardhat.config.cjs | 16 + .../run-tests-twice/package.json | 3 + .../run-tests-twice/test/test.js | 18 + .../esm/cjs-config/contracts/Foo.sol | 4 + .../esm/cjs-config/hardhat.config.cjs | 10 + .../esm/cjs-config/package.json | 3 + .../esm/js-config/hardhat.config.js | 1 + .../esm/js-config/package.json | 3 + .../hardhat.config.js | 2 +- .../hardhat.config.js | 2 +- .../multiple-unsupported-solc.js | 4 +- .../unsupported-new-solc.js | 2 +- .../unsupported-solc-in-override.js | 2 +- .../hardhat.config.js | 10 + .../config-bail-false-overriden/package.json | 1 + .../test/test.js | 0 .../bail/config-bail-false/hardhat.config.js | 10 + .../bail/config-bail-false/package.json | 1 + .../bail/config-bail-false/test/test.js | 11 + .../bail/config-bail-true/hardhat.config.js | 10 + .../bail/config-bail-true/package.json | 1 + .../bail/config-bail-true/test/test.js | 11 + .../default-with-bail-flag/hardhat.config.js | 9 + .../bail/default-with-bail-flag/package.json | 1 + .../bail/default-with-bail-flag/test/test.js | 11 + .../test-task/bail/default/hardhat.config.js | 9 + .../test-task/bail/default/package.json | 1 + .../test-task/bail/default/test/test.js | 11 + .../test-task/failing-tests/package.json | 1 + .../test-task/minimal-config/package.json | 1 + .../mixed-test-files/hardhat.config.js | 9 + .../test-task/mixed-test-files/package.json | 1 + .../test-task/mixed-test-files/test/test.cjs | 7 + .../test-task/mixed-test-files/test/test.js | 7 + .../test-task/mixed-test-files/test/test.mjs | 7 + .../hardhat.config.js | 10 + .../package.json | 1 + .../test/check-parallel.js | 0 .../parallel-config-false/hardhat.config.js | 10 + .../parallel-config-false/package.json | 1 + .../test/check-parallel.js | 7 + .../parallel-config-true/hardhat.config.js | 10 + .../parallel-config-true/package.json | 1 + .../test/check-parallel.js | 7 + .../parallel-tests/parallel/hardhat.config.js | 9 + .../parallel-tests/parallel/package.json | 1 + .../parallel/test/check-parallel.js | 7 + .../parallel-tests/serial/hardhat.config.js | 9 + .../parallel-tests/serial/package.json | 1 + .../serial/test/check-parallel.js | 7 + .../run-tests-twice-mjs/hardhat.config.js | 16 + .../run-tests-twice-mjs/package.json | 1 + .../run-tests-twice-mjs/test/test.js | 18 + .../run-tests-twice-mjs/test/test.mjs | 18 + .../run-tests-twice/hardhat.config.js | 16 + .../test-task/run-tests-twice/package.json | 1 + .../test-task/run-tests-twice/test/test.js | 18 + .../internal/core/config/config-loading.ts | 49 + .../test/internal/core/project-structure.ts | 20 + .../test/internal/core/runtime-environment.ts | 152 +- .../hardhat-network/helpers/contracts.ts | 35 + .../hardhat-network/helpers/providers.ts | 27 +- .../hardhat-network/helpers/transactions.ts | 50 + .../hardhat-network/helpers/useProvider.ts | 59 +- .../hardhat-network/jsonrpc/client.ts | 6 +- .../provider/HardhatBlockchain.ts | 34 +- .../hardhat-network/provider/baseFeePerGas.ts | 13 +- .../provider/fork/ForkBlockchain.ts | 85 +- .../provider/fork/ForkStateManager.ts | 6 +- .../provider/forked-provider.ts | 6 +- .../provider/forking-different-hardfork.ts | 234 + .../provider/hardhat-network-options.ts | 4 +- .../provider/interval-mining-provider.ts | 4 +- .../hardhat-network/provider/modules/debug.ts | 95 +- .../provider/modules/eth/hardforks.ts | 385 +- .../modules/eth/methods/getBlockByNumber.ts | 2 +- .../eth/methods/getTransactionByHash.ts | 8 +- .../modules/eth/methods/sendRawTransaction.ts | 2 +- .../modules/eth/methods/sendTransaction.ts | 4 +- .../hardhat-network/provider/modules/evm.ts | 40 +- .../provider/modules/hardhat.ts | 22 +- .../internal/hardhat-network/provider/node.ts | 101 +- .../provider/utils/runFullBlock.ts | 11 +- .../stack-traces/compilation.ts | 79 +- .../stack-traces/compilers-list.ts | 187 + .../hardhat-network/stack-traces/execution.ts | 7 +- .../storage-mapping-parameter/test.json | 4 +- .../overloaded-function-params-error/c.sol | 11 + .../test.json | 23 + .../eip170-contract-too-large/test.json | 2 + .../hardhat-network/stack-traces/test.ts | 313 +- .../test/internal/util/download.ts | 76 - .../test/internal/util/scripts-runner.ts | 87 +- packages/hardhat-docker/.eslintrc.js | 7 - packages/hardhat-docker/.gitignore | 95 - packages/hardhat-docker/.prettierignore | 3 - packages/hardhat-docker/CHANGELOG.md | 13 - packages/hardhat-docker/README.md | 57 - packages/hardhat-docker/package.json | 56 - packages/hardhat-docker/src/errors.ts | 84 - packages/hardhat-docker/src/hardhat-docker.ts | 297 - packages/hardhat-docker/src/index.ts | 3 - packages/hardhat-docker/src/streams.ts | 18 - packages/hardhat-docker/src/types.ts | 20 - packages/hardhat-docker/test/tests.ts | 0 packages/hardhat-docker/tsconfig.json | 13 - packages/hardhat-ethers/CHANGELOG.md | 6 + packages/hardhat-ethers/package.json | 10 +- .../hardhat-ethers/src/internal/helpers.ts | 6 +- packages/hardhat-etherscan/.eslintrc.js | 7 - packages/hardhat-etherscan/CHANGELOG.md | 13 + packages/hardhat-etherscan/DEVELOPING.md | 10 - packages/hardhat-etherscan/LICENSE | 21 - packages/hardhat-etherscan/README.md | 216 +- packages/hardhat-etherscan/src/ABIEncoder.ts | 47 - packages/hardhat-etherscan/src/config.ts | 69 - packages/hardhat-etherscan/src/constants.ts | 13 - packages/hardhat-etherscan/src/errors.ts | 19 - .../src/etherscan/EtherscanService.ts | 163 - .../EtherscanVerifyContractRequest.ts | 56 - packages/hardhat-etherscan/src/index.ts | 879 --- .../hardhat-etherscan/src/network/prober.ts | 78 - .../src/resolveEtherscanApiKey.ts | 42 - .../hardhat-etherscan/src/solc/bytecode.ts | 320 - .../hardhat-etherscan/src/solc/libraries.ts | 275 - .../hardhat-etherscan/src/solc/metadata.ts | 123 - .../hardhat-etherscan/src/solc/version.ts | 52 - packages/hardhat-etherscan/src/types.ts | 32 - packages/hardhat-etherscan/src/util.ts | 58 - packages/hardhat-etherscan/test/.eslintrc.js | 11 - .../hardhat-project-defined-config/.gitignore | 2 - .../contracts/TestContract.sol | 12 - .../hardhat.config.js | 10 - .../hardhat-project-goerli-mock/.gitignore | 2 - .../contracts/ReentrancyGuard.sol | 16 - .../contracts/TestContract.sol | 19 - .../contracts/TestContract1.sol | 19 - .../contracts/TestLibrary.sol | 11 - .../contracts/TestParamList.sol | 23 - .../contracts/TestReentrancyGuardImported.sol | 10 - .../contracts/TestReentrancyGuardLocal.sol | 10 - .../contracts/imported/ReentrancyGuard.sol | 32 - .../contracts/libraries/SafeMath.sol | 28 - .../hardhat.config.js | 28 - .../hardhat-project-goerli-mock/paramList.js | 8 - .../.gitignore | 2 - .../contracts/TestContract.sol | 12 - .../hardhat.config.js | 13 - .../.gitignore | 2 - .../contracts/TestContract.sol | 12 - .../hardhat.config.js | 7 - .../hardhat-project/.gitignore | 2 - .../contracts/ReentrancyGuard.sol | 16 - .../contracts/TestContract.sol | 19 - .../contracts/TestContract1.sol | 19 - .../hardhat-project/contracts/TestLibrary.sol | 11 - .../contracts/TestParamList.sol | 23 - .../contracts/TestReentrancyGuardImported.sol | 10 - .../contracts/TestReentrancyGuardLocal.sol | 11 - .../hardhat-project/contracts/WithLibs.sol | 49 - .../contracts/imported/ReentrancyGuard.sol | 32 - .../contracts/libraries/SafeMath.sol | 28 - .../hardhat-project/hardhat.config.js | 27 - .../hardhat-project/paramList.js | 8 - packages/hardhat-etherscan/test/helpers.ts | 54 - ...HardhatRuntimeEnvironmentExtensionTests.ts | 44 - .../test/integration/PluginTests.ts | 653 -- .../test/integration/solc/version.ts | 10 - .../hardhat-etherscan/test/unit/ABIEncoder.ts | 278 - .../test/unit/ChainConfig.ts | 22 - .../test/unit/etherscanConfigExtender.ts | 72 - .../test/unit/resolveEtherscanApiKey.ts | 64 - .../test/unit/solc/bytecode.ts | 325 - .../test/unit/solc/metadata.ts | 288 - .../test/unit/solc/version.ts | 82 - packages/hardhat-etherscan/test/unit/util.ts | 43 - packages/hardhat-foundry/CHANGELOG.md | 7 + packages/hardhat-foundry/package.json | 10 +- packages/hardhat-foundry/src/index.ts | 5 +- packages/hardhat-ganache/.eslintignore | 1 - packages/hardhat-ganache/.eslintrc.js | 7 - packages/hardhat-ganache/.gitignore | 95 - packages/hardhat-ganache/.mocharc.json | 5 - packages/hardhat-ganache/.prettierignore | 5 - packages/hardhat-ganache/LICENSE | 21 - packages/hardhat-ganache/README.md | 59 - packages/hardhat-ganache/package.json | 66 - .../hardhat-ganache/src/ganache-options-ti.ts | 54 - .../hardhat-ganache/src/ganache-service.ts | 289 - packages/hardhat-ganache/src/index.ts | 54 - packages/hardhat-ganache/test/.eslintrc.js | 11 - .../hardhat.config.ts | 17 - .../scripts/custom-accounts-sample.js | 43 - .../hardhat-project/.gitignore | 3 - .../contracts/EVMInspector.sol | 15 - .../hardhat-project/hardhat.config.ts | 5 - .../scripts/accounts-sample.js | 23 - .../hardhat-project/scripts/delayed-sample.js | 26 - .../hardhat-project/test/test.js | 19 - packages/hardhat-ganache/test/helpers.ts | 25 - packages/hardhat-ganache/test/index.ts | 105 - packages/hardhat-ganache/tsconfig.json | 12 - packages/hardhat-network-helpers/CHANGELOG.md | 7 + packages/hardhat-network-helpers/package.json | 10 +- .../src/helpers/time/increase.ts | 4 +- .../src/helpers/time/increaseTo.ts | 13 +- .../src/helpers/time/setNextBlockTimestamp.ts | 15 +- packages/hardhat-network-helpers/src/utils.ts | 6 +- .../hardhat.config.js | 8 + .../test/helpers/time/increase.ts | 92 +- .../test/helpers/time/increaseTo.ts | 129 +- .../helpers/time/setNextBlockTimestamp.ts | 102 +- packages/hardhat-shorthand/package.json | 8 +- packages/hardhat-shorthand/src/completion.ts | 3 +- packages/hardhat-solhint/CHANGELOG.md | 6 + packages/hardhat-solhint/package.json | 13 +- packages/hardhat-solhint/src/index.ts | 15 +- packages/hardhat-solhint/test/tests.ts | 8 + packages/hardhat-solpp/package.json | 8 +- packages/hardhat-solpp/src/index.ts | 2 +- packages/hardhat-toolbox/CHANGELOG.md | 8 + packages/hardhat-toolbox/README.md | 35 +- packages/hardhat-toolbox/package.json | 6 +- packages/hardhat-toolbox/src/index.ts | 3 +- packages/hardhat-toolbox/tsconfig.json | 2 +- packages/hardhat-truffle4/package.json | 8 +- packages/hardhat-truffle5/.mocharc.json | 2 +- packages/hardhat-truffle5/package.json | 8 +- packages/{e2e => hardhat-verify}/.eslintrc.js | 0 .../.gitignore | 2 + .../.mocharc.json | 1 + .../.prettierignore | 1 + packages/{e2e => hardhat-verify}/LICENSE | 0 packages/hardhat-verify/README.md | 217 + .../package.json | 39 +- .../src/abi-validation-extras.ts} | 45 +- .../src/chain-config.ts} | 259 +- packages/hardhat-verify/src/config.ts | 33 + packages/hardhat-verify/src/errors.ts | 475 ++ packages/hardhat-verify/src/etherscan.ts | 219 + packages/hardhat-verify/src/index.ts | 587 ++ packages/hardhat-verify/src/solc/artifacts.ts | 404 + packages/hardhat-verify/src/solc/bytecode.ts | 213 + packages/hardhat-verify/src/solc/metadata.ts | 85 + packages/hardhat-verify/src/task-names.ts | 12 + .../src/type-extensions.ts | 4 +- packages/hardhat-verify/src/types.ts | 15 + packages/hardhat-verify/src/undici.ts | 38 + packages/hardhat-verify/src/utilities.ts | 210 + .../test/.eslintrc.js | 0 .../hardhat-project/constructor-args.js | 9 + .../contracts/DuplicatedContract.sol} | 5 +- .../contracts/DuplicatedContractCopy.sol} | 5 +- .../contracts/SimpleContract.sol | 10 + .../hardhat-project}/contracts/WithLibs.sol | 0 .../hardhat-project/duplicated-libraries.js | 5 + .../hardhat-project/hardhat.config.js | 31 + .../hardhat-project/invalid-libraries.js | 3 + .../hardhat-project/libraries.js | 4 + .../mismatched-address-libraries.js | 3 + .../missing-undetectable-libraries.js | 3 + .../hardhat-project/not-used-libraries.js | 3 + packages/hardhat-verify/test/helpers.ts | 43 + .../hardhat-verify/test/integration/index.ts | 612 ++ .../test/integration/mocks/etherscan.ts | 52 + packages/hardhat-verify/test/setup.ts | 8 + .../hardhat-verify/test/unit/chain-config.ts | 154 + packages/hardhat-verify/test/unit/config.ts | 153 + .../hardhat-verify/test/unit/etherscan.ts | 56 + packages/hardhat-verify/test/unit/index.ts | 133 + .../unit/mocks/invalid-constructor-args.js | 5 + .../test/unit/mocks/invalid-libraries.js | 1 + .../test/unit/mocks/valid-constructor-args.js | 9 + .../test/unit/mocks/valid-libraries.js | 4 + .../test/unit/solc/artifacts.ts | 195 + .../hardhat-verify/test/unit/solc/metadata.ts | 66 + .../hardhat-verify/test/unit/utilities.ts | 495 ++ .../tsconfig.json | 0 packages/hardhat-vyper/.mocharc.json | 2 +- packages/hardhat-vyper/CHANGELOG.md | 7 + packages/hardhat-vyper/package.json | 10 +- packages/hardhat-vyper/src/constants.ts | 3 - packages/hardhat-vyper/src/downloader.ts | 37 +- packages/hardhat-waffle/.eslintignore | 1 - packages/hardhat-waffle/.eslintrc.js | 7 - packages/hardhat-waffle/.gitignore | 94 - packages/hardhat-waffle/.mocharc.json | 5 - packages/hardhat-waffle/.prettierignore | 5 - packages/hardhat-waffle/CHANGELOG.md | 13 - packages/hardhat-waffle/LICENSE | 21 - packages/hardhat-waffle/README.md | 73 +- packages/hardhat-waffle/package.json | 68 - packages/hardhat-waffle/src/constants.ts | 1 - packages/hardhat-waffle/src/deploy.ts | 34 - packages/hardhat-waffle/src/fixtures.ts | 50 - packages/hardhat-waffle/src/index.ts | 37 - packages/hardhat-waffle/src/link.ts | 13 - packages/hardhat-waffle/src/matchers.ts | 16 - .../hardhat-waffle/src/type-extensions.ts | 31 - packages/hardhat-waffle/src/waffle-chai.ts | 84 - .../src/waffle-provider-adapter.ts | 53 - packages/hardhat-waffle/test/.eslintrc.js | 11 - .../hardhat.config.js | 21 - .../hardhat-project/.gitignore | 2 - .../hardhat-project/contracts/Contract.sol | 17 - .../hardhat-project/contracts/Token.sol | 93 - .../hardhat-project/hardhat.config.js | 5 - .../test/test-with-explicit-chai-use.js | 13 - .../hardhat-project/test/tests.js | 155 - packages/hardhat-waffle/test/helpers.ts | 25 - packages/hardhat-waffle/test/index.ts | 105 - packages/hardhat-waffle/tsconfig.json | 15 - packages/hardhat-web3-legacy/package.json | 8 +- packages/hardhat-web3/package.json | 8 +- scripts/check-dependencies.js | 2 +- yarn.lock | 6587 +++++------------ 547 files changed, 14256 insertions(+), 15185 deletions(-) delete mode 100644 .changeset/kind-humans-grow.md create mode 100644 .changeset/lazy-hornets-clap.md delete mode 100644 .changeset/plenty-comics-bow.md create mode 100644 .changeset/polite-mugs-cry.m delete mode 100644 .changeset/stale-garlics-mate.md delete mode 100644 .github/workflows/e2e-ci.yml delete mode 100644 .github/workflows/hardhat-etherscan-ci.yml rename .github/workflows/{hardhat-ganache-ci.yml => hardhat-verify-ci.yml} (85%) delete mode 100644 .github/workflows/hardhat-waffle-ci.yml delete mode 100644 config/typescript/@types/adm-zip/index.d.ts delete mode 100644 config/typescript/@types/ethereumjs-abi/index.d.ts delete mode 100644 config/typescript/@types/solc/index.d.ts delete mode 100644 config/typescript/@types/solhint/index.d.ts delete mode 100644 config/typescript/@types/solpp/index.d.ts delete mode 100644 config/typescript/@types/tabtab/index.d.ts create mode 100644 docs/src/content/hardhat-runner/docs/advanced/using-esm.md create mode 100644 docs/src/content/hardhat-runner/docs/supporter-guides/_dirinfo.yaml create mode 100644 docs/src/content/hardhat-runner/docs/supporter-guides/oracles.md delete mode 100644 packages/e2e/.eslintignore delete mode 100644 packages/e2e/.gitignore delete mode 100644 packages/e2e/.mocharc.json delete mode 100644 packages/e2e/.prettierignore delete mode 100644 packages/e2e/README.md delete mode 100644 packages/e2e/package.json delete mode 100644 packages/e2e/run-tests.js delete mode 100644 packages/e2e/test/fixture-projects/basic-project/.gitignore delete mode 100644 packages/e2e/test/fixture-projects/basic-project/contracts/Contract.sol delete mode 100644 packages/e2e/test/fixture-projects/basic-project/hardhat.config.js delete mode 100644 packages/e2e/test/fixture-projects/basic-project/package.json delete mode 100644 packages/e2e/test/fixture-projects/basic-project/scripts/multi-run-test.js delete mode 100644 packages/e2e/test/fixture-projects/basic-project/test/another-test.js delete mode 100644 packages/e2e/test/fixture-projects/basic-project/test/simple.js delete mode 100644 packages/e2e/test/fixture-projects/empty/.gitkeep delete mode 100644 packages/e2e/test/fixture-projects/javascript-sample-project/package.json delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-config/.gitignore delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-config/hardhat.config.ts delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-config/package.json delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-script/.gitignore delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-script/hardhat.config.ts delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-script/package.json delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-script/script.ts delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-test/.gitignore delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-test/hardhat.config.ts delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-test/package.json delete mode 100644 packages/e2e/test/fixture-projects/type-error-in-test/test/test.ts delete mode 100644 packages/e2e/test/fixture-projects/typescript-sample-project/package.json delete mode 100644 packages/e2e/test/helpers.ts delete mode 100644 packages/e2e/test/index.ts delete mode 100644 packages/e2e/tsconfig.json create mode 100644 packages/eslint-plugin/CHANGELOG.md delete mode 100644 packages/hardhat-chai-matchers/src/internal/checkIfWaffleIsInstalled.ts create mode 100644 packages/hardhat-chai-matchers/src/internal/hardhatWaffleIncompatibilityCheck.ts create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/LICENSE.md create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/README.md create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/contracts/Lock.sol create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/hardhat.config.cjs create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/scripts/deploy.js create mode 100644 packages/hardhat-core/sample-projects/javascript-esm/test/Lock.js create mode 100644 packages/hardhat-core/src/internal/util/proxy.ts create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/.gitignore create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/assert-hardhat-arguments.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/async-script.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/contracts/a.sol create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/env-var-script.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/failing-script.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/params-script.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-project-with-scripts/successful-script.js rename packages/hardhat-core/test/fixture-projects/{test-task/bail/hardhat.config-bail-false.js => esm-test-task/bail/bail-config-false-overriden/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-false-overriden/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-false-overriden/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-false/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-false/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-false/test/test.js rename packages/hardhat-core/test/fixture-projects/{test-task/bail/hardhat.config-bail-true.js => esm-test-task/bail/bail-config-true/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-true/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/bail-config-true/test/test.js rename packages/hardhat-core/test/fixture-projects/{test-task/bail/hardhat.config.js => esm-test-task/bail/default/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/default/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/default/test/test.js rename packages/hardhat-core/test/fixture-projects/{test-task/parallel-tests/hardhat.config.js => esm-test-task/bail/with-bail-flag/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/with-bail-flag/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/bail/with-bail-flag/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/failing-tests/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/failing-tests/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/failing-tests/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/minimal-config/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/minimal-config/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/minimal-config/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/mixed-test-files/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/mixed-test-files/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/mixed-test-files/test/test.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/mixed-test-files/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/mixed-test-files/test/test.mjs rename packages/hardhat-core/test/fixture-projects/{test-task/parallel-tests/hardhat.config-parallel-false.js => esm-test-task/parallel-tests/parallel-config-false-overriden/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-false-overriden/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-false-overriden/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-false/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-false/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-false/test/check-parallel.js rename packages/hardhat-core/test/fixture-projects/{test-task/parallel-tests/hardhat.config-parallel-true.js => esm-test-task/parallel-tests/parallel-config-true/hardhat.config.cjs} (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-true/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel-config-true/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/parallel/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/serial/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/serial/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/parallel-tests/serial/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/run-tests-twice/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/run-tests-twice/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm-test-task/run-tests-twice/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm/cjs-config/contracts/Foo.sol create mode 100644 packages/hardhat-core/test/fixture-projects/esm/cjs-config/hardhat.config.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/esm/cjs-config/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/esm/js-config/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/esm/js-config/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-false-overriden/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-false-overriden/package.json rename packages/hardhat-core/test/fixture-projects/test-task/bail/{ => config-bail-false-overriden}/test/test.js (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-false/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-false/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-false/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-true/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-true/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/config-bail-true/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default-with-bail-flag/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default-with-bail-flag/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default-with-bail-flag/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/bail/default/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/failing-tests/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/minimal-config/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/mixed-test-files/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/mixed-test-files/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/mixed-test-files/test/test.cjs create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/mixed-test-files/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/mixed-test-files/test/test.mjs create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-false-overriden/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-false-overriden/package.json rename packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/{ => parallel-config-false-overriden}/test/check-parallel.js (100%) create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-false/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-false/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-false/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-true/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-true/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel-config-true/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/parallel/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/serial/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/serial/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/parallel-tests/serial/test/check-parallel.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice-mjs/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice-mjs/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice-mjs/test/test.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice-mjs/test/test.mjs create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice/hardhat.config.js create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice/package.json create mode 100644 packages/hardhat-core/test/fixture-projects/test-task/run-tests-twice/test/test.js create mode 100644 packages/hardhat-core/test/internal/hardhat-network/provider/forking-different-hardfork.ts create mode 100644 packages/hardhat-core/test/internal/hardhat-network/stack-traces/compilers-list.ts create mode 100644 packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/0_8/call-message/no-calls/overloaded-function-params-error/c.sol create mode 100644 packages/hardhat-core/test/internal/hardhat-network/stack-traces/test-files/0_8/call-message/no-calls/overloaded-function-params-error/test.json delete mode 100644 packages/hardhat-docker/.eslintrc.js delete mode 100644 packages/hardhat-docker/.gitignore delete mode 100644 packages/hardhat-docker/.prettierignore delete mode 100644 packages/hardhat-docker/CHANGELOG.md delete mode 100644 packages/hardhat-docker/README.md delete mode 100644 packages/hardhat-docker/package.json delete mode 100644 packages/hardhat-docker/src/errors.ts delete mode 100644 packages/hardhat-docker/src/hardhat-docker.ts delete mode 100644 packages/hardhat-docker/src/index.ts delete mode 100644 packages/hardhat-docker/src/streams.ts delete mode 100644 packages/hardhat-docker/src/types.ts delete mode 100644 packages/hardhat-docker/test/tests.ts delete mode 100644 packages/hardhat-docker/tsconfig.json delete mode 100644 packages/hardhat-etherscan/.eslintrc.js delete mode 100644 packages/hardhat-etherscan/DEVELOPING.md delete mode 100644 packages/hardhat-etherscan/LICENSE delete mode 100644 packages/hardhat-etherscan/src/ABIEncoder.ts delete mode 100644 packages/hardhat-etherscan/src/config.ts delete mode 100644 packages/hardhat-etherscan/src/constants.ts delete mode 100644 packages/hardhat-etherscan/src/errors.ts delete mode 100644 packages/hardhat-etherscan/src/etherscan/EtherscanService.ts delete mode 100644 packages/hardhat-etherscan/src/etherscan/EtherscanVerifyContractRequest.ts delete mode 100644 packages/hardhat-etherscan/src/index.ts delete mode 100644 packages/hardhat-etherscan/src/network/prober.ts delete mode 100644 packages/hardhat-etherscan/src/resolveEtherscanApiKey.ts delete mode 100644 packages/hardhat-etherscan/src/solc/bytecode.ts delete mode 100644 packages/hardhat-etherscan/src/solc/libraries.ts delete mode 100644 packages/hardhat-etherscan/src/solc/metadata.ts delete mode 100644 packages/hardhat-etherscan/src/solc/version.ts delete mode 100644 packages/hardhat-etherscan/src/types.ts delete mode 100644 packages/hardhat-etherscan/src/util.ts delete mode 100644 packages/hardhat-etherscan/test/.eslintrc.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-defined-config/.gitignore delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-defined-config/contracts/TestContract.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-defined-config/hardhat.config.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/.gitignore delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/ReentrancyGuard.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestContract.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestContract1.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestLibrary.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestParamList.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestReentrancyGuardImported.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/TestReentrancyGuardLocal.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/imported/ReentrancyGuard.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/libraries/SafeMath.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/hardhat.config.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/paramList.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-multiple-apikeys-config/.gitignore delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-multiple-apikeys-config/contracts/TestContract.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-multiple-apikeys-config/hardhat.config.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-undefined-config/.gitignore delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-undefined-config/contracts/TestContract.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project-undefined-config/hardhat.config.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/.gitignore delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/ReentrancyGuard.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestContract.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestContract1.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestLibrary.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestParamList.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestReentrancyGuardImported.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/TestReentrancyGuardLocal.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/WithLibs.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/imported/ReentrancyGuard.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/libraries/SafeMath.sol delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/hardhat.config.js delete mode 100644 packages/hardhat-etherscan/test/fixture-projects/hardhat-project/paramList.js delete mode 100644 packages/hardhat-etherscan/test/helpers.ts delete mode 100644 packages/hardhat-etherscan/test/integration/HardhatRuntimeEnvironmentExtensionTests.ts delete mode 100644 packages/hardhat-etherscan/test/integration/PluginTests.ts delete mode 100644 packages/hardhat-etherscan/test/integration/solc/version.ts delete mode 100644 packages/hardhat-etherscan/test/unit/ABIEncoder.ts delete mode 100644 packages/hardhat-etherscan/test/unit/ChainConfig.ts delete mode 100644 packages/hardhat-etherscan/test/unit/etherscanConfigExtender.ts delete mode 100644 packages/hardhat-etherscan/test/unit/resolveEtherscanApiKey.ts delete mode 100644 packages/hardhat-etherscan/test/unit/solc/bytecode.ts delete mode 100644 packages/hardhat-etherscan/test/unit/solc/metadata.ts delete mode 100644 packages/hardhat-etherscan/test/unit/solc/version.ts delete mode 100644 packages/hardhat-etherscan/test/unit/util.ts create mode 100644 packages/hardhat-foundry/CHANGELOG.md delete mode 100644 packages/hardhat-ganache/.eslintignore delete mode 100644 packages/hardhat-ganache/.eslintrc.js delete mode 100644 packages/hardhat-ganache/.gitignore delete mode 100644 packages/hardhat-ganache/.mocharc.json delete mode 100644 packages/hardhat-ganache/.prettierignore delete mode 100644 packages/hardhat-ganache/LICENSE delete mode 100644 packages/hardhat-ganache/README.md delete mode 100644 packages/hardhat-ganache/package.json delete mode 100644 packages/hardhat-ganache/src/ganache-options-ti.ts delete mode 100644 packages/hardhat-ganache/src/ganache-service.ts delete mode 100644 packages/hardhat-ganache/src/index.ts delete mode 100644 packages/hardhat-ganache/test/.eslintrc.js delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project-with-configs/hardhat.config.ts delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project-with-configs/scripts/custom-accounts-sample.js delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/.gitignore delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/contracts/EVMInspector.sol delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/hardhat.config.ts delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/scripts/accounts-sample.js delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/scripts/delayed-sample.js delete mode 100644 packages/hardhat-ganache/test/fixture-projects/hardhat-project/test/test.js delete mode 100644 packages/hardhat-ganache/test/helpers.ts delete mode 100644 packages/hardhat-ganache/test/index.ts delete mode 100644 packages/hardhat-ganache/tsconfig.json create mode 100644 packages/hardhat-network-helpers/test/fixture-projects/allow-blocks-same-timestamp/hardhat.config.js rename packages/{e2e => hardhat-verify}/.eslintrc.js (100%) rename packages/{hardhat-etherscan => hardhat-verify}/.gitignore (95%) rename packages/{hardhat-etherscan => hardhat-verify}/.mocharc.json (78%) rename packages/{hardhat-etherscan => hardhat-verify}/.prettierignore (92%) rename packages/{e2e => hardhat-verify}/LICENSE (100%) create mode 100644 packages/hardhat-verify/README.md rename packages/{hardhat-etherscan => hardhat-verify}/package.json (65%) rename packages/{hardhat-etherscan/src/ABITypes.ts => hardhat-verify/src/abi-validation-extras.ts} (70%) rename packages/{hardhat-etherscan/src/ChainConfig.ts => hardhat-verify/src/chain-config.ts} (64%) create mode 100644 packages/hardhat-verify/src/config.ts create mode 100644 packages/hardhat-verify/src/errors.ts create mode 100644 packages/hardhat-verify/src/etherscan.ts create mode 100644 packages/hardhat-verify/src/index.ts create mode 100644 packages/hardhat-verify/src/solc/artifacts.ts create mode 100644 packages/hardhat-verify/src/solc/bytecode.ts create mode 100644 packages/hardhat-verify/src/solc/metadata.ts create mode 100644 packages/hardhat-verify/src/task-names.ts rename packages/{hardhat-etherscan => hardhat-verify}/src/type-extensions.ts (63%) create mode 100644 packages/hardhat-verify/src/types.ts create mode 100644 packages/hardhat-verify/src/undici.ts create mode 100644 packages/hardhat-verify/src/utilities.ts rename packages/{hardhat-docker => hardhat-verify}/test/.eslintrc.js (100%) create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/constructor-args.js rename packages/{hardhat-etherscan/test/fixture-projects/hardhat-project/contracts/NewContract.sol => hardhat-verify/test/fixture-projects/hardhat-project/contracts/DuplicatedContract.sol} (79%) rename packages/{hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock/contracts/NewContract.sol => hardhat-verify/test/fixture-projects/hardhat-project/contracts/DuplicatedContractCopy.sol} (79%) create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/contracts/SimpleContract.sol rename packages/{hardhat-etherscan/test/fixture-projects/hardhat-project-goerli-mock => hardhat-verify/test/fixture-projects/hardhat-project}/contracts/WithLibs.sol (100%) create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/duplicated-libraries.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/hardhat.config.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/invalid-libraries.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/libraries.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/mismatched-address-libraries.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/missing-undetectable-libraries.js create mode 100644 packages/hardhat-verify/test/fixture-projects/hardhat-project/not-used-libraries.js create mode 100644 packages/hardhat-verify/test/helpers.ts create mode 100644 packages/hardhat-verify/test/integration/index.ts create mode 100644 packages/hardhat-verify/test/integration/mocks/etherscan.ts create mode 100644 packages/hardhat-verify/test/setup.ts create mode 100644 packages/hardhat-verify/test/unit/chain-config.ts create mode 100644 packages/hardhat-verify/test/unit/config.ts create mode 100644 packages/hardhat-verify/test/unit/etherscan.ts create mode 100644 packages/hardhat-verify/test/unit/index.ts create mode 100644 packages/hardhat-verify/test/unit/mocks/invalid-constructor-args.js create mode 100644 packages/hardhat-verify/test/unit/mocks/invalid-libraries.js create mode 100644 packages/hardhat-verify/test/unit/mocks/valid-constructor-args.js create mode 100644 packages/hardhat-verify/test/unit/mocks/valid-libraries.js create mode 100644 packages/hardhat-verify/test/unit/solc/artifacts.ts create mode 100644 packages/hardhat-verify/test/unit/solc/metadata.ts create mode 100644 packages/hardhat-verify/test/unit/utilities.ts rename packages/{hardhat-etherscan => hardhat-verify}/tsconfig.json (100%) delete mode 100644 packages/hardhat-waffle/.eslintignore delete mode 100644 packages/hardhat-waffle/.eslintrc.js delete mode 100644 packages/hardhat-waffle/.gitignore delete mode 100644 packages/hardhat-waffle/.mocharc.json delete mode 100644 packages/hardhat-waffle/.prettierignore delete mode 100644 packages/hardhat-waffle/CHANGELOG.md delete mode 100644 packages/hardhat-waffle/LICENSE delete mode 100644 packages/hardhat-waffle/package.json delete mode 100644 packages/hardhat-waffle/src/constants.ts delete mode 100644 packages/hardhat-waffle/src/deploy.ts delete mode 100644 packages/hardhat-waffle/src/fixtures.ts delete mode 100644 packages/hardhat-waffle/src/index.ts delete mode 100644 packages/hardhat-waffle/src/link.ts delete mode 100644 packages/hardhat-waffle/src/matchers.ts delete mode 100644 packages/hardhat-waffle/src/type-extensions.ts delete mode 100644 packages/hardhat-waffle/src/waffle-chai.ts delete mode 100644 packages/hardhat-waffle/src/waffle-provider-adapter.ts delete mode 100644 packages/hardhat-waffle/test/.eslintrc.js delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project-custom-accounts/hardhat.config.js delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/.gitignore delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/contracts/Contract.sol delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/contracts/Token.sol delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/hardhat.config.js delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/test/test-with-explicit-chai-use.js delete mode 100644 packages/hardhat-waffle/test/fixture-projects/hardhat-project/test/tests.js delete mode 100644 packages/hardhat-waffle/test/helpers.ts delete mode 100644 packages/hardhat-waffle/test/index.ts delete mode 100644 packages/hardhat-waffle/tsconfig.json diff --git a/.changeset/config.json b/.changeset/config.json index e3e909d0a4..e35708c6a6 100644 --- a/.changeset/config.json +++ b/.changeset/config.json @@ -6,7 +6,7 @@ "access": "public", "baseBranch": "main", "updateInternalDependencies": "minor", - "ignore": ["@nomiclabs/common", "@nomiclabs/hardhat-e2e-tests"], + "ignore": ["@nomiclabs/common"], "___experimentalUnsafeOptions_WILL_CHANGE_IN_PATCH": { "onlyUpdatePeerDependentsWhenOutOfRange": true } diff --git a/.changeset/kind-humans-grow.md b/.changeset/kind-humans-grow.md deleted file mode 100644 index a25cbc2184..0000000000 --- a/.changeset/kind-humans-grow.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"@nomicfoundation/hardhat-network-helpers": patch ---- - -Added a new 'reset' network helper diff --git a/.changeset/lazy-hornets-clap.md b/.changeset/lazy-hornets-clap.md new file mode 100644 index 0000000000..61606d5fb9 --- /dev/null +++ b/.changeset/lazy-hornets-clap.md @@ -0,0 +1,5 @@ +--- +"@nomicfoundation/hardhat-chai-matchers": patch +--- + +Fixed a problem when `.withArgs` was used with arrays with different length diff --git a/.changeset/plenty-comics-bow.md b/.changeset/plenty-comics-bow.md deleted file mode 100644 index 9e2f2c928e..0000000000 --- a/.changeset/plenty-comics-bow.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"hardhat": patch ---- - -Added support for sending batch requests through WebSocket to the Hardhat node (thanks @tenbits!) diff --git a/.changeset/polite-mugs-cry.m b/.changeset/polite-mugs-cry.m new file mode 100644 index 0000000000..b03cb1f534 --- /dev/null +++ b/.changeset/polite-mugs-cry.m @@ -0,0 +1,5 @@ +--- +"@nomiclabs/hardhat-etherscan": patch +--- + +Fix URLs for the Aurora networks (thanks @zZoMROT!) diff --git a/.changeset/stale-garlics-mate.md b/.changeset/stale-garlics-mate.md deleted file mode 100644 index e71f44aad3..0000000000 --- a/.changeset/stale-garlics-mate.md +++ /dev/null @@ -1,5 +0,0 @@ ---- -"hardhat": patch ---- - -Added a config validation for the number of optimizer runs used (thanks @konarshankar07!) diff --git a/.github/workflows/e2e-ci.yml b/.github/workflows/e2e-ci.yml deleted file mode 100644 index e7d129c312..0000000000 --- a/.github/workflows/e2e-ci.yml +++ /dev/null @@ -1,48 +0,0 @@ -name: E2E tests CI - -on: - push: - branches: [$default-branch] - paths: - - "packages/e2e/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - pull_request: - branches: - - "**" - - "!rethnet*/**" - - "!49-implement-read-only-struct-remotedatabase-for-querying-the-blockchain" - paths: - - "packages/e2e/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - -defaults: - run: - working-directory: packages/e2e - -concurrency: - group: ${{github.workflow}}-${{github.ref}} - cancel-in-progress: true - -jobs: - test_on_linux: - name: Run E2E tests on Ubuntu with Node ${{ matrix.node }} - runs-on: ubuntu-latest - strategy: - matrix: - node: [18] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: ${{ matrix.node }} - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test diff --git a/.github/workflows/hardhat-etherscan-ci.yml b/.github/workflows/hardhat-etherscan-ci.yml deleted file mode 100644 index 4e60011dfd..0000000000 --- a/.github/workflows/hardhat-etherscan-ci.yml +++ /dev/null @@ -1,80 +0,0 @@ -name: hardhat-etherscan CI - -on: - push: - branches: [$default-branch] - paths: - - "packages/hardhat-etherscan/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - pull_request: - branches: - - "**" - paths: - - "packages/hardhat-etherscan/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - -defaults: - run: - working-directory: packages/hardhat-etherscan - -concurrency: - group: ${{github.workflow}}-${{github.ref}} - cancel-in-progress: true - -jobs: - test_on_windows: - name: Test hardhat-etherscan on Windows with Node 18 - runs-on: windows-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test - - test_on_macos: - name: Test hardhat-etherscan on MacOS with Node 18 - runs-on: macos-latest - # disable until actions/virtual-environments#4896 is fixed - if: ${{ false }} - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test - - test_on_linux: - name: Test hardhat-etherscan on Ubuntu with Node ${{ matrix.node }} - runs-on: ubuntu-latest - strategy: - matrix: - node: [18] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: ${{ matrix.node }} - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test diff --git a/.github/workflows/hardhat-ganache-ci.yml b/.github/workflows/hardhat-verify-ci.yml similarity index 85% rename from .github/workflows/hardhat-ganache-ci.yml rename to .github/workflows/hardhat-verify-ci.yml index f0215c8049..bcc9cc182a 100644 --- a/.github/workflows/hardhat-ganache-ci.yml +++ b/.github/workflows/hardhat-verify-ci.yml @@ -1,10 +1,10 @@ -name: hardhat-ganache CI +name: hardhat-verify CI on: push: branches: [$default-branch] paths: - - "packages/hardhat-ganache/**" + - "packages/hardhat-verify/**" - "packages/hardhat-core/**" - "packages/hardhat-common/**" - "config/**" @@ -12,14 +12,14 @@ on: branches: - "**" paths: - - "packages/hardhat-ganache/**" + - "packages/hardhat-verify/**" - "packages/hardhat-core/**" - "packages/hardhat-common/**" - "config/**" defaults: run: - working-directory: packages/hardhat-ganache + working-directory: packages/hardhat-verify concurrency: group: ${{github.workflow}}-${{github.ref}} @@ -43,7 +43,7 @@ jobs: run: yarn test test_on_macos: - name: Test hardhat-ganache on MacOS with Node 18 + name: Test hardhat-verify on MacOS with Node 18 runs-on: macos-latest # disable until actions/virtual-environments#4896 is fixed if: ${{ false }} @@ -61,7 +61,7 @@ jobs: run: yarn test test_on_linux: - name: Test hardhat-ganache on Ubuntu with Node ${{ matrix.node }} + name: Test hardhat-verify on Ubuntu with Node ${{ matrix.node }} runs-on: ubuntu-latest strategy: matrix: diff --git a/.github/workflows/hardhat-waffle-ci.yml b/.github/workflows/hardhat-waffle-ci.yml deleted file mode 100644 index 4068094aea..0000000000 --- a/.github/workflows/hardhat-waffle-ci.yml +++ /dev/null @@ -1,82 +0,0 @@ -name: hardhat-waffle CI - -on: - push: - branches: [$default-branch] - paths: - - "packages/hardhat-waffle/**" - - "packages/hardhat-ethers/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - pull_request: - branches: - - "**" - paths: - - "packages/hardhat-waffle/**" - - "packages/hardhat-ethers/**" - - "packages/hardhat-core/**" - - "packages/hardhat-common/**" - - "config/**" - -defaults: - run: - working-directory: packages/hardhat-waffle - -concurrency: - group: ${{github.workflow}}-${{github.ref}} - cancel-in-progress: true - -jobs: - test_on_windows: - name: Test hardhat-waffle on Windows with Node 18 - runs-on: windows-latest - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test - - test_on_macos: - name: Test hardhat-waffle on MacOS with Node 18 - runs-on: macos-latest - # disable until actions/virtual-environments#4896 is fixed - if: ${{ false }} - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: 18 - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test - - test_on_linux: - name: Test hardhat-waffle on Ubuntu with Node ${{ matrix.node }} - runs-on: ubuntu-latest - strategy: - matrix: - node: [18] - steps: - - uses: actions/checkout@v2 - - uses: actions/setup-node@v2 - with: - node-version: ${{ matrix.node }} - cache: yarn - - name: Install - run: yarn --frozen-lockfile - - name: Build - run: yarn build - - name: Run tests - run: yarn test diff --git a/.github/workflows/test-recent-mainnet-block.yml b/.github/workflows/test-recent-mainnet-block.yml index b4ef2dc1af..375b6484c0 100644 --- a/.github/workflows/test-recent-mainnet-block.yml +++ b/.github/workflows/test-recent-mainnet-block.yml @@ -24,7 +24,7 @@ jobs: run: yarn build - name: Run test env: - INFURA_URL: ${{ secrets.INFURA_URL }} + ALCHEMY_URL: ${{ secrets.ALCHEMY_URL }} run: yarn ts-node scripts/test-recent-mainnet-block.ts - name: Notify failures if: failure() diff --git a/config/eslint/eslintrc.js b/config/eslint/eslintrc.js index 9b563bae71..0d563e5981 100644 --- a/config/eslint/eslintrc.js +++ b/config/eslint/eslintrc.js @@ -91,7 +91,7 @@ module.exports = { }, { selector: ["objectLiteralProperty"], - format: null + format: null, }, { selector: ["objectLiteralMethod"], @@ -137,9 +137,12 @@ module.exports = { "@typescript-eslint/prefer-function-type": "error", "@typescript-eslint/prefer-namespace-keyword": "error", "@typescript-eslint/restrict-plus-operands": "error", - "@typescript-eslint/restrict-template-expressions": ["error", { - allowAny: true, - }], + "@typescript-eslint/restrict-template-expressions": [ + "error", + { + allowAny: true, + }, + ], "@typescript-eslint/strict-boolean-expressions": [ "error", { @@ -219,8 +222,11 @@ module.exports = { }, ], "use-isnan": "error", - "no-restricted-imports": ["error", { - patterns: ["hardhat/src", "@nomiclabs/*/src"] - }], + "no-restricted-imports": [ + "error", + { + patterns: ["hardhat/src", "@nomiclabs/*/src"], + }, + ], }, }; diff --git a/config/typescript/@types/adm-zip/index.d.ts b/config/typescript/@types/adm-zip/index.d.ts deleted file mode 100644 index e01e67278c..0000000000 --- a/config/typescript/@types/adm-zip/index.d.ts +++ /dev/null @@ -1,3 +0,0 @@ -// there is a @types/adm-zip in definitely-typed, -// but it breaks the build -declare module "adm-zip"; diff --git a/config/typescript/@types/ethereumjs-abi/index.d.ts b/config/typescript/@types/ethereumjs-abi/index.d.ts deleted file mode 100644 index 50bf5b2824..0000000000 --- a/config/typescript/@types/ethereumjs-abi/index.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "ethereumjs-abi"; diff --git a/config/typescript/@types/solc/index.d.ts b/config/typescript/@types/solc/index.d.ts deleted file mode 100644 index b407e90b31..0000000000 --- a/config/typescript/@types/solc/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare module "solc"; -declare module "solc/wrapper"; \ No newline at end of file diff --git a/config/typescript/@types/solhint/index.d.ts b/config/typescript/@types/solhint/index.d.ts deleted file mode 100644 index e68b7e7e7a..0000000000 --- a/config/typescript/@types/solhint/index.d.ts +++ /dev/null @@ -1,2 +0,0 @@ -declare module "solhint/lib/index"; -declare module "solhint/lib/config/config-file"; diff --git a/config/typescript/@types/solpp/index.d.ts b/config/typescript/@types/solpp/index.d.ts deleted file mode 100644 index 6ea5577cfb..0000000000 --- a/config/typescript/@types/solpp/index.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "solpp"; diff --git a/config/typescript/@types/tabtab/index.d.ts b/config/typescript/@types/tabtab/index.d.ts deleted file mode 100644 index fd48faf5bf..0000000000 --- a/config/typescript/@types/tabtab/index.d.ts +++ /dev/null @@ -1 +0,0 @@ -declare module "@fvictorio/tabtab"; diff --git a/config/typescript/tsconfig.json b/config/typescript/tsconfig.json index 2457b096cf..fbbc0efe2c 100644 --- a/config/typescript/tsconfig.json +++ b/config/typescript/tsconfig.json @@ -7,7 +7,6 @@ "sourceMap": true, "strict": true, "esModuleInterop": true, - "typeRoots": ["../../node_modules/@types", "./@types"], "noEmitOnError": true, "skipDefaultLibCheck": true, "skipLibCheck": true diff --git a/docs/package.json b/docs/package.json index d4018839c3..b051b4e0a6 100644 --- a/docs/package.json +++ b/docs/package.json @@ -33,7 +33,7 @@ "remark-prism": "^1.3.6", "remark-unwrap-images": "^3.0.1", "ts-node": "^10.8.0", - "undici": "^5.8.2", + "undici": "^5.19.1", "unist-util-visit": "^4.1.0" }, "devDependencies": { diff --git a/docs/public/privacy-policy.html b/docs/public/privacy-policy.html index 74684011a1..37fbcecfa7 100644 --- a/docs/public/privacy-policy.html +++ b/docs/public/privacy-policy.html @@ -1 +1,1956 @@ -

Nomic Foundation Privacy Policy

Nomic Foundation (the “NomicFoundation,” “we,” or “us”), a Swiss Foundation, located at c/o Kaiser Odermatt & Partner AG, Baarerstrasse 12, 6300 Zug, is a software development foundation building open-source software to unlock developer productivity in the Ethereum ecosystem is the controller and processor of your personal data.

This Privacy Policy applies to the Nomic Foundation website (https://nomic.foundation) (“website”) the Hardhat software (“Hardhat”), the Hardhat tools and plugins, including the hardhat runner, the hardhat vscode, the @nomicfoundation/coc-solidity, and the hardhat network which, with any other features, tools and/or materials made available from time to time by the Foundation (the “Tools”), all of which are referred to as “services” in this Privacy Policy. Users of the website, Hardhat and the Tools are referred to collectively as “Clients.”

This Privacy Policy describes what information Nomic Foundation collects, how we use that information, and how we protect it. Nomic Foundation collects and processes personal data in order to run our organization and offer our services to Clients. We do not share Client information with third parties except for the limited purposes described in this Privacy Policy.

By using our services, Clients understand and agree that we will collect, process and use their information as described in this Privacy Policy, and in compliance with the Swiss Federal Act on Data Protection (“FADP”), the Swiss Ordinance to the Federal Act on Data Protection (“OFADP”), and the General European Data Protection Regulation (“GDPR”). We recommend that Clients read this Privacy Policy in full to ensure they are fully informed.

If you have any questions about this Privacy Policy or how we handle personal data, please contact us at privacy@nomic.foundation or use the contact information set out at bottom of this Privacy Policy.

What Personal Data Does Nomic Foundation Collect, and Why?

The information that Nomic Foundation gathers from Clients allows us to deliver and improve our services. For example, it allows us to communicate with Clients, provide support, and (with Clients’ permission) monitor deployments of Hardhat and the Tools for performance-improvement and error-correction purposes.

A.        Information Our Clients Provide

We receive and store the information our Clients supply to us when they communicate with us by email, GitHub, and social media. This information may include the Client’s name, company, email address, username, postal address, and telephone number.

Personal data processed through social media is subject to their privacy policies. You should check each social media privacy policies. The Nomic Foundation is not responsible for the data collected by these platforms.

B.        Information Automatically Collected from Clients

We also collect certain information automatically, including:

  1. Error and performance monitoring data

Hardhat collects automated error and performance monitoring data to help Nomic Foundation fix bugs and improve the performance of our software. This data is only collected and processed if the Client opts into this feature.

The information collected includes: 1) a unique cryptographic identifier to identify data from a single user; 2) the Hardhat version number; 3) whether Hardhat is running on a server or desktop; 4) the Client’s operating system (e.g. Windows, Mac, or Linux); 5) whether the Hardhat task running is a build-in or user-defined task; 6) the versions of Node.js and Sentry running on the Client’s computer; 7) error message text; 8) stack trace data; and 9) timestamps.

The Nomic Foundation uses third-party services, Sentry and Google analytics, to collect and process this data: i) Google analytics: extension version, machine Id, operating system, user agent; and ii) Sentry: extension name + version, environment, machine id as mentioned above, tags that provide context.

In the case of Sentry, the data is anonymized before being sent to Sentry’s servers. Any piece of data resembling a path or a private key is removed on a best-effort basis. The data sent to Sentry is subject to a data processing agreement limiting Sentry’s use of the data and requiring Sentry to take appropriate security measures to protect the data.

  1. Website cookies

Like many websites, the Nomic Foundation website uses cookies to obtain certain types of information when your web browser accesses our site. Cookies are used most commonly to do things like tracking page views, identifying repeat users and utilizing login tokens for a session.

Type of Cookie

Served By

How to Control These

Session cookies: these are used to anonymously track a user’s session on our website to deliver a better experience.

Nomic Foundation

You can block or delete these by changing your browser settings.

Performance and Targeting cookies: These cookies collect information that is used either in aggregate form to help us understand how our website is being used or how effective our marketing campaigns are, or to help us customize our website for you. Such cookies may record site and display-related activity for a session so that a client does not see displays that are irrelevant or have already been dismissed.

Google

Google offers a Google Analytics Opt-Out Browser Add-on for most browsers.

You can set or amend your web browser controls to accept or refuse cookies. If you choose to reject cookies, you may still use our website though your access to some functionality and areas of our website may be restricted. As the means by which you can refuse cookies through your web browser controls vary from browser-to-browser, you should visit your browser's help menu for more information.

Other information we collect and analyze includes the Internet Protocol (IP) address used to connect your computer to the Internet, computer and connection information such as browser type, version, language, and time zone setting, browser plug-in type and version, screen resolution, and operating system and platform. This information is stored in log files and is collected automatically. We collect this technical information to better understand user needs and provide Clients with an optimal online experience.

We also collect aggregate usage data for our website, which may include browsing patterns and broad demographic information, to enable us to understand how our website is being used and to develop and refine it to better serve our Clients.

Our Legal Bases for Processing Personal Information

For personal data under Nomic Foundation’s control, we rely on two bases to lawfully obtain and process personal information. First, where Clients have given us valid consent to use their data in certain ways, we rely on that consent. Second, as described in more detail below, in certain cases we may process information where this is necessary to meet legal obligations, such as compliance with law enforcement subpoenas or warrants, and/or to further our legitimate interests, so long as any such legitimate interests are not overridden by your rights or interests.

How and When Do We Share Information?

Nomic Foundation does not sell your information. As set out below, we only share information on a limited basis in order to enable us to offer our services. We do not otherwise make Client data available to third parties.  

Service Providers

We employ other organizations and service providers to perform certain functions on our behalf. These third parties have only limited access to your information, may use your information only to perform these tasks on our behalf, and are obligated to Nomic Foundation not to disclose or use your information for other purposes.

All Service Providers warrant to be in compliance with the GDPR and provide sufficient security to the information they access to.

Our use of Service Providers includes:

  • Google: for website analytics
  • Sentry: for collection and processing of error and performance monitoring data for Hardhat

If you have any questions about the specific Service Providers we currently use, please contact us at privacy@nomic.foundation or by using the contact information set out at the bottom of this Privacy Policy.

Legal Compliance / Protection of the Public and Our Business / Legitimate Interests 

We will release personal and account information: to comply with a subpoena, court order, legal process, or other legal requirement when we believe in good faith that such disclosure is necessary to comply with the law; to protect, establish, or exercise our legal rights or defend against legal claims; when we believe doing so is reasonably necessary to prevent harm to an individual; or take action regarding illegal activities, suspected fraud, threats to our property, or violations of our legal terms.

We may also share your information during an organizational transaction like a merger or distribution of our assets to a successor organization. If such a transaction occurs, we will provide notification of any changes to control of your information, as well as choices you may have.

Children’s Privacy

The services are not intended for children under the age of 16. We do not knowingly collect personal information from anyone under the age of sixteen. If you are under the age of sixteen, your parent or guardian must provide their consent for you to use the services.

Data Transfers

Nomic Foundation provides a voluntary service and Clients can choose whether or not they wish to use it. Consequently, when decided to use our Services you entitled Nomic Foundation to transfer your personal data to the Services Providers for the purposes of the data processing described in this Privacy Policy.

When your data is moved from its home country to another country, the laws and rules that protect your personal information in the country to which your information is transferred may be different from those in the country where you reside. Because we offer our services to people in different countries and use technical infrastructure based in different jurisdictions, we may need to transfer your personal information across borders in order to deliver our services.

Our Services Providers are obliged to protect data privacy at least to the same extent as ourselves. We contractually ensure that the protection of your personal data corresponds to the applicable laws by using the standard contractual clauses to comply with the GDPR.

How Secure Is Your Information?

We maintain administrative, technical, and physical safeguards designed to protect the privacy and security of the information we maintain about you. The connection between your computer and our website server is encrypted using Secure Sockets Layer (SSL) software that encrypts that information.

We use a Digital Certificate and secure pages will be identified by a padlock sign and “https://” in the address bar. Likewise, all Hardhat error and performance monitoring data is transmitted over HTTPS transport layer security (TLS)-secured connections.

However, no method of transmission or storage is 100% secure. As a result, while we strive to protect your personal information, you acknowledge that: (a) there are security and privacy limitations inherent to the Internet which are beyond our control; and (b) security, integrity, and privacy of any and all information and data exchanged between you and us through this Site cannot be guaranteed.

What Are Your Rights?

Upon request, Nomic Foundation will provide Clients with information about whether we hold any of their personal information (“Right to confirmation”). In certain cases, subject to relevant legal rights, Clients have the right to object to the processing of their personal information, to request changes, corrections, or the deletion of their personal information, and to obtain a copy of their personal information in an easily accessible format (“Right to access”, “Right to object”, “Right to rectification”).

In order to do this, Clients can contact us using the contact information set out at the bottom of this Privacy Policy. We will respond to every request within a reasonable timeframe and may need to take reasonable steps to confirm identity before proceeding.

You can also withdraw your consent to our processing of your information and the use of our services, and/or delete your Client account at any time, by using the contact information below to request that your personal information be deleted (“right to be forgotten”).

If you are an EU resident and believe that our processing of your personal data is contrary to the EU General Data Protection Regulation, you have the right to lodge a complaint with the appropriate supervisory authority.

If you withdraw your consent to the use or sharing of your personal information for the purposes set out in this policy, we may not be able to provide you with our services. Please note that in certain cases we may continue to process your information after you have withdrawn consent and requested that we delete your information if we have a legal basis/need to do so.

Data Retention

For personal data under its control, Nomic Foundation will retain such data only for as long as is necessary for the purposes set out in this policy, or as needed to provide Clients with our services.

If a Client no longer wishes to use our services then it may request deletion of its data at any time.

Notwithstanding the above, Nomic Foundation will retain and use Client information to the extent necessary to comply with our legal obligations (for example, if we are required to retain your information to comply with applicable tax/revenue laws), resolve disputes, and enforce our agreements.

We may also retain log files for the purpose of internal analysis, for site safety, security and fraud prevention, to improve site functionality, or where we are legally required to retain them for longer time periods.

Contact Us

If you have any questions, comments or suggestions about how we handle personal information you can contact Nomic Foundation at privacy@nomic.foundation

+ + + + + + + + + + + + + + + + + + + + + +
+ +

Nomic Foundation Privacy Policy

+ +

Nomic Foundation (the “Nomic Foundation,” “we,” or “us”), a Swiss +Foundation, located at c/o Kaiser Odermatt & +Partner AG, Baarerstrasse 12, 6300 Zug, is a software +development foundation building open-source software to unlock developer +productivity in the Ethereum ecosystem is the controller and processor of your +personal data.

+ +

 

+ +

This Privacy Policy applies to: (i) the Nomic Foundation +website (https://nomic.foundation), the Hardhat website (https://hardhat.org/), and any other website +published by us (“website”), (ii) the Hardhat software (“Hardhat”), and +the (iii) Hardhat tools and plugins, including the hardhat runner, the hardhat vscode, the @nomicfoundation/coc-solidity, and the hardhat +network which, with any other features, tools and/or materials, domains and +subdomains, projects, made available from time to time by the Foundation (the +“Tools”), all of which are referred to as “services” in this Privacy Policy. +Users of the websites, Hardhat and the Tools are referred to collectively +as “Clients.”

+ +

 

+ +

This Privacy Policy describes what information Nomic Foundation collects, +how we use that information, and how we protect it. Nomic Foundation collects +and processes personal data in order to run our organization and offer our +services to Clients. We do not share Client information with third parties +except for the limited purposes described in this Privacy Policy.

+ +

 

+ +

By using our services, Clients understand and agree that we will collect, +process and use their information as described in this Privacy Policy, and in +compliance with the Swiss Federal Act on Data Protection (“FADP”), the Swiss +Ordinance to the Federal Act on Data Protection (“OFADP”), and the General +European Data Protection Regulation (“GDPR”). We recommend that Clients read +this Privacy Policy in full to ensure they are fully informed.

+ +

 

+ +

If you have any questions about this Privacy Policy or how we handle +personal data, please contact us at privacy@nomic.foundation or use the +contact information set out at bottom of this Privacy Policy.

+ +

 

+ +

What Personal Data Does Nomic Foundation Collect, +and Why?

+ +

The information that Nomic Foundation gathers from Clients allows us to +deliver and improve our services. For example, it allows us to communicate with +Clients, provide support, and (with Clients’ permission) monitor deployments +of Hardhat or any other website and the Tools for +performance-improvement and error-correction purposes.

+ +

 

+ +

A.        Information Our +Clients Provide

+ +

We receive and store the information our Clients +supply to us when they communicate with us by email, GitHub, and social media. +This information may include the Client’s name, company, email address, +username, postal address, and telephone number.

+ +

Personal data processed through social media is subject to their privacy +policies. You should check each social media privacy policies. The Nomic +Foundation is not responsible for the data collected by these platforms.

+ +

 

+ +

B.        Information +Automatically Collected from Clients

+ +

We also collect certain information automatically, including:

+ +

1.     Error and performance monitoring data

+ +

Hardhat collects automated error and performance monitoring data to help +Nomic Foundation fix bugs and improve the performance of our software. This +data is only collected and processed if the Client opts into this feature.

+ +

 

+ +

The information collected includes: 1) a unique cryptographic identifier +to identify data from a single user; 2) the Hardhat version number; 3) whether +Hardhat is running on a server or desktop; 4) the Client’s operating system +(e.g. Windows, Mac, or Linux); 5) whether the Hardhat task running is a +build-in or user-defined task; 6) the versions of Node.js and Sentry running on +the Client’s computer; 7) error message text; 8) stack trace data; and 9) +timestamps.

+ +

 

+ +

The Nomic Foundation uses +third-party services, Sentry and Google analytics, to collect and process this +data: i) Google +analytics: extension version, machine Id, operating system, user agent; and ii) +Sentry: extension name + version, environment, machine id as mentioned above, +tags that provide context.

+ +

In the case of Sentry, the data is anonymized before being sent to +Sentry’s servers. Any piece of data resembling a path +or a private key is removed on a best-effort basis. The data sent to Sentry is +subject to a data processing agreement limiting Sentry’s use of the data and +requiring Sentry to take appropriate security measures to protect the data.

+ +

2.     Website cookies

+ +

Like many websites, the Nomic Foundation websites use cookies to obtain +certain types of information when your web browser accesses our site. Cookies +are used most commonly to do things like tracking page +views, identifying repeat users and utilizing login tokens for a session.

+ + + + + + + + + + + + + + + + + + + + +
+

Type of Cookie

+
+

Served By

+
+

How to Control These

+
+

Session cookies: these are used to anonymously track a user’s session + on our websites to deliver a better experience.

+
+

Nomic Foundation

+
+

You can block or delete these by changing your browser settings.

+
+

Performance and Targeting cookies: These cookies collect information + that is used either in aggregate form to help us understand how our website + is being used or how effective our marketing campaigns are, or to help us + customize our website for you. Such cookies may record site and + display-related activity for a session so that a client does not see displays + that are irrelevant or have already been dismissed.

+
+

Google

+
+

Google offers a Google + Analytics Opt-Out Browser Add-on for + most browsers.

+
+

You can set or amend your web browser controls to accept or refuse + cookies. If you choose to reject cookies, you may still use our website + though your access to some functionality and areas of our website may be + restricted. As the means by which you can refuse cookies through your web + browser controls vary from browser-to-browser, you should visit your + browser's help menu for more information.

+
+ +

 

+ +

Other information we collect and analyze includes the Internet Protocol +(IP) address used to connect your computer to the Internet, computer and +connection information such as browser type, version, language, and time zone +setting, browser plug-in type and version, screen resolution, and operating +system and platform. This information is stored in log files and is collected +automatically. We collect this technical information to better understand user +needs and provide Clients with an optimal online experience.

+ +

 

+ +

We also collect aggregate usage data for our websites, which may include +browsing patterns and broad demographic information, to enable us to understand +how our websites are being used and to develop and refine them to better serve +our Clients.

+ +

 

+ +

Our Legal Bases for Processing Personal Information

+ +

For personal data under Nomic Foundation’s control, we rely on two bases +to lawfully obtain and process personal information. First, where Clients have +given us valid consent to use their data in certain ways, we rely on that +consent. Second, as described in more detail below, in certain cases we may +process information where this is necessary to meet legal obligations, such as +compliance with law enforcement subpoenas or warrants, and/or to further our +legitimate interests, so long as any such legitimate interests are not +overridden by your rights or interests.

+ +

 

+ +

How and When Do We Share Information?

+ +

Nomic Foundation does not sell your information. As set out below, we +only share information on a limited basis in order to enable us to offer our +services. We do not otherwise make Client data available +to third parties.  

+ +

 

+ +

Service Providers

+ +

We employ other organizations and service providers to perform certain +functions on our behalf. These third parties have only limited access to your +information, may use your information only to perform these tasks on our +behalf, and are obligated to Nomic Foundation not to disclose or use your +information for other purposes.

+ +

 

+ +

All Service Providers warrant to be in compliance with +the GDPR and provide sufficient security to the information they access to.

+ +

 

+ +

Our use of Service Providers includes:

+ +

·       Google: for website analytics

+ +

·       Sentry: for +collection and processing of error and performance monitoring data +for Hardhat

+ +

If you have any questions about the specific Service Providers we +currently use, please contact us at privacy@nomic.foundation or by +using the contact information set out at the bottom of this Privacy Policy.

+ +

 

+ +

Legal Compliance / Protection of the Public and Our Business / +Legitimate Interests 

+ +

We will release personal and account information: to comply with a +subpoena, court order, legal process, or other legal requirement when we +believe in good faith that such disclosure is necessary to comply with the law; +to protect, establish, or exercise our legal rights or defend against legal +claims; when we believe doing so is reasonably necessary to prevent harm to an +individual; or take action regarding illegal activities, suspected fraud, +threats to our property, or violations of our legal terms.

+ +

 

+ +

We may also share your information during an organizational transaction +like a merger or distribution of our assets to a successor organization. If +such a transaction occurs, we will provide notification of any changes to +control of your information, as well as choices you may have.

+ +

 

+ +

Children’s Privacy

+ +

The services are not intended for children under the age of 16. We do +not knowingly collect personal information from anyone under the age of +sixteen. If you are under the age of sixteen, your parent or guardian must +provide their consent for you to use the services.

+ +

 

+ +

Data Transfers

+ +

Nomic Foundation provides a voluntary service and Clients can choose whether or not they wish to use it. Consequently, when +decided to use our Services you entitled Nomic Foundation to transfer your +personal data to the Services Providers for the purposes of the data processing +described in this Privacy Policy.

+ +

 

+ +

When your data is moved from its home country to another country, the +laws and rules that protect your personal information in the country to which +your information is transferred may be different from those in the country +where you reside. Because we offer our services to people in different +countries and use technical infrastructure based in different jurisdictions, we +may need to transfer your personal information across borders in order to +deliver our services.

+ +

 

+ +

Our Services Providers are obliged to protect data privacy at least to +the same extent as ourselves. We contractually ensure that the protection of +your personal data corresponds to the applicable laws by using the standard +contractual clauses to comply with the GDPR.

+ +

 

+ +

How Secure Is Your Information?

+ +

We maintain administrative, technical, and physical safeguards designed +to protect the privacy and security of the information we maintain about you. +The connection between your computer and our website server is encrypted using +Secure Sockets Layer (SSL) software that encrypts that information.

+ +

 

+ +

We use a Digital Certificate and secure pages will be identified by a +padlock sign and “https://” in the address bar. Likewise, +all Hardhat error and performance monitoring data is transmitted over +HTTPS transport layer security (TLS)-secured connections.

+ +

 

+ +

However, no method of transmission or storage is 100% secure. As a +result, while we strive to protect your personal information, you acknowledge +that: (a) there are security and privacy limitations inherent to the Internet +which are beyond our control; and (b) security, integrity, and privacy of any and all information and data exchanged between you and +us through this Site cannot be guaranteed.

+ +

 

+ +

What Are Your Rights?

+ +

Upon request, Nomic Foundation will provide Clients with information +about whether we hold any of their personal information (“Right to +confirmation”). In certain cases, subject to relevant legal rights, Clients +have the right to object to the processing of their personal information, to +request changes, corrections, or the deletion of their personal information, +and to obtain a copy of their personal information in an easily accessible +format (“Right to access”, “Right to object”, “Right to rectification”).

+ +

 

+ +

In order to do this, Clients can contact us using the contact +information set out at the bottom of this Privacy Policy. We will respond to +every request within a reasonable timeframe and may need to take reasonable +steps to confirm identity before proceeding.

+ +

 

+ +

You can also withdraw your consent to our processing of your information +and the use of our services, and/or delete your Client +account at any time, by using the contact information below to request that +your personal information be deleted (“right to be forgotten”).

+ +

 

+ +

If you are an EU resident and believe that our processing of your +personal data is contrary to the EU General Data Protection Regulation, you +have the right to lodge a complaint with the appropriate supervisory authority.

+ +

 

+ +

If you withdraw your consent to the use or sharing of your personal +information for the purposes set out in this policy, we may not be able to +provide you with our services. Please note that in certain cases we may +continue to process your information after you have withdrawn consent and +requested that we delete your information if we have a legal basis/need to do +so.

+ +

 

+ +

Data Retention

+ +

For personal data under its control, Nomic Foundation will retain such +data only for as long as is necessary for the purposes set out in this policy, +or as needed to provide Clients with our services.

+ +

 

+ +

If a Client no longer wishes to use our +services then it may request deletion of its data at any time.

+ +

 

+ +

Notwithstanding the above, Nomic Foundation will retain and use Client +information to the extent necessary to comply with our legal obligations (for +example, if we are required to retain your information to comply with +applicable tax/revenue laws), resolve disputes, and enforce our agreements.

+ +

 

+ +

We may also retain log files for the purpose of internal analysis, for +site safety, security and fraud prevention, to improve site functionality, or +where we are legally required to retain them for longer time periods.

+ +

 

+ +

Contact Us

+ +

If you have any questions, comments or suggestions about how we handle +personal information you can contact Nomic Foundation at privacy@nomic.foundation

+ +

 

+ +
+ + + + diff --git a/docs/redirects.config.js b/docs/redirects.config.js index c2ce1aa690..0d801afd81 100644 --- a/docs/redirects.config.js +++ b/docs/redirects.config.js @@ -63,12 +63,12 @@ const customRedirects = [ }, { source: "/verify-custom-networks", - destination: "/plugins/nomiclabs-hardhat-etherscan#adding-support-for-other-networks", + destination: "/plugins/nomicfoundation-hardhat-verify#adding-support-for-other-networks", permanent: false }, { source: "/verify-multiple-networks", - destination: "plugins/nomiclabs-hardhat-etherscan.html#multiple-api-keys-and-alternative-block-explorers", + destination: "plugins/nomicfoundation-hardhat-verify.html#multiple-api-keys-and-alternative-block-explorers", permanent: false }, { @@ -78,7 +78,7 @@ const customRedirects = [ }, { source: "/migrate-from-waffle", - destination: "/hardhat-chai-matchers/docs/migrate-from-waffle", + destination: "/hardhat-runner/docs/guides/migrating-from-hardhat-waffle", permanent: false }, { @@ -91,6 +91,11 @@ const customRedirects = [ destination: "https://cryptpad.fr/form/#/2/form/view/HuPIRv4gvziSV0dPV1SJncKzYJXTVc8LGCaMfLUoj2c/", permanent: false }, + { + source: "/solc-viair", + destination: "/hardhat-runner/docs/reference/solidity-support#support-for-ir-based-codegen", + permanent: false + }, // top-level component URLs { @@ -256,6 +261,11 @@ const customRedirects = [ destination: "/hardhat-network-helpers/docs/:slug", permanent: false }, + { + source: "/hardhat-runner/plugins/nomiclabs-hardhat-etherscan", + destination: "/hardhat-runner/plugins/nomicfoundation-hardhat-verify", + permanent: false + }, ...loadErrorRedirects() ]; diff --git a/docs/src/assets/trustedTeamsLogos/logos.ts b/docs/src/assets/trustedTeamsLogos/logos.ts index 62608d679b..0275075713 100644 --- a/docs/src/assets/trustedTeamsLogos/logos.ts +++ b/docs/src/assets/trustedTeamsLogos/logos.ts @@ -44,7 +44,7 @@ export const TrustedTeamsLogos = [ }, { img: require("./aragonOne.png"), - alt: "ARAGONE ONE company logo", + alt: "ARAGON ONE company logo", }, { img: require("./kleros.png"), @@ -215,7 +215,7 @@ export const TrustedTeamsLogosDark = [ }, { img: require("./dark/aragonOne-dark.png"), - alt: "ARAGONE ONE company logo", + alt: "ARAGON ONE company logo", }, { img: require("./dark/kleros-dark.png"), diff --git a/docs/src/components/DocumentationLayout.tsx b/docs/src/components/DocumentationLayout.tsx index 16dcb70cf2..667cf21df5 100644 --- a/docs/src/components/DocumentationLayout.tsx +++ b/docs/src/components/DocumentationLayout.tsx @@ -92,7 +92,7 @@ export const SidebarContainer = styled.aside<{ isSidebarOpen: boolean }>` left: ${({ isSidebarOpen }) => (isSidebarOpen ? "0px" : "-120vw")}; height: calc(100vh - 136px); display: flex; - overflow-y: scroll; + overflow-y: auto; transition: all ease-out 0.25s; z-index: 50; background-color: ${tm(({ colors }) => colors.neutral0)}; diff --git a/docs/src/components/LandingFooter.tsx b/docs/src/components/LandingFooter.tsx index 4f105ec319..58605160db 100644 --- a/docs/src/components/LandingFooter.tsx +++ b/docs/src/components/LandingFooter.tsx @@ -141,7 +141,7 @@ const LandingFooter = () => { - Copyright 2022 Nomic Foundation | + Copyright {new Date().getFullYear()} Nomic Foundation | Privacy Policy diff --git a/docs/src/components/Navigation.mocks.json b/docs/src/components/Navigation.mocks.json index 9d39851677..f542795d7e 100644 --- a/docs/src/components/Navigation.mocks.json +++ b/docs/src/components/Navigation.mocks.json @@ -162,8 +162,8 @@ "href": "/plugins/nomiclabs-hardhat-waffle" }, { - "label": "@nomiclabs/hardhat-etherscan", - "href": "/plugins/nomiclabs-hardhat-etherscan" + "label": "@nomicfoundation/hardhat-verify", + "href": "/plugins/nomicfoundation-hardhat-verify" }, { "label": "@nomiclabs/hardhat-web3", @@ -177,10 +177,6 @@ "label": "@nomiclabs/hardhat-solhint", "href": "/plugins/nomiclabs-hardhat-solhint" }, - { - "label": "@nomiclabs/hardhat-ganache", - "href": "/plugins/nomiclabs-hardhat-ganache" - }, { "label": "@nomiclabs/hardhat-solpp", "href": "/plugins/nomiclabs-hardhat-solpp" diff --git a/docs/src/components/landingBlocks/HeroBlock.tsx b/docs/src/components/landingBlocks/HeroBlock.tsx index c076ed5ca9..5b30532280 100644 --- a/docs/src/components/landingBlocks/HeroBlock.tsx +++ b/docs/src/components/landingBlocks/HeroBlock.tsx @@ -45,7 +45,7 @@ const Block = styled.div` padding: 0 0 24px; min-height: 100px; &.content { - z-index: 1; + z-index: 2; } & svg { margin: 0 auto; diff --git a/docs/src/content/hardhat-chai-matchers/docs/migrate-from-waffle.md b/docs/src/content/hardhat-chai-matchers/docs/migrate-from-waffle.md index dfa71cd36c..485704db0d 100644 --- a/docs/src/content/hardhat-chai-matchers/docs/migrate-from-waffle.md +++ b/docs/src/content/hardhat-chai-matchers/docs/migrate-from-waffle.md @@ -1,6 +1,6 @@ # Migrating from Waffle -This page explains how to migrate from Waffle to Hardhat Chai Matchers, and the advantages of doing it. Migrating should only take a few minutes. +If you want to replace Waffle with Hardhat Chai Matchers, we recommend you [migrate to the Hardhat Toolbox](/hardhat-runner/docs/guides/migrating-from-hardhat-waffle). If for some reason you want to migrate without using the Toolbox, read on. ## How to migrate @@ -90,38 +90,8 @@ The `@nomicfoundation/hardhat-chai-matchers` plugin is meant to be a drop-in rep :::: -4. If you were not importing the `@nomiclabs/hardhat-ethers` plugin explicitly (because the Hardhat Waffle plugin already imported it), then add it to your config: - - ::::tabsgroup{options=TypeScript,JavaScript} - - :::tab{value=TypeScript} - - ```ts - import "@nomiclabs/hardhat-ethers"; - ``` - - ::: - - :::tab{value=JavaScript} - - ```js - require("@nomiclabs/hardhat-ethers"); - ``` - - ::: - - :::: - :::tip -Looking for a replacement of Waffle's `loadFixture`? You can find our version of it in [Hardhat Network Helpers](/hardhat-network-helpers/docs/reference#fixtures). +Looking for a replacement for Waffle's `loadFixture`? You can find our version of it in [Hardhat Network Helpers](/hardhat-network-helpers/docs/reference#fixtures). ::: - -## Why migrate? - -The Hardhat Chai Matchers are compatible with Waffle's API and offer several advantages: - -- **More features**: the Hardhat Chai Matchers include new matchers, like [`.revertedWithCustomError`](./reference#.revertedwithcustomerror) and [`.revertedWithPanic`](/chai-matchers/reference.md#.revertedwithpanic), which let you perform better assertions of a transaction's revert reason. -- **Support for native BigInts**: Besides numbers and ethers’s BigNumbers, you can also use JavaScript's native BigInts in your assertions, which means being able to do things like `expect(await token.totalSupply()).to.equal(10n**18n)` instead of `expect(await token.totalSupply()).to.equal(ethers.BigNumber.from("1000000000000000000"))`. -- **More reliable**: Several problems and minor bugs in Waffle's matchers are fixed in the Hardhat Chai Matchers. diff --git a/docs/src/content/hardhat-chai-matchers/docs/overview.md b/docs/src/content/hardhat-chai-matchers/docs/overview.md index 8c164b4a6c..a61f57b0c8 100644 --- a/docs/src/content/hardhat-chai-matchers/docs/overview.md +++ b/docs/src/content/hardhat-chai-matchers/docs/overview.md @@ -49,7 +49,7 @@ A few other helpers, such as argument predicates and panic code constants, must ### Events -You can easily write tests to verify that your contract emitted a certain event. For example, `await expect(contract.call()).to.emit(contract, "Event")` would detect the event emitted by the following Solidtity code: +You can easily write tests to verify that your contract emitted a certain event. For example, `await expect(contract.call()).to.emit(contract, "Event")` would detect the event emitted by the following Solidity code: ```solidity contract C { @@ -179,7 +179,7 @@ These matchers support not just [ethers' `BigNumber`](https://docs.ethers.io/v5/ ### Balance Changes -Often times, a transaction you're testing will be expected to have some effect on a wallet's balance, either its balance of Ether or its balance of some ERC-20 token. Another set of matchers allows you to verify that a transaction resulted in such a balance change: +Oftentimes, a transaction you're testing will be expected to have some effect on a wallet's balance, either its balance of Ether or its balance of some ERC-20 token. Another set of matchers allows you to verify that a transaction resulted in such a balance change: ```js await expect(() => diff --git a/docs/src/content/hardhat-chai-matchers/docs/reference.md b/docs/src/content/hardhat-chai-matchers/docs/reference.md index 660724a11c..42f99a9c64 100644 --- a/docs/src/content/hardhat-chai-matchers/docs/reference.md +++ b/docs/src/content/hardhat-chai-matchers/docs/reference.md @@ -223,7 +223,7 @@ await expect(factory.create(9999)) .withArgs(anyValue, 9999); ``` -Predicates are just function that return true if the value is correct, and return false if it isn't, so you can create your own predicates: +Predicates are just functions that return true if the value is correct, and return false if it isn't, so you can create your own predicates: ```ts function isEven(x: BigNumber): boolean { diff --git a/docs/src/content/hardhat-network/docs/guides/forking-other-networks.md b/docs/src/content/hardhat-network/docs/guides/forking-other-networks.md index 8fb6a87257..e987e66e87 100644 --- a/docs/src/content/hardhat-network/docs/guides/forking-other-networks.md +++ b/docs/src/content/hardhat-network/docs/guides/forking-other-networks.md @@ -2,29 +2,68 @@ You can start an instance of Hardhat Network that forks mainnet. This means that it will simulate having the same state as mainnet, but it will work as a local development network. That way you can interact with deployed protocols and test complex interactions locally. -To use this feature you need to connect to an archive node. We recommend using [Alchemy]. +To use this feature you need to connect to an archive node. We recommend using [Infura] or [Alchemy]. ## Forking from mainnet The easiest way to try this feature is to start a node from the command line: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + ``` -npx hardhat node --fork https://eth-mainnet.alchemyapi.io/v2/ +npx hardhat node --fork https://mainnet.infura.io/v3/ ``` +::: + +:::tab{value=Alchemy} + +``` +npx hardhat node --fork https://eth-mainnet.g.alchemy.com/v2/ + +``` + +::: + +:::: + You can also configure Hardhat Network to always do this: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + ```js networks: { hardhat: { forking: { - url: "https://eth-mainnet.alchemyapi.io/v2/", + url: "https://mainnet.infura.io/v3/", } } } ``` -(Note that you'll need to replace the `` component of the URL with your personal Alchemy API key.) +::: + +:::tab{value=Alchemy} + +```js +networks: { + hardhat: { + forking: { + url: "https://eth-mainnet.g.alchemy.com/v2/", + } + } +} +``` + +::: + +:::: + +(Note that you'll need to replace the `` component of the URL with your personal Infura or Alchemy API key.) By accessing any state that exists on mainnet, Hardhat Network will pull the data and expose it transparently as if it was available locally. @@ -37,27 +76,66 @@ There are two reasons for this: - The state your tests run against may change between runs. This could cause your tests or scripts to behave differently. - Pinning enables caching. Every time data is fetched from mainnet, Hardhat Network caches it on disk to speed up future access. If you don't pin the block, there's going to be new data with each new block and the cache won't be useful. We measured up to 20x speed improvements with block pinning. -**You will need access to a node with archival data for this to work.** This is why we recommend [Alchemy], since their free plans include archival data. +**You will need access to a node with archival data for this to work.** This is why we recommend [Infura] or [Alchemy], since their free plans include archival data. To pin the block number: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + ```js networks: { hardhat: { forking: { - url: "https://eth-mainnet.alchemyapi.io/v2/", + url: "https://mainnet.infura.io/v3/", blockNumber: 14390000 } } } ``` +::: + +:::tab{value=Alchemy} + +```js +networks: { + hardhat: { + forking: { + url: "https://eth-mainnet.g.alchemy.com/v2/", + blockNumber: 14390000 + } + } +} +``` + +::: + +:::: + If you are using the `node` task, you can also specify a block number with the `--fork-block-number` flag: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + +``` +npx hardhat node --fork https://mainnet.infura.io/v3/ --fork-block-number 14390000 +``` + +::: + +:::tab{value=Alchemy} + ``` -npx hardhat node --fork https://eth-mainnet.alchemyapi.io/v2/ --fork-block-number 14390000 +npx hardhat node --fork https://eth-mainnet.g.alchemy.com/v2/ --fork-block-number 14390000 ``` +::: + +:::: + ## Custom HTTP headers You can add extra HTTP headers that will be used in any request made to the forked node. One reason to do this is for authorization: instead of including your credentials in the URL, you can use a bearer token via a custom HTTP header: @@ -149,11 +227,11 @@ See also [the `chains` entry in the Hardhat Network configuration reference](../ ### "Project ID does not have access to archive state" -When using Infura without the archival add-on, you will only have access to the state of the blockchain from recent blocks. To avoid this problem, you can use either a local archive node or a service that provides archival data, like [Alchemy] or [Infura]. +When using a node that doesn't have archival data, you will only have access to the state of the blockchain from recent blocks. To avoid this problem, you can use either a local archive node or a service that provides archival data, like [Infura] or [Alchemy]. ## See also For full details on what's supported, see [the Hardhat Network Config Reference](../reference/#config). -[alchemy]: https://alchemyapi.io/ -[infura]: https://alchemyapi.io/ +[infura]: https://infura.io +[alchemy]: https://alchemy.com diff --git a/docs/src/content/hardhat-network/docs/metamask-issue/index.md b/docs/src/content/hardhat-network/docs/metamask-issue/index.md index fa4f93589d..ade466ab9b 100644 --- a/docs/src/content/hardhat-network/docs/metamask-issue/index.md +++ b/docs/src/content/hardhat-network/docs/metamask-issue/index.md @@ -13,7 +13,9 @@ Incompatible EIP155-based V 2710 and chain id 31337. See the second parameter of This is because MetaMask mistakenly assumes all networks in `http://127.0.0.1:8545` to have a chain id of `1337`, but Hardhat uses a different number by default. **Please upvote [the MetaMask issue about it](https://github.com/MetaMask/metamask-extension/issues/10290) if you want this fixed.** -In the meantime, to resolve this you can set the `chainId` of Hardhat Network to `1337` in your Hardhat config: +In the meantime, consider using an alternative wallet that doesn't have this problem, like [Coinbase Wallet](https://www.coinbase.com/wallet). + +If you want to use MetaMask, you can work around this issue by setting the `chainId` of Hardhat Network to `1337` in your Hardhat config: ``` networks: { diff --git a/docs/src/content/hardhat-network/docs/overview/index.md b/docs/src/content/hardhat-network/docs/overview/index.md index ba68ca6d18..4ef156214c 100644 --- a/docs/src/content/hardhat-network/docs/overview/index.md +++ b/docs/src/content/hardhat-network/docs/overview/index.md @@ -29,7 +29,7 @@ Hardhat Network is simply another network. If you wanted to be explicit, you cou ### Running stand-alone in order to support wallets and other software -Alternatively, Hardhat Network can run in a stand-alone fashion so that external clients can connect to it. This could be MetaMask, your Dapp front-end, or a script. To run Hardhat Network in this way, run: +Alternatively, Hardhat Network can run in a stand-alone fashion so that external clients can connect to it. This could be a wallet, your Dapp front-end, or a script. To run Hardhat Network in this way, run: ``` $ npx hardhat node diff --git a/docs/src/content/hardhat-network/docs/reference/index.md b/docs/src/content/hardhat-network/docs/reference/index.md index 8cb82ae59b..45dde36436 100644 --- a/docs/src/content/hardhat-network/docs/reference/index.md +++ b/docs/src/content/hardhat-network/docs/reference/index.md @@ -7,9 +7,12 @@ - petersburg - istanbul - muirGlacier +- berlin - london - arrowGlacier +- grayGlacier - merge +- shanghai ## Config @@ -62,7 +65,7 @@ The block gas limit to use in Hardhat Network's blockchain. Default value: `30_0 #### `hardfork` -This setting changes how Hardhat Network works, to mimic Ethereum's mainnet at a given hardfork. It must be one of `"byzantium"`, `"constantinople"`, `"petersburg"`, `"istanbul"`, `"muirGlacier"`, `"berlin"`, `"london"`, `"arrowGlacier"`, `"grayGlacier"` and `"merge"`. Default value: `"merge"` +This setting changes how Hardhat Network works, to mimic Ethereum's mainnet at a given hardfork. It must be one of `"byzantium"`, `"constantinople"`, `"petersburg"`, `"istanbul"`, `"muirGlacier"`, `"berlin"`, `"london"`, `"arrowGlacier"`, `"grayGlacier"`, `"merge"` and `"shanghai"`. Default value: `"shanghai"` #### `throwOnTransactionFailures` @@ -84,6 +87,10 @@ An optional string setting the date of the blockchain. Valid values are [Javascr An optional boolean that disables the contract size limit imposed by the [EIP 170](https://eips.ethereum.org/EIPS/eip-170). Default value: `false` +#### `allowBlocksWithSameTimestamp` + +A boolean to allow mining blocks that have the same timestamp. This is not allowed by default because Ethereum's consensus rules specify that each block should have a different timestamp. Default value: `false` + #### `forking` An object that describes the [forking](./guides/forking-other-networks.md) configuration that can have the following fields: @@ -207,7 +214,7 @@ networks: { - `console.log` implements the same formatting options that can be found in Node.js' [`console.log`](https://nodejs.org/dist/latest-v12.x/docs/api/console.html#console_console_log_data_args), which in turn uses [`util.format`](https://nodejs.org/dist/latest-v12.x/docs/api/util.html#util_util_format_format_args). - Example: `console.log("Changing owner from %s to %s", currentOwner, newOwner)` - `console.log` is implemented in standard Solidity and then detected in Hardhat Network. This makes its compilation work with any other tools (like Remix, Waffle or Truffle). -- `console.log` calls can run in other networks, like mainnet, goerli, sepolia, etc. They do nothing in those networks, but do spend a minimal amount of gas. +- `console.log` calls can run in other networks, like mainnet, sepolia, goerli, etc. They do nothing in those networks, but do spend a minimal amount of gas. - `console.log` output can also be viewed for testnets and mainnet via [Tenderly](https://tenderly.co/). - `console.log` works by sending static calls to a well-known contract address. At runtime, Hardhat Network detects calls to that address, decodes the input data to the calls, and writes it to the console. @@ -378,7 +385,7 @@ Remove a transaction from the mempool Hardhat Network allows you to send transactions impersonating specific account and contract addresses. -To impersonate an account use the this method, passing the address to impersonate as its parameter: +To impersonate an account use this method, passing the address to impersonate as its parameter: ```tsx await hre.network.provider.request({ @@ -440,13 +447,35 @@ Also note that blocks created via `hardhat_mine` may not trigger new-block event You can manipulate forking during runtime to reset back to a fresh forked state, fork from another block number or disable forking by calling `hardhat_reset`: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + +```ts +await network.provider.request({ + method: "hardhat_reset", + params: [ + { + forking: { + jsonRpcUrl: "https://mainnet.infura.io/v3/", + blockNumber: 14390000, + }, + }, + ], +}); +``` + +::: + +:::tab{value=Alchemy} + ```ts await network.provider.request({ method: "hardhat_reset", params: [ { forking: { - jsonRpcUrl: "https://eth-mainnet.alchemyapi.io/v2/", + jsonRpcUrl: "https://eth-mainnet.g.alchemy.com/v2/", blockNumber: 14390000, }, }, @@ -454,6 +483,10 @@ await network.provider.request({ }); ``` +::: + +:::: + You can disable forking by passing empty params: ```ts diff --git a/docs/src/content/hardhat-runner/docs/advanced/_dirinfo.yaml b/docs/src/content/hardhat-runner/docs/advanced/_dirinfo.yaml index d2d3246fac..6365297f03 100644 --- a/docs/src/content/hardhat-runner/docs/advanced/_dirinfo.yaml +++ b/docs/src/content/hardhat-runner/docs/advanced/_dirinfo.yaml @@ -11,3 +11,4 @@ order: - /flattening - href: /vscode-tests title: Running tests in VS Code + - /using-esm diff --git a/docs/src/content/hardhat-runner/docs/advanced/hardhat-and-foundry.md b/docs/src/content/hardhat-runner/docs/advanced/hardhat-and-foundry.md index 9df071d55d..189e422f32 100644 --- a/docs/src/content/hardhat-runner/docs/advanced/hardhat-and-foundry.md +++ b/docs/src/content/hardhat-runner/docs/advanced/hardhat-and-foundry.md @@ -8,6 +8,12 @@ How to set up a project that combines Hardhat and Foundry depends on whether you ### Adding Foundry to a Hardhat project +:::tip + +Foundry relies on Git to work properly. Make sure your project is already a Git repository, or type `git init` to initialize one. + +::: + If you have an existing Hardhat project and you want to use Foundry in it, you should follow these steps. First, run `forge --version` to make sure that you have Foundry installed. If you don't, go [here](https://getfoundry.sh/) to get it. diff --git a/docs/src/content/hardhat-runner/docs/advanced/hardhat-runtime-environment.md b/docs/src/content/hardhat-runner/docs/advanced/hardhat-runtime-environment.md index dcde2a666a..6f9d550920 100644 --- a/docs/src/content/hardhat-runner/docs/advanced/hardhat-runtime-environment.md +++ b/docs/src/content/hardhat-runner/docs/advanced/hardhat-runtime-environment.md @@ -14,7 +14,7 @@ The HRE has a role of centralizing coordination across all Hardhat components. T By default, the HRE gives you programmatic access to the task runner and the config system, and exports an [EIP1193-compatible](https://eips.ethereum.org/EIPS/eip-1193) Ethereum provider. -Plugins can extend the HRE. For example, [hardhat-ethers](https://github.com/NomicFoundation/hardhat/tree/main/packages/hardhat-ethers) adds a Ethers.js instance to it, making it available to tasks, tests and scripts. +Plugins can extend the HRE. For example, [hardhat-ethers](https://github.com/NomicFoundation/hardhat/tree/main/packages/hardhat-ethers) adds an Ethers.js instance to it, making it available to tasks, tests and scripts. ### As global variables diff --git a/docs/src/content/hardhat-runner/docs/advanced/using-esm.md b/docs/src/content/hardhat-runner/docs/advanced/using-esm.md new file mode 100644 index 0000000000..291a4cf4f6 --- /dev/null +++ b/docs/src/content/hardhat-runner/docs/advanced/using-esm.md @@ -0,0 +1,132 @@ +# Using ES modules + +Node.js projects can use one of two module systems: CommonJS and ES Modules (ESM). Hardhat was designed mainly with CommonJS in mind, but in the last years adoption of ESM has been growing. + +This guide explains where you can use ESM in your Hardhat project and how to do it. + +## Hardhat support for ES modules + +You can write your scripts and tests as both CommonJS and ES modules. However, your Hardhat config, and any file imported by it, **must** be CommonJS modules. + +If your package uses ESM by default (that is, you have [`"type": "module"`](https://nodejs.org/api/packages.html#type) in your `package.json`), then your Hardhat config file must be named `hardhat.config.cjs`. + +Hardhat doesn't support [ESM in TypeScript projects](#esm-and-typescript-projects). + +## Using ES Modules in Hardhat + +The following sections explain how to use ES modules in new or existing Hardhat projects. + +### Starting an ESM-first Hardhat project + +If you want to start a Hardhat project that uses ES modules by default, first you have to initialize a Node.js project: + +::::tabsgroup{options="npm 7+,npm 6,yarn"} + +:::tab{value="npm 7+"} + +``` +npm init -y +``` + +::: + +:::tab{value="npm 6"} + +``` +npm init -y +``` + +::: + +:::tab{value="yarn"} + +``` +yarn init -y +``` + +::: + +:::: + +Open the `package.json` that was created and add a `"type": "module"` entry. This will make the project use ESM by default. + +After that, install Hardhat: + +::::tabsgroup{options="npm 7+,npm 6,yarn"} + +:::tab{value="npm 7+"} + +``` +npm install --save-dev hardhat +``` + +::: + +:::tab{value="npm 6"} + +``` +npm install --save-dev hardhat +``` + +::: + +:::tab{value="yarn"} + +``` +yarn add --dev hardhat +``` + +::: + +:::: + +and run `npx hardhat` to create a Hardhat project: + +``` +888 888 888 888 888 +888 888 888 888 888 +888 888 888 888 888 +8888888888 8888b. 888d888 .d88888 88888b. 8888b. 888888 +888 888 "88b 888P" d88" 888 888 "88b "88b 888 +888 888 .d888888 888 888 888 888 888 .d888888 888 +888 888 888 888 888 Y88b 888 888 888 888 888 Y88b. +888 888 "Y888888 888 "Y88888 888 888 "Y888888 "Y888 + +Welcome to Hardhat v2.13.0 + +? What do you want to do? … +▸ Create a JavaScript project + Create a TypeScript project (not available for ESM projects) + Create an empty hardhat.config.cjs + Quit +``` + +Select the `Create a JavaScript project` option. This will initialize a Hardhat project where the scripts and tests are ES modules, and where the configuration has a `.cjs` extension. + +### Migrating a project to ESM + +If you have an existing Hardhat project and you want to convert it into an ESM project, follow these steps: + +1. Edit your `package.json` and add a `"type": "module"` entry. +2. Rename your `hardhat.config.js` file to `hardhat.config.cjs`. +3. Migrate all your scripts and tests from CommonJS to ESM. Alternatively, you can rename them to have a `.cjs` extension instead of `.js`. + +### Adding ESM files to an existing Hardhat project + +It's also possible to write ESM scripts and tests without making your whole project ESM by default. To do this, just create your scripts and tests with an `.mjs` extension. + +## ESM and TypeScript projects + +At the moment, it's not possible to use ESM in TypeScript projects. + +Hardhat uses [`ts-node`](https://typestrong.org/ts-node/) to run TypeScript projects, which in turn relies on Node's loader hooks. This is all experimental and the current functionality is not enough for Hardhat's needs. + +If you need this feature, please let us know in [this issue](https://github.com/NomicFoundation/hardhat/issues/3385). + +## Learn more + +To learn more about ES modules in general, check these resources: + +- [Node.js docs](https://nodejs.org/api/packages.html) +- [ES modules: A cartoon deep-dive](https://hacks.mozilla.org/2018/03/es-modules-a-cartoon-deep-dive/) +- The [Modules chapter](https://exploringjs.com/impatient-js/ch_modules.html) of "JavaScript for impatient programmers" diff --git a/docs/src/content/hardhat-runner/docs/config/index.md b/docs/src/content/hardhat-runner/docs/config/index.md index 0c7857a865..0b818c2c6b 100644 --- a/docs/src/content/hardhat-runner/docs/config/index.md +++ b/docs/src/content/hardhat-runner/docs/config/index.md @@ -10,14 +10,18 @@ To set up your config, you have to export an object from `hardhat.config.js`. This object can have entries like `defaultNetwork`, [`networks`](#networks-configuration), [`solidity`](#solidity-configuration), [`paths`](#path-configuration) and [`mocha`](#mocha-configuration). For example: +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + ```js module.exports = { - defaultNetwork: "goerli", + defaultNetwork: "sepolia", networks: { hardhat: { }, - goerli: { - url: "https://eth-goerli.alchemyapi.io/v2/123abc123abc123abc123abc123abcde", + sepolia: { + url: "https://sepolia.infura.io/v3/", accounts: [privateKey1, privateKey2, ...] } }, @@ -42,6 +46,46 @@ module.exports = { } ``` +::: + +:::tab{value=Alchemy} + +```js +module.exports = { + defaultNetwork: "sepolia", + networks: { + hardhat: { + }, + sepolia: { + url: "https://eth-sepolia.g.alchemy.com/v2/", + accounts: [privateKey1, privateKey2, ...] + } + }, + solidity: { + version: "0.5.15", + settings: { + optimizer: { + enabled: true, + runs: 200 + } + } + }, + paths: { + sources: "./contracts", + tests: "./test", + cache: "./cache", + artifacts: "./artifacts" + }, + mocha: { + timeout: 40000 + } +} +``` + +::: + +:::: + ## Networks configuration The `networks` config field is an optional object where network names map to their configuration. @@ -60,7 +104,7 @@ See [the Hardhat Network Configuration Reference](/hardhat-network/docs/referenc ### JSON-RPC based networks -These are networks that connect to an external node. Nodes can be running in your computer, like Ganache, or remotely, like Alchemy or Infura. +These are networks that connect to an external node. Nodes can be running in your computer, like Ganache, or remotely, like Infura or Alchemy. This kind of network is configured with objects with the following fields: @@ -101,7 +145,7 @@ For example: ```js module.exports = { networks: { - goerli: { + sepolia: { url: "...", accounts: { mnemonic: "test test test test test test test test test test test junk", diff --git a/docs/src/content/hardhat-runner/docs/getting-started/index.md b/docs/src/content/hardhat-runner/docs/getting-started/index.md index 8aac017a3a..b1bc062deb 100644 --- a/docs/src/content/hardhat-runner/docs/getting-started/index.md +++ b/docs/src/content/hardhat-runner/docs/getting-started/index.md @@ -284,7 +284,7 @@ Lock with 1 ETH deployed to: 0x5FbDB2315678afecb367f032d93F642f64180aa3 ### Connecting a wallet or Dapp to Hardhat Network -By default, Hardhat will spin up a new in-memory instance of Hardhat Network on startup. It's also possible to run Hardhat Network in a standalone fashion so that external clients can connect to it. This could be MetaMask, your Dapp front-end, or a script. +By default, Hardhat will spin up a new in-memory instance of Hardhat Network on startup. It's also possible to run Hardhat Network in a standalone fashion so that external clients can connect to it. This could be a wallet, your Dapp front-end, or a script. To run Hardhat Network in this way, run `npx hardhat node`: diff --git a/docs/src/content/hardhat-runner/docs/guides/migrating-from-hardhat-waffle.md b/docs/src/content/hardhat-runner/docs/guides/migrating-from-hardhat-waffle.md index f5a93f3fe9..5d2a12c130 100644 --- a/docs/src/content/hardhat-runner/docs/guides/migrating-from-hardhat-waffle.md +++ b/docs/src/content/hardhat-runner/docs/guides/migrating-from-hardhat-waffle.md @@ -1,29 +1,111 @@ # Migrating away from hardhat-waffle -Our recommended setup used to include [Waffle] using our [`hardhat-waffle`] plugin. +In the past, our recommended setup included [Waffle], by using the [`hardhat-waffle`] plugin. -We now recommend using our [Hardhat Chai Matchers] and [Hardhat Network Helpers] instead. +However, we now recommend using [Hardhat Toolbox], a plugin that bundles a curated set of useful packages. This set includes [Hardhat Chai Matchers] and [Hardhat Network Helpers], which work as an improved replacement for `hardhat-waffle`. -Migrating to these packages only takes a few minutes, as they were designed as a drop-in replacement. If you do so, you'll get more functionality, like support for Solidity custom errors and native `bigint` support, and a more reliable testing experience. +Migrating to the Toolbox only takes a few minutes. If you do so, you'll get more functionality, like support for Solidity custom errors and native `bigint` support, and a more reliable testing experience. It will also make it easier for you to keep up to date with our recommended setup. -To learn how to start using them, read [this guide](../../../hardhat-chai-matchers/docs/migrate-from-waffle.md). +## Migrating to Hardhat Toolbox -## Using the Hardhat Toolbox +Follow these steps to migrate your project to Hardhat Toolbox. -You can get our recommended setup by installing [`@nomicfoundation/hardhat-toolbox`], a single plugin that has everything you need. +1. First you'll need to remove some packages from your project. -When you use it, you'll be able to: + ::::tabsgroup{options="npm 7+,npm 6,yarn"} -- Deploy and interact with your contracts using [ethers.js](https://docs.ethers.io/v5/) and the [`hardhat-ethers`](/hardhat-runner/plugins/nomiclabs-hardhat-ethers) plugin. -- Test your contracts with [Mocha](https://mochajs.org/), [Chai](https://chaijs.com/) and our own [Hardhat Chai Matchers](/hardhat-chai-matchers) plugin. -- Interact with Hardhat Network with our [Hardhat Network Helpers](/hardhat-network-helpers). -- Verify the source code of your contracts with the [hardhat-etherscan](/hardhat-runner/plugins/nomiclabs-hardhat-etherscan) plugin. -- Get metrics on the gas used by your contracts with the [hardhat-gas-reporter](https://github.com/cgewecke/hardhat-gas-reporter) plugin. -- Measure your tests coverage with [solidity-coverage](https://github.com/sc-forks/solidity-coverage). -- And, if you are using TypeScript, get type bindings for your contracts with [Typechain](https://github.com/dethcrypto/TypeChain/). + :::tab{value="npm 7+"} + + ``` + npm uninstall @nomiclabs/hardhat-waffle ethereum-waffle @nomiclabs/hardhat-ethers @nomiclabs/hardhat-etherscan chai ethers hardhat-gas-reporter solidity-coverage @typechain/hardhat typechain @typechain/ethers-v5 @ethersproject/abi @ethersproject/providers + ``` + + ::: + + :::tab{value="npm 6"} + + ``` + npm uninstall @nomiclabs/hardhat-waffle ethereum-waffle @nomiclabs/hardhat-ethers @nomiclabs/hardhat-etherscan chai ethers hardhat-gas-reporter solidity-coverage @typechain/hardhat typechain @typechain/ethers-v5 @ethersproject/abi @ethersproject/providers + ``` + + ::: + + :::tab{value=yarn} + + ``` + yarn remove @nomiclabs/hardhat-waffle ethereum-waffle + ``` + + ::: + + :::: + +2. Then you need to install the Toolbox. If you are using yarn or an old version of npm, you'll also have to install some other packages (the peer dependencies of the Toolbox). + + ::::tabsgroup{options="npm 7+,npm 6,yarn"} + + :::tab{value="npm 7+"} + + ``` + npm install --save-dev @nomicfoundation/hardhat-toolbox + ``` + + ::: + + :::tab{value="npm 6"} + + ``` + npm install --save-dev @nomicfoundation/hardhat-toolbox @nomicfoundation/hardhat-network-helpers @nomicfoundation/hardhat-chai-matchers @nomiclabs/hardhat-ethers @nomiclabs/hardhat-etherscan chai ethers hardhat-gas-reporter solidity-coverage @typechain/hardhat typechain @typechain/ethers-v5 @ethersproject/abi @ethersproject/providers + ``` + + ::: + + :::tab{value="yarn"} + + ``` + yarn add --dev @nomicfoundation/hardhat-toolbox @nomicfoundation/hardhat-network-helpers @nomicfoundation/hardhat-chai-matchers @nomiclabs/hardhat-ethers @nomiclabs/hardhat-etherscan chai ethers hardhat-gas-reporter solidity-coverage @typechain/hardhat typechain @typechain/ethers-v5 @ethersproject/abi @ethersproject/providers + ``` + + ::: + + :::: + +3. Finally, remove `hardhat-waffle` from your imported plugins and import the Toolbox instead: + + ::::tabsgroup{options=TypeScript,JavaScript} + + :::tab{value=TypeScript} + + ```diff + - import "@nomiclabs/hardhat-waffle"; + + import "@nomicfoundation/hardhat-toolbox"; + ``` + + ::: + + :::tab{value=JavaScript} + + ```diff + - require("@nomiclabs/hardhat-waffle"); + + require("@nomicfoundation/hardhat-toolbox"); + ``` + + ::: + + :::: + + Adding the Toolbox will make many other imports redundant, so you can remove any of these if you want: + + - `@nomiclabs/hardhat-ethers` + - `@nomiclabs/hardhat-etherscan` + - `hardhat-gas-reporter` + - `solidity-coverage` + - `@typechain/hardhat` + +Check the [Hardhat Chai Matchers] and [Hardhat Network Helpers] docs to learn more about the functionality included in the Toolbox. [waffle]: https://getwaffle.io [`hardhat-waffle`]: ../../plugins/nomiclabs-hardhat-waffle -[`@nomicfoundation/hardhat-toolbox`]: ../../plugins/nomicfoundation-hardhat-toolbox [hardhat chai matchers]: /hardhat-chai-matchers [hardhat network helpers]: /hardhat-network-helpers +[hardhat toolbox]: /hardhat-runner/plugins/nomicfoundation-hardhat-toolbox diff --git a/docs/src/content/hardhat-runner/docs/guides/verifying.md b/docs/src/content/hardhat-runner/docs/guides/verifying.md index 85f0d17bb1..35e91c571e 100644 --- a/docs/src/content/hardhat-runner/docs/guides/verifying.md +++ b/docs/src/content/hardhat-runner/docs/guides/verifying.md @@ -42,31 +42,33 @@ module.exports = { :::: -## Deploying and verifying a contract in the Goerli testnet +## Deploying and verifying a contract in the Sepolia testnet -We are going to use the [Goerli testnet](https://ethereum.org/en/developers/docs/networks/#goerli) to deploy and verify our contract, so you need to add this network in your Hardhat config. Here we are using [Alchemy](https://www.alchemy.com/) to connect to the network, but you can use an alternative JSON-RPC URL like [Infura](https://www.infura.io/) if you want. +We are going to use the [Sepolia testnet](https://ethereum.org/en/developers/docs/networks/#sepolia) to deploy and verify our contract, so you need to add this network in your Hardhat config. Here we are using [Infura](https://infura.io/) to connect to the network, but you can use an alternative JSON-RPC URL like [Alchemy](https://alchemy.com/) if you want. -::::tabsgroup{options=TypeScript,JavaScript} +::::tabsgroup{options=Infura,Alchemy} -:::tab{value=TypeScript} +:::tab{value=Infura} -```ts -// Go to https://www.alchemyapi.io, sign up, create -// a new App in its dashboard, and replace "KEY" with its key -const ALCHEMY_API_KEY = "KEY"; +```js +// Go to https://infura.io, sign up, create a new API key +// in its dashboard, and replace "KEY" with it +const INFURA_API_KEY = "KEY"; -// Replace this private key with your Goerli account private key. +// Replace this private key with your Sepolia account private key +// To export your private key from Coinbase Wallet, go to +// Settings > Developer Settings > Show private key // To export your private key from Metamask, open Metamask and -// go to Account Details > Export Private Key. +// go to Account Details > Export Private Key // Beware: NEVER put real Ether into testing accounts -const GOERLI_PRIVATE_KEY = "YOUR GOERLI PRIVATE KEY"; +const SEPOLIA_PRIVATE_KEY = "YOUR SEPOLIA PRIVATE KEY"; -export default { +module.exports = { // ...rest of your config... networks: { - goerli: { - url: `https://eth-goerli.alchemyapi.io/v2/${ALCHEMY_API_KEY}`, - accounts: [GOERLI_PRIVATE_KEY], + sepolia: { + url: `https://sepolia.infura.io/v3/${INFURA_API_KEY}`, + accounts: [SEPOLIA_PRIVATE_KEY], }, }, }; @@ -74,25 +76,27 @@ export default { ::: -:::tab{value=JavaScript} +:::tab{value=Alchemy} ```js -// Go to https://www.alchemyapi.io, sign up, create -// a new App in its dashboard, and replace "KEY" with its key +// Go to https://alchemy.com, sign up, create a new App in +// its dashboard, and replace "KEY" with its key const ALCHEMY_API_KEY = "KEY"; -// Replace this private key with your Goerli account private key +// Replace this private key with your Sepolia account private key +// To export your private key from Coinbase Wallet, go to +// Settings > Developer Settings > Show private key // To export your private key from Metamask, open Metamask and // go to Account Details > Export Private Key // Beware: NEVER put real Ether into testing accounts -const GOERLI_PRIVATE_KEY = "YOUR GOERLI PRIVATE KEY"; +const SEPOLIA_PRIVATE_KEY = "YOUR SEPOLIA PRIVATE KEY"; module.exports = { // ...rest of your config... networks: { - goerli: { - url: `https://eth-goerli.alchemyapi.io/v2/${ALCHEMY_API_KEY}`, - accounts: [GOERLI_PRIVATE_KEY], + sepolia: { + url: `https://eth-sepolia.g.alchemy.com/v2/${ALCHEMY_API_KEY}`, + accounts: [SEPOLIA_PRIVATE_KEY], }, }, }; @@ -102,12 +106,11 @@ module.exports = { :::: -To deploy on Goerli you need to send some Goerli ether to the address that's going to be making the deployment. You can get testnet ether from a faucet, a service that distributes testing-ETH for free. Here are some for Goerli: +To deploy on Sepolia you need to send some Sepolia ether to the address that's going to be making the deployment. You can get testnet ether from a faucet, a service that distributes testing-ETH for free. Here is one for Sepolia: -- [Chainlink faucet](https://faucets.chain.link/) -- [Alchemy Goerli Faucet](https://goerlifaucet.com/) +- [Alchemy Sepolia Faucet](https://sepoliafaucet.com/) -Now you are ready to deploy your contract, but first we are going to make the source code of our contract unique. The reason we need to do this is that the sample code from the previous section is already verified in Goerli, so if you try to verify it you'll get an error. +Now you are ready to deploy your contract, but first we are going to make the source code of our contract unique. The reason we need to do this is that the sample code from the previous section is already verified in Sepolia, so if you try to verify it you'll get an error. Open your contract and add a comment with something unique, like your GitHub's username. Keep in mind that whatever you include here will be, like the rest of the code, publicly available on Etherscan: @@ -116,14 +119,14 @@ Open your contract and add a comment with something unique, like your GitHub's u contract Lock { ``` -You can now run the deploy script using the newly added Goerli network: +You can now run the deploy script using the newly added Sepolia network: ::::tabsgroup{options=TypeScript,JavaScript} :::tab{value=TypeScript} ``` -npx hardhat run scripts/deploy.ts --network goerli +npx hardhat run scripts/deploy.ts --network sepolia ``` ::: @@ -131,7 +134,7 @@ npx hardhat run scripts/deploy.ts --network goerli :::tab{value=JavaScript} ``` -npx hardhat run scripts/deploy.js --network goerli +npx hardhat run scripts/deploy.js --network sepolia ``` ::: @@ -141,7 +144,7 @@ npx hardhat run scripts/deploy.js --network goerli Take note of the address and the unlock time and run the `verify` task with them: ``` -npx hardhat verify --network goerli
+npx hardhat verify --network sepolia
``` :::tip @@ -152,4 +155,4 @@ If you get an error saying that the address does not have bytecode, it probably After the task is successfully executed, you'll see a link to the publicly verified code of your contract. -To learn more about verifying, read the [hardhat-etherscan](/hardhat-runner/plugins/nomiclabs-hardhat-etherscan) documentation. +To learn more about verifying, read the [hardhat-verify](/hardhat-runner/plugins/nomicfoundation-hardhat-verify) documentation. diff --git a/docs/src/content/hardhat-runner/docs/reference/solidity-support.md b/docs/src/content/hardhat-runner/docs/reference/solidity-support.md index 418c6180f0..39516b2ded 100644 --- a/docs/src/content/hardhat-runner/docs/reference/solidity-support.md +++ b/docs/src/content/hardhat-runner/docs/reference/solidity-support.md @@ -13,7 +13,7 @@ These are the versions of Solidity that you can expect to fully work with Hardha - Any 0.5.x version starting from 0.5.1 - Any 0.6.x version - Any 0.7.x version -- Any 0.8.x version up to and including 0.8.17 +- Any 0.8.x version up to and including 0.8.18 We recommend against using Hardhat with newer, unsupported versions of Solidity. But if you need to do so; please read on. @@ -24,3 +24,28 @@ When running an unsupported version of Solidity, our integration may not work or This could mean that Solidity stack traces stop working, are incorrect, or incomplete. It could also mean that `console.log` stops working. Despite these features possibly being affected, the compilation and execution of your smart contracts won't be affected. You can still trust your test results and deploy smart contracts, but Hardhat may be less useful in the process. + +## Support for IR-based codegen + +The solc compiler has a newer, alternative way of generating bytecode through an [intermediate representation](https://docs.soliditylang.org/en/latest/ir-breaking-changes.html). This mode of compilation can be enabled with the `viaIR` setting. + +At the moment, this option only works well [when the optimizer is enabled](https://github.com/ethereum/solidity/issues/12533). Since Hardhat works much better when the optimizer is disabled, we don't completely support the `viaIR` option yet. You can still enable it to compile your contracts and run your tests, but things like stack traces might not work correctly. + +If you use the `viaIR` option, we recommend you set the [optimization step sequence](https://docs.soliditylang.org/en/v0.8.17/yul.html#optimization-step-sequence) to `"u"`, to make Hardhat work as well as possible: + +``` +solidity: { + version: "0.8.17", // any version you want + settings: { + viaIR: true, + optimizer: { + enabled: true, + details: { + yulDetails: { + optimizerSteps: "u", + }, + }, + }, + }, +} +``` diff --git a/docs/src/content/hardhat-runner/docs/supporter-guides/_dirinfo.yaml b/docs/src/content/hardhat-runner/docs/supporter-guides/_dirinfo.yaml new file mode 100644 index 0000000000..a524a210d2 --- /dev/null +++ b/docs/src/content/hardhat-runner/docs/supporter-guides/_dirinfo.yaml @@ -0,0 +1,5 @@ +section-type: group +section-title: Supporter guides +order: + - title: Working with oracles + href: /oracles diff --git a/docs/src/content/hardhat-runner/docs/supporter-guides/oracles.md b/docs/src/content/hardhat-runner/docs/supporter-guides/oracles.md new file mode 100644 index 0000000000..4a1c7e90ca --- /dev/null +++ b/docs/src/content/hardhat-runner/docs/supporter-guides/oracles.md @@ -0,0 +1,229 @@ +--- +title: Oracles +description: Oracles help get real-world data into your Ethereum application because smart contracts can't query real-world data on their own. +--- + +# Working with blockchain oracles + +_This guide is based on the [ethereum.org oracles guide](https://ethereum.org/en/developers/docs/oracles)_ + +Oracles provide a bridge between the real-world and on-chain smart contracts by being a source of data that smart contracts can rely on, and act upon. + +Oracles play a critical role in facilitating the full potential of smart contract utility. Without a reliable connection to real-world data, smart contracts cannot effectively serve the real-world. + + + +## Why are they needed? + +With a blockchain like Ethereum, you need every node in the network to replay every transaction and end up with the same result, guaranteed. APIs introduce potentially variable data. If you were sending ETH based on an agreed $USD value using a price API, the query would return a different result from one day to the next. Not to mention, the API could be hacked or deprecated. If this happens, the nodes in the network wouldn't be able to agree on Ethereum's current state, effectively breaking [consensus](https://ethereum.org/developers/docs/consensus-mechanisms/). + +Oracles solve this problem by posting the data on the blockchain. So any node replaying the transaction will use the same immutable data that's posted for all to see. To do this, an oracle is typically made up of a smart contract and some off-chain components that can query APIs, then periodically send transactions to update the smart contract's data. + +## The oracle problem + +As we mentioned, blockchain transactions cannot access off-chain data directly. At the same time, relying on a single source of truth to provide data is insecure and invalidates the decentralization of a smart contract. This is known as the oracle problem. + +We can avoid the oracle problem by using a decentralized oracle network, which pulls data from multiple sources; if one data source is hacked or fails, the smart contract will still function as intended. + +## Security + +An oracle is only as secure as its data source(s). If a dapp uses Uniswap as an oracle for its ETH/DAI price feed, an attacker can move the price on Uniswap to manipulate the dapp's understanding of the current price. An example of how to combat this is [a feed system](https://developer.makerdao.com/feeds/) like the one used by MakerDAO, which collates price data from many external price feeds instead of just relying on a single source. + +## Architecture + +This is an example of simple Oracle architecture, however, there are more ways to trigger off-chain computation. + +1. Emit a log with your [smart contract event](https://ethereum.org/developers/docs/smart-contracts/anatomy/#events-and-logs) +2. An off-chain service has subscribed (usually using something like the JSON-RPC `eth_subscribe` command) to these specific logs. +3. The off-chain service proceeds to do some tasks as defined by the log. +4. The off-chain service responds with the data requested in a secondary transaction to the smart contract. + +This is how to get data in a 1 to 1 manner, however to improve security you may want to decentralize how you collect your off-chain data. + +## Getting Price Data + +Below is an example of how you can retrieve the latest ETH price in your smart contract using a Chainlink price feed on Goerli: + +```solidity +// SPDX-License-Identifier: MIT +pragma solidity ^0.8.7; + +import "@chainlink/contracts/src/v0.8/interfaces/AggregatorV3Interface.sol"; + +contract PriceConsumerV3 { + + AggregatorV3Interface internal priceFeed; + + /** + * Network: Goerli + * Aggregator: ETH/USD + * Address: 0xD4a33860578De61DBAbDc8BFdb98FD742fA7028e + */ + constructor() { + priceFeed = AggregatorV3Interface(0xD4a33860578De61DBAbDc8BFdb98FD742fA7028e); + } + + /** + * Returns the latest price + */ + function getLatestPrice() public view returns (int) { + (int price) = priceFeed.latestRoundData(); + return price; + } +} + +``` + +## Randomness + +Randomness in computer systems, especially on blockchains, is challenging to achieve because general-purpose blockchains like Ethereum do not have inherent randomness. Another problem is the public nature of blockchain technology which makes finding a secure source of entropy difficult. Almost any mechanism of generating on-chain randomness using Solidity is vulnerable to [MEV attacks](https://ethereum.org/en/developers/docs/mev/). + +It is possible to generate the random value off-chain and send it on-chain, but doing so imposes high trust requirements on users. They must believe the value was truly generated via unpredictable mechanisms and wasn’t altered in transit. + +Oracles designed for off-chain computation solve this problem by securely generating random outcomes off-chain that they broadcast on-chain along with cryptographic proofs attesting to the unpredictability of the process. An example is Chainlink VRF (Verifiable Random Function), which is a provably-fair and verifiable source of randomness designed for smart contracts. Smart contract developers can use Chainlink VRF as a tamper-proof random number generation (RNG) to build smart contracts for any applications which rely on unpredictable outcomes: + +- Blockchain games and NFTs +- Random assignment of duties and resources (e.g. randomly assigning judges to cases) +- Choosing a representative sample for consensus mechanisms + +Random numbers are difficult because blockchains are deterministic. + +To start with Chainlink VRF, create a new `VRFv2Consumer.sol` smart contract, which you can get from the [Official Chainlink Documentation](https://docs.chain.link/vrf/v2/subscription/examples/get-a-random-number). + +Usually, you will create and manage your subscriptions on the [VRF Subscription Management](https://vrf.chain.link/) page, but with the [`@chainlink/hardhat-chainlink`](https://www.npmjs.com/package/@chainlink/hardhat-chainlink) plugin, you can automate that process. This plugin will help you to use the Chainlink protocol inside your tests, scripts & tasks. + +You will need to install it by typing: + +::::tabsgroup{options="npm 7+,npm 6,yarn"} + +:::tab{value="npm 7+"} + +``` +npm install --save-dev @chainlink/hardhat-chainlink +``` + +::: + +:::tab{value="npm 6"} + +``` +npm install --save-dev @chainlink/hardhat-chainlink +``` + +::: + +:::tab{value="yarn"} + +``` +yarn add --dev @chainlink/hardhat-chainlink +``` + +::: + +:::: + +And import it inside the `hardhat.config` file: + +::::tabsgroup{options="TypeScript,JavaScript"} + +:::tab{value="TypeScript"} + +```ts +import "@chainlink/hardhat-chainlink"; +``` + +::: + +:::tab{value="JavaScript"} + +```js +require("@chainlink/hardhat-chainlink"); +``` + +::: + +:::: + +Then you can just expand the deployment script which will deploy the above `VRFv2Consumer` smart contract and do the VRF Managment part. + +To do so, first prepare the `hardhat.config` file for the deployment on the Goerli network: + +```ts + networks: { + goerli: { + url: GOERLI_RPC_URL, + accounts: [PRIVATE_KEY] + } + } +``` + +And after that, expand your deployment script: + +```ts +// scripts/deploy.ts +import { chainlink, ethers } from "hardhat"; + +async function main() { + // NOTE: If you already have an active VRF Subscription, proceed to step 3 + + // Step 1: Create a new VRF Subscription + const vrfCoordinatorAddress = `0x2Ca8E0C643bDe4C2E08ab1fA0da3401AdAD7734D`; + const { subscriptionId } = await chainlink.createVrfSubscription( + vrfCoordinatorAddress + ); + + // Step 2: Fund VRF Subscription + const linkTokenAddress = `0x326C977E6efc84E512bB9C30f76E30c160eD06FB`; + const amountInJuels = ethers.BigNumber.from(`1000000000000000000`); // 1 LINK + await chainlink.fundVrfSubscription( + vrfCoordinatorAddress, + linkTokenAddress, + amountInJuels, + subscriptionId + ); + + // Step 3: Deploy your smart contract + const VRFv2ConsumerFactory = await ethers.getContractFactory("VRFv2Consumer"); + const VRFv2Consumer = await VRFv2ConsumerFactory.deploy(subscriptionId); + await VRFv2Consumer.deployed(); + console.log("VRFv2Consumer deployed to:", VRFv2Consumer.address); + + // Step 4: Add VRF Consumer contract to your VRF Subscription + await chainlink.addVrfConsumer( + vrfCoordinatorAddress, + VRFv2Consumer.address, + subscriptionId + ); +} + +main().catch((error) => { + console.error(error); + process.exitCode = 1; +}); +``` + +Finnaly, run the deployment script by typing: + +```sh +npx hardhat run scripts/deploy.ts --network goerli +``` + +## Use blockchain oracles + +There are multiple oracle applications you can integrate into your dapp: + +- [Chainlink](https://chain.link/) - _Chainlink decentralized oracle networks provide tamper-proof inputs, outputs, and computations to support advanced smart contracts on any blockchain._ + +- [Witnet](https://witnet.io/) - _Witnet is a permissionless, decentralized, and censorship-resistant oracle helping smart contracts to react to real world events with strong crypto-economic guarantees._ + +- [UMA Oracle](https://umaproject.org/products/optimistic-oracle) - _UMA's optimistic oracle allows smart contracts to quickly and receive any kind of data for different applications, including insurance, financial derivatives, and prediction market._ + +- [Tellor](https://tellor.io/) - _Tellor is a transparent and permissionless oracle protocol for your smart contract to easily get any data whenever it needs it._ + +- [Band Protocol](https://bandprotocol.com/) - _Band Protocol is a cross-chain data oracle platform that aggregates and connects real-world data and APIs to smart contracts._ + +- [Provable](https://provable.xyz/) - _Provable connects blockchain dapps with any external Web API and leverages TLSNotary proofs, Trusted Execution Environments (TEEs), and secure cryptographic primitives to guarantee data authenticity._ + +- [Paralink](https://paralink.network/) - _Paralink provides an open source and decentralized oracle platform for smart contracts running on Ethereum and other popular blockchains._ + +- [Dos.Network](https://dos.network/) - _DOS Network is a decentralized oracle service network to boost blockchain usability with real-world data and computation power._ diff --git a/docs/src/content/hardhat-runner/docs/troubleshooting/common-problems.md b/docs/src/content/hardhat-runner/docs/troubleshooting/common-problems.md index d4c0b789bd..56eecfe38a 100644 --- a/docs/src/content/hardhat-runner/docs/troubleshooting/common-problems.md +++ b/docs/src/content/hardhat-runner/docs/troubleshooting/common-problems.md @@ -13,3 +13,9 @@ npx hardhat --max-memory 4096 compile ``` If you find yourself using this all the time, you can set it with an environment variable in your `.bashrc` (if using bash) or `.zshrc` (if using zsh): `export HARDHAT_MAX_MEMORY=4096`. + +## Using Hardhat with a proxy server + +Hardhat supports the `http_proxy` environment variable. When this variable is set, Hardhat will send its requests through the given proxy for things like JSON-RPC requests, mainnet forking and downloading compilers. + +There's also support for the `no_proxy` variable, which accepts a comma separated list of hosts or `"*"`. Any host included in this list will not be proxied. Note that requests to `"localhost"` or `"127.0.0.1"` are never proxied. diff --git a/docs/src/content/hardhat-runner/plugins/_dirinfo.yaml b/docs/src/content/hardhat-runner/plugins/_dirinfo.yaml index b8799e9ff4..1728c8daff 100644 --- a/docs/src/content/hardhat-runner/plugins/_dirinfo.yaml +++ b/docs/src/content/hardhat-runner/plugins/_dirinfo.yaml @@ -4,13 +4,12 @@ order: - "@nomicfoundation/hardhat-toolbox" - "@nomicfoundation/hardhat-chai-matchers" - "@nomiclabs/hardhat-ethers" - - "@nomiclabs/hardhat-etherscan" + - "@nomicfoundation/hardhat-verify" - "@nomicfoundation/hardhat-foundry" - "@nomiclabs/hardhat-vyper" - "@nomiclabs/hardhat-solhint" - "@nomiclabs/hardhat-solpp" - "@nomiclabs/hardhat-waffle" - - "@nomiclabs/hardhat-ganache" - "@nomiclabs/hardhat-web3" - "@nomiclabs/hardhat-truffle5" - "@nomiclabs/hardhat-web3-legacy" diff --git a/docs/src/content/hardhat-runner/plugins/plugins.ts b/docs/src/content/hardhat-runner/plugins/plugins.ts index 13a914fe9c..435cf481ea 100644 --- a/docs/src/content/hardhat-runner/plugins/plugins.ts +++ b/docs/src/content/hardhat-runner/plugins/plugins.ts @@ -368,14 +368,6 @@ const communityPlugins: IPlugin[] = [ "Hardhat plugin for solidity contract verification on Blockscout block explorer.", tags: ["Blockscout", "Deployment", "Solidity", "Verification"], }, - { - name: "@georacle/hardhat-georacle", - author: "Georacle", - authorUrl: "https://georacle.io", - description: - "A Hardhat plugin for integrating smart contracts with Georacle.", - tags: ["Georacle", "oracle", "geospatial"], - }, { name: "@muzamint/hardhat-etherspot", author: "muzamint", @@ -501,6 +493,21 @@ const communityPlugins: IPlugin[] = [ "Handy set of utilities for testing contracts in Hardhat projects", tags: ["testing", "solidity"], }, + { + name: "hardhat-deals", + author: "use less", + authorUrl: "https://github.com/Karuzzzo", + description: "Small hardhat task for debug money transfers 🤝💰", + tags: ["Deployment", "Testing", "Security", "Debug", "helper", "Tasks"], + }, + { + name: "hardhat-contract-dumper", + author: "use less", + authorUrl: "https://github.com/Karuzzzo", + description: + "Another small hardhat task for printing contract's storage layout 📐", + tags: ["Deployment", "Testing", "Security", "Debug", "helper", "Tasks"], + }, { name: "@0xweb/hardhat", author: "Alex Kit", @@ -671,6 +678,101 @@ const communityPlugins: IPlugin[] = [ "Hardhat plugin to check and lock the storage layout of contracts", tags: ["Tooling", "Testing", "Storage"], }, + { + name: "hardhat-insight", + author: "Andres Adjimann", + authorUrl: "https://www.npmjs.com/package/hardhat-insight", + description: + "Hardhat plugin to get contract storage, gas and code size insights using the compiler ast output", + tags: ["Tooling", "Storage", "Gas", "Size", "Bytecode"], + }, + { + name: "hardhat-scilla-plugin", + author: "Saeed Dadkhah", + authorUrl: "https://www.github.com/its-saeed", + description: + "Hardhat plugin to test scilla contracts. Scilla is Zillqa's programming language to develop smart contracts.", + tags: ["Scilla", "Testing", "Zilliqa"], + }, + { + name: "hardhat-flat-exporter", + author: "Laz", + authorUrl: "https://github.com/Saszr", + description: "Export flat contract on compilation via Hardhat.", + tags: ["Flatten", "Smart contract", "Solidity", "Deployment"], + }, + { + name: "@chainlink/hardhat-chainlink", + author: "Chainlink Labs", + authorUrl: "https://github.com/smartcontractkit/hardhat-chainlink", + description: "Integrates Chainlink into Hardhat projects.", + tags: ["chainlink", "hardhat"], + }, + { + name: "@dlsl/hardhat-markup", + author: "Distributed Lab", + authorUrl: "https://distributedlab.com/", + description: + "Hardhat plugin to generate customizable smart contracts documentation", + tags: ["Documentation", "NatSpec", "Markdown"], + }, + { + name: "hardhat-contract-clarity", + author: "Marc-Aurele Besner", + authorUrl: "https://github.com/marc-aurele-besner", + description: + "This Hardhat plugin add 3 tasks to Hardhat, to summarize a smart contract in human readable format using OpenAI GPT-3, to create a readme looking at your package.json and a task to ask question to chatGPT when running into errors.", + tags: ["chatGPT", "openai", "gpt3", "ai"], + }, + { + name: "transaction-retry-tool", + author: "Marc-Aurele Besner", + authorUrl: "https://github.com/marc-aurele-besner", + description: + "This Hardhat plugin provides two tasks and two functions to help you manage and optimize your transactions on Ethereum compatible blockchain. The two tasks include the ability to retry a transaction and retrieve the current gas cost.", + tags: ["transaction", "gasPrice", "retry", "helper"], + }, + { + name: "hardhat-fireblocks", + author: "Fireblocks", + authorUrl: "https://github.com/fireblocks", + description: "Hardhat plugin for integrating with Fireblocks", + tags: ["Deployment", "Security"], + }, + { + name: "hardhat-uniswap-v2-deploy-plugin", + author: "Cyrille Derché", + authorUrl: "https://github.com/onmychain/hardhat-uniswap-v2-deploy-plugin", + description: + "Hardhat plugin for Uniswap V2 (pancakeswap protocol) testing and deployment. You can use it to test features such as pair creation, liquidity provisioning, and swaps.", + tags: ["uniswap", "pancakeswap", "testing", "deployment", "automated"], + }, + { + name: "hardhat-deal", + author: "Rubilmax", + authorUrl: "https://github.com/rubilmax/hardhat-deal", + description: + "Hardhat plugin to ease dealing ERC20 tokens in hardhat forks.", + tags: [ + "erc20", + "deal", + "foundry", + "forge", + "mock", + "balanceOf", + "fork", + "testing", + "tests", + ], + }, + { + name: "@truffle/dashboard-hardhat-plugin", + author: "Truffle", + authorUrl: "https://trufflesuite.com/", + description: + "Enable project-specific features inside Truffle Dashboard, including advanced calldata decoding and more", + tags: ["truffle-dashboard", "transaction", "signing", "decoding"], + }, ]; const officialPlugins: IPlugin[] = [ @@ -696,10 +798,10 @@ const officialPlugins: IPlugin[] = [ tags: ["Ethers.js", "Testing", "Tasks", "Scripts"], }, { - name: "@nomiclabs/hardhat-etherscan", + name: "@nomicfoundation/hardhat-verify", author: "Nomic Foundation", authorUrl: "https://twitter.com/NomicFoundation", - description: "Automatically verify contracts on Etherscan", + description: "Automatically verify contracts", tags: ["Etherscan", "Verification"], }, { @@ -740,13 +842,6 @@ const officialPlugins: IPlugin[] = [ "Adds a Waffle-compatible provider to the Hardhat Runtime Environment and automatically initializes the Waffle Chai matchers", tags: ["Waffle", "Testing"], }, - { - name: "@nomiclabs/hardhat-ganache", - author: "Nomic Foundation", - authorUrl: "https://twitter.com/NomicFoundation", - description: "Hardhat plugin for managing Ganache", - tags: ["Ganache", "Testing network"], - }, { name: "@nomiclabs/hardhat-web3", author: "Nomic Foundation", diff --git a/docs/src/content/home.ts b/docs/src/content/home.ts index 83bd4b523f..0ba2fd12e9 100644 --- a/docs/src/content/home.ts +++ b/docs/src/content/home.ts @@ -174,7 +174,7 @@ const reviewsBlockContent = [ position: "CTO at Aragon One", personImage: reviewsBlock.brett, companyImage: "/images/reveiws-logo/aone.svg", - alt: "Aragone One logo", + alt: "Aragon One logo", comment: '"Our interest in Hardhat was driven by our own experience of building and maintaining developer tooling for the Aragon ecosystem. Not only were these efforts time consuming, difficult, and error-prone, we also found ourselves constantly re-inventing the wheel in areas we did not want to care about or force opinions on (e.g. Ganache connections, Truffle providers, test strategy). Hardhat, with its plugin ecosystem, has effectively eliminated many of these problems for us. We feel confident piggybacking on the best for the underlying layers so that we can focus our attention on exposing the power of the Aragon ecosystem to our community."', }, diff --git a/docs/src/content/layouts.yaml b/docs/src/content/layouts.yaml index ac403efff4..c984269661 100644 --- a/docs/src/content/layouts.yaml +++ b/docs/src/content/layouts.yaml @@ -7,6 +7,7 @@ hardhat-runner: - hardhat-runner/docs/config - hardhat-runner/docs/guides - hardhat-runner/docs/advanced + - hardhat-runner/docs/supporter-guides - hardhat-runner/docs/troubleshooting - hardhat-runner/docs/reference - hardhat-runner/plugins diff --git a/docs/src/content/tutorial/boilerplate-project.md b/docs/src/content/tutorial/boilerplate-project.md index f1388d60c7..8137fb3168 100644 --- a/docs/src/content/tutorial/boilerplate-project.md +++ b/docs/src/content/tutorial/boilerplate-project.md @@ -45,7 +45,7 @@ npm install npx hardhat node ``` -Here we just install the npm project's dependencies, and by running `npx hardhat node` we spin up an instance of Hardhat Network that you can connect to using MetaMask. In a different terminal in the same directory, run: +Here we just install the npm project's dependencies, and by running `npx hardhat node` we spin up an instance of Hardhat Network that you can connect to using your wallet. In a different terminal in the same directory, run: ``` npx hardhat --network localhost run scripts/deploy.js @@ -61,9 +61,9 @@ npm run start Then open [http://127.0.0.1:3000/](http://127.0.0.1:3000/) in your browser and you should see this: ![](/front-5.png) -Set your network in MetaMask to `127.0.0.1:8545`. +Click the button to connect your wallet. If you are using MetaMask, make sure you have selected the `Localhost 8545` network. -Now click the button in the web app. You should then see this: +After connecting your wallet, you should see this: ![](/front-2.png) diff --git a/docs/src/content/tutorial/creating-a-new-hardhat-project.md b/docs/src/content/tutorial/creating-a-new-hardhat-project.md index 0357fe99c9..8f6d0d39a5 100644 --- a/docs/src/content/tutorial/creating-a-new-hardhat-project.md +++ b/docs/src/content/tutorial/creating-a-new-hardhat-project.md @@ -164,6 +164,6 @@ require("@nomicfoundation/hardhat-toolbox"); /** @type import('hardhat/config').HardhatUserConfig */ module.exports = { - solidity: "0.8.17", + solidity: "0.8.18", }; ``` diff --git a/docs/src/content/tutorial/debugging-with-hardhat-network.md b/docs/src/content/tutorial/debugging-with-hardhat-network.md index 037595cd61..3e2f3c13e2 100644 --- a/docs/src/content/tutorial/debugging-with-hardhat-network.md +++ b/docs/src/content/tutorial/debugging-with-hardhat-network.md @@ -1,6 +1,6 @@ # 6. Debugging with Hardhat Network -Hardhat comes built-in with Hardhat Network, a local Ethereum network designed for development. It allows you to deploy your contracts, run your tests and debug your code, all within the confines of your local machine. It's the default network Hardhat that connects to, so you don't need to set up anything for it to work. Just run your tests. +Hardhat comes built-in with Hardhat Network, a local Ethereum network designed for development. It allows you to deploy your contracts, run your tests and debug your code, all within the confines of your local machine. It's the default network that Hardhat connects to, so you don't need to set up anything for it to work. Just run your tests. ## Solidity `console.log` diff --git a/docs/src/content/tutorial/deploying-to-a-live-network.md b/docs/src/content/tutorial/deploying-to-a-live-network.md index 077e1fb310..088f044b51 100644 --- a/docs/src/content/tutorial/deploying-to-a-live-network.md +++ b/docs/src/content/tutorial/deploying-to-a-live-network.md @@ -2,7 +2,7 @@ Once you're ready to share your dApp with other people, you may want to deploy it to a live network. This way others can access an instance that's not running locally on your system. -The "mainnet" Ethereum network deals with real money, but there are separate "testnet" networks that do not. These testnets provide shared staging environments that do a good job of mimicking the real world scenario without putting real money at stake, and [Ethereum has several](https://ethereum.org/en/developers/docs/networks/#ethereum-testnets), like _Goerli_ and _Sepolia_. We recommend you deploy your contracts to the _Goerli_ testnet. +The "mainnet" Ethereum network deals with real money, but there are separate "testnet" networks that do not. These testnets provide shared staging environments that do a good job of mimicking the real world scenario without putting real money at stake, and [Ethereum has several](https://ethereum.org/en/developers/docs/networks/#ethereum-testnets), like _Sepolia_ and _Goerli_. We recommend you deploy your contracts to the _Sepolia_ testnet. At the software level, deploying to a testnet is the same as deploying to mainnet. The only difference is which network you connect to. Let's look into what the code to deploy your contracts using ethers.js would look like. @@ -49,39 +49,80 @@ Token address: 0x5FbDB2315678afecb367f032d93F642f64180aa3 ## Deploying to remote networks -To deploy to a remote network such as mainnet or any testnet, you need to add a `network` entry to your `hardhat.config.js` file. We’ll use Goerli for this example, but you can add any network similarly: +To deploy to a remote network such as mainnet or any testnet, you need to add a `network` entry to your `hardhat.config.js` file. We’ll use Sepolia for this example, but you can add any network similarly: + +::::tabsgroup{options=Infura,Alchemy} + +:::tab{value=Infura} + +```js{5,11,15-20} +require("@nomicfoundation/hardhat-toolbox"); + +// Go to https://infura.io, sign up, create a new API key +// in its dashboard, and replace "KEY" with it +const INFURA_API_KEY = "KEY"; + +// Replace this private key with your Sepolia account private key +// To export your private key from Coinbase Wallet, go to +// Settings > Developer Settings > Show private key +// To export your private key from Metamask, open Metamask and +// go to Account Details > Export Private Key +// Beware: NEVER put real Ether into testing accounts +const SEPOLIA_PRIVATE_KEY = "YOUR SEPOLIA PRIVATE KEY"; + +module.exports = { + solidity: "0.8.9", + networks: { + sepolia: { + url: `https://sepolia.infura.io/v3/${INFURA_API_KEY}`, + accounts: [SEPOLIA_PRIVATE_KEY] + } + } +}; +``` + +::: + +:::tab{value=Alchemy} ```js{5,11,15-20} require("@nomicfoundation/hardhat-toolbox"); -// Go to https://www.alchemyapi.io, sign up, create -// a new App in its dashboard, and replace "KEY" with its key +// Go to https://alchemy.com, sign up, create a new App in +// its dashboard, and replace "KEY" with its key const ALCHEMY_API_KEY = "KEY"; -// Replace this private key with your Goerli account private key +// Replace this private key with your Sepolia account private key +// To export your private key from Coinbase Wallet, go to +// Settings > Developer Settings > Show private key // To export your private key from Metamask, open Metamask and // go to Account Details > Export Private Key // Beware: NEVER put real Ether into testing accounts -const GOERLI_PRIVATE_KEY = "YOUR GOERLI PRIVATE KEY"; +const SEPOLIA_PRIVATE_KEY = "YOUR SEPOLIA PRIVATE KEY"; module.exports = { solidity: "0.8.9", networks: { - goerli: { - url: `https://eth-goerli.alchemyapi.io/v2/${ALCHEMY_API_KEY}`, - accounts: [GOERLI_PRIVATE_KEY] + sepolia: { + url: `https://eth-sepolia.g.alchemy.com/v2/${ALCHEMY_API_KEY}`, + accounts: [SEPOLIA_PRIVATE_KEY] } } }; ``` -We're using [Alchemy](https://www.alchemyapi.io), but pointing `url` to any Ethereum node or gateway, like [Infura](https://www.infura.io/), would work. Go grab your `ALCHEMY_API_KEY` and come back. +::: + +:::: + +We're using [Infura](https://infura.io) or [Alchemy](https://alchemy.com/), but pointing `url` to any Ethereum node or gateway. Go grab your API key and come back. -To deploy on Goerli you need to send some Goerli ether to the address that's going to be making the deployment. You can get testnet ether from a faucet, a service that distributes testing-ETH for free. Here is one for Goerli: +To deploy on Sepolia you need to send some Sepolia ether to the address that's going to be making the deployment. You can get testnet ether from a faucet, a service that distributes testing-ETH for free. Here is one for Sepolia: -- [Alchemy Goerli Faucet](https://goerlifaucet.com/) +- [Alchemy Sepolia Faucet](https://sepoliafaucet.com/) +- [Coinbase Sepolia Faucet](https://coinbase.com/faucets/ethereum-sepolia-faucet) (only works if you are using the Coinbase Wallet) -You'll have to change Metamask's network to Goerli before transacting. +You'll have to change your wallet's network to Sepolia before transacting. :::tip @@ -92,7 +133,7 @@ You can learn more about other testnets and find links to their faucets on the [ Finally, run: ``` -npx hardhat run scripts/deploy.js --network goerli +npx hardhat run scripts/deploy.js --network sepolia ``` If everything went well, you should see the deployed contract address. diff --git a/docs/src/content/tutorial/final-thoughts.md b/docs/src/content/tutorial/final-thoughts.md index 33659bfbef..419b85a365 100644 --- a/docs/src/content/tutorial/final-thoughts.md +++ b/docs/src/content/tutorial/final-thoughts.md @@ -11,6 +11,7 @@ Here are some links you might find useful throughout your journey: - [Ethers.js Documentation](https://docs.ethers.io/) - [Mocha Documentation](https://mochajs.org/) - [Chai Documentation](https://www.chaijs.com/) +- [Alchemy's smart contract tutorial](https://docs.alchemy.com/docs/hello-world-smart-contract) to also learn how to use Metamask and Solidity as well as an RPC endpoint like the one that Alchemy provides. Further things to do with your smart contract using Hardhat: diff --git a/docs/src/content/tutorial/index.md b/docs/src/content/tutorial/index.md index d252d1da37..9c73ff2dfd 100644 --- a/docs/src/content/tutorial/index.md +++ b/docs/src/content/tutorial/index.md @@ -27,6 +27,6 @@ To follow this tutorial you should be able to: - Operate a [terminal](https://en.wikipedia.org/wiki/Terminal_emulator) - Use [git](https://git-scm.com/doc) - Understand the basics of how [smart contracts](https://ethereum.org/learn/#smart-contracts) work -- Set up a [Metamask](https://metamask.io/) wallet +- Set up a [Coinbase](https://www.coinbase.com/wallet) or [Metamask](https://metamask.io/) wallet If you can't do any of the above, follow the links and take some time to learn the basics. diff --git a/docs/src/content/tutorial/testing-contracts.md b/docs/src/content/tutorial/testing-contracts.md index a4e48bce2b..4d87502e75 100644 --- a/docs/src/content/tutorial/testing-contracts.md +++ b/docs/src/content/tutorial/testing-contracts.md @@ -256,7 +256,7 @@ describe("Token contract", function () { ).to.changeTokenBalances(hardhatToken, [addr1, addr2], [-50, 50]); }); - it("should emit Transfer events", async function () { + it("Should emit Transfer events", async function () { const { hardhatToken, owner, addr1, addr2 } = await loadFixture( deployTokenFixture ); diff --git a/docs/temp/tabsConfig.json b/docs/temp/tabsConfig.json index 7f4f9243f7..b4e9357913 100644 --- a/docs/temp/tabsConfig.json +++ b/docs/temp/tabsConfig.json @@ -1,5 +1,5 @@ { - "npm/yarn": "npm", "TypeScript/JavaScript": "TypeScript", - "npm 7+/npm 6/yarn": "npm 7+" + "npm 7+/npm 6/yarn": "npm 7+", + "Infura/Alchemy": "Infura" } diff --git a/docs/yarn.lock b/docs/yarn.lock index 8f1f36d449..862a255546 100644 --- a/docs/yarn.lock +++ b/docs/yarn.lock @@ -4257,6 +4257,13 @@ builtin-status-codes@^3.0.0: resolved "https://registry.npmjs.org/builtin-status-codes/-/builtin-status-codes-3.0.0.tgz" integrity sha1-hZgoeOIbmOHGZCXgPQF0eI9Wnug= +busboy@^1.6.0: + version "1.6.0" + resolved "https://registry.yarnpkg.com/busboy/-/busboy-1.6.0.tgz#966ea36a9502e43cdb9146962523b92f531f6893" + integrity sha512-8SFQbg/0hQ9xy3UNTB0YEnsNBbWfhf7RtnzpL7TkBiTBRfrQ9Fxcnz7VJsleJpyp6rVLvXiuORqjlHi5q+PYuA== + dependencies: + streamsearch "^1.1.0" + bytes@3.0.0: version "3.0.0" resolved "https://registry.npmjs.org/bytes/-/bytes-3.0.0.tgz" @@ -11611,6 +11618,11 @@ stream-shift@^1.0.0: resolved "https://registry.npmjs.org/stream-shift/-/stream-shift-1.0.1.tgz" integrity sha512-AiisoFqQ0vbGcZgQPY1cdP2I76glaVA/RauYR4G4thNFgkTqr90yXTo4LYX60Jl+sIlPNHHdGSwo01AvbKUSVQ== +streamsearch@^1.1.0: + version "1.1.0" + resolved "https://registry.yarnpkg.com/streamsearch/-/streamsearch-1.1.0.tgz#404dd1e2247ca94af554e841a8ef0eaa238da764" + integrity sha512-Mcc5wHehp9aXz1ax6bZUyY5afg9u2rv5cqQI3mRrYkGC8rW2hM02jWuwjtL++LS5qinSyhj2QfLyNsuc+VsExg== + "string-width@^1.0.2 || 2 || 3 || 4", string-width@^4.0.0, string-width@^4.1.0, string-width@^4.2.0, string-width@^4.2.2, string-width@^4.2.3: version "4.2.3" resolved "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz" @@ -12158,9 +12170,9 @@ typescript@4.5.5: integrity sha512-TCTIul70LyWe6IJWT8QSYeA54WQe8EjQFU4wY52Fasj5UKx88LNYKCgBEHcOMOrFF1rKGbD8v/xcNWVUq9SymA== ua-parser-js@^0.7.30: - version "0.7.31" - resolved "https://registry.npmjs.org/ua-parser-js/-/ua-parser-js-0.7.31.tgz" - integrity sha512-qLK/Xe9E2uzmYI3qLeOmI0tEOt+TBBQyUIAh4aAgU05FVYzeZrKUdkAZfBNVGRaHVgV0TDkdEngJSw/SyQchkQ== + version "0.7.33" + resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.33.tgz#1d04acb4ccef9293df6f70f2c3d22f3030d8b532" + integrity sha512-s8ax/CeZdK9R/56Sui0WM6y9OFREJarMRHqLB2EwkovemBxNQ+Bqu8GAsUnVcXKgphb++ghr/B2BZx4mahujPw== uglify-js@^3.1.4: version "3.15.2" @@ -12177,10 +12189,12 @@ unbox-primitive@^1.0.1: has-symbols "^1.0.2" which-boxed-primitive "^1.0.2" -undici@^5.8.2: - version "5.8.2" - resolved "https://registry.yarnpkg.com/undici/-/undici-5.8.2.tgz#071fc8a6a5d24db0ad510ad442f607d9b09d5eec" - integrity sha512-3KLq3pXMS0Y4IELV045fTxqz04Nk9Ms7yfBBHum3yxsTR4XNn+ZCaUbf/mWitgYDAhsplQ0B1G4S5D345lMO3A== +undici@^5.19.1: + version "5.19.1" + resolved "https://registry.yarnpkg.com/undici/-/undici-5.19.1.tgz#92b1fd3ab2c089b5a6bd3e579dcda8f1934ebf6d" + integrity sha512-YiZ61LPIgY73E7syxCDxxa3LV2yl3sN8spnIuTct60boiiRaE1J8mNWHO8Im2Zi/sFrPusjLlmRPrsyraSqX6A== + dependencies: + busboy "^1.6.0" unfetch@^4.2.0: version "4.2.0" diff --git a/package.json b/package.json index 6cee820d33..f4adfcc33e 100644 --- a/package.json +++ b/package.json @@ -4,26 +4,26 @@ "author": "Nomic Labs LLC", "license": "SEE LICENSE IN EACH PACKAGE'S LICENSE FILE", "private": true, - "workspaces": [ - "packages/*", - "crates/rethnet_evm_napi" - ], + "workspaces": { + "packages": ["packages/*", "crates/rethnet_evm_napi"], + "nohoist": ["**/find-up", "**/find-up/**"] + }, "devDependencies": { "@changesets/cli": "^2.16.0", "@open-rpc/typings": "^1.12.1", "prettier": "2.4.1", "shelljs": "^0.8.5", - "typescript": "~4.5.2", + "typescript": "~4.7.4", "wsrun": "^5.2.2" }, "scripts": { "prebuild": "cd crates/rethnet_evm_napi && yarn build", - "build": "tsc --build packages/hardhat-core packages/hardhat-docker packages/hardhat-ethers packages/hardhat-etherscan packages/hardhat-ganache packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-waffle packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox packages/hardhat-foundry", - "watch": "tsc --build --watch packages/hardhat-core/src packages/hardhat-docker packages/hardhat-ethers packages/hardhat-etherscan packages/hardhat-ganache packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-waffle packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox packages/hardhat-foundry", + "build": "tsc --build packages/hardhat-core packages/hardhat-ethers packages/hardhat-verify packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox packages/hardhat-foundry", + "watch": "tsc --build --watch packages/hardhat-core/src packages/hardhat-ethers packages/hardhat-verify packages/hardhat-solhint packages/hardhat-solpp packages/hardhat-truffle4 packages/hardhat-truffle5 packages/hardhat-vyper packages/hardhat-web3 packages/hardhat-web3-legacy packages/hardhat-chai-matchers packages/hardhat-network-helpers packages/hardhat-toolbox packages/hardhat-foundry", "clean": "wsrun --exclude-missing clean", "test": "node scripts/run-tests.js", "lint": "wsrun --exclude-missing --stages lint && yarn prettier --check", "lint:fix": "wsrun --exclude-missing --stages lint:fix && yarn prettier --write", - "prettier": "prettier *.md \"{docs,.github}/**/*.{md,yml}\" \"scripts/**/*.js\"" + "prettier": "prettier *.md \"{docs,.github}/**/*.{md,yml,ts}\" \"scripts/**/*.js\"" } } diff --git a/packages/e2e/.eslintignore b/packages/e2e/.eslintignore deleted file mode 100644 index a379b5efcb..0000000000 --- a/packages/e2e/.eslintignore +++ /dev/null @@ -1 +0,0 @@ -test/fixture-projects/**/* diff --git a/packages/e2e/.gitignore b/packages/e2e/.gitignore deleted file mode 100644 index ef6cb5fb9f..0000000000 --- a/packages/e2e/.gitignore +++ /dev/null @@ -1,95 +0,0 @@ -# Node modules -node_modules - -# Compilation output -/build-test/ -/dist - -# Code coverage artifacts -/coverage -/.nyc_output - -# Below is Github's node gitignore template, -# ignoring the node_modules part, as it'd ignore every node_modules, and we have some for testing - -# Logs -logs -*.log -npm-debug.log* -yarn-debug.log* -yarn-error.log* -lerna-debug.log* - -# Diagnostic reports (https://nodejs.org/api/report.html) -report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json - -# Runtime data -pids -*.pid -*.seed -*.pid.lock - -# Directory for instrumented libs generated by jscoverage/JSCover -lib-cov - -# Coverage directory used by tools like istanbul -coverage - -# nyc test coverage -.nyc_output - -# Grunt intermediate storage (https://gruntjs.com/creating-plugins#storing-task-files) -.grunt - -# Bower dependency directory (https://bower.io/) -bower_components - -# node-waf configuration -.lock-wscript - -# Compiled binary addons (https://nodejs.org/api/addons.html) -build/Release - -# Dependency directories -#node_modules/ -jspm_packages/ - -# TypeScript v1 declaration files -typings/ - -# Optional npm cache directory -.npm - -# Optional eslint cache -.eslintcache - -# Optional REPL history -.node_repl_history - -# Output of 'npm pack' -*.tgz - -# Yarn Integrity file -.yarn-integrity - -# parcel-bundler cache (https://parceljs.org/) -.cache - -# next.js build output -.next - -# nuxt.js build output -.nuxt - -# vuepress build output -.vuepress/dist - -# Serverless directories -.serverless/ - -# FuseBox cache -.fusebox/ - -# DynamoDB Local files -.dynamodb/ - diff --git a/packages/e2e/.mocharc.json b/packages/e2e/.mocharc.json deleted file mode 100644 index ba554c9b04..0000000000 --- a/packages/e2e/.mocharc.json +++ /dev/null @@ -1,5 +0,0 @@ -{ - "require": "ts-node/register/files", - "ignore": ["test/fixture-projects/**/*"], - "timeout": 180000 -} diff --git a/packages/e2e/.prettierignore b/packages/e2e/.prettierignore deleted file mode 100644 index 8225baa4a7..0000000000 --- a/packages/e2e/.prettierignore +++ /dev/null @@ -1,2 +0,0 @@ -/node_modules -/dist diff --git a/packages/e2e/README.md b/packages/e2e/README.md deleted file mode 100644 index a25413feb9..0000000000 --- a/packages/e2e/README.md +++ /dev/null @@ -1,11 +0,0 @@ -# hardhat e2e tests - -This package has end to end tests for the packaged version of Hardhat. You can run them with `yarn test`. This will build Hardhat, package it as tgz, and run the tests. - -## How it works - -The entry point of this package is the `run-tests.js` file. This script expects an argument that can be `npm` or `yarn`, and it indicates which package manager will be used to package hardhat before the tests are executed. - -After that, `mocha` will be executed to run the test suites under `test`. This means that `mocha` shouldn't be run directly, because these tests assume that the tgz file has been built and that its path is available as an environment variable. - -The tests copy each fixture project directory in a temporary directory, and then install hardhat there using the tgz file. diff --git a/packages/e2e/package.json b/packages/e2e/package.json deleted file mode 100644 index 8663850bdb..0000000000 --- a/packages/e2e/package.json +++ /dev/null @@ -1,44 +0,0 @@ -{ - "name": "@nomiclabs/hardhat-e2e-tests", - "private": true, - "version": "0.0.1", - "description": "e2e tests for Hardhat", - "homepage": "https://github.com/nomiclabs/hardhat/tree/main/packages/e2e", - "repository": "github:nomiclabs/hardhat", - "author": "Nomic Labs LLC", - "license": "MIT", - "scripts": { - "lint": "yarn prettier --check && yarn eslint", - "lint:fix": "yarn prettier --write && yarn eslint --fix", - "prettier": "prettier \"**/*.{js,md,json}\"", - "eslint": "eslint run-tests.js 'test/**/*.ts'", - "test:npm": "node run-tests.js npm", - "test:yarn": "node run-tests.js yarn", - "test": "npm run test:npm && npm run test:yarn", - "prebuild": "cd ../../crates/rethnet_evm_napi && yarn build", - "build": "tsc --build .", - "clean": "rimraf dist" - }, - "dependencies": { - "@types/chai": "^4.2.0", - "@types/fs-extra": "^5.1.0", - "@types/mocha": ">=9.1.0", - "@types/node": "^14.0.0", - "@types/shelljs": "^0.8.6", - "@typescript-eslint/eslint-plugin": "^5.30.7", - "@typescript-eslint/parser": "4.29.2", - "chai": "^4.2.0", - "eslint": "^7.29.0", - "eslint-config-prettier": "8.3.0", - "eslint-plugin-import": "2.24.1", - "eslint-plugin-no-only-tests": "3.0.0", - "eslint-plugin-prettier": "3.4.0", - "fs-extra": "^7.0.1", - "mocha": "^10.0.0", - "prettier": "2.4.1", - "rimraf": "^3.0.2", - "shelljs": "^0.8.5", - "ts-node": "^8.1.0", - "typescript": "~4.5.2" - } -} diff --git a/packages/e2e/run-tests.js b/packages/e2e/run-tests.js deleted file mode 100644 index 29f2fbca5a..0000000000 --- a/packages/e2e/run-tests.js +++ /dev/null @@ -1,92 +0,0 @@ -const fsExtra = require("fs-extra"); -const path = require("path"); -const shell = require("shelljs"); - -shell.set("-e"); - -// make sure that this env var is not set -delete process.env.TS_NODE_TRANSPILE_ONLY; - -const rootDir = path.join(__dirname, "..", ".."); -const hardhatCoreDir = path.join(rootDir, "packages", "hardhat-core"); - -if (process.argv[2] !== "npm" && process.argv[2] !== "yarn") { - console.error("Usage: node run-tests.js "); - process.exit(1); -} - -const isYarn = process.argv[2] === "yarn"; - -async function main() { - // build hardhat and geth the path to the tgz - const hardhatPackagePath = buildHardhat(); - shell.cd(__dirname); - - // we don't throw if the tests fail so that we can cleanup things properly - shell.set("+e"); - const mochaResult = shell.exec(`mocha --recursive \"test/**/*.ts\"`, { - env: { - ...process.env, - // the tests need this information to setup the fixture projects - HARDHAT_E2E_PATH_TO_HARDHAT_TGZ: hardhatPackagePath, - HARDHAT_E2E_IS_YARN: isYarn, - }, - }); - shell.set("-e"); - - // Remove the built package. If we don't do this, the hardhat-core directory - // will have a new random tgz file in each e2e run. - shell.rm(hardhatPackagePath); - - process.exit(mochaResult.code); -} - -/** - * Build and package hardhat as a tgz file and install it in each fixture project. - */ -function buildHardhat() { - // cd into packages/hardhat-core - shell.cd(hardhatCoreDir); - - // build and pack the project - if (isYarn) { - shell.exec("yarn build"); - shell.exec("yarn pack"); - } else { - shell.exec("npm run build"); - shell.exec("npm pack"); - } - - // get the path to the tgz file - const { version } = fsExtra.readJsonSync( - path.join(hardhatCoreDir, "package.json") - ); - - let hardhatPackageName; - if (isYarn) { - hardhatPackageName = `hardhat-v${version}.tgz`; - } else { - hardhatPackageName = `hardhat-${version}.tgz`; - } - - // We rename the tgz file to a unique name because apparently yarn uses the - // path to a tgz to cache it, but we don't want it to ever be cached when we - // are working on the e2e tests locally. - // - // To err on the side of safety, we always do this, even if it's only needed - // for yarn. - const newHardhatPackageName = `hardhat-${Date.now()}.tgz`; - shell.mv( - path.join(hardhatCoreDir, hardhatPackageName), - path.join(hardhatCoreDir, newHardhatPackageName) - ); - - return path.join(hardhatCoreDir, newHardhatPackageName); -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); diff --git a/packages/e2e/test/fixture-projects/basic-project/.gitignore b/packages/e2e/test/fixture-projects/basic-project/.gitignore deleted file mode 100644 index 4a4ecc528f..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -cache/ -artifacts/ diff --git a/packages/e2e/test/fixture-projects/basic-project/contracts/Contract.sol b/packages/e2e/test/fixture-projects/basic-project/contracts/Contract.sol deleted file mode 100644 index fd057843ed..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/contracts/Contract.sol +++ /dev/null @@ -1,4 +0,0 @@ -pragma solidity ^0.7.0; - -contract Contract { -} diff --git a/packages/e2e/test/fixture-projects/basic-project/hardhat.config.js b/packages/e2e/test/fixture-projects/basic-project/hardhat.config.js deleted file mode 100644 index 38f2a55388..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/hardhat.config.js +++ /dev/null @@ -1,3 +0,0 @@ -module.exports = { - solidity: "0.7.3", -}; diff --git a/packages/e2e/test/fixture-projects/basic-project/package.json b/packages/e2e/test/fixture-projects/basic-project/package.json deleted file mode 100644 index 4539747ffa..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "basic-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": {} -} diff --git a/packages/e2e/test/fixture-projects/basic-project/scripts/multi-run-test.js b/packages/e2e/test/fixture-projects/basic-project/scripts/multi-run-test.js deleted file mode 100644 index 4d2b3509e2..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/scripts/multi-run-test.js +++ /dev/null @@ -1,25 +0,0 @@ -// eslint-disable-next-line import/no-extraneous-dependencies -const hre = require("hardhat"); - -async function main() { - const code = await hre.run("test"); - - if (code > 0) { - console.error("Failed first test run"); - process.exit(1); - } - - const secondCode = await hre.run("test"); - - if (secondCode > 0) { - console.error("Failed second test run"); - process.exit(1); - } -} - -main() - .then(() => process.exit(0)) - .catch((error) => { - console.error(error); - process.exit(1); - }); diff --git a/packages/e2e/test/fixture-projects/basic-project/test/another-test.js b/packages/e2e/test/fixture-projects/basic-project/test/another-test.js deleted file mode 100644 index 43908218d4..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/test/another-test.js +++ /dev/null @@ -1,3 +0,0 @@ -describe("simple", () => { - it("should pass", () => {}); -}); diff --git a/packages/e2e/test/fixture-projects/basic-project/test/simple.js b/packages/e2e/test/fixture-projects/basic-project/test/simple.js deleted file mode 100644 index 43908218d4..0000000000 --- a/packages/e2e/test/fixture-projects/basic-project/test/simple.js +++ /dev/null @@ -1,3 +0,0 @@ -describe("simple", () => { - it("should pass", () => {}); -}); diff --git a/packages/e2e/test/fixture-projects/empty/.gitkeep b/packages/e2e/test/fixture-projects/empty/.gitkeep deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/packages/e2e/test/fixture-projects/javascript-sample-project/package.json b/packages/e2e/test/fixture-projects/javascript-sample-project/package.json deleted file mode 100644 index 08770e7334..0000000000 --- a/packages/e2e/test/fixture-projects/javascript-sample-project/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "javascript-sample-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": {} -} diff --git a/packages/e2e/test/fixture-projects/type-error-in-config/.gitignore b/packages/e2e/test/fixture-projects/type-error-in-config/.gitignore deleted file mode 100644 index 4a4ecc528f..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-config/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -cache/ -artifacts/ diff --git a/packages/e2e/test/fixture-projects/type-error-in-config/hardhat.config.ts b/packages/e2e/test/fixture-projects/type-error-in-config/hardhat.config.ts deleted file mode 100644 index 995724d9c7..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-config/hardhat.config.ts +++ /dev/null @@ -1,6 +0,0 @@ -// this produces a compilation error but not a runtime error -const x: string = 10; - -export default { - solidity: "0.7.3", -}; diff --git a/packages/e2e/test/fixture-projects/type-error-in-config/package.json b/packages/e2e/test/fixture-projects/type-error-in-config/package.json deleted file mode 100644 index e20e7bee0a..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-config/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "basic-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "ts-node": "^8.1.0", - "typescript": "~4.5.2" - } -} diff --git a/packages/e2e/test/fixture-projects/type-error-in-script/.gitignore b/packages/e2e/test/fixture-projects/type-error-in-script/.gitignore deleted file mode 100644 index 4a4ecc528f..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-script/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -cache/ -artifacts/ diff --git a/packages/e2e/test/fixture-projects/type-error-in-script/hardhat.config.ts b/packages/e2e/test/fixture-projects/type-error-in-script/hardhat.config.ts deleted file mode 100644 index c5df47fb48..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-script/hardhat.config.ts +++ /dev/null @@ -1,3 +0,0 @@ -export default { - solidity: "0.7.3", -}; diff --git a/packages/e2e/test/fixture-projects/type-error-in-script/package.json b/packages/e2e/test/fixture-projects/type-error-in-script/package.json deleted file mode 100644 index e20e7bee0a..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-script/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "basic-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "ts-node": "^8.1.0", - "typescript": "~4.5.2" - } -} diff --git a/packages/e2e/test/fixture-projects/type-error-in-script/script.ts b/packages/e2e/test/fixture-projects/type-error-in-script/script.ts deleted file mode 100644 index 55f33c2090..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-script/script.ts +++ /dev/null @@ -1,2 +0,0 @@ -// this produces a compilation error but not a runtime error -const x: string = 10; diff --git a/packages/e2e/test/fixture-projects/type-error-in-test/.gitignore b/packages/e2e/test/fixture-projects/type-error-in-test/.gitignore deleted file mode 100644 index 4a4ecc528f..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-test/.gitignore +++ /dev/null @@ -1,2 +0,0 @@ -cache/ -artifacts/ diff --git a/packages/e2e/test/fixture-projects/type-error-in-test/hardhat.config.ts b/packages/e2e/test/fixture-projects/type-error-in-test/hardhat.config.ts deleted file mode 100644 index c5df47fb48..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-test/hardhat.config.ts +++ /dev/null @@ -1,3 +0,0 @@ -export default { - solidity: "0.7.3", -}; diff --git a/packages/e2e/test/fixture-projects/type-error-in-test/package.json b/packages/e2e/test/fixture-projects/type-error-in-test/package.json deleted file mode 100644 index e20e7bee0a..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-test/package.json +++ /dev/null @@ -1,9 +0,0 @@ -{ - "name": "basic-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": { - "ts-node": "^8.1.0", - "typescript": "~4.5.2" - } -} diff --git a/packages/e2e/test/fixture-projects/type-error-in-test/test/test.ts b/packages/e2e/test/fixture-projects/type-error-in-test/test/test.ts deleted file mode 100644 index 572728b34c..0000000000 --- a/packages/e2e/test/fixture-projects/type-error-in-test/test/test.ts +++ /dev/null @@ -1,6 +0,0 @@ -describe("describe", function () { - it("it", async function () { - // this produces a compilation error but not a runtime error - const x: string = 10; - }); -}); diff --git a/packages/e2e/test/fixture-projects/typescript-sample-project/package.json b/packages/e2e/test/fixture-projects/typescript-sample-project/package.json deleted file mode 100644 index d715de688f..0000000000 --- a/packages/e2e/test/fixture-projects/typescript-sample-project/package.json +++ /dev/null @@ -1,6 +0,0 @@ -{ - "name": "typescript-sample-project", - "version": "1.0.0", - "license": "MIT", - "dependencies": {} -} diff --git a/packages/e2e/test/helpers.ts b/packages/e2e/test/helpers.ts deleted file mode 100644 index 1ffd69ebc1..0000000000 --- a/packages/e2e/test/helpers.ts +++ /dev/null @@ -1,48 +0,0 @@ -import fsExtra from "fs-extra"; -import os from "os"; -import path from "path"; -import shell from "shelljs"; - -declare module "mocha" { - interface Context { - testDirPath: string; - } -} - -export function useFixture(project: string) { - before(`using project "${project}"`, function () { - const fixturePath = path.join(__dirname, "fixture-projects", project); - - const tmpDirContainer = os.tmpdir(); - this.testDirPath = path.join(tmpDirContainer, `hardhat-e2e-${project}`); - - fsExtra.ensureDirSync(this.testDirPath); - fsExtra.emptyDirSync(this.testDirPath); - - fsExtra.copySync(fixturePath, this.testDirPath); - - shell.cd(this.testDirPath); - - // install hardhat locally - const isYarn = process.env.HARDHAT_E2E_IS_YARN === "true"; - const hardhatPackagePath = process.env.HARDHAT_E2E_PATH_TO_HARDHAT_TGZ; - - if (hardhatPackagePath === undefined || hardhatPackagePath === "") { - throw new Error( - "Undefined or empty environment variable: HARDHAT_E2E_PATH_TO_HARDHAT_TGZ" - ); - } - - if (isYarn) { - if (fsExtra.existsSync("package.json")) { - shell.exec("yarn"); - } - shell.exec(`yarn add ${hardhatPackagePath}`); - } else { - if (fsExtra.existsSync("package.json")) { - shell.exec("npm install"); - } - shell.exec(`npm install ${hardhatPackagePath}`); - } - }); -} diff --git a/packages/e2e/test/index.ts b/packages/e2e/test/index.ts deleted file mode 100644 index 14f4a31f2c..0000000000 --- a/packages/e2e/test/index.ts +++ /dev/null @@ -1,315 +0,0 @@ -import os from "os"; - -import { assert } from "chai"; -import fsExtra from "fs-extra"; -import path from "path"; -import shell from "shelljs"; - -import { useFixture } from "./helpers"; - -const hardhatBinary = path.join("node_modules", ".bin", "hardhat"); - -const versionRegExp = /^\d+\.\d+\.\d+\n$/; - -describe("e2e tests", function () { - before(function () { - shell.set("-e"); // Ensure that shell failures will induce test failures - }); - - describe("basic-project", function () { - useFixture("basic-project"); - - it("should print the hardhat version", function () { - const { code, stdout } = shell.exec(`${hardhatBinary} --version`); - assert.equal(code, 0); - assert.match(stdout, versionRegExp); - }); - - it("should compile", function () { - // hh clean - const { code: hhCleanCode1 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode1, 0); - - // hh compile - const { code: hhCompileCode, stdout } = shell.exec( - `${hardhatBinary} compile` - ); - assert.equal(hhCompileCode, 0); - - // check artifacts were created - const artifactsDir = path.join(this.testDirPath, "artifacts"); - assert.isTrue(fsExtra.existsSync(artifactsDir)); - - // check stdout - assert.match(stdout, /Compiled \d+ Solidity files? successfully/); - - // hh clean - const { code: hhCleanCode2 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode2, 0); - }); - - it("should test programmatically", function () { - // hh clean - const { code: hhCleanCode1 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode1, 0); - - // hh compile - const { code: testRunCode, stdout } = shell.exec( - `${hardhatBinary} run ./scripts/multi-run-test.js` - ); - assert.equal(testRunCode, 0); - - // check stdout - - // check we get passing runs - assert.match(stdout, /2 passing/); - // check we get no runs without tests - assert.notMatch( - stdout, - /0 passing/, - "A test run occured with 0 tests - potential caching issue" - ); - - // hh clean - const { code: hhCleanCode2 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode2, 0); - }); - - it("the test task should accept test files", async function () { - // hh clean - const { code: hhCleanCode1 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode1, 0); - - // hh test without ./ - const { code: testRunCode1 } = shell.exec( - `${hardhatBinary} test test/simple.js` - ); - assert.equal(testRunCode1, 0); - - // hh test with ./ - const { code: testRunCode2 } = shell.exec( - `${hardhatBinary} test ./test/simple.js` - ); - assert.equal(testRunCode2, 0); - }); - - it("should run tests in parallel", function () { - // hh clean - const { code: hhCleanCode1 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode1, 0); - - // hh test --parallel - const { code: hhCompileCode, stdout } = shell.exec( - `${hardhatBinary} test --parallel` - ); - assert.equal(hhCompileCode, 0); - - // check we get passing runs - assert.match(stdout, /2 passing/); - - // hh clean - const { code: hhCleanCode2 } = shell.exec(`${hardhatBinary} clean`); - assert.equal(hhCleanCode2, 0); - }); - }); - - describe("sample projects", function () { - // These tests generate the sample project and then exercise the commands - // that are suggested to the user after project generation. It would be - // better if that list of commands were externalized somewhere, in a place - // from which we could consume them here, so that the lists of commands - // executed here cannot fall out of sync with what's actually suggested to - // the user, but this approach was more expedient. - - before(function () { - if (os.type() === "Windows_NT") { - // See https://github.com/nomiclabs/hardhat/issues/1698 - this.skip(); - } - }); - - describe("javascript sample project", function () { - useFixture("javascript-sample-project"); - - before(function () { - shell.exec(`${hardhatBinary}`, { - env: { - ...process.env, - HARDHAT_CREATE_JAVASCRIPT_PROJECT_WITH_DEFAULTS: "true", - }, - }); - }); - - for (const suggestedCommand of [ - // This list should be kept reasonably in sync with - // packages/hardhat-core/sample-projects/javascript/README.md - `${hardhatBinary} help`, - `${hardhatBinary} test`, - `${hardhatBinary} run scripts/deploy.js`, - ]) { - it(`should permit successful execution of the suggested command "${suggestedCommand}"`, async function () { - shell.exec(suggestedCommand, { - env: { - ...process.env, - }, - }); - }); - } - - it("should report gas", async function () { - const { stdout } = shell.exec(`${hardhatBinary} test`, { - env: { - ...process.env, - REPORT_GAS: "true", - }, - }); - - // check that some row has the gas report headers - // this will break if hardhat-gas-reporter changes its output - const lines = stdout.split(os.EOL); - const hasGasReport = lines.some((x) => - x.match(/Contract.*Method.*Min.*Max.*Avg/) - ); - - assert.isTrue(hasGasReport); - }); - }); - - describe("typescript sample project", function () { - useFixture("typescript-sample-project"); - - before(function () { - shell.exec(`${hardhatBinary}`, { - env: { - ...process.env, - HARDHAT_CREATE_TYPESCRIPT_PROJECT_WITH_DEFAULTS: "true", - }, - }); - }); - - for (const suggestedCommand of [ - // This list should be kept reasonably in sync with - // packages/hardhat-core/sample-projects/typescript/README.md - `${hardhatBinary} help`, - `${hardhatBinary} test`, - `${hardhatBinary} run scripts/deploy.ts`, - ]) { - it(`should permit successful execution of the suggested command "${suggestedCommand}"`, async function () { - shell.exec(suggestedCommand, { - env: { - ...process.env, - }, - }); - }); - } - - it("should report gas", async function () { - const { stdout } = shell.exec(`${hardhatBinary} test`, { - env: { - ...process.env, - REPORT_GAS: "true", - }, - }); - - const lines = stdout.split(os.EOL); - const hasGasReport = lines.some((x) => - x.match(/Contract.*Method.*Min.*Max.*Avg/) - ); - - assert.isTrue(hasGasReport); - }); - }); - }); - - describe("no project", function () { - useFixture("empty"); - - it("should print the hardhat version", function () { - const { code, stdout } = shell.exec(`${hardhatBinary} --version`); - assert.equal(code, 0); - assert.match(stdout, versionRegExp); - }); - - it(`should print an error message if you try to compile`, function () { - shell.set("+e"); - const { code, stderr } = shell.exec(`${hardhatBinary} compile`); - shell.set("-e"); - assert.equal(code, 1); - // This is a loose match to check HH1 and HH15 - assert.match(stderr, /You are not inside/); - assert.match(stderr, /HH15?/); - }); - }); - - describe("--typecheck", function () { - // we don't want to throw for failed executions in these tests - before(() => shell.set("+e")); - after(() => shell.set("-e")); - - describe("javascript project", function () { - useFixture("basic-project"); - - it("should throw if --typecheck is used", async function () { - const { code, stderr } = shell.exec(`${hardhatBinary} --typecheck`); - - assert.equal(code, 1); - assert.include(stderr, "Error HH313"); - }); - }); - - describe("type error in config", function () { - useFixture("type-error-in-config"); - - it("should not throw by default", async function () { - const { code } = shell.exec(`${hardhatBinary}`); - - assert.equal(code, 0); - }); - - it("should throw if --typecheck is used", async function () { - const { code, stderr } = shell.exec(`${hardhatBinary} --typecheck`); - - assert.equal(code, 1); - assert.include(stderr, "error TS"); - }); - }); - - describe("type error in script", function () { - useFixture("type-error-in-script"); - - it("should not throw by default", async function () { - const { code } = shell.exec(`${hardhatBinary} run script.ts`); - - assert.equal(code, 0); - }); - - it("should throw if --typecheck is used", async function () { - const { code, stderr } = shell.exec( - `${hardhatBinary} run script.ts --typecheck` - ); - - assert.equal(code, 1); - assert.include(stderr, "error TS"); - }); - }); - - describe("type error in test", function () { - useFixture("type-error-in-test"); - - it("should not throw by default", async function () { - const { code } = shell.exec(`${hardhatBinary} test`); - - assert.equal(code, 0); - }); - - it("should throw if --typecheck is used", async function () { - const { code, stderr } = shell.exec( - `${hardhatBinary} test --typecheck` - ); - - assert.equal(code, 1); - assert.include(stderr, "error TS"); - }); - }); - }); -}); diff --git a/packages/e2e/tsconfig.json b/packages/e2e/tsconfig.json deleted file mode 100644 index 7c6c380560..0000000000 --- a/packages/e2e/tsconfig.json +++ /dev/null @@ -1,13 +0,0 @@ -{ - "extends": "../../config/typescript/tsconfig.json", - "compilerOptions": { - "outDir": "./dist", - "composite": true - }, - "exclude": ["./dist", "./node_modules", "./test/fixture-projects/**/*"], - "references": [ - { - "path": "../hardhat-core/src" - } - ] -} diff --git a/packages/eslint-plugin/CHANGELOG.md b/packages/eslint-plugin/CHANGELOG.md new file mode 100644 index 0000000000..9a3724f684 --- /dev/null +++ b/packages/eslint-plugin/CHANGELOG.md @@ -0,0 +1,13 @@ +# @nomiclabs/eslint-plugin-hardhat-internal-rules + +## 1.0.2 + +### Patch Changes + +- 53f7a5f80: allow linting classes that extend the HardhatPluginError class + +## 1.0.1 + +### Patch Changes + +- 337456b8a: add eslint rule for hardhat plugin errors diff --git a/packages/eslint-plugin/index.js b/packages/eslint-plugin/index.js index 8d75155512..26cc501e2a 100644 --- a/packages/eslint-plugin/index.js +++ b/packages/eslint-plugin/index.js @@ -1,4 +1,4 @@ -const { onlyHardhatErrorRule } = require("./onlyHardhatErrorRule"); +const { onlyHardhatErrorRule, onlyHardhatPluginErrorRule } = require("./onlyHardhatErrorRule"); const rules = { "only-hardhat-error": { @@ -11,6 +11,16 @@ const rules = { }, }, }, + "only-hardhat-plugin-error": { + create: onlyHardhatPluginErrorRule, + meta: { + type: "problem", + schema: [], + docs: { + description: "Enforces that only HardhatPluginError is thrown.", + }, + }, + } }; module.exports = { rules }; diff --git a/packages/eslint-plugin/onlyHardhatErrorRule.js b/packages/eslint-plugin/onlyHardhatErrorRule.js index ea90694928..3819076dda 100644 --- a/packages/eslint-plugin/onlyHardhatErrorRule.js +++ b/packages/eslint-plugin/onlyHardhatErrorRule.js @@ -1,12 +1,14 @@ const { ESLintUtils } = require("@typescript-eslint/experimental-utils"); function onlyHardhatErrorRule(context) { - const parserServices = ESLintUtils.getParserServices(context) + const parserServices = ESLintUtils.getParserServices(context); const checker = parserServices.program.getTypeChecker(); return { ThrowStatement(node) { - const expression = parserServices.esTreeNodeToTSNodeMap.get(node.argument); + const expression = parserServices.esTreeNodeToTSNodeMap.get( + node.argument + ); if (!isHardhatError(expression, checker)) { const exceptionName = getExpressionClassName(expression, checker); @@ -20,6 +22,28 @@ function onlyHardhatErrorRule(context) { }; } +function onlyHardhatPluginErrorRule(context) { + const parserServices = ESLintUtils.getParserServices(context); + const checker = parserServices.program.getTypeChecker(); + + return { + ThrowStatement(node) { + const expression = parserServices.esTreeNodeToTSNodeMap.get( + node.argument + ); + + if (!isHardhatPluginError(expression, checker)) { + const exceptionName = getExpressionClassName(expression, checker); + + context.report({ + node, + message: `Only HardhatPluginError must be thrown, ${exceptionName} found.`, + }); + } + }, + }; +} + function getExpressionClassName(expression, tc) { const exceptionType = tc.getTypeAtLocation(expression); @@ -30,8 +54,29 @@ function getExpressionClassName(expression, tc) { return exceptionType.symbol.getName(); } +function getExpressionClassNameAndBaseClass(expression, tc) { + const exceptionType = tc.getTypeAtLocation(expression); + + if (exceptionType.symbol === undefined) { + return ["[UNKNOWN EXCEPTION TYPE]"]; + } + + const className = exceptionType.symbol.getName(); + const baseClass = + exceptionType.resolvedBaseConstructorType?.symbol?.getName() ?? + "[UNKNOWN BASE CLASS]"; + + return [className, baseClass]; +} + function isHardhatError(expression, tc) { return getExpressionClassName(expression, tc) === "HardhatError"; } -module.exports = { onlyHardhatErrorRule } +function isHardhatPluginError(expression, tc) { + return getExpressionClassNameAndBaseClass(expression, tc).includes( + "HardhatPluginError" + ); +} + +module.exports = { onlyHardhatErrorRule, onlyHardhatPluginErrorRule }; diff --git a/packages/eslint-plugin/package.json b/packages/eslint-plugin/package.json index 8f8ae3e6a9..f80eafa69b 100644 --- a/packages/eslint-plugin/package.json +++ b/packages/eslint-plugin/package.json @@ -1,6 +1,6 @@ { "name": "@nomiclabs/eslint-plugin-hardhat-internal-rules", - "version": "1.0.0", + "version": "1.0.2", "main": "index.js", "license": "MIT" } diff --git a/packages/hardhat-chai-matchers/CHANGELOG.md b/packages/hardhat-chai-matchers/CHANGELOG.md index 02396578bc..5da3f4d0ba 100644 --- a/packages/hardhat-chai-matchers/CHANGELOG.md +++ b/packages/hardhat-chai-matchers/CHANGELOG.md @@ -1,5 +1,11 @@ # @nomicfoundation/hardhat-chai-matchers +## 1.0.6 + +### Patch Changes + +- 8fa00c97c: Improved the warning shown when both `@nomicfoundation/hardhat-chai-matchers` and `@nomiclabs/hardhat-waffle` are used. + ## 1.0.5 ### Patch Changes diff --git a/packages/hardhat-chai-matchers/package.json b/packages/hardhat-chai-matchers/package.json index c5f99a277a..a3732936d3 100644 --- a/packages/hardhat-chai-matchers/package.json +++ b/packages/hardhat-chai-matchers/package.json @@ -1,6 +1,6 @@ { "name": "@nomicfoundation/hardhat-chai-matchers", - "version": "1.0.5", + "version": "1.0.6", "description": "Hardhat utils for testing", "homepage": "https://github.com/nomicfoundation/hardhat/tree/main/packages/hardhat-chai-matchers", "repository": "github:nomicfoundation/hardhat", @@ -42,8 +42,8 @@ "@types/chai": "^4.2.0", "@types/mocha": ">=9.1.0", "@types/node": "^14.0.0", - "@typescript-eslint/eslint-plugin": "^5.30.7", - "@typescript-eslint/parser": "4.29.2", + "@typescript-eslint/eslint-plugin": "5.53.0", + "@typescript-eslint/parser": "5.53.0", "bignumber.js": "^9.0.2", "bn.js": "^5.1.0", "chai": "^4.2.0", @@ -58,8 +58,8 @@ "mocha": "^10.0.0", "prettier": "2.4.1", "rimraf": "^3.0.2", - "ts-node": "^8.1.0", - "typescript": "~4.5.2" + "ts-node": "^10.8.0", + "typescript": "~4.7.4" }, "peerDependencies": { "@nomiclabs/hardhat-ethers": "^2.0.0", @@ -71,7 +71,6 @@ "@ethersproject/abi": "^5.1.2", "@types/chai-as-promised": "^7.1.3", "chai-as-promised": "^7.1.1", - "chalk": "^2.4.2", "deep-eql": "^4.0.1", "ordinal": "^1.0.3" } diff --git a/packages/hardhat-chai-matchers/src/index.ts b/packages/hardhat-chai-matchers/src/index.ts index 57eaaa3459..658ffa8acd 100644 --- a/packages/hardhat-chai-matchers/src/index.ts +++ b/packages/hardhat-chai-matchers/src/index.ts @@ -2,7 +2,7 @@ import "@nomiclabs/hardhat-ethers"; import "./types"; -import { checkIfWaffleIsInstalled } from "./internal/checkIfWaffleIsInstalled"; +import { hardhatWaffleIncompatibilityCheck } from "./internal/hardhatWaffleIncompatibilityCheck"; import "./internal/add-chai-matchers"; -checkIfWaffleIsInstalled(); +hardhatWaffleIncompatibilityCheck(); diff --git a/packages/hardhat-chai-matchers/src/internal/checkIfWaffleIsInstalled.ts b/packages/hardhat-chai-matchers/src/internal/checkIfWaffleIsInstalled.ts deleted file mode 100644 index 00fccecd94..0000000000 --- a/packages/hardhat-chai-matchers/src/internal/checkIfWaffleIsInstalled.ts +++ /dev/null @@ -1,15 +0,0 @@ -import chalk from "chalk"; - -export function checkIfWaffleIsInstalled() { - try { - require.resolve("ethereum-waffle"); - - console.warn( - chalk.yellow( - `You have both ethereum-waffle and @nomicfoundation/hardhat-chai-matchers installed. They don't work correctly together, so please make sure you only use one. - -We recommend you migrate to @nomicfoundation/hardhat-chai-matchers. Learn how to do it here: https://hardhat.org/migrate-from-waffle` - ) - ); - } catch {} -} diff --git a/packages/hardhat-chai-matchers/src/internal/emit.ts b/packages/hardhat-chai-matchers/src/internal/emit.ts index cd6988cfb6..d6ea26791c 100644 --- a/packages/hardhat-chai-matchers/src/internal/emit.ts +++ b/packages/hardhat-chai-matchers/src/internal/emit.ts @@ -165,6 +165,17 @@ function assertArgsArraysEqual( expectedArgs[index]?.length !== undefined && typeof expectedArgs[index] !== "string" ) { + const expectedLength = expectedArgs[index].length; + const actualLength = actualArgs[index].length; + assert( + expectedLength === actualLength, + `Expected the ${ordinal( + index + 1 + )} argument of the "${eventName}" event to have ${expectedLength} ${ + expectedLength === 1 ? "element" : "elements" + }, but it has ${actualLength}` + ); + for (let j = 0; j < expectedArgs[index].length; j++) { new Assertion(actualArgs[index][j], undefined, ssfi, true).equal( expectedArgs[index][j] diff --git a/packages/hardhat-chai-matchers/src/internal/hardhatWaffleIncompatibilityCheck.ts b/packages/hardhat-chai-matchers/src/internal/hardhatWaffleIncompatibilityCheck.ts new file mode 100644 index 0000000000..2f2638fb9d --- /dev/null +++ b/packages/hardhat-chai-matchers/src/internal/hardhatWaffleIncompatibilityCheck.ts @@ -0,0 +1,11 @@ +export function hardhatWaffleIncompatibilityCheck() { + if ((global as any).__HARDHAT_WAFFLE_IS_LOADED === true) { + throw new Error( + `You are using both @nomicfoundation/hardhat-chai-matchers and @nomiclabs/hardhat-waffle. They don't work correctly together, so please make sure you only use one. + +We recommend you migrate to @nomicfoundation/hardhat-chai-matchers. Learn how to do it here: https://hardhat.org/migrate-from-waffle` + ); + } + + (global as any).__HARDHAT_CHAI_MATCHERS_IS_LOADED = true; +} diff --git a/packages/hardhat-chai-matchers/src/internal/reverted/revertedWithCustomError.ts b/packages/hardhat-chai-matchers/src/internal/reverted/revertedWithCustomError.ts index 329b10082c..f5e0ea26fd 100644 --- a/packages/hardhat-chai-matchers/src/internal/reverted/revertedWithCustomError.ts +++ b/packages/hardhat-chai-matchers/src/internal/reverted/revertedWithCustomError.ts @@ -1,4 +1,5 @@ import { AssertionError } from "chai"; +import ordinal from "ordinal"; import { buildAssert, Ssfi } from "../../utils"; import { decodeReturnData, getReturnDataFromError } from "./utils"; @@ -193,6 +194,16 @@ export async function revertedWithCustomErrorWithArgs( throw e; } } else if (Array.isArray(expectedArg)) { + const expectedLength = expectedArg.length; + const actualLength = actualArg.length; + assert( + expectedLength === actualLength, + `Expected the ${ordinal(i + 1)} argument of the "${ + customError.name + }" custom error to have ${expectedLength} ${ + expectedLength === 1 ? "element" : "elements" + }, but it has ${actualLength}` + ); new Assertion(actualArg).to.deep.equal(expectedArg); } else { new Assertion(actualArg).to.equal(expectedArg); diff --git a/packages/hardhat-chai-matchers/test/events.ts b/packages/hardhat-chai-matchers/test/events.ts index 1c9f8faef1..7bd6dd7f6d 100644 --- a/packages/hardhat-chai-matchers/test/events.ts +++ b/packages/hardhat-chai-matchers/test/events.ts @@ -337,6 +337,26 @@ describe(".to.emit (contract events)", () => { "expected 1 to equal 3" ); }); + + it("Should fail when the arrays don't have the same length", async function () { + await expect( + expect(contract.emitUintArray(1, 2)) + .to.emit(contract, "WithUintArray") + .withArgs([1]) + ).to.be.eventually.rejectedWith( + AssertionError, + 'Expected the 1st argument of the "WithUintArray" event to have 1 element, but it has 2' + ); + + await expect( + expect(contract.emitUintArray(1, 2)) + .to.emit(contract, "WithUintArray") + .withArgs([1, 2, 3]) + ).to.be.eventually.rejectedWith( + AssertionError, + 'Expected the 1st argument of the "WithUintArray" event to have 3 elements, but it has 2' + ); + }); }); describe("with a bytes32 array argument", function () { diff --git a/packages/hardhat-chai-matchers/test/reverted/revertedWithCustomError.ts b/packages/hardhat-chai-matchers/test/reverted/revertedWithCustomError.ts index e791eaac8a..e5a9d2a037 100644 --- a/packages/hardhat-chai-matchers/test/reverted/revertedWithCustomError.ts +++ b/packages/hardhat-chai-matchers/test/reverted/revertedWithCustomError.ts @@ -317,6 +317,26 @@ describe("INTEGRATION: Reverted with custom error", function () { ); }); + it("should fail when the arrays don't have the same length", async function () { + await expect( + expect(matchers.revertWithCustomErrorWithPair(1, 2)) + .to.be.revertedWithCustomError(matchers, "CustomErrorWithPair") + .withArgs([1]) + ).to.be.rejectedWith( + AssertionError, + 'Expected the 1st argument of the "CustomErrorWithPair" custom error to have 1 element, but it has 2' + ); + + await expect( + expect(matchers.revertWithCustomErrorWithPair(1, 2)) + .to.be.revertedWithCustomError(matchers, "CustomErrorWithPair") + .withArgs([1, 2, 3]) + ).to.be.rejectedWith( + AssertionError, + 'Expected the 1st argument of the "CustomErrorWithPair" custom error to have 3 elements, but it has 2' + ); + }); + it("Should fail when used with .not.", async function () { expect(() => expect(matchers.revertWithSomeCustomError()) diff --git a/packages/hardhat-core/CHANGELOG.md b/packages/hardhat-core/CHANGELOG.md index 32d78accd4..6d618fd1f4 100644 --- a/packages/hardhat-core/CHANGELOG.md +++ b/packages/hardhat-core/CHANGELOG.md @@ -1,5 +1,56 @@ # hardhat +## 2.14.0 + +### Minor Changes + +- d69020f72: Set Shanghai as the default hardfork + +## 2.13.1 + +### Patch Changes + +- 5d4d1edba: Fixed a problem when importing scoped packages in a Yarn Berry monorepo that uses PnP (thanks @zouguangxian!) +- cdd9aa578: Added support for the shanghai hardfork + +## 2.13.0 + +### Minor Changes + +- 83ef755f3: Hardhat's task runner now allows you to override the arguments passed to subtasks. +- 50779cd10: Added support for writing scripts and tests as ES modules. + + To learn how to start using ESM with Hardhat read [this guide](https://hardhat.org/hardhat-runner/docs/advanced/using-esm). + +### Patch Changes + +- f55a3a769: Reduce the amount of ETH sent to the Lock contract in the sample project's deploy script (Thanks @mutedSpectre!) +- 929b26849: The `resolveJsonModule` compiler option is now enabled by default in the sample tsconfig (thanks @mlshv!) +- 071e6bc89: Stop colorizing the entire message when an error is printed +- 0fa7ac548: Make Hardhat more tolerant to unsupported Node.js versions +- 7a5bc5512: Send less ETH and lock it for less time on sample deployment scripts. +- 7ceb5f90d: Added basic support for solc `viaIR` setting +- e6f07b4b6: Fixed an issue with a warning showing the same solc version multiple times (thanks @shark0der!) +- 6e51edf4d: Added support for Solidity 0.8.18 (thanks @taxio!) +- b9c34f36f: Fix an error that could happen when a download failed. +- 1c833bf04: Propagate HttpProviderError exception messages. + +## 2.12.7 + +### Patch Changes + +- e443b3667: Added an option in Hardhat Network to allow mining blocks with the same timestamp +- c23a1cac4: Added support for the `http_proxy` environment variable. When this variable is set, Hardhat will send its requests through the given proxy for things like JSON-RPC requests, mainnet forking and downloading compilers. + + We also removed support for the `HTTP_PROXY` and `HTTPS_PROXY` environment variables, since `http_proxy` is the most commonly used environment variable for this kind of thing. Those variables could only be used for downloading compilers. + + Finally, we also added support for `no_proxy`, which accepts a comma separated list of hosts or `"*"`. Any host included in this list will not be proxied. + + Note that requests to `"localhost"` or `"127.0.0.1"` are never proxied. + +- 69546655e: Added support for sending batch requests through WebSocket to the Hardhat node (thanks @tenbits!) +- 6bf1673bb: Added a config validation for the number of optimizer runs used (thanks @konarshankar07!) + ## 2.12.6 ### Patch Changes diff --git a/packages/hardhat-core/package.json b/packages/hardhat-core/package.json index c35f74d6f1..8e4ad24189 100644 --- a/packages/hardhat-core/package.json +++ b/packages/hardhat-core/package.json @@ -1,6 +1,6 @@ { "name": "hardhat", - "version": "2.12.6", + "version": "2.14.0", "author": "Nomic Labs LLC", "license": "MIT", "homepage": "https://hardhat.org", @@ -19,9 +19,11 @@ "task-runner", "solidity" ], - "bin": "internal/cli/cli.js", + "bin": { + "hardhat": "internal/cli/bootstrap.js" + }, "engines": { - "node": "^14.0.0 || ^16.0.0 || ^18.0.0" + "node": ">=14.0.0" }, "scripts": { "lint": "yarn prettier --check && yarn eslint", @@ -77,8 +79,8 @@ "@types/sinon": "^9.0.8", "@types/uuid": "^8.3.1", "@types/ws": "^7.2.1", - "@typescript-eslint/eslint-plugin": "^5.30.7", - "@typescript-eslint/parser": "4.29.2", + "@typescript-eslint/eslint-plugin": "5.53.0", + "@typescript-eslint/parser": "5.53.0", "bignumber.js": "^9.0.2", "bn.js": "^5.1.0", "chai": "^4.2.0", @@ -91,26 +93,25 @@ "ethers": "^5.0.0", "mocha": "^10.0.0", "prettier": "2.4.1", - "proxy": "^1.0.2", "rimraf": "^3.0.2", "sinon": "^9.0.0", "time-require": "^0.1.2", - "ts-node": "^8.1.0", - "typescript": "~4.5.2" + "ts-node": "^10.8.0", + "typescript": "~4.7.4" }, "dependencies": { "@ethersproject/abi": "^5.1.2", "@metamask/eth-sig-util": "^4.0.0", - "@nomicfoundation/ethereumjs-block": "^4.0.0", - "@nomicfoundation/ethereumjs-blockchain": "^6.0.0", - "@nomicfoundation/ethereumjs-common": "^3.0.0", - "@nomicfoundation/ethereumjs-evm": "^1.0.0", - "@nomicfoundation/ethereumjs-rlp": "^4.0.0", - "@nomicfoundation/ethereumjs-statemanager": "^1.0.0", - "@nomicfoundation/ethereumjs-trie": "^5.0.0", - "@nomicfoundation/ethereumjs-tx": "^4.0.0", - "@nomicfoundation/ethereumjs-util": "^8.0.0", - "@nomicfoundation/ethereumjs-vm": "^6.0.0", + "@nomicfoundation/ethereumjs-block": "5.0.1", + "@nomicfoundation/ethereumjs-blockchain": "7.0.1", + "@nomicfoundation/ethereumjs-common": "4.0.1", + "@nomicfoundation/ethereumjs-evm": "2.0.1", + "@nomicfoundation/ethereumjs-rlp": "5.0.1", + "@nomicfoundation/ethereumjs-statemanager": "2.0.1", + "@nomicfoundation/ethereumjs-trie": "6.0.1", + "@nomicfoundation/ethereumjs-tx": "5.0.1", + "@nomicfoundation/ethereumjs-util": "9.0.1", + "@nomicfoundation/ethereumjs-vm": "7.0.1", "@nomicfoundation/solidity-analyzer": "^0.1.0", "@sentry/node": "^5.18.1", "@types/bn.js": "^5.1.0", diff --git a/packages/hardhat-core/sample-projects/javascript-esm/LICENSE.md b/packages/hardhat-core/sample-projects/javascript-esm/LICENSE.md new file mode 100644 index 0000000000..c539a505b4 --- /dev/null +++ b/packages/hardhat-core/sample-projects/javascript-esm/LICENSE.md @@ -0,0 +1,11 @@ +# License + +This is free and unencumbered software released into the public domain. + +Anyone is free to copy, modify, publish, use, compile, sell, or distribute this software, either in source code form or as a compiled binary, for any purpose, commercial or non-commercial, and by any means. + +In jurisdictions that recognize copyright laws, the author or authors of this software dedicate any and all copyright interest in the software to the public domain. We make this dedication for the benefit of the public at large and to the detriment of our heirs and successors. We intend this dedication to be an overt act of relinquishment in perpetuity of all present and future rights to this software under copyright law. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +For more information, please refer to [https://unlicense.org](https://unlicense.org) diff --git a/packages/hardhat-core/sample-projects/javascript-esm/README.md b/packages/hardhat-core/sample-projects/javascript-esm/README.md new file mode 100644 index 0000000000..e9dd5f3485 --- /dev/null +++ b/packages/hardhat-core/sample-projects/javascript-esm/README.md @@ -0,0 +1,13 @@ +# Sample Hardhat Project + +This project demonstrates a basic Hardhat use case. It comes with a sample contract, a test for that contract, and a script that deploys that contract. + +Try running some of the following tasks: + +```shell +npx hardhat help +npx hardhat test +REPORT_GAS=true npx hardhat test +npx hardhat node +npx hardhat run scripts/deploy.js +``` diff --git a/packages/hardhat-core/sample-projects/javascript-esm/contracts/Lock.sol b/packages/hardhat-core/sample-projects/javascript-esm/contracts/Lock.sol new file mode 100644 index 0000000000..50935f61fd --- /dev/null +++ b/packages/hardhat-core/sample-projects/javascript-esm/contracts/Lock.sol @@ -0,0 +1,34 @@ +// SPDX-License-Identifier: UNLICENSED +pragma solidity ^0.8.9; + +// Uncomment this line to use console.log +// import "hardhat/console.sol"; + +contract Lock { + uint public unlockTime; + address payable public owner; + + event Withdrawal(uint amount, uint when); + + constructor(uint _unlockTime) payable { + require( + block.timestamp < _unlockTime, + "Unlock time should be in the future" + ); + + unlockTime = _unlockTime; + owner = payable(msg.sender); + } + + function withdraw() public { + // Uncomment this line, and the import of "hardhat/console.sol", to print a log in your terminal + // console.log("Unlock time is %o and block timestamp is %o", unlockTime, block.timestamp); + + require(block.timestamp >= unlockTime, "You can't withdraw yet"); + require(msg.sender == owner, "You aren't the owner"); + + emit Withdrawal(address(this).balance, block.timestamp); + + owner.transfer(address(this).balance); + } +} diff --git a/packages/hardhat-core/sample-projects/javascript-esm/hardhat.config.cjs b/packages/hardhat-core/sample-projects/javascript-esm/hardhat.config.cjs new file mode 100644 index 0000000000..86913e7054 --- /dev/null +++ b/packages/hardhat-core/sample-projects/javascript-esm/hardhat.config.cjs @@ -0,0 +1,6 @@ +require("@nomicfoundation/hardhat-toolbox"); + +/** @type import('hardhat/config').HardhatUserConfig */ +module.exports = { + solidity: "0.8.17", +}; diff --git a/packages/hardhat-core/sample-projects/javascript-esm/scripts/deploy.js b/packages/hardhat-core/sample-projects/javascript-esm/scripts/deploy.js new file mode 100644 index 0000000000..05f8a19cd9 --- /dev/null +++ b/packages/hardhat-core/sample-projects/javascript-esm/scripts/deploy.js @@ -0,0 +1,23 @@ +// We require the Hardhat Runtime Environment explicitly here. This is optional +// but useful for running the script in a standalone fashion through `node