diff --git a/.github/workflows/bench.yml b/.github/workflows/bench.yml index 77dce0abfd..ddad7d5781 100644 --- a/.github/workflows/bench.yml +++ b/.github/workflows/bench.yml @@ -62,7 +62,7 @@ jobs: bench-success: if: always() name: bench success - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: # Note: This check is a dummy because we don't have any bench checks enabled. - - run: echo OK. \ No newline at end of file + - run: echo OK. diff --git a/.github/workflows/book.yml b/.github/workflows/book.yml index e5eac1efd2..9879611567 100644 --- a/.github/workflows/book.yml +++ b/.github/workflows/book.yml @@ -8,7 +8,7 @@ on: jobs: test: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest name: test timeout-minutes: 60 @@ -31,7 +31,7 @@ jobs: run: mdbook test lint: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest name: lint timeout-minutes: 60 @@ -50,7 +50,7 @@ jobs: run: mdbook-linkcheck --standalone build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest timeout-minutes: 60 steps: - uses: actions/checkout@v3 @@ -69,6 +69,8 @@ jobs: echo $(pwd)/mdbook-template >> $GITHUB_PATH - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true - name: Build book run: mdbook build @@ -111,7 +113,7 @@ jobs: deploy: # Only deploy if a push to main if: github.ref_name == 'main' && github.event_name == 'push' - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest needs: [test, lint, build] # Grant GITHUB_TOKEN the permissions required to make a Pages deployment diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 7fecee5a77..86a668335d 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -17,6 +17,8 @@ jobs: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@clippy - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true - run: cargo clippy --workspace --all-targets --all-features env: RUSTFLAGS: -D warnings @@ -29,6 +31,8 @@ jobs: - uses: actions/checkout@v3 - uses: dtolnay/rust-toolchain@nightly - uses: Swatinem/rust-cache@v2 + with: + cache-on-failure: true - run: cargo docs --document-private-items env: # Keep in sync with ./book.yml:jobs.build diff --git a/.github/workflows/deny.yml b/.github/workflows/deny.yml index 8332723b81..f4ee009810 100644 --- a/.github/workflows/deny.yml +++ b/.github/workflows/deny.yml @@ -18,7 +18,7 @@ concurrency: deny-${{ github.head_ref || github.run_id }} jobs: deny: name: deny - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - uses: actions/checkout@v3 - uses: EmbarkStudios/cargo-deny-action@v1 diff --git a/.github/workflows/fuzz.yml b/.github/workflows/fuzz.yml index 07a31797af..d1175b61ea 100644 --- a/.github/workflows/fuzz.yml +++ b/.github/workflows/fuzz.yml @@ -64,7 +64,7 @@ jobs: fuzz-success: if: always() name: fuzz success - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest needs: all steps: # Note: This check is a dummy because we currently have fuzz tests disabled. diff --git a/.github/workflows/hive.yml b/.github/workflows/hive.yml index 332f11be08..1abe4ed46d 100644 --- a/.github/workflows/hive.yml +++ b/.github/workflows/hive.yml @@ -27,6 +27,7 @@ jobs: with: context: . tags: paradigmxyz/reth:main + build-args: BUILD_PROFILE=hivetests outputs: type=docker,dest=./artifacts/reth_image.tar cache-from: type=gha cache-to: type=gha,mode=max diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 1417695755..8b980a5b1b 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -15,38 +15,42 @@ env: jobs: extract-version: name: extract version - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest steps: - name: Extract version run: echo "VERSION=$(echo ${GITHUB_REF#refs/tags/})" >> $GITHUB_OUTPUT id: extract_version outputs: VERSION: ${{ steps.extract_version.outputs.VERSION }} + build: name: build release strategy: matrix: - arch: [aarch64-unknown-linux-gnu, - x86_64-unknown-linux-gnu, - x86_64-apple-darwin, - aarch64-apple-darwin, - x86_64-pc-windows-gnu] + arch: + [ + aarch64-unknown-linux-gnu, + x86_64-unknown-linux-gnu, + x86_64-apple-darwin, + aarch64-apple-darwin, + x86_64-pc-windows-gnu, + ] include: - - arch: aarch64-unknown-linux-gnu - platform: ubuntu-20.04 - profile: maxperf - - arch: x86_64-unknown-linux-gnu - platform: ubuntu-20.04 - profile: maxperf - - arch: x86_64-apple-darwin - platform: macos-latest - profile: maxperf - - arch: aarch64-apple-darwin - platform: macos-latest - profile: maxperf - - arch: x86_64-pc-windows-gnu - platform: ubuntu-20.04 - profile: maxperf + - arch: aarch64-unknown-linux-gnu + platform: ubuntu-20.04 + profile: maxperf + - arch: x86_64-unknown-linux-gnu + platform: ubuntu-20.04 + profile: maxperf + - arch: x86_64-apple-darwin + platform: macos-latest + profile: maxperf + - arch: aarch64-apple-darwin + platform: macos-latest + profile: maxperf + - arch: x86_64-pc-windows-gnu + platform: ubuntu-20.04 + profile: maxperf runs-on: ${{ matrix.platform }} needs: extract-version @@ -76,18 +80,18 @@ jobs: # ============================== - name: Build reth for ${{ matrix.arch }} - run: | + run: | cargo install cross env PROFILE=${{ matrix.profile }} make build-${{ matrix.arch }} - name: Move cross-compiled binary - if: matrix.arch != 'x86_64-pc-windows-gnu' + if: matrix.arch != 'x86_64-pc-windows-gnu' run: | mkdir artifacts mv target/${{ matrix.arch }}/${{ matrix.profile }}/reth ./artifacts - name: Move cross-compiled binary (Windows) - if: matrix.arch == 'x86_64-pc-windows-gnu' + if: matrix.arch == 'x86_64-pc-windows-gnu' run: | mkdir artifacts mv target/${{ matrix.arch }}/${{ matrix.profile }}/reth.exe ./artifacts @@ -113,8 +117,8 @@ jobs: # Upload artifacts # This is required to share artifacts between different jobs # ======================================================================= - - name: Upload artifact - uses: actions/upload-artifact@v3 + - name: Upload artifact + uses: actions/upload-artifact@v3 with: name: reth-${{ needs.extract-version.outputs.VERSION }}-${{ matrix.arch }}.tar.gz path: reth-${{ needs.extract-version.outputs.VERSION }}-${{ matrix.arch }}.tar.gz @@ -127,10 +131,10 @@ jobs: draft-release: name: draft release - needs: [build, extract-version] - runs-on: ubuntu-20.04 + needs: [build, extract-version] + runs-on: ubuntu-latest env: - VERSION: ${{ needs.extract-version.outputs.VERSION }} + VERSION: ${{ needs.extract-version.outputs.VERSION }} permissions: # Required to post the release contents: write @@ -162,54 +166,55 @@ jobs: GITHUB_USER: ${{ github.repository_owner }} GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - # The formatting here is borrowed from Lighthouse (which is borrowed from OpenEthereum): https://github.com/openethereum/openethereum/blob/main/.github/workflows/build.yml + # The formatting here is borrowed from Lighthouse (which is borrowed from OpenEthereum): + # https://github.com/openethereum/openethereum/blob/main/.github/workflows/build.yml run: | body=$(cat <<- "ENDBODY" - + ## Testing Checklist (DELETE ME) - + - [ ] Run on testnet for 1-3 days. - [ ] Resync a mainnet node. - [ ] Ensure all CI checks pass. - + ## Release Checklist (DELETE ME) - + - [ ] Ensure all crates have had their versions bumped. - [ ] Write the summary. - [ ] Fill out the update priority. - [ ] Ensure all binaries have been added. - [ ] Prepare release posts (Twitter, ...). - + ## Summary - + Add a summary, including: - + - Critical bug fixes - New features - Any breaking changes (and what to expect) - + ## Update Priority - + This table provides priorities for which classes of users should update particular components. - + | User Class | Priority | |----------------------|-----------------| | Payload Builders | | | Non-Payload Builders | | - + *See [Update Priorities](https://paradigmxyz.github.io/reth/installation/priorities.html) for more information about this table.* - + ## All Changes - + ${{ steps.changelog.outputs.CHANGELOG }} - + ## Binaries - + [See pre-built binaries documentation.](https://paradigmxyz.github.io/reth/installation/binaries.html) - + The binaries are signed with the PGP key: `A3AE 097C 8909 3A12 4049 DF1F 5391 A3C4 1005 30B4` - + | System | Architecture | Binary | PGP Signature | |:---:|:---:|:---:|:---| | | x86_64 | [reth-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/reth-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz) | [PGP Signature](https://github.com/${{ env.REPO_NAME }}/releases/download/${{ env.VERSION }}/reth-${{ env.VERSION }}-x86_64-unknown-linux-gnu.tar.gz.asc) | diff --git a/.github/workflows/sanity.yml b/.github/workflows/sanity.yml index 71ec1b495c..8b1e6f6716 100644 --- a/.github/workflows/sanity.yml +++ b/.github/workflows/sanity.yml @@ -15,7 +15,7 @@ env: name: sanity jobs: dep-version-constraints: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest name: dep version constraints test (partition ${{ matrix.partition }}/${{ strategy.job-total }}) strategy: matrix: @@ -64,7 +64,7 @@ jobs: filename: .github/SANITY_DEPS_ISSUE_TEMPLATE.md unused-deps: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest name: unused dependencies steps: - name: Checkout sources diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index 31ce92c268..5328cc95e8 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -6,7 +6,7 @@ on: jobs: close-issues: - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest permissions: issues: write pull-requests: write diff --git a/.github/workflows/unit.yml b/.github/workflows/unit.yml index ba5f0ff530..3d398593b5 100644 --- a/.github/workflows/unit.yml +++ b/.github/workflows/unit.yml @@ -69,7 +69,7 @@ jobs: repository: ethereum/tests path: testing/ef-tests/ethereum-tests submodules: recursive - depth: 1 + fetch-depth: 1 - name: Install toolchain uses: actions-rs/toolchain@v1 @@ -102,7 +102,7 @@ jobs: unit-success: if: always() name: unit success - runs-on: ubuntu-20.04 + runs-on: ubuntu-latest needs: [test, eth-blockchain, doc-test] timeout-minutes: 60 steps: diff --git a/Cargo.lock b/Cargo.lock index acc57da381..6f5e3adac8 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -119,9 +119,9 @@ dependencies = [ [[package]] name = "aho-corasick" -version = "1.0.5" +version = "1.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0c378d78423fdad8089616f827526ee33c19f2fddbd5de1629152c9593ba4783" +checksum = "ea5d730647d4fadd988536d06fecce94b7b4f2a7efdae548f1cf4b63205518ab" dependencies = [ "memchr", ] @@ -148,16 +148,118 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0942ffc6dcaadf03badf6e6a2d0228460359d5e34b57ccdc720b7382dfbd5ec5" [[package]] -name = "alloy-rlp" -version = "0.3.2" +name = "alloy-dyn-abi" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f938f00332d63a5b0ac687bd6f46d03884638948921d9f8b50c59563d421ae25" +checksum = "b4a2c94da79130a80677c497eb56e465f72e376e0d85720228be2cf6c85ec5b0" dependencies = [ + "alloy-json-abi", + "alloy-primitives", + "alloy-sol-type-parser", + "alloy-sol-types", + "const-hex", + "derive_more", + "itoa", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-json-abi" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d187c265879ea8fc1fb574f75f95942e9502d2a67eba7e5c9f6ba9879375ddd6" +dependencies = [ + "alloy-primitives", + "alloy-sol-type-parser", + "serde", + "serde_json", +] + +[[package]] +name = "alloy-primitives" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "6b4084879b7257d5b95b9009837c07a1868bd7d60e66418a7764b9b580ae64e0" +dependencies = [ + "alloy-rlp", + "arbitrary", + "bytes", + "cfg-if", + "const-hex", + "derive_arbitrary", + "derive_more", + "getrandom 0.2.10", + "hex-literal", + "itoa", + "proptest", + "proptest-derive", + "rand 0.8.5", + "ruint", + "serde", + "tiny-keccak", +] + +[[package]] +name = "alloy-rlp" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "cc0fac0fc16baf1f63f78b47c3d24718f3619b0714076f6a02957d808d52cbef" +dependencies = [ + "alloy-rlp-derive", "arrayvec", "bytes", "smol_str", ] +[[package]] +name = "alloy-rlp-derive" +version = "0.3.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c0391754c09fab4eae3404d19d0d297aa1c670c1775ab51d8a5312afeca23157" +dependencies = [ + "proc-macro2", + "quote", + "syn 2.0.37", +] + +[[package]] +name = "alloy-sol-macro" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "de84480ac5979d8513164f7e668f837839cd6d5c4bdb8beecbb8cf062b61cb48" +dependencies = [ + "const-hex", + "dunce", + "heck", + "proc-macro2", + "quote", + "syn 2.0.37", + "syn-solidity", + "tiny-keccak", +] + +[[package]] +name = "alloy-sol-type-parser" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3427d2135d3c28696b437fdf44b86e334f36639d367abc8a5af2f718b3c1992b" +dependencies = [ + "winnow", +] + +[[package]] +name = "alloy-sol-types" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fa7e42aa2983db6676af5d762bc8d9371dd74f5948739790d3080c3d652a957b" +dependencies = [ + "alloy-primitives", + "alloy-sol-macro", + "const-hex", + "serde", +] + [[package]] name = "android-tzdata" version = "0.1.1" @@ -380,15 +482,6 @@ dependencies = [ "rand 0.8.5", ] -[[package]] -name = "array-init" -version = "0.0.4" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "23589ecb866b460d3a0f1278834750268c607e8e28a1b982c907219f3178cd72" -dependencies = [ - "nodrop", -] - [[package]] name = "arrayvec" version = "0.7.4" @@ -434,7 +527,7 @@ checksum = "bc00ceb34980c03614e35a3a4e218276a0a824e911d07651cd0d858a51e8c0f0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -619,7 +712,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.36", + "syn 2.0.37", "which", ] @@ -640,7 +733,7 @@ dependencies = [ "regex", "rustc-hash", "shlex", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -754,23 +847,23 @@ dependencies = [ [[package]] name = "boa_ast" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7c261398db3b2f9ba05f76872721d6a8a142d10ae6c0a58d3ddc5c2853cc02d" +checksum = "73498e9b2f0aa7db74977afa4d594657611e90587abf0dd564c0b55b4a130163" dependencies = [ "bitflags 2.4.0", "boa_interner", "boa_macros", - "indexmap 2.0.0", + "indexmap 2.0.1", "num-bigint", "rustc-hash", ] [[package]] name = "boa_engine" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f31e7a37b855625f1615a07414fb341361475950e57bb9396afe1389bbc2ccdc" +checksum = "16377479d5d6d33896e7acdd1cc698d04a8f72004025bbbddf47558cd29146a6" dependencies = [ "bitflags 2.4.0", "boa_ast", @@ -784,7 +877,7 @@ dependencies = [ "dashmap", "fast-float", "icu_normalizer", - "indexmap 2.0.0", + "indexmap 2.0.1", "itertools 0.11.0", "num-bigint", "num-integer", @@ -807,9 +900,9 @@ dependencies = [ [[package]] name = "boa_gc" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2346f8ac7b736236de0608a7c75a9a32bac0a1137b98574cfebde6343e4ff6b7" +checksum = "c97b44beaef9d4452342d117d94607fdfa8d474280f1ba0fd97853834e3a49b2" dependencies = [ "boa_macros", "boa_profiler", @@ -818,28 +911,29 @@ dependencies = [ [[package]] name = "boa_icu_provider" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "07652c6f1ca97bbe16bd2ab1ebc39313ac81568d2671aeb24a4a45964d2291a4" +checksum = "b30e52e34e451dd0bfc2c654a9a43ed34b0073dbd4ae3394b40313edda8627aa" dependencies = [ "icu_collections", "icu_normalizer", "icu_properties", "icu_provider", + "icu_provider_adapters", + "icu_provider_blob", "once_cell", - "zerovec", ] [[package]] name = "boa_interner" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7b968bd467737cace9723a5d01a3d32fe95471526d36db9654a1779c4b766fb6" +checksum = "f3e5afa991908cfbe79bd3109b824e473a1dc5f74f31fab91bb44c9e245daa77" dependencies = [ "boa_gc", "boa_macros", "hashbrown 0.14.0", - "indexmap 2.0.0", + "indexmap 2.0.1", "once_cell", "phf", "rustc-hash", @@ -848,21 +942,21 @@ dependencies = [ [[package]] name = "boa_macros" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca3de43b7806061fccfba716fef51eea462d636de36803b62d10f902608ffef4" +checksum = "005fa0c5bd20805466dda55eb34cd709bb31a2592bb26927b47714eeed6914d8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", - "synstructure 0.13.0", + "syn 2.0.37", + "synstructure", ] [[package]] name = "boa_parser" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2ff1108bda6d573049191b6452490844c5ba4b12f7bdcc512a33e5c3f5037196" +checksum = "9e09afb035377a9044443b598187a7d34cd13164617182a4d7c348522ee3f052" dependencies = [ "bitflags 2.4.0", "boa_ast", @@ -871,19 +965,23 @@ dependencies = [ "boa_macros", "boa_profiler", "fast-float", + "icu_locid", "icu_properties", + "icu_provider", + "icu_provider_macros", "num-bigint", "num-traits", "once_cell", "regress", "rustc-hash", + "tinystr", ] [[package]] name = "boa_profiler" -version = "0.17.0" +version = "0.17.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a24f6aa1ecc56e797506437b1f9a172e4a5f207894e74196c682cb656d2c2d60" +checksum = "3190f92dfe48224adc92881c620f08ccf37ff62b91a094bb357fe53bd5e84647" [[package]] name = "boyer-moore-magiclen" @@ -971,8 +1069,9 @@ dependencies = [ [[package]] name = "c-kzg" -version = "0.1.0" -source = "git+https://github.com/ethereum/c-kzg-4844?rev=f5f6f863d475847876a2bd5ee252058d37c3a15d#f5f6f863d475847876a2bd5ee252058d37c3a15d" +version = "0.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ac926d808fb72fe09ebf471a091d6d72918876ccf0b4989766093d2d0d24a0ef" dependencies = [ "bindgen 0.66.1", "blst", @@ -983,19 +1082,6 @@ dependencies = [ "serde", ] -[[package]] -name = "c-kzg" -version = "0.1.0" -source = "git+https://github.com/ethereum/c-kzg-4844#fbef59a3f9e8fa998bdb5069d212daf83d586aa5" -dependencies = [ - "bindgen 0.66.1", - "blst", - "cc", - "glob", - "hex", - "libc", -] - [[package]] name = "camino" version = "1.1.6" @@ -1022,7 +1108,7 @@ checksum = "eee4243f1f26fc7a42710e7439c149e2b10b05472f88090acce52632f231a73a" dependencies = [ "camino", "cargo-platform", - "semver 1.0.18", + "semver 1.0.19", "serde", "serde_json", "thiserror", @@ -1036,7 +1122,7 @@ checksum = "e7daec1a2a2129eeba1644b220b4647ec537b0b5d4bfd6876fcc5a540056b592" dependencies = [ "camino", "cargo-platform", - "semver 1.0.18", + "semver 1.0.19", "serde", "serde_json", "thiserror", @@ -1153,9 +1239,9 @@ dependencies = [ [[package]] name = "clap" -version = "4.4.3" +version = "4.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "84ed82781cea27b43c9b106a979fe450a13a31aab0500595fb3fc06616de08e6" +checksum = "824956d0dca8334758a5b7f7e50518d66ea319330cbceedcf76905c2f6ab30e3" dependencies = [ "clap_builder", "clap_derive", @@ -1163,9 +1249,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.4.2" +version = "4.4.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2bb9faaa7c2ef94b2743a21f5a29e6f0010dff4caa69ac8e9d6cf8b6fa74da08" +checksum = "122ec64120a49b4563ccaedcbea7818d069ed8e9aa6d829b82d8a4128936b2ab" dependencies = [ "anstream", "anstyle", @@ -1182,7 +1268,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1203,10 +1289,11 @@ version = "0.1.0-alpha.10" dependencies = [ "convert_case 0.6.0", "parity-scale-codec", + "pretty_assertions", "proc-macro2", "quote", "serde", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1305,9 +1392,9 @@ dependencies = [ [[package]] name = "const-hex" -version = "1.8.0" +version = "1.9.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08849ed393c907c90016652a01465a12d86361cd38ad2a7de026c56a520cc259" +checksum = "aa72a10d0e914cad6bcad4e7409e68d230c1c2db67896e19a37f758b1fcbdab5" dependencies = [ "cfg-if", "cpufeatures", @@ -1605,9 +1692,9 @@ dependencies = [ [[package]] name = "curve25519-dalek" -version = "4.1.0" +version = "4.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "622178105f911d937a42cdb140730ba4a3ed2becd8ae6ce39c7d28b5d75d4588" +checksum = "e89b8c6a2e4b1f45971ad09761aafb85514a84744b67a95e32c3cc1352d1f65c" dependencies = [ "cfg-if", "cpufeatures", @@ -1628,7 +1715,7 @@ checksum = "83fdaf97f4804dcebfa5862639bc9ce4121e82140bec2a987ac5140294865b5b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1676,7 +1763,7 @@ dependencies = [ "proc-macro2", "quote", "strsim 0.10.0", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1698,7 +1785,7 @@ checksum = "836a9bbc7ad63342d6d6e7b815ccab164bc77a2d95d84bc3117a8c0d5c98e2d5" dependencies = [ "darling_core 0.20.3", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1783,7 +1870,7 @@ checksum = "53e0efad4403bfc52dc201159c4b842a246a14b98c64b55dfd0f2d89729dfeb8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1940,7 +2027,7 @@ dependencies = [ "parking_lot 0.11.2", "rand 0.8.5", "rlp", - "smallvec 1.11.0", + "smallvec", "socket2 0.4.9", "tokio", "tracing", @@ -1957,7 +2044,7 @@ checksum = "487585f4d0c6655fe74905e2504d8ad6908e4db67f744eb140876906c2f3175d" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -1986,9 +2073,9 @@ checksum = "56ce8c6da7551ec6c462cbaf3bfbc75131ebbfa1c944aeaa9dab51ca1c5f0c3b" [[package]] name = "dyn-clone" -version = "1.0.13" +version = "1.0.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bbfc4744c1b8f2a09adc0e55242f60b1af195d88596bd8700be74418c056c555" +checksum = "23d2f3407d9a573d666de4b5bdf10569d73ca9478087346697dcbae6244bfbcd" [[package]] name = "dyn_size_of" @@ -2050,12 +2137,12 @@ dependencies = [ name = "ef-tests" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "reth-db", "reth-interfaces", "reth-primitives", "reth-provider", "reth-revm", - "reth-rlp", "reth-stages", "serde", "serde_json", @@ -2089,6 +2176,12 @@ dependencies = [ "zeroize", ] +[[package]] +name = "embedded-io" +version = "0.4.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ef1a6892d9eef45c8fa6b9e0086428a2cca8491aca8f787c534a3d6d0bcb3ced" + [[package]] name = "encode_unicode" version = "0.3.6" @@ -2112,9 +2205,9 @@ checksum = "c34f04666d835ff5d62e058c3995147c06f42fe86ff053337632bca83e42702d" [[package]] name = "enr" -version = "0.9.0" +version = "0.9.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0be7b2ac146c1f99fe245c02d16af0696450d8e06c135db75e10eeb9e642c20d" +checksum = "fe81b5c06ecfdbc71dd845216f225f53b62a10cb8a16c946836a3467f701d05b" dependencies = [ "base64 0.21.4", "bytes", @@ -2126,7 +2219,6 @@ dependencies = [ "rlp", "secp256k1", "serde", - "serde-hex", "sha3", "zeroize", ] @@ -2152,7 +2244,7 @@ dependencies = [ "heck", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -2165,7 +2257,7 @@ dependencies = [ "num-traits", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -2176,7 +2268,7 @@ checksum = "c2ad8cef1d801a4686bfd8919f0b30eac4c8e48968c437a6405ded4fb5272d2b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -2323,7 +2415,7 @@ dependencies = [ "regex", "serde", "serde_json", - "syn 2.0.36", + "syn 2.0.37", "toml 0.7.8", "walkdir", ] @@ -2341,7 +2433,7 @@ dependencies = [ "proc-macro2", "quote", "serde_json", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -2367,7 +2459,7 @@ dependencies = [ "serde", "serde_json", "strum 0.25.0", - "syn 2.0.36", + "syn 2.0.37", "tempfile", "thiserror", "tiny-keccak", @@ -2382,7 +2474,7 @@ checksum = "0e53451ea4a8128fbce33966da71132cf9e1040dcfd2a2084fd7733ada7b2045" dependencies = [ "ethers-core", "reqwest", - "semver 1.0.18", + "semver 1.0.19", "serde", "serde_json", "thiserror", @@ -2473,12 +2565,6 @@ dependencies = [ "tracing", ] -[[package]] -name = "ethnum" -version = "1.4.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6c8ff382b2fa527fb7fb06eeebfc5bbb3f17e3cc6b9d70b006c41daa8824adac" - [[package]] name = "event-listener" version = "2.5.3" @@ -2534,9 +2620,9 @@ dependencies = [ [[package]] name = "fastrand" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6999dc1837253364c2ebb0704ba97994bd874e8f195d665c50b7548f6ea92764" +checksum = "25cbce373ec4653f1a01a31e8a5e5ec0c622dc27ff9c4e6606eefef5cbbed4a5" [[package]] name = "fastrlp" @@ -2710,7 +2796,7 @@ checksum = "89ca545a94061b6365f2c7355b4b32bd20df3ff95f02da9329b34ccc3bd6ee72" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -3002,9 +3088,9 @@ checksum = "95505c38b4572b2d910cecb0281560f54b440a19336cbbcb27bf6ce6adc6f5a8" [[package]] name = "hermit-abi" -version = "0.3.2" +version = "0.3.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "443144c8cdadd93ebf52ddb4056d257f5b52c04d3c804e657d19eb73fc33668b" +checksum = "d77f7ec81a6d05a3abb01ab6eb7590f6083d08449fe5a1c8b1e620283546ccb7" [[package]] name = "hex" @@ -3211,6 +3297,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ef8302d8dfd6044d3ddb3f807a5ef3d7bbca9a574959c6d6e4dc39aa7012d0d5" dependencies = [ "displaydoc", + "serde", "yoke", "zerofrom", "zerovec", @@ -3224,8 +3311,10 @@ checksum = "3003f85dccfc0e238ff567693248c59153a46f4e6125ba4020b973cef4d1d335" dependencies = [ "displaydoc", "litemap", + "serde", "tinystr", "writeable", + "zerovec", ] [[package]] @@ -3238,7 +3327,8 @@ dependencies = [ "icu_collections", "icu_properties", "icu_provider", - "smallvec 1.11.0", + "serde", + "smallvec", "utf16_iter", "utf8_iter", "write16", @@ -3254,6 +3344,7 @@ dependencies = [ "displaydoc", "icu_collections", "icu_provider", + "serde", "tinystr", "zerovec", ] @@ -3267,6 +3358,7 @@ dependencies = [ "displaydoc", "icu_locid", "icu_provider_macros", + "postcard", "serde", "stable_deref_trait", "writeable", @@ -3275,6 +3367,34 @@ dependencies = [ "zerovec", ] +[[package]] +name = "icu_provider_adapters" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4ae1e2bd0c41728b77e7c46e9afdec5e2127d1eedacc684724667d50c126bd3" +dependencies = [ + "icu_locid", + "icu_provider", + "serde", + "tinystr", + "yoke", + "zerovec", +] + +[[package]] +name = "icu_provider_blob" +version = "1.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "fd364c9a01f791a4bc04a74cf2a1d01d9f6926a40fd5ae1c28004e1e70d8338b" +dependencies = [ + "icu_provider", + "postcard", + "serde", + "writeable", + "yoke", + "zerovec", +] + [[package]] name = "icu_provider_macros" version = "1.2.0" @@ -3412,9 +3532,9 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.0.0" +version = "2.0.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d5477fe2230a79769d8dc68e0eabf5437907c0457a5614a9e8dddb67f65eb65d" +checksum = "ad227c3af19d4914570ad36d30409928b75967c298feb9ea1969db3a610bb14e" dependencies = [ "equivalent", "hashbrown 0.14.0", @@ -3428,7 +3548,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c50453ec3a6555fad17b1cd1a80d16af5bc7cb35094f64e429fd46549018c6a3" dependencies = [ "ahash 0.8.3", - "indexmap 2.0.0", + "indexmap 2.0.1", "is-terminal", "itoa", "log", @@ -3504,7 +3624,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb0889898416213fab133e1d33a0e5858a48177452750691bde3666d0fdbaf8b" dependencies = [ "hermit-abi", - "rustix 0.38.13", + "rustix 0.38.14", "windows-sys 0.48.0", ] @@ -3865,9 +3985,9 @@ checksum = "1a9bad9f94746442c783ca431b22403b519cd7fbeed0533fdd6328b2f2212128" [[package]] name = "litemap" -version = "0.7.0" +version = "0.7.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3a04a5b2b6f54acba899926491d0a6c59d98012938ca2ab5befb281c034e8f94" +checksum = "77a1a2647d5b7134127971a6de0d533c49de2159167e7f259c427195f87168a1" [[package]] name = "lock_api" @@ -3958,12 +4078,6 @@ version = "0.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "2532096657941c2fea9c289d370a250971c689d4f143798ff67113ec042024a5" -[[package]] -name = "maybe-uninit" -version = "2.0.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60302e4db3a61da70c0cb7991976248362f30319e88850c487b9b95bbf059e00" - [[package]] name = "memchr" version = "2.6.3" @@ -4034,7 +4148,7 @@ checksum = "ddece26afd34c31585c74a4db0630c376df271c285d682d1e55012197830b6df" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -4174,7 +4288,7 @@ version = "0.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "77a5d83df9f36fe23f0c3648c6bbb8b0298bb5f1939c8f2704431371f4b84d43" dependencies = [ - "smallvec 1.11.0", + "smallvec", ] [[package]] @@ -4188,12 +4302,6 @@ dependencies = [ "libc", ] -[[package]] -name = "nodrop" -version = "0.1.14" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72ef4a56884ca558e5ddb05a1d1e7e1bfd9a68d9ed024c21704cc98872dae1bb" - [[package]] name = "nom" version = "7.1.3" @@ -4345,7 +4453,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -4357,7 +4465,7 @@ dependencies = [ "proc-macro-crate", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -4534,7 +4642,7 @@ dependencies = [ "instant", "libc", "redox_syscall 0.2.16", - "smallvec 1.11.0", + "smallvec", "winapi", ] @@ -4547,7 +4655,7 @@ dependencies = [ "cfg-if", "libc", "redox_syscall 0.3.5", - "smallvec 1.11.0", + "smallvec", "windows-targets 0.48.5", ] @@ -4599,9 +4707,9 @@ checksum = "9b2a4787296e9989611394c33f193f676704af1686e70b8f8033ab5ba9a35a94" [[package]] name = "pest" -version = "2.7.3" +version = "2.7.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7a4d085fd991ac8d5b05a147b437791b4260b76326baf0fc60cf7c9c27ecd33" +checksum = "c022f1e7b65d6a24c0dbbd5fb344c66881bc01f3e5ae74a1c8100f2f985d98a4" dependencies = [ "memchr", "thiserror", @@ -4661,7 +4769,7 @@ dependencies = [ "phf_shared", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -4690,7 +4798,7 @@ checksum = "4359fd9c9171ec6e8c62926d6faaf553a8dc3f64e1507e76da7911b4f6a04405" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -4790,11 +4898,12 @@ checksum = "31114a898e107c51bb1609ffaf55a0e011cf6a4d7f1170d0015a165082c0338b" [[package]] name = "postcard" -version = "1.0.7" +version = "1.0.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d534c6e61df1c7166e636ca612d9820d486fe96ddad37f7abc671517b297488e" +checksum = "a55c51ee6c0db07e68448e336cf8ea4131a620edefebf9893e759b2d793420f8" dependencies = [ "cobs", + "embedded-io", "heapless", "serde", ] @@ -4815,7 +4924,7 @@ dependencies = [ "nix", "once_cell", "parking_lot 0.12.1", - "smallvec 1.11.0", + "smallvec", "symbolic-demangle", "tempfile", "thiserror", @@ -4874,7 +4983,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "ae005bd773ab59b4725093fd7df83fd7892f7d8eafb48dbd7de6e024e4215f9d" dependencies = [ "proc-macro2", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -5146,9 +5255,9 @@ dependencies = [ [[package]] name = "rayon" -version = "1.7.0" +version = "1.8.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1d2df5196e37bcc87abebc0053e20787d73847bb33134a69841207dd0a47f03b" +checksum = "9c27db03db7734835b3f53954b534c91069375ce6ccaa2e065441e07d9b6cdb1" dependencies = [ "either", "rayon-core", @@ -5156,14 +5265,12 @@ dependencies = [ [[package]] name = "rayon-core" -version = "1.11.0" +version = "1.12.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4b8f95bd6966f5c87776639160a66bd8ab9895d9d4ab01ddba9fc60661aebe8d" +checksum = "5ce3fb6ad83f861aac485e76e1985cd109d9a3713802152be56c3b1f0e0658ed" dependencies = [ - "crossbeam-channel", "crossbeam-deque", "crossbeam-utils", - "num_cpus", ] [[package]] @@ -5201,7 +5308,7 @@ version = "1.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "697061221ea1b4a94a624f67d0ae2bfe4e22b8a17b6a192afb11046542cc8c47" dependencies = [ - "aho-corasick 1.0.5", + "aho-corasick 1.1.1", "memchr", "regex-automata 0.3.8", "regex-syntax 0.7.5", @@ -5222,7 +5329,7 @@ version = "0.3.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c2f401f4955220693b56f8ec66ee9c78abffd8d1c4f23dc41a23839eb88f0795" dependencies = [ - "aho-corasick 1.0.5", + "aho-corasick 1.1.1", "memchr", "regex-syntax 0.7.5", ] @@ -5297,6 +5404,7 @@ dependencies = [ name = "reth" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "aquamarine", "backon", "boyer-moore-magiclen", @@ -5309,7 +5417,6 @@ dependencies = [ "eyre", "fdlimit", "futures", - "hex", "human_bytes", "humantime", "hyper", @@ -5342,13 +5449,13 @@ dependencies = [ "reth-prune", "reth-revm", "reth-revm-inspectors", - "reth-rlp", "reth-rpc", "reth-rpc-api", "reth-rpc-builder", "reth-rpc-engine-api", "reth-rpc-types", "reth-rpc-types-compat", + "reth-snapshot", "reth-stages", "reth-tasks", "reth-tracing", @@ -5388,6 +5495,7 @@ dependencies = [ name = "reth-basic-payload-builder" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "futures-core", "futures-util", "metrics", @@ -5397,7 +5505,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rlp", "reth-tasks", "reth-transaction-pool", "revm", @@ -5425,6 +5532,7 @@ dependencies = [ "reth-revm", "reth-rpc-types", "reth-rpc-types-compat", + "reth-snapshot", "reth-stages", "reth-tasks", "reth-tracing", @@ -5528,6 +5636,7 @@ dependencies = [ "reth-interfaces", "reth-libmdbx", "reth-metrics", + "reth-nippy-jar", "reth-primitives", "secp256k1", "serde", @@ -5544,18 +5653,17 @@ dependencies = [ name = "reth-discv4" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "discv5", "enr", "generic-array", - "hex", "parking_lot 0.12.1", "rand 0.8.5", "reth-net-common", "reth-net-nat", "reth-primitives", - "reth-rlp", - "reth-rlp-derive", "reth-tracing", + "rlp", "secp256k1", "serde", "thiserror", @@ -5568,6 +5676,7 @@ dependencies = [ name = "reth-dns-discovery" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "async-trait", "data-encoding", "enr", @@ -5575,7 +5684,6 @@ dependencies = [ "parking_lot 0.12.1", "reth-net-common", "reth-primitives", - "reth-rlp", "reth-tracing", "schnellru", "secp256k1", @@ -5592,6 +5700,7 @@ dependencies = [ name = "reth-downloaders" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "assert_matches", "futures", "futures-util", @@ -5603,7 +5712,6 @@ dependencies = [ "reth-interfaces", "reth-metrics", "reth-primitives", - "reth-rlp", "reth-tasks", "reth-tracing", "tempfile", @@ -5619,6 +5727,7 @@ name = "reth-ecies" version = "0.1.0-alpha.10" dependencies = [ "aes 0.8.3", + "alloy-rlp", "block-padding", "byteorder", "cipher 0.4.4", @@ -5627,13 +5736,11 @@ dependencies = [ "educe", "futures", "generic-array", - "hex-literal", "hmac", "pin-project", "rand 0.8.5", "reth-net-common", "reth-primitives", - "reth-rlp", "secp256k1", "sha2", "sha3", @@ -5649,13 +5756,12 @@ dependencies = [ name = "reth-eth-wire" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "arbitrary", "async-trait", "bytes", "ethers-core", "futures", - "hex", - "hex-literal", "metrics", "pin-project", "proptest", @@ -5666,11 +5772,9 @@ dependencies = [ "reth-ecies", "reth-metrics", "reth-primitives", - "reth-rlp", "reth-tracing", "secp256k1", "serde", - "smol_str", "snap", "test-fuzz", "thiserror", @@ -5689,7 +5793,6 @@ dependencies = [ "auto_impl", "clap", "futures", - "hex-literal", "modular-bitfield", "parity-scale-codec", "parking_lot 0.12.1", @@ -5698,6 +5801,7 @@ dependencies = [ "reth-db", "reth-eth-wire", "reth-network-api", + "reth-nippy-jar", "reth-primitives", "reth-rpc-types", "revm-primitives", @@ -5735,7 +5839,7 @@ dependencies = [ "byteorder", "criterion", "derive_more", - "indexmap 2.0.0", + "indexmap 2.0.1", "libc", "lifetimed-bytes", "parking_lot 0.12.1", @@ -5777,7 +5881,7 @@ dependencies = [ "quote", "regex", "serial_test", - "syn 2.0.36", + "syn 2.0.37", "trybuild", ] @@ -5808,6 +5912,7 @@ dependencies = [ name = "reth-network" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "aquamarine", "async-trait", "auto_impl", @@ -5818,7 +5923,6 @@ dependencies = [ "ethers-signers", "fnv", "futures", - "hex", "humantime-serde", "linked-hash-map", "linked_hash_set", @@ -5837,8 +5941,6 @@ dependencies = [ "reth-network-api", "reth-primitives", "reth-provider", - "reth-rlp", - "reth-rlp-derive", "reth-rpc-types", "reth-tasks", "reth-tracing", @@ -5892,16 +5994,17 @@ dependencies = [ name = "reth-payload-builder" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "futures-util", "metrics", "reth-interfaces", "reth-metrics", "reth-primitives", "reth-revm-primitives", - "reth-rlp", "reth-rpc-types", "reth-rpc-types-compat", "reth-transaction-pool", + "revm", "revm-primitives", "sha2", "thiserror", @@ -5914,22 +6017,22 @@ dependencies = [ name = "reth-primitives" version = "0.1.0-alpha.10" dependencies = [ + "alloy-primitives", + "alloy-rlp", + "alloy-sol-types", "arbitrary", "assert_matches", + "byteorder", "bytes", - "c-kzg 0.1.0 (git+https://github.com/ethereum/c-kzg-4844?rev=f5f6f863d475847876a2bd5ee252058d37c3a15d)", + "c-kzg", "crc", "criterion", - "crunchy", "derive_more", "ethers-core", - "fixed-hash", "hash-db", - "hex", - "hex-literal", - "impl-serde", "itertools 0.11.0", "modular-bitfield", + "num_enum 0.7.0", "once_cell", "paste", "plain_hasher", @@ -5939,10 +6042,7 @@ dependencies = [ "rand 0.8.5", "rayon", "reth-codecs", - "reth-rlp", - "reth-rlp-derive", "revm-primitives", - "ruint", "secp256k1", "serde", "serde_json", @@ -5953,7 +6053,6 @@ dependencies = [ "tempfile", "test-fuzz", "thiserror", - "tiny-keccak", "tokio", "tokio-stream", "toml 0.7.8", @@ -5967,17 +6066,19 @@ dependencies = [ name = "reth-provider" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "assert_matches", "auto_impl", "itertools 0.11.0", "parking_lot 0.12.1", "pin-project", + "rand 0.8.5", "rayon", "reth-db", "reth-interfaces", + "reth-nippy-jar", "reth-primitives", "reth-revm-primitives", - "reth-rlp", "reth-trie", "revm", "tempfile", @@ -5999,8 +6100,10 @@ dependencies = [ "reth-metrics", "reth-primitives", "reth-provider", + "reth-snapshot", "reth-stages", "thiserror", + "tokio", "tokio-stream", "tracing", ] @@ -6023,18 +6126,15 @@ dependencies = [ name = "reth-revm-inspectors" version = "0.1.0-alpha.10" dependencies = [ + "alloy-sol-types", "boa_engine", "boa_gc", - "hashbrown 0.14.0", - "icu_collections", - "icu_provider_macros", "reth-primitives", "reth-rpc-types", "revm", "serde", "serde_json", "thiserror", - "tinystr", "tokio", ] @@ -6043,47 +6143,20 @@ name = "reth-revm-primitives" version = "0.1.0-alpha.10" dependencies = [ "reth-primitives", - "revm", -] - -[[package]] -name = "reth-rlp" -version = "0.1.0-alpha.10" -dependencies = [ - "arrayvec", - "auto_impl", - "bytes", - "c-kzg 0.1.0 (git+https://github.com/ethereum/c-kzg-4844?rev=f5f6f863d475847876a2bd5ee252058d37c3a15d)", - "criterion", - "ethereum-types", - "ethnum", - "hex-literal", - "pprof", - "reth-rlp", - "reth-rlp-derive", - "revm-primitives", - "smol_str", -] - -[[package]] -name = "reth-rlp-derive" -version = "0.1.0-alpha.10" -dependencies = [ - "proc-macro2", - "quote", - "syn 2.0.36", ] [[package]] name = "reth-rpc" version = "0.1.0-alpha.10" dependencies = [ + "alloy-dyn-abi", + "alloy-primitives", + "alloy-rlp", + "alloy-sol-types", "assert_matches", "async-trait", "bytes", - "ethers-core", "futures", - "hex", "http", "http-body", "hyper", @@ -6101,7 +6174,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rlp", "reth-rpc-api", "reth-rpc-engine-api", "reth-rpc-types", @@ -6185,6 +6257,7 @@ dependencies = [ name = "reth-rpc-engine-api" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "assert_matches", "async-trait", "jsonrpsee-core", @@ -6194,7 +6267,6 @@ dependencies = [ "reth-payload-builder", "reth-primitives", "reth-provider", - "reth-rlp", "reth-rpc-api", "reth-rpc-types", "reth-rpc-types-compat", @@ -6208,11 +6280,11 @@ dependencies = [ name = "reth-rpc-types" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "itertools 0.11.0", "jsonrpsee-types", "rand 0.8.5", "reth-primitives", - "reth-rlp", "serde", "serde_json", "similar-asserts", @@ -6223,15 +6295,31 @@ dependencies = [ name = "reth-rpc-types-compat" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "reth-primitives", - "reth-rlp", "reth-rpc-types", ] +[[package]] +name = "reth-snapshot" +version = "0.1.0-alpha.10" +dependencies = [ + "assert_matches", + "reth-db", + "reth-interfaces", + "reth-primitives", + "reth-provider", + "reth-stages", + "thiserror", + "tokio", + "tracing", +] + [[package]] name = "reth-stages" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "aquamarine", "assert_matches", "async-trait", @@ -6255,7 +6343,6 @@ dependencies = [ "reth-primitives", "reth-provider", "reth-revm", - "reth-rlp", "reth-trie", "revm", "serde", @@ -6295,6 +6382,7 @@ dependencies = [ name = "reth-transaction-pool" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "aquamarine", "assert_matches", "async-trait", @@ -6312,7 +6400,6 @@ dependencies = [ "reth-metrics", "reth-primitives", "reth-provider", - "reth-rlp", "reth-tasks", "serde", "thiserror", @@ -6325,16 +6412,15 @@ dependencies = [ name = "reth-trie" version = "0.1.0-alpha.10" dependencies = [ + "alloy-rlp", "criterion", "derive_more", - "hex", "pretty_assertions", "proptest", "reth-db", "reth-interfaces", "reth-primitives", "reth-provider", - "reth-rlp", "thiserror", "tokio", "tokio-stream", @@ -6345,7 +6431,7 @@ dependencies = [ [[package]] name = "revm" version = "3.3.0" -source = "git+https://github.com/bluealloy/revm?rev=516f62cc#516f62ccc1c5f2a62e5fc58115213fe04c7f7a8c" +source = "git+https://github.com/Evalir/revm/?branch=reintroduce-alloy-rebased#988e0d7920ab20c3d0a12dd5582654a58e7f9f2c" dependencies = [ "auto_impl", "revm-interpreter", @@ -6355,21 +6441,17 @@ dependencies = [ [[package]] name = "revm-interpreter" version = "1.1.2" -source = "git+https://github.com/bluealloy/revm?rev=516f62cc#516f62ccc1c5f2a62e5fc58115213fe04c7f7a8c" +source = "git+https://github.com/Evalir/revm/?branch=reintroduce-alloy-rebased#988e0d7920ab20c3d0a12dd5582654a58e7f9f2c" dependencies = [ - "derive_more", - "enumn", "revm-primitives", - "sha3", ] [[package]] name = "revm-precompile" version = "2.0.3" -source = "git+https://github.com/bluealloy/revm?rev=516f62cc#516f62ccc1c5f2a62e5fc58115213fe04c7f7a8c" +source = "git+https://github.com/Evalir/revm/?branch=reintroduce-alloy-rebased#988e0d7920ab20c3d0a12dd5582654a58e7f9f2c" dependencies = [ - "c-kzg 0.1.0 (git+https://github.com/ethereum/c-kzg-4844)", - "hex", + "c-kzg", "k256", "num", "once_cell", @@ -6377,35 +6459,25 @@ dependencies = [ "ripemd", "secp256k1", "sha2", - "sha3", "substrate-bn", ] [[package]] name = "revm-primitives" version = "1.1.2" -source = "git+https://github.com/bluealloy/revm?rev=516f62cc#516f62ccc1c5f2a62e5fc58115213fe04c7f7a8c" +source = "git+https://github.com/Evalir/revm/?branch=reintroduce-alloy-rebased#988e0d7920ab20c3d0a12dd5582654a58e7f9f2c" dependencies = [ - "arbitrary", + "alloy-primitives", + "alloy-rlp", "auto_impl", "bitflags 2.4.0", "bitvec", - "bytes", - "c-kzg 0.1.0 (git+https://github.com/ethereum/c-kzg-4844)", - "derive_more", + "c-kzg", "enumn", - "fixed-hash", "hashbrown 0.14.0", "hex", - "hex-literal", "once_cell", - "primitive-types", - "proptest", - "proptest-derive", - "rlp", - "ruint", "serde", - "sha3", ] [[package]] @@ -6571,7 +6643,7 @@ version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "bfa0f585226d2e68097d4f95d113b15b83a82e819ab25717ec0590d9584ef366" dependencies = [ - "semver 1.0.18", + "semver 1.0.19", ] [[package]] @@ -6590,9 +6662,9 @@ dependencies = [ [[package]] name = "rustix" -version = "0.38.13" +version = "0.38.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d7db8590df6dfcd144d22afd1b83b36c21a18d7cbc1dc4bb5295a8712e9eb662" +checksum = "747c788e9ce8e92b12cd485c49ddf90723550b654b32508f979b71a7b1ecda4f" dependencies = [ "bitflags 2.4.0", "errno 0.3.3", @@ -6636,9 +6708,9 @@ dependencies = [ [[package]] name = "rustls-webpki" -version = "0.101.5" +version = "0.101.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "45a27e3b59326c16e23d30aeb7a36a24cc0d29e71d68ff611cdfb4a01d013bed" +checksum = "3c7d5dece342910d9ba34d259310cae3e0154b873b35408b787b59bce53d34fe" dependencies = [ "ring", "untrusted", @@ -6832,9 +6904,9 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0293b4b29daaf487284529cc2f5675b8e57c61f70167ba415a463651fd6a918" +checksum = "ad977052201c6de01a8ef2aa3378c4bd23217a056337d1d6da40468d267a4fb0" dependencies = [ "serde", ] @@ -6869,17 +6941,6 @@ dependencies = [ "serde_derive", ] -[[package]] -name = "serde-hex" -version = "0.1.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ca37e3e4d1b39afd7ff11ee4e947efae85adfddf4841787bfa47c470e96dc26d" -dependencies = [ - "array-init", - "serde", - "smallvec 0.6.14", -] - [[package]] name = "serde_bytes" version = "0.11.12" @@ -6897,7 +6958,7 @@ checksum = "4eca7ac642d82aa35b60049a6eccb4be6be75e599bd2e9adb5f875a737654af2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -6942,7 +7003,7 @@ dependencies = [ "chrono", "hex", "indexmap 1.9.3", - "indexmap 2.0.0", + "indexmap 2.0.1", "serde", "serde_json", "serde_with_macros", @@ -6958,7 +7019,7 @@ dependencies = [ "darling 0.20.3", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -6983,7 +7044,7 @@ checksum = "91d129178576168c589c9ec973feedf7d3126c01ac2bf08795109aa35b69fb8f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -7012,9 +7073,9 @@ dependencies = [ [[package]] name = "sha1" -version = "0.10.5" +version = "0.10.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f04293dc80c3993519f2d7f6f511707ee7094fe0c6d3406feb330cdb3540eba3" +checksum = "e3bf829a2d51ab4a5ddf1352d8470c140cadc8301b2ae1789db023f01cedd6ba" dependencies = [ "cfg-if", "cpufeatures", @@ -7023,9 +7084,9 @@ dependencies = [ [[package]] name = "sha2" -version = "0.10.7" +version = "0.10.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "479fb9d862239e610720565ca91403019f2f00410f1864c5aa7479b950a76ed8" +checksum = "793db75ad2bcafc3ffa7c68b215fee268f537982cd901d132f89c6343f3a3dc8" dependencies = [ "cfg-if", "cpufeatures", @@ -7044,9 +7105,9 @@ dependencies = [ [[package]] name = "sharded-slab" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "900fba806f70c630b0a382d0d825e17a0f19fcd059a2ade1ff237bcddf446b31" +checksum = "c1b21f559e07218024e7e9f90f96f601825397de0e25420135f7f952453fed0b" dependencies = [ "lazy_static", ] @@ -7167,18 +7228,9 @@ dependencies = [ [[package]] name = "smallvec" -version = "0.6.14" +version = "1.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b97fcaeba89edba30f044a10c6a3cc39df9c3f17d7cd829dd1446cab35f890e0" -dependencies = [ - "maybe-uninit", -] - -[[package]] -name = "smallvec" -version = "1.11.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9" +checksum = "942b4a808e05215192e39f4ab80813e599068285906cc91aa64f923db842bd5a" [[package]] name = "smol_str" @@ -7330,7 +7382,7 @@ dependencies = [ "proc-macro2", "quote", "rustversion", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -7383,9 +7435,9 @@ dependencies = [ [[package]] name = "symbolic-common" -version = "12.4.0" +version = "12.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9e0e9bc48b3852f36a84f8d0da275d50cb3c2b88b59b9ec35fdd8b7fa239e37d" +checksum = "fac08504d60cf5bdffeb8a6a028f1a4868a5da1098bb19eb46239440039163fb" dependencies = [ "debugid", "memmap2 0.5.10", @@ -7395,9 +7447,9 @@ dependencies = [ [[package]] name = "symbolic-demangle" -version = "12.4.0" +version = "12.4.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "691e53bdc0702aba3a5abc2cffff89346fcbd4050748883c7e2f714b33a69045" +checksum = "8b212728d4f6c527c1d50d6169e715f6e02d849811843c13e366d8ca6d0cf5c4" dependencies = [ "cpp_demangle", "rustc-demangle", @@ -7417,9 +7469,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.36" +version = "2.0.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "91e02e55d62894af2a08aca894c6577281f76769ba47c94d5756bec8ac6e7373" +checksum = "7303ef2c05cd654186cb250d29049a24840ca25d2747c25c0381c8d9e2f582e8" dependencies = [ "proc-macro2", "quote", @@ -7427,15 +7479,15 @@ dependencies = [ ] [[package]] -name = "synstructure" -version = "0.12.6" +name = "syn-solidity" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f36bdaa60a83aca3921b5259d5400cbf5e90fc51931376a9bd4a0eb79aa7210f" +checksum = "1b8a5a633f1172a0c80b1516a988e7e8efa7ce9cededf56590f54e593e4513b3" dependencies = [ + "paste", "proc-macro2", "quote", - "syn 1.0.109", - "unicode-xid", + "syn 2.0.37", ] [[package]] @@ -7446,7 +7498,7 @@ checksum = "285ba80e733fac80aa4270fbcdf83772a79b80aa35c97075320abfee4a915b06" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", "unicode-xid", ] @@ -7463,17 +7515,17 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "cb94d2f3cc536af71caac6b6fcebf65860b347e7ce0cc9ebe8f70d3e521054ef" dependencies = [ "cfg-if", - "fastrand 2.0.0", + "fastrand 2.0.1", "redox_syscall 0.3.5", - "rustix 0.38.13", + "rustix 0.38.14", "windows-sys 0.48.0", ] [[package]] name = "termcolor" -version = "1.2.0" +version = "1.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "be55cf8942feac5c765c2c993422806843c9a9a45d4d5c407ad6dd2ea95eb9b6" +checksum = "6093bad37da69aab9d123a8091e4be0aa4a03e4d601ec641c327398315f62b64" dependencies = [ "winapi-util", ] @@ -7522,7 +7574,7 @@ dependencies = [ "proc-macro2", "quote", "subprocess", - "syn 2.0.36", + "syn 2.0.37", "test-fuzz-internal", "toolchain_find", ] @@ -7549,22 +7601,22 @@ checksum = "aac81b6fd6beb5884b0cf3321b8117e6e5d47ecb6fc89f414cfdcca8b2fe2dd8" [[package]] name = "thiserror" -version = "1.0.48" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9d6d7a740b8a666a7e828dd00da9c0dc290dff53154ea77ac109281de90589b7" +checksum = "1177e8c6d7ede7afde3585fd2513e611227efd6481bd78d2e82ba1ce16557ed4" dependencies = [ "thiserror-impl", ] [[package]] name = "thiserror-impl" -version = "1.0.48" +version = "1.0.49" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49922ecae66cc8a249b77e68d1d0623c1b2c514f0060c27cdc68bd62a1219d35" +checksum = "10712f02019e9288794769fba95cd6847df9874d49d871d062172f9dd41bc4cc" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -7588,9 +7640,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.28" +version = "0.3.29" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "17f6bb557fd245c28e6411aa56b6403c689ad95061f50e4be16c274e70a17e48" +checksum = "426f806f4089c493dcac0d24c29c01e2c38baf8e30f1b716ee37e83d200b18fe" dependencies = [ "deranged", "itoa", @@ -7603,15 +7655,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.1" +version = "0.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7300fbefb4dadc1af235a9cef3737cea692a9d97e1b9cbcd4ebdae6f8868e6fb" +checksum = "ef927ca75afb808a4d64dd374f00a2adf8d0fcff8e7b184af886c3c87ec4a3f3" [[package]] name = "time-macros" -version = "0.2.14" +version = "0.2.15" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a942f44339478ef67935ab2bbaec2fb0322496cf3cbe84b261e06ac3814c572" +checksum = "4ad70d68dba9e1f8aceda7aa6711965dfec1cac869f311a51bd08b3a2ccbce20" dependencies = [ "time-core", ] @@ -7627,11 +7679,12 @@ dependencies = [ [[package]] name = "tinystr" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7ac3f5b6856e931e15e07b478e98c8045239829a65f9156d4fa7e7788197a5ef" +checksum = "8faa444297615a4e020acb64146b0603c9c395c03a97c17fd9028816d3b4d63e" dependencies = [ "displaydoc", + "serde", "zerovec", ] @@ -7687,7 +7740,7 @@ checksum = "630bdcf245f78637c13ec01ffae6187cca34625e8c63150d424b59e55af2675e" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -7714,9 +7767,9 @@ dependencies = [ [[package]] name = "tokio-tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b2dbec703c26b00d74844519606ef15d09a7d6857860f84ad223dec002ddea2" +checksum = "212d5dcb2a1ce06d81107c3d0ffa3121fe974b73f068c8282cb1c32328113b6c" dependencies = [ "futures-util", "log", @@ -7726,9 +7779,9 @@ dependencies = [ [[package]] name = "tokio-util" -version = "0.7.8" +version = "0.7.9" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "806fe8c2c87eccc8b3267cbae29ed3ab2d0bd37fca70ab622e46aaa9375ddb7d" +checksum = "1d68074620f57a0b21594d9735eb2e98ab38b17f80d3fcb189fca266771ca60d" dependencies = [ "bytes", "futures-core", @@ -7776,7 +7829,7 @@ version = "0.19.15" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1b5bb770da30e5cbfde35a2d7b9b8a2c4b8ef89548a7a6aeab5c9a576e3e7421" dependencies = [ - "indexmap 2.0.0", + "indexmap 2.0.1", "serde", "serde_spanned", "toml_datetime", @@ -7792,7 +7845,7 @@ dependencies = [ "home", "once_cell", "regex", - "semver 1.0.18", + "semver 1.0.19", "walkdir", ] @@ -7891,7 +7944,7 @@ checksum = "5f4f31f56159e98206da9efd823404b79b6ef3143b4a7ab76e67b1751b25a4ab" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] @@ -7949,7 +8002,7 @@ dependencies = [ "once_cell", "regex", "sharded-slab", - "smallvec 1.11.0", + "smallvec", "thread_local", "tracing", "tracing-core", @@ -8004,7 +8057,7 @@ dependencies = [ "lazy_static", "log", "rand 0.8.5", - "smallvec 1.11.0", + "smallvec", "thiserror", "tinyvec", "tokio", @@ -8028,7 +8081,7 @@ dependencies = [ "ipnet", "once_cell", "rand 0.8.5", - "smallvec 1.11.0", + "smallvec", "thiserror", "tinyvec", "tokio", @@ -8050,7 +8103,7 @@ dependencies = [ "parking_lot 0.12.1", "rand 0.8.5", "resolv-conf", - "smallvec 1.11.0", + "smallvec", "thiserror", "tokio", "tracing", @@ -8093,9 +8146,9 @@ dependencies = [ [[package]] name = "tungstenite" -version = "0.20.0" +version = "0.20.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e862a1c4128df0112ab625f55cd5c934bcb4312ba80b39ae4b4835a3fd58e649" +checksum = "9e3dac10fd62eaf6617d3a904ae222845979aec67c615d1c842b4002c7666fb9" dependencies = [ "byteorder", "bytes", @@ -8178,9 +8231,9 @@ checksum = "1dd624098567895118886609431a7c3b8f516e41d30e0643f03d94592a147e36" [[package]] name = "unicode-width" -version = "0.1.10" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0edd1e5b14653f783770bce4a4dabb4a5108a5370a5f5d8cfe8710c361f6c8b" +checksum = "e51733f11c9c4f72aa0c160008246859e340b00807569a0da0e7a1079b27ba85" [[package]] name = "unicode-xid" @@ -8342,7 +8395,7 @@ dependencies = [ "once_cell", "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", "wasm-bindgen-shared", ] @@ -8376,7 +8429,7 @@ checksum = "54681b18a46765f095758388f2d0cf16eb8d4169b639ab575a8f5693af210c7b" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", "wasm-bindgen-backend", "wasm-bindgen-shared", ] @@ -8412,7 +8465,7 @@ dependencies = [ "either", "home", "once_cell", - "rustix 0.38.13", + "rustix 0.38.14", ] [[package]] @@ -8445,9 +8498,9 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.5" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "70ec6ce85bb158151cae5e5c87f95a8e97d2c0c4b001223f33a334e3ce5de178" +checksum = "f29e6f9198ba0d26b4c9f07dbe6f9ed633e1f3d5b8b414090084349e46a52596" dependencies = [ "winapi", ] @@ -8645,9 +8698,9 @@ checksum = "d1890f4022759daae28ed4fe62859b1236caebfc61ede2f63ed4e695f3f6d936" [[package]] name = "writeable" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "60e49e42bdb1d5dc76f4cd78102f8f0714d32edfa3efb82286eb0f0b1fc0da0f" +checksum = "c0af0c3d13faebf8dda0b5256fa7096a2d5ccb662f7b9f54a40fe201077ab1c2" [[package]] name = "ws_stream_wasm" @@ -8688,9 +8741,9 @@ dependencies = [ [[package]] name = "xml-rs" -version = "0.8.18" +version = "0.8.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bab77e97b50aee93da431f2cee7cd0f43b4d1da3c408042f2d7d164187774f0a" +checksum = "0fcb9cbac069e033553e8bb871be2fbdffcab578eb25bd0f7c508cedc6dcd75a" [[package]] name = "xmltree" @@ -8709,9 +8762,9 @@ checksum = "09041cd90cf85f7f8b2df60c646f853b7f535ce68f85244eb6731cf89fa498ec" [[package]] name = "yoke" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1848075a23a28f9773498ee9a0f2cf58fcbad4f8c0ccf84a210ab33c6ae495de" +checksum = "61e38c508604d6bbbd292dadb3c02559aa7fff6b654a078a36217cad871636e4" dependencies = [ "serde", "stable_deref_trait", @@ -8721,35 +8774,35 @@ dependencies = [ [[package]] name = "yoke-derive" -version = "0.7.1" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "af46c169923ed7516eef0aa32b56d2651b229f57458ebe46b49ddd6efef5b7a2" +checksum = "d5e19fb6ed40002bab5403ffa37e53e0e56f914a4450c8765f533018db1db35f" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.37", + "synstructure", ] [[package]] name = "zerofrom" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "df54d76c3251de27615dfcce21e636c172dafb2549cd7fd93e21c66f6ca6bea2" +checksum = "655b0814c5c0b19ade497851070c640773304939a6c0fd5f5fb43da0696d05b7" dependencies = [ "zerofrom-derive", ] [[package]] name = "zerofrom-derive" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b4eae7c1f7d4b8eafce526bc0771449ddc2f250881ae31c50d22c032b5a1c499" +checksum = "e6a647510471d372f2e6c2e6b7219e44d8c574d24fdc11c610a61455782f18c3" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.37", + "synstructure", ] [[package]] @@ -8769,15 +8822,16 @@ checksum = "ce36e65b0d2999d2aafac989fb249189a141aee1f53c612c1f37d72631959f69" dependencies = [ "proc-macro2", "quote", - "syn 2.0.36", + "syn 2.0.37", ] [[package]] name = "zerovec" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "198f54134cd865f437820aa3b43d0ad518af4e68ee161b444cdd15d8e567c8ea" +checksum = "591691014119b87047ead4dcf3e6adfbf73cb7c38ab6980d4f18a32138f35d46" dependencies = [ + "serde", "yoke", "zerofrom", "zerovec-derive", @@ -8785,14 +8839,13 @@ dependencies = [ [[package]] name = "zerovec-derive" -version = "0.9.4" +version = "0.9.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "486558732d5dde10d0f8cb2936507c1bb21bc539d924c949baf5f36a58e51bac" +checksum = "7a4a1638a1934450809c2266a70362bfc96cd90550c073f5b8a55014d1010157" dependencies = [ "proc-macro2", "quote", - "syn 1.0.109", - "synstructure 0.12.6", + "syn 2.0.37", ] [[package]] diff --git a/Cargo.toml b/Cargo.toml index c286e9e7b1..eeba5880ed 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -25,8 +25,6 @@ members = [ "crates/revm", "crates/revm/revm-primitives", "crates/revm/revm-inspectors", - "crates/rlp", - "crates/rlp/rlp-derive", "crates/rpc/ipc", "crates/rpc/rpc", "crates/rpc/rpc-api", @@ -34,6 +32,7 @@ members = [ "crates/rpc/rpc-engine-api", "crates/rpc/rpc-types", "crates/rpc/rpc-testing-util", + "crates/snapshot", "crates/stages", "crates/storage/codecs", "crates/storage/db", @@ -72,6 +71,12 @@ exclude = [".github/"] inherits = "release" debug = true +# Meant for testing - all optimizations, but with debug assertions and overflow +# checks +[profile.hivetests] +inherits = "test" +opt-level = 3 + [profile.maxperf] inherits = "release" lto = "fat" @@ -85,7 +90,6 @@ reth-primitives = { path = "./crates/primitives" } reth-interfaces = { path = "./crates/interfaces" } reth-provider = { path = "./crates/storage/provider" } reth-db = { path = "./crates/storage/db" } -reth-rlp = { path = "./crates/rlp" } reth-rpc-types = { path = "./crates/rpc/rpc-types" } reth-rpc-builder = { path = "./crates/rpc/rpc-builder" } reth-blockchain-tree = { path = "./crates/blockchain-tree" } @@ -103,15 +107,19 @@ reth-eth-wire = { path = "./crates/net/eth-wire" } reth-ecies = { path = "./crates/net/ecies" } # revm -revm = { git = "https://github.com/bluealloy/revm", rev = "516f62cc" } -revm-primitives = { git = "https://github.com/bluealloy/revm", rev = "516f62cc" } +# TODO: Switch back to bluealloy/revm once #724 lands +revm = { git = "https://github.com/Evalir/revm/", branch = "reintroduce-alloy-rebased" } +revm-primitives = { git = "https://github.com/Evalir/revm/", branch = "reintroduce-alloy-rebased" } ## eth +alloy-primitives = "0.4" +alloy-dyn-abi = "0.4" +alloy-sol-types = "0.4" +alloy-rlp = "0.3" ethers-core = { version = "2.0", default-features = false } ethers-providers = { version = "2.0", default-features = false } ethers-signers = { version = "2.0", default-features = false } ethers-middleware = { version = "2.0", default-features = false } - discv5 = { git = "https://github.com/sigp/discv5", rev = "d2e30e04ee62418b9e57278cee907c02b99d5bd1" } igd = { git = "https://github.com/stevefan1999-personal/rust-igd", rev = "c2d1f83eb1612a462962453cb0703bc93258b173" } @@ -133,7 +141,7 @@ strum = "0.25" rayon = "1.7" itertools = "0.11" parking_lot = "0.12" -metrics = "0.21.1" # Needed for `metrics-macro` to resolve the crate using `::metrics` notation +metrics = "0.21.1" # Needed for `metrics-macro` to resolve the crate using `::metrics` notation hex-literal = "0.4" ### proc-macros @@ -164,7 +172,7 @@ secp256k1 = { version = "0.27.0", default-features = false, features = [ ] } enr = { version = "0.9", default-features = false, features = ["k256"] } # for eip-4844 -c-kzg = { git = "https://github.com/ethereum/c-kzg-4844", rev = "f5f6f863d475847876a2bd5ee252058d37c3a15d" } +c-kzg = "0.1.1" ## config confy = "0.5" diff --git a/Dockerfile b/Dockerfile index dd35ce2d44..0a68f2e427 100644 --- a/Dockerfile +++ b/Dockerfile @@ -26,12 +26,16 @@ RUN cargo chef cook --profile $BUILD_PROFILE --recipe-path recipe.json COPY . . RUN cargo build --profile $BUILD_PROFILE --locked --bin reth +# ARG is not resolved in COPY so we have to hack around it by copying the +# binary to a temporary location +RUN cp /app/target/$BUILD_PROFILE/reth /app/reth + # Use Ubuntu as the release image FROM ubuntu AS runtime WORKDIR /app # Copy reth over from the build stage -COPY --from=builder /app/target/release/reth /usr/local/bin +COPY --from=builder /app/reth /usr/local/bin # Copy licenses COPY LICENSE-* ./ diff --git a/bin/reth/Cargo.toml b/bin/reth/Cargo.toml index fb8923cf16..05f9279594 100644 --- a/bin/reth/Cargo.toml +++ b/bin/reth/Cargo.toml @@ -38,7 +38,6 @@ reth-rpc = { path = "../../crates/rpc/rpc" } reth-rpc-types = { path = "../../crates/rpc/rpc-types" } reth-rpc-types-compat = { path = "../../crates/rpc/rpc-types-compat" } reth-rpc-api = { path = "../../crates/rpc/rpc-api" } -reth-rlp.workspace = true reth-network = { path = "../../crates/net/network", features = ["serde"] } reth-network-api.workspace = true reth-downloaders = { path = "../../crates/net/downloaders", features = ["test-utils"] } @@ -49,9 +48,11 @@ reth-payload-builder.workspace = true reth-basic-payload-builder = { path = "../../crates/payload/basic" } reth-discv4 = { path = "../../crates/net/discv4" } reth-prune = { path = "../../crates/prune" } +reth-snapshot = { path = "../../crates/snapshot" } reth-trie = { path = "../../crates/trie" } # crypto +alloy-rlp.workspace = true secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } # tracing @@ -96,7 +97,6 @@ eyre = "0.6.8" clap = { version = "4", features = ["derive"] } tempfile = { version = "3.3.0" } backon = "0.4" -hex = "0.4" thiserror.workspace = true pretty_assertions = "1.3.0" humantime = "2.1.0" diff --git a/bin/reth/src/args/debug_args.rs b/bin/reth/src/args/debug_args.rs index aecb693b06..ffc5e36a27 100644 --- a/bin/reth/src/args/debug_args.rs +++ b/bin/reth/src/args/debug_args.rs @@ -1,7 +1,7 @@ //! clap [Args](clap::Args) for debugging purposes use clap::Args; -use reth_primitives::{TxHash, H256}; +use reth_primitives::{TxHash, B256}; /// Parameters for debugging purposes #[derive(Debug, Args, PartialEq, Default)] @@ -21,7 +21,7 @@ pub struct DebugArgs { /// /// NOTE: This is a temporary flag #[arg(long = "debug.tip", help_heading = "Debug", conflicts_with = "continuous")] - pub tip: Option, + pub tip: Option, /// Runs the sync only up to the specified block. #[arg(long = "debug.max-block", help_heading = "Debug")] diff --git a/bin/reth/src/args/network_args.rs b/bin/reth/src/args/network_args.rs index 21a6584313..afaf8cf423 100644 --- a/bin/reth/src/args/network_args.rs +++ b/bin/reth/src/args/network_args.rs @@ -3,11 +3,12 @@ use crate::version::P2P_CLIENT_VERSION; use clap::Args; use reth_config::Config; +use reth_discv4::{DEFAULT_DISCOVERY_ADDR, DEFAULT_DISCOVERY_PORT}; use reth_net_nat::NatResolver; use reth_network::{HelloMessage, NetworkConfigBuilder}; use reth_primitives::{mainnet_nodes, ChainSpec, NodeRecord}; use secp256k1::SecretKey; -use std::{path::PathBuf, sync::Arc}; +use std::{net::Ipv4Addr, path::PathBuf, sync::Arc}; /// Parameters for configuring the network more granularity via CLI #[derive(Debug, Args)] @@ -57,9 +58,13 @@ pub struct NetworkArgs { #[arg(long, default_value = "any")] pub nat: NatResolver, - /// Network listening port. default: 30303 - #[arg(long = "port", value_name = "PORT")] - pub port: Option, + /// Network listening address + #[arg(long = "addr", value_name = "ADDR", default_value_t = DEFAULT_DISCOVERY_ADDR)] + pub addr: Ipv4Addr, + + /// Network listening port + #[arg(long = "port", value_name = "PORT", default_value_t = DEFAULT_DISCOVERY_PORT)] + pub port: u16, /// Maximum number of outbound requests. default: 100 #[arg(long)] @@ -133,9 +138,13 @@ pub struct DiscoveryArgs { #[arg(long, conflicts_with = "disable_discovery")] pub disable_discv4_discovery: bool, - /// The UDP port to use for P2P discovery/networking. default: 30303 - #[arg(long = "discovery.port", name = "discovery.port", value_name = "DISCOVERY_PORT")] - pub port: Option, + /// The UDP address to use for P2P discovery/networking + #[arg(long = "discovery.addr", name = "discovery.addr", value_name = "DISCOVERY_ADDR", default_value_t = DEFAULT_DISCOVERY_ADDR)] + pub addr: Ipv4Addr, + + /// The UDP port to use for P2P discovery/networking + #[arg(long = "discovery.port", name = "discovery.port", value_name = "DISCOVERY_PORT", default_value_t = DEFAULT_DISCOVERY_PORT)] + pub port: u16, } impl DiscoveryArgs { diff --git a/bin/reth/src/args/pruning_args.rs b/bin/reth/src/args/pruning_args.rs index 550634f0e9..43a449dfd2 100644 --- a/bin/reth/src/args/pruning_args.rs +++ b/bin/reth/src/args/pruning_args.rs @@ -11,8 +11,8 @@ use std::sync::Arc; #[derive(Debug, Args, PartialEq, Default)] #[command(next_help_heading = "Pruning")] pub struct PruningArgs { - /// Run full node. Only the most recent 128 block states are stored. This flag takes - /// priority over pruning configuration in reth.toml. + /// Run full node. Only the most recent [`MINIMUM_PRUNING_DISTANCE`] block states are stored. + /// This flag takes priority over pruning configuration in reth.toml. #[arg(long, default_value_t = false)] pub full: bool, } diff --git a/bin/reth/src/args/rpc_server_args.rs b/bin/reth/src/args/rpc_server_args.rs index 6c09e191aa..180fedaccb 100644 --- a/bin/reth/src/args/rpc_server_args.rs +++ b/bin/reth/src/args/rpc_server_args.rs @@ -24,6 +24,7 @@ use reth_rpc::{ }, JwtError, JwtSecret, }; + use reth_rpc_builder::{ auth::{AuthServerConfig, AuthServerHandle}, constants, @@ -50,7 +51,7 @@ pub(crate) const RPC_DEFAULT_MAX_REQUEST_SIZE_MB: u32 = 15; /// This is only relevant for very large trace responses. pub(crate) const RPC_DEFAULT_MAX_RESPONSE_SIZE_MB: u32 = 115; /// Default number of incoming connections. -pub(crate) const RPC_DEFAULT_MAX_CONNECTIONS: u32 = 100; +pub(crate) const RPC_DEFAULT_MAX_CONNECTIONS: u32 = 500; /// Parameters for configuring the rpc more granularity via CLI #[derive(Debug, Args)] diff --git a/bin/reth/src/args/secret_key.rs b/bin/reth/src/args/secret_key.rs index 9dbb83078f..7ff36bfb3b 100644 --- a/bin/reth/src/args/secret_key.rs +++ b/bin/reth/src/args/secret_key.rs @@ -1,6 +1,5 @@ -use hex::encode as hex_encode; use reth_network::config::rng_secret_key; -use reth_primitives::{fs, fs::FsPathError}; +use reth_primitives::{fs, fs::FsPathError, hex::encode as hex_encode}; use secp256k1::{Error as SecretKeyBaseError, SecretKey}; use std::{ io, diff --git a/bin/reth/src/args/utils.rs b/bin/reth/src/args/utils.rs index b243c6a592..3a59556aab 100644 --- a/bin/reth/src/args/utils.rs +++ b/bin/reth/src/args/utils.rs @@ -1,9 +1,9 @@ //! Clap parser utilities use reth_primitives::{ - fs, AllGenesisFormats, BlockHashOrNumber, ChainSpec, DEV, GOERLI, HOLESKY, MAINNET, SEPOLIA, + fs, AllGenesisFormats, BlockHashOrNumber, ChainSpec, B256, DEV, GOERLI, HOLESKY, MAINNET, + SEPOLIA, }; -use reth_revm::primitives::B256 as H256; use std::{ net::{IpAddr, Ipv4Addr, SocketAddr, ToSocketAddrs}, path::PathBuf, @@ -53,7 +53,7 @@ pub fn genesis_value_parser(s: &str) -> eyre::Result, eyre::Error /// Parse [BlockHashOrNumber] pub fn hash_or_num_value_parser(value: &str) -> eyre::Result { - match H256::from_str(value) { + match B256::from_str(value) { Ok(hash) => Ok(BlockHashOrNumber::Hash(hash)), Err(_) => Ok(BlockHashOrNumber::Number(value.parse()?)), } diff --git a/bin/reth/src/chain/import.rs b/bin/reth/src/chain/import.rs index 19d6ec1d56..f60db7e7c8 100644 --- a/bin/reth/src/chain/import.rs +++ b/bin/reth/src/chain/import.rs @@ -18,7 +18,7 @@ use reth_downloaders::{ headers::reverse_headers::ReverseHeadersDownloaderBuilder, test_utils::FileClient, }; use reth_interfaces::consensus::Consensus; -use reth_primitives::{stage::StageId, ChainSpec, H256}; +use reth_primitives::{stage::StageId, ChainSpec, B256}; use reth_stages::{ prelude::*, stages::{ @@ -55,6 +55,7 @@ pub struct ImportCommand { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", @@ -156,7 +157,7 @@ impl ImportCommand { .build(file_client.clone(), consensus.clone(), db.clone()) .into_task(); - let (tip_tx, tip_rx) = watch::channel(H256::zero()); + let (tip_tx, tip_rx) = watch::channel(B256::ZERO); let factory = reth_revm::Factory::new(self.chain.clone()); let max_block = file_client.max_block().unwrap_or(0); diff --git a/bin/reth/src/chain/init.rs b/bin/reth/src/chain/init.rs index e25cbf8a2c..0f8f6b8bb6 100644 --- a/bin/reth/src/chain/init.rs +++ b/bin/reth/src/chain/init.rs @@ -30,6 +30,7 @@ pub struct InitCommand { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/cli/config.rs b/bin/reth/src/cli/config.rs index b87ed6be07..5c10a001a3 100644 --- a/bin/reth/src/cli/config.rs +++ b/bin/reth/src/cli/config.rs @@ -1,7 +1,7 @@ //! Config traits for various node components. -use reth_revm::primitives::bytes::BytesMut; -use reth_rlp::Encodable; +use alloy_rlp::Encodable; +use reth_primitives::{Bytes, BytesMut}; use reth_rpc::{eth::gas_oracle::GasPriceOracleConfig, JwtError, JwtSecret}; use reth_rpc_builder::{ auth::AuthServerConfig, error::RpcError, EthConfig, IpcServerBuilder, RpcServerConfig, @@ -72,10 +72,10 @@ pub trait PayloadBuilderConfig { fn extradata(&self) -> Cow<'_, str>; /// Returns the rlp-encoded extradata bytes. - fn extradata_rlp_bytes(&self) -> reth_primitives::bytes::Bytes { + fn extradata_rlp_bytes(&self) -> Bytes { let mut extradata = BytesMut::new(); self.extradata().as_bytes().encode(&mut extradata); - extradata.freeze() + extradata.freeze().into() } /// The interval at which the job should build a new payload after the last. diff --git a/bin/reth/src/cli/mod.rs b/bin/reth/src/cli/mod.rs index a17ab8a30e..bc2b9adb1f 100644 --- a/bin/reth/src/cli/mod.rs +++ b/bin/reth/src/cli/mod.rs @@ -40,6 +40,7 @@ pub struct Cli { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/db/get.rs b/bin/reth/src/db/get.rs index 09534ec00c..4f4c2ba31d 100644 --- a/bin/reth/src/db/get.rs +++ b/bin/reth/src/db/get.rs @@ -81,7 +81,7 @@ mod tests { models::{storage_sharded_key::StorageShardedKey, ShardedKey}, AccountHistory, HashedAccount, Headers, StorageHistory, SyncStage, }; - use reth_primitives::{H160, H256}; + use reth_primitives::{Address, B256}; use std::str::FromStr; /// A helper type to parse Args more easily @@ -104,7 +104,7 @@ mod tests { .args; assert_eq!( args.table_key::().unwrap(), - H256::from_str("0x0ac361fe774b78f8fc4e86c1916930d150865c3fc2e21dca2e58833557608bac") + B256::from_str("0x0ac361fe774b78f8fc4e86c1916930d150865c3fc2e21dca2e58833557608bac") .unwrap() ); } @@ -122,8 +122,8 @@ mod tests { assert_eq!( args.table_key::().unwrap(), StorageShardedKey::new( - H160::from_str("0x01957911244e546ce519fbac6f798958fafadb41").unwrap(), - H256::from_str( + Address::from_str("0x01957911244e546ce519fbac6f798958fafadb41").unwrap(), + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000003" ) .unwrap(), @@ -138,7 +138,7 @@ mod tests { assert_eq!( args.table_key::().unwrap(), ShardedKey::new( - H160::from_str("0x4448e1273fd5a8bfdb9ed111e96889c960eee145").unwrap(), + Address::from_str("0x4448e1273fd5a8bfdb9ed111e96889c960eee145").unwrap(), 18446744073709551615 ) ); diff --git a/bin/reth/src/db/list.rs b/bin/reth/src/db/list.rs index 4fc1fb7cad..c3356991e9 100644 --- a/bin/reth/src/db/list.rs +++ b/bin/reth/src/db/list.rs @@ -3,6 +3,7 @@ use crate::utils::{DbTool, ListFilter}; use clap::Parser; use eyre::WrapErr; use reth_db::{database::Database, table::Table, DatabaseEnvRO, TableType, TableViewer, Tables}; +use reth_primitives::hex; use std::cell::RefCell; use tracing::error; diff --git a/bin/reth/src/db/mod.rs b/bin/reth/src/db/mod.rs index 53a24b718a..1c9f15bcd6 100644 --- a/bin/reth/src/db/mod.rs +++ b/bin/reth/src/db/mod.rs @@ -48,6 +48,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/debug_cmd/execution.rs b/bin/reth/src/debug_cmd/execution.rs index afd8385695..5dbdf3d701 100644 --- a/bin/reth/src/debug_cmd/execution.rs +++ b/bin/reth/src/debug_cmd/execution.rs @@ -12,7 +12,6 @@ use futures::{stream::select as stream_select, StreamExt}; use reth_beacon_consensus::BeaconConsensus; use reth_config::Config; use reth_db::{database::Database, init_db, DatabaseEnv}; -use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, @@ -23,7 +22,7 @@ use reth_interfaces::{ }; use reth_network::NetworkHandle; use reth_network_api::NetworkInfo; -use reth_primitives::{fs, stage::StageId, BlockHashOrNumber, BlockNumber, ChainSpec, H256}; +use reth_primitives::{fs, stage::StageId, BlockHashOrNumber, BlockNumber, ChainSpec, B256}; use reth_provider::{BlockExecutionWriter, ProviderFactory, StageCheckpointReader}; use reth_stages::{ sets::DefaultStages, @@ -35,7 +34,7 @@ use reth_stages::{ }; use reth_tasks::TaskExecutor; use std::{ - net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + net::{SocketAddr, SocketAddrV4}, path::PathBuf, sync::Arc, }; @@ -63,6 +62,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", @@ -112,7 +112,7 @@ impl Command { let stage_conf = &config.stages; - let (tip_tx, tip_rx) = watch::channel(H256::zero()); + let (tip_tx, tip_rx) = watch::channel(B256::ZERO); let factory = reth_revm::Factory::new(self.chain.clone()); let header_mode = HeaderSyncMode::Tip(tip_rx); @@ -166,13 +166,10 @@ impl Command { .network .network_config(config, self.chain.clone(), secret_key, default_peers_path) .with_task_executor(Box::new(task_executor)) - .listener_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.port.unwrap_or(DEFAULT_DISCOVERY_PORT), - ))) + .listener_addr(SocketAddr::V4(SocketAddrV4::new(self.network.addr, self.network.port))) .discovery_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.discovery.port.unwrap_or(DEFAULT_DISCOVERY_PORT), + self.network.discovery.addr, + self.network.discovery.port, ))) .build(ProviderFactory::new(db, self.chain.clone())) .start_network() @@ -186,7 +183,7 @@ impl Command { &self, client: Client, block: BlockNumber, - ) -> eyre::Result { + ) -> eyre::Result { info!(target: "reth::cli", ?block, "Fetching block from the network."); loop { match get_single_header(&client, BlockHashOrNumber::Number(block)).await { diff --git a/bin/reth/src/debug_cmd/in_memory_merkle.rs b/bin/reth/src/debug_cmd/in_memory_merkle.rs index bddece7650..1b338ab9de 100644 --- a/bin/reth/src/debug_cmd/in_memory_merkle.rs +++ b/bin/reth/src/debug_cmd/in_memory_merkle.rs @@ -9,7 +9,6 @@ use backon::{ConstantBuilder, Retryable}; use clap::Parser; use reth_config::Config; use reth_db::{init_db, DatabaseEnv}; -use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_network::NetworkHandle; use reth_network_api::NetworkInfo; use reth_primitives::{fs, stage::StageId, BlockHashOrNumber, ChainSpec}; @@ -21,7 +20,7 @@ use reth_provider::{ use reth_tasks::TaskExecutor; use reth_trie::{hashed_cursor::HashedPostStateCursorFactory, updates::TrieKey, StateRoot}; use std::{ - net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + net::{SocketAddr, SocketAddrV4}, path::PathBuf, sync::Arc, }; @@ -51,6 +50,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", @@ -89,13 +89,10 @@ impl Command { .network .network_config(config, self.chain.clone(), secret_key, default_peers_path) .with_task_executor(Box::new(task_executor)) - .listener_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.port.unwrap_or(DEFAULT_DISCOVERY_PORT), - ))) + .listener_addr(SocketAddr::V4(SocketAddrV4::new(self.network.addr, self.network.port))) .discovery_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.discovery.port.unwrap_or(DEFAULT_DISCOVERY_PORT), + self.network.discovery.addr, + self.network.discovery.port, ))) .build(ProviderFactory::new(db, self.chain.clone())) .start_network() diff --git a/bin/reth/src/debug_cmd/merkle.rs b/bin/reth/src/debug_cmd/merkle.rs index 37e41e8fc6..f413713f35 100644 --- a/bin/reth/src/debug_cmd/merkle.rs +++ b/bin/reth/src/debug_cmd/merkle.rs @@ -10,7 +10,6 @@ use clap::Parser; use reth_beacon_consensus::BeaconConsensus; use reth_config::Config; use reth_db::{cursor::DbCursorRO, init_db, tables, transaction::DbTx, DatabaseEnv}; -use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_interfaces::{consensus::Consensus, p2p::full_block::FullBlockClient}; use reth_network::NetworkHandle; use reth_network_api::NetworkInfo; @@ -29,7 +28,7 @@ use reth_stages::{ }; use reth_tasks::TaskExecutor; use std::{ - net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + net::{SocketAddr, SocketAddrV4}, path::PathBuf, sync::Arc, }; @@ -56,6 +55,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", @@ -98,13 +98,10 @@ impl Command { .network .network_config(config, self.chain.clone(), secret_key, default_peers_path) .with_task_executor(Box::new(task_executor)) - .listener_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.port.unwrap_or(DEFAULT_DISCOVERY_PORT), - ))) + .listener_addr(SocketAddr::V4(SocketAddrV4::new(self.network.addr, self.network.port))) .discovery_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, - self.network.discovery.port.unwrap_or(DEFAULT_DISCOVERY_PORT), + self.network.discovery.addr, + self.network.discovery.port, ))) .build(ProviderFactory::new(db, self.chain.clone())) .start_network() diff --git a/bin/reth/src/init.rs b/bin/reth/src/init.rs index 352c8cf38f..28cf9de033 100644 --- a/bin/reth/src/init.rs +++ b/bin/reth/src/init.rs @@ -6,7 +6,9 @@ use reth_db::{ transaction::{DbTx, DbTxMut}, }; use reth_interfaces::{db::DatabaseError, RethError}; -use reth_primitives::{stage::StageId, Account, Bytecode, ChainSpec, StorageEntry, H256, U256}; +use reth_primitives::{ + stage::StageId, Account, Bytecode, ChainSpec, Receipts, StorageEntry, B256, U256, +}; use reth_provider::{ bundle_state::{BundleStateInit, RevertsInit}, BundleStateWithReceipts, DatabaseProviderRW, HashingWriter, HistoryWriter, OriginalValuesKnown, @@ -26,9 +28,9 @@ pub enum InitDatabaseError { #[error("Genesis hash in the database does not match the specified chainspec: chainspec is {chainspec_hash}, database is {database_hash}")] GenesisHashMismatch { /// Expected genesis hash. - chainspec_hash: H256, + chainspec_hash: B256, /// Actual genesis hash. - database_hash: H256, + database_hash: B256, }, /// Low-level database error. @@ -45,7 +47,7 @@ pub enum InitDatabaseError { pub fn init_genesis( db: Arc, chain: Arc, -) -> Result { +) -> Result { let genesis = chain.genesis(); let hash = chain.genesis_hash(); @@ -95,11 +97,11 @@ pub fn insert_genesis_state( ) -> Result<(), InitDatabaseError> { let mut state_init: BundleStateInit = HashMap::new(); let mut reverts_init = HashMap::new(); - let mut contracts: HashMap = HashMap::new(); + let mut contracts: HashMap = HashMap::new(); for (address, account) in &genesis.alloc { let bytecode_hash = if let Some(code) = &account.code { - let bytecode = Bytecode::new_raw(code.0.clone()); + let bytecode = Bytecode::new_raw(code.clone()); let hash = bytecode.hash_slow(); contracts.insert(hash, bytecode); Some(hash) @@ -145,7 +147,7 @@ pub fn insert_genesis_state( state_init, all_reverts_init, contracts.into_iter().collect(), - vec![], + Receipts::new(), 0, ); @@ -287,9 +289,9 @@ mod tests { #[test] fn init_genesis_history() { - let address_with_balance = Address::from_low_u64_be(1); - let address_with_storage = Address::from_low_u64_be(2); - let storage_key = H256::from_low_u64_be(1); + let address_with_balance = Address::with_last_byte(1); + let address_with_storage = Address::with_last_byte(2); + let storage_key = B256::with_last_byte(1); let chain_spec = Arc::new(ChainSpec { chain: Chain::Id(1), genesis: Genesis { @@ -301,7 +303,7 @@ mod tests { ( address_with_storage, GenesisAccount { - storage: Some(HashMap::from([(storage_key, H256::random())])), + storage: Some(HashMap::from([(storage_key, B256::random())])), ..Default::default() }, ), diff --git a/bin/reth/src/lib.rs b/bin/reth/src/lib.rs index 461d6580f2..8c8906d34f 100644 --- a/bin/reth/src/lib.rs +++ b/bin/reth/src/lib.rs @@ -19,7 +19,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/bin/reth/src/node/events.rs b/bin/reth/src/node/events.rs index a89cf101f3..81169dd090 100644 --- a/bin/reth/src/node/events.rs +++ b/bin/reth/src/node/events.rs @@ -34,18 +34,18 @@ struct NodeState { eta: Eta, /// The current checkpoint of the executing stage. current_checkpoint: StageCheckpoint, - /// The latest canonical block added in the consensus engine. - latest_canonical_engine_block: Option, + /// The latest block reached by either pipeline or consensus engine. + latest_block: Option, } impl NodeState { - fn new(network: Option, latest_block_number: Option) -> Self { + fn new(network: Option, latest_block: Option) -> Self { Self { network, current_stage: None, eta: Eta::default(), current_checkpoint: StageCheckpoint::new(0), - latest_canonical_engine_block: latest_block_number, + latest_block, } } @@ -79,6 +79,9 @@ impl NodeState { result: ExecOutput { checkpoint, done }, } => { self.current_checkpoint = checkpoint; + if stage_id.is_finish() { + self.latest_block = Some(checkpoint.block_number); + } self.eta.update(self.current_checkpoint); info!( @@ -124,11 +127,11 @@ impl NodeState { ); } BeaconConsensusEngineEvent::CanonicalBlockAdded(block) => { - self.latest_canonical_engine_block = Some(block.number); - info!(number=block.number, hash=?block.hash, "Block added to canonical chain"); } BeaconConsensusEngineEvent::CanonicalChainCommitted(head, elapsed) => { + self.latest_block = Some(head.number); + info!(number=head.number, hash=?head.hash, ?elapsed, "Canonical chain committed"); } BeaconConsensusEngineEvent::ForkBlockAdded(block) => { @@ -138,18 +141,22 @@ impl NodeState { } fn handle_consensus_layer_health_event(&self, event: ConsensusLayerHealthEvent) { - match event { - ConsensusLayerHealthEvent::NeverSeen => { - warn!("Post-merge network, but never seen beacon client. Please launch one to follow the chain!") - } - ConsensusLayerHealthEvent::HasNotBeenSeenForAWhile(period) => { - warn!(?period, "Post-merge network, but no beacon client seen for a while. Please launch one to follow the chain!") - } - ConsensusLayerHealthEvent::NeverReceivedUpdates => { - warn!("Beacon client online, but never received consensus updates. Please ensure your beacon client is operational to follow the chain!") - } - ConsensusLayerHealthEvent::HaveNotReceivedUpdatesForAWhile(period) => { - warn!(?period, "Beacon client online, but no consensus updates received for a while. Please fix your beacon client to follow the chain!") + // If pipeline is running, it's fine to not receive any messages from the CL. + // So we need to report about CL health only when pipeline is idle. + if self.current_stage.is_none() { + match event { + ConsensusLayerHealthEvent::NeverSeen => { + warn!("Post-merge network, but never seen beacon client. Please launch one to follow the chain!") + } + ConsensusLayerHealthEvent::HasNotBeenSeenForAWhile(period) => { + warn!(?period, "Post-merge network, but no beacon client seen for a while. Please launch one to follow the chain!") + } + ConsensusLayerHealthEvent::NeverReceivedUpdates => { + warn!("Beacon client online, but never received consensus updates. Please ensure your beacon client is operational to follow the chain!") + } + ConsensusLayerHealthEvent::HaveNotReceivedUpdatesForAWhile(period) => { + warn!(?period, "Beacon client online, but no consensus updates received for a while. Please fix your beacon client to follow the chain!") + } } } } @@ -266,7 +273,7 @@ where info!( target: "reth::cli", connected_peers = this.state.num_connected_peers(), - latest_block = this.state.latest_canonical_engine_block.unwrap_or(this.state.current_checkpoint.block_number), + latest_block = this.state.latest_block.unwrap_or(this.state.current_checkpoint.block_number), "Status" ); } diff --git a/bin/reth/src/node/mod.rs b/bin/reth/src/node/mod.rs index 68a106d9be..cc6627a07f 100644 --- a/bin/reth/src/node/mod.rs +++ b/bin/reth/src/node/mod.rs @@ -34,7 +34,6 @@ use reth_blockchain_tree::{ }; use reth_config::{config::PruneConfig, Config}; use reth_db::{database::Database, init_db, DatabaseEnv}; -use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_downloaders::{ bodies::bodies::BodiesDownloaderBuilder, headers::reverse_headers::ReverseHeadersDownloaderBuilder, @@ -54,7 +53,7 @@ use reth_primitives::{ constants::eip4844::{LoadKzgSettingsError, MAINNET_KZG_TRUSTED_SETUP}, kzg::KzgSettings, stage::StageId, - BlockHashOrNumber, BlockNumber, ChainSpec, DisplayHardforks, Head, SealedHeader, H256, + BlockHashOrNumber, BlockNumber, ChainSpec, DisplayHardforks, Head, SealedHeader, B256, }; use reth_provider::{ providers::BlockchainProvider, BlockHashReader, BlockReader, CanonStateSubscriptions, @@ -78,7 +77,7 @@ use reth_transaction_pool::{ }; use secp256k1::SecretKey; use std::{ - net::{Ipv4Addr, SocketAddr, SocketAddrV4}, + net::{SocketAddr, SocketAddrV4}, path::PathBuf, sync::Arc, }; @@ -113,6 +112,7 @@ pub struct NodeCommand { /// - mainnet /// - goerli /// - sepolia + /// - holesky /// - dev #[arg( long, @@ -450,6 +450,8 @@ impl NodeCommand { None }; + let (highest_snapshots_tx, highest_snapshots_rx) = watch::channel(None); + let mut hooks = EngineHooks::new(); let pruner_events = if let Some(prune_config) = prune_config { @@ -460,6 +462,7 @@ impl NodeCommand { prune_config.block_interval, prune_config.parts, self.chain.prune_batch_sizes, + highest_snapshots_rx, ); let events = pruner.events(); hooks.add(PruneHook::new(pruner, Box::new(ctx.task_executor.clone()))); @@ -468,6 +471,13 @@ impl NodeCommand { Either::Right(stream::empty()) }; + let _snapshotter = reth_snapshot::Snapshotter::new( + db, + self.chain.clone(), + self.chain.snapshot_block_interval, + highest_snapshots_tx, + ); + // Configure the consensus engine let (beacon_consensus_engine, beacon_engine_handle) = BeaconConsensusEngine::with_channel( client, @@ -705,7 +715,7 @@ impl NodeCommand { &self, db: DB, client: Client, - tip: H256, + tip: B256, ) -> RethResult where DB: Database, @@ -766,20 +776,14 @@ impl NodeCommand { .with_task_executor(Box::new(executor)) .set_head(head) .listener_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, + self.network.addr, // set discovery port based on instance number - match self.network.port { - Some(port) => port + self.instance - 1, - None => DEFAULT_DISCOVERY_PORT + self.instance - 1, - }, + self.network.port + self.instance - 1, ))) .discovery_addr(SocketAddr::V4(SocketAddrV4::new( - Ipv4Addr::UNSPECIFIED, + self.network.addr, // set discovery port based on instance number - match self.network.port { - Some(port) => port + self.instance - 1, - None => DEFAULT_DISCOVERY_PORT + self.instance - 1, - }, + self.network.port + self.instance - 1, ))) .build(ProviderFactory::new(db, self.chain.clone())) } @@ -811,7 +815,7 @@ impl NodeCommand { builder = builder.with_max_block(max_block) } - let (tip_tx, tip_rx) = watch::channel(H256::zero()); + let (tip_tx, tip_rx) = watch::channel(B256::ZERO); use reth_revm_inspectors::stack::InspectorStackConfig; let factory = reth_revm::Factory::new(self.chain.clone()); @@ -943,8 +947,12 @@ async fn run_network_until_shutdown( #[cfg(test)] mod tests { use super::*; + use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_primitives::DEV; - use std::{net::IpAddr, path::Path}; + use std::{ + net::{IpAddr, Ipv4Addr}, + path::Path, + }; #[test] fn parse_help_node_command() { @@ -960,10 +968,31 @@ mod tests { } } + #[test] + fn parse_discovery_addr() { + let cmd = + NodeCommand::<()>::try_parse_from(["reth", "--discovery.addr", "127.0.0.1"]).unwrap(); + assert_eq!(cmd.network.discovery.addr, Ipv4Addr::LOCALHOST); + } + + #[test] + fn parse_addr() { + let cmd = NodeCommand::<()>::try_parse_from([ + "reth", + "--discovery.addr", + "127.0.0.1", + "--addr", + "127.0.0.1", + ]) + .unwrap(); + assert_eq!(cmd.network.discovery.addr, Ipv4Addr::LOCALHOST); + assert_eq!(cmd.network.addr, Ipv4Addr::LOCALHOST); + } + #[test] fn parse_discovery_port() { let cmd = NodeCommand::<()>::try_parse_from(["reth", "--discovery.port", "300"]).unwrap(); - assert_eq!(cmd.network.discovery.port, Some(300)); + assert_eq!(cmd.network.discovery.port, 300); } #[test] @@ -971,8 +1000,8 @@ mod tests { let cmd = NodeCommand::<()>::try_parse_from(["reth", "--discovery.port", "300", "--port", "99"]) .unwrap(); - assert_eq!(cmd.network.discovery.port, Some(300)); - assert_eq!(cmd.network.port, Some(99)); + assert_eq!(cmd.network.discovery.port, 300); + assert_eq!(cmd.network.port, 99); } #[test] @@ -1041,32 +1070,32 @@ mod tests { fn parse_instance() { let mut cmd = NodeCommand::<()>::parse_from(["reth"]); cmd.adjust_instance_ports(); - cmd.network.port = Some(DEFAULT_DISCOVERY_PORT + cmd.instance - 1); + cmd.network.port = DEFAULT_DISCOVERY_PORT + cmd.instance - 1; // check rpc port numbers assert_eq!(cmd.rpc.auth_port, 8551); assert_eq!(cmd.rpc.http_port, 8545); assert_eq!(cmd.rpc.ws_port, 8546); // check network listening port number - assert_eq!(cmd.network.port.unwrap(), 30303); + assert_eq!(cmd.network.port, 30303); let mut cmd = NodeCommand::<()>::parse_from(["reth", "--instance", "2"]); cmd.adjust_instance_ports(); - cmd.network.port = Some(DEFAULT_DISCOVERY_PORT + cmd.instance - 1); + cmd.network.port = DEFAULT_DISCOVERY_PORT + cmd.instance - 1; // check rpc port numbers assert_eq!(cmd.rpc.auth_port, 8651); assert_eq!(cmd.rpc.http_port, 8544); assert_eq!(cmd.rpc.ws_port, 8548); // check network listening port number - assert_eq!(cmd.network.port.unwrap(), 30304); + assert_eq!(cmd.network.port, 30304); let mut cmd = NodeCommand::<()>::parse_from(["reth", "--instance", "3"]); cmd.adjust_instance_ports(); - cmd.network.port = Some(DEFAULT_DISCOVERY_PORT + cmd.instance - 1); + cmd.network.port = DEFAULT_DISCOVERY_PORT + cmd.instance - 1; // check rpc port numbers assert_eq!(cmd.rpc.auth_port, 8751); assert_eq!(cmd.rpc.http_port, 8543); assert_eq!(cmd.rpc.ws_port, 8550); // check network listening port number - assert_eq!(cmd.network.port.unwrap(), 30305); + assert_eq!(cmd.network.port, 30305); } } diff --git a/bin/reth/src/p2p/mod.rs b/bin/reth/src/p2p/mod.rs index cc0010486b..864c62ee41 100644 --- a/bin/reth/src/p2p/mod.rs +++ b/bin/reth/src/p2p/mod.rs @@ -33,6 +33,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/recover/storage_tries.rs b/bin/reth/src/recover/storage_tries.rs index d1e8a87f54..01a00c3d40 100644 --- a/bin/reth/src/recover/storage_tries.rs +++ b/bin/reth/src/recover/storage_tries.rs @@ -37,6 +37,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/stage/drop.rs b/bin/reth/src/stage/drop.rs index 073c14c2a0..aaee453541 100644 --- a/bin/reth/src/stage/drop.rs +++ b/bin/reth/src/stage/drop.rs @@ -32,6 +32,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/stage/dump/mod.rs b/bin/reth/src/stage/dump/mod.rs index 792777d1a6..1c4c46feeb 100644 --- a/bin/reth/src/stage/dump/mod.rs +++ b/bin/reth/src/stage/dump/mod.rs @@ -46,6 +46,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/stage/run.rs b/bin/reth/src/stage/run.rs index d06ee8e7b2..3a8e7003ad 100644 --- a/bin/reth/src/stage/run.rs +++ b/bin/reth/src/stage/run.rs @@ -50,6 +50,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/stage/unwind.rs b/bin/reth/src/stage/unwind.rs index d88346bc71..4c0c5a1e2d 100644 --- a/bin/reth/src/stage/unwind.rs +++ b/bin/reth/src/stage/unwind.rs @@ -31,6 +31,7 @@ pub struct Command { /// - mainnet /// - goerli /// - sepolia + /// - holesky #[arg( long, value_name = "CHAIN_OR_PATH", diff --git a/bin/reth/src/utils.rs b/bin/reth/src/utils.rs index 64a798dc9e..57519da642 100644 --- a/bin/reth/src/utils.rs +++ b/bin/reth/src/utils.rs @@ -6,7 +6,7 @@ use reth_consensus_common::validation::validate_block_standalone; use reth_db::{ cursor::DbCursorRO, database::Database, - table::{Table, TableRow}, + table::{Decode, Decompress, Table, TableRow}, transaction::{DbTx, DbTxMut}, DatabaseError, RawTable, TableRawRow, }; @@ -128,16 +128,22 @@ impl<'a, DB: Database> DbTool<'a, DB> { let map_filter = |row: Result, _>| { if let Ok((k, v)) = row { + let (key, value) = (k.into_key(), v.into_value()); + let result = || { if filter.only_count { return None } - Some((k.key().unwrap(), v.value().unwrap())) + Some(( + ::Key::decode(&key).unwrap(), + ::Value::decompress(&value).unwrap(), + )) }; + match &*bmb { Some(searcher) => { - if searcher.find_first_in(v.raw_value()).is_some() || - searcher.find_first_in(k.raw_key()).is_some() + if searcher.find_first_in(&value).is_some() || + searcher.find_first_in(&key).is_some() { hits += 1; return result() diff --git a/book/cli/cli.md b/book/cli/cli.md index 72db454386..304711adfd 100644 --- a/book/cli/cli.md +++ b/book/cli/cli.md @@ -52,6 +52,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/config.md b/book/cli/config.md index e9610f7b04..12d8c83f79 100644 --- a/book/cli/config.md +++ b/book/cli/config.md @@ -23,6 +23,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/db.md b/book/cli/db.md index 5b4de6a57d..30e41d711e 100644 --- a/book/cli/db.md +++ b/book/cli/db.md @@ -39,6 +39,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/debug.md b/book/cli/debug.md index 3824bcea77..b250cb3d23 100644 --- a/book/cli/debug.md +++ b/book/cli/debug.md @@ -34,6 +34,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/import.md b/book/cli/import.md index d295e42f85..e73fb600cb 100644 --- a/book/cli/import.md +++ b/book/cli/import.md @@ -31,6 +31,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/init.md b/book/cli/init.md index 1c6d962a75..582c229d9e 100644 --- a/book/cli/init.md +++ b/book/cli/init.md @@ -28,6 +28,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/node.md b/book/cli/node.md index 8d414afa19..38bf646de5 100644 --- a/book/cli/node.md +++ b/book/cli/node.md @@ -31,6 +31,7 @@ Options: - mainnet - goerli - sepolia + - holesky - dev [default: mainnet] @@ -196,7 +197,7 @@ RPC: --rpc-max-connections Maximum number of RPC server connections - [default: 100] + [default: 500] --rpc-max-tracing-requests Maximum number of concurrent tracing requests diff --git a/book/cli/p2p.md b/book/cli/p2p.md index fc83304228..7708928886 100644 --- a/book/cli/p2p.md +++ b/book/cli/p2p.md @@ -25,6 +25,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/recover.md b/book/cli/recover.md index 019b85f428..25db4a2605 100644 --- a/book/cli/recover.md +++ b/book/cli/recover.md @@ -21,6 +21,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/stage.md b/book/cli/stage.md index 665eec0593..31892c9291 100644 --- a/book/cli/stage.md +++ b/book/cli/stage.md @@ -24,6 +24,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/cli/test-vectors.md b/book/cli/test-vectors.md index 87fced2d8e..8164af82ca 100644 --- a/book/cli/test-vectors.md +++ b/book/cli/test-vectors.md @@ -21,6 +21,7 @@ Options: - mainnet - goerli - sepolia + - holesky [default: mainnet] diff --git a/book/run/private-testnet.md b/book/run/private-testnet.md index e6bb168970..1ae0257919 100644 --- a/book/run/private-testnet.md +++ b/book/run/private-testnet.md @@ -2,15 +2,15 @@ For those who need a private testnet to validate functionality or scale with Reth. ## Using Docker locally -This guide uses [Kurtosis' eth2-package](https://github.com/kurtosis-tech/eth2-package) and assumes you have Kurtosis and Docker installed and have Docker already running on your machine. +This guide uses [Kurtosis' ethereum-package](https://github.com/kurtosis-tech/ethereum-package) and assumes you have Kurtosis and Docker installed and have Docker already running on your machine. * Go [here](https://docs.kurtosis.com/install/) to install Kurtosis * Go [here](https://docs.docker.com/get-docker/) to install Docker -The [`eth2-package`](https://github.com/kurtosis-tech/eth2-package) is a [package](https://docs.kurtosis.com/concepts-reference/packages) for a general purpose Ethereum testnet definition used for instantiating private testnets at any scale over Docker or Kubernetes, locally or in the cloud. This guide will go through how to spin up a local private testnet with Reth various CL clients locally. Specifically, you will instantiate a 2-node network over Docker with Reth/Lighthouse and Reth/Teku client combinations. +The [`ethereum-package`](https://github.com/kurtosis-tech/ethereum-package) is a [package](https://docs.kurtosis.com/concepts-reference/packages) for a general purpose Ethereum testnet definition used for instantiating private testnets at any scale over Docker or Kubernetes, locally or in the cloud. This guide will go through how to spin up a local private testnet with Reth various CL clients locally. Specifically, you will instantiate a 2-node network over Docker with Reth/Lighthouse and Reth/Teku client combinations. -To see all possible configurations and flags you can use, including metrics and observability tools (e.g. Grafana, Prometheus, etc), go [here](https://github.com/kurtosis-tech/eth2-package#configuration). +To see all possible configurations and flags you can use, including metrics and observability tools (e.g. Grafana, Prometheus, etc), go [here](https://github.com/kurtosis-tech/ethereum-package#configuration). -Genesis data will be generated using this [genesis-generator](https://github.com/ethpandaops/ethereum-genesis-generator) to be used to bootstrap the EL and CL clients for each node. The end result will be a private testnet with nodes deployed as Docker containers in an ephemeral, isolated environment on your machine called an [enclave](https://docs.kurtosis.com/concepts-reference/enclaves/). Read more about how the `eth2-package` works by going [here](https://github.com/kurtosis-tech/eth2-package/). +Genesis data will be generated using this [genesis-generator](https://github.com/ethpandaops/ethereum-genesis-generator) to be used to bootstrap the EL and CL clients for each node. The end result will be a private testnet with nodes deployed as Docker containers in an ephemeral, isolated environment on your machine called an [enclave](https://docs.kurtosis.com/concepts-reference/enclaves/). Read more about how the `ethereum-package` works by going [here](https://github.com/kurtosis-tech/ethereum-package/). ### Step 1: Define the parameters and shape of your private network First, in your home directory, create a file with the name `network_params.json` with the following contents: @@ -39,7 +39,7 @@ First, in your home directory, create a file with the name `network_params.json` Next, run the following command from your command line: ```bash -kurtosis run github.com/kurtosis-tech/eth2-package "$(cat ~/network_params.json)" +kurtosis run github.com/kurtosis-tech/ethereum-package "$(cat ~/network_params.json)" ``` Kurtosis will spin up an [enclave](https://docs.kurtosis.com/concepts-reference/enclaves) (i.e an ephemeral, isolated environment) and begin to configure and instantiate the nodes in your network. In the end, Kurtosis will print the services running in your enclave that form your private testnet alongside all the container ports and files that were generated & used to start up the private testnet. Here is a sample output: ```console @@ -96,11 +96,11 @@ Great! You now have a private network with 2 full Ethereum nodes on your local m Kurtosis packages are portable and reproducible, meaning they will work the same way over Docker or Kubernetes, locally or on remote infrsatructure. For use cases that require a larger scale, Kurtosis can be deployed on Kubernetes by following these docs [here](https://docs.kurtosis.com/k8s/). ## Running the network with additional services -The [`eth2-package`](https://github.com/kurtosis-tech/eth2-package) comes with many optional flags and arguments you can enable for your private network. Some include: +The [`ethereum-package`](https://github.com/kurtosis-tech/ethereum-package) comes with many optional flags and arguments you can enable for your private network. Some include: - A Grafana + Prometheus instance - A transaction spammer called [`tx-fuzz`](https://github.com/MariusVanDerWijden/tx-fuzz) - [A network metrics collector](https://github.com/dapplion/beacon-metrics-gazer) - Flashbot's `mev-boost` implementation of PBS (to test/simulate MEV workflows) ### Questions? -Please reach out to the [Kurtosis discord](https://discord.com/invite/6Jjp9c89z9) should you have any questions about how to use the `eth2-package` for your private testnet needs. Thanks! +Please reach out to the [Kurtosis discord](https://discord.com/invite/6Jjp9c89z9) should you have any questions about how to use the `ethereum-package` for your private testnet needs. Thanks! diff --git a/crates/blockchain-tree/src/block_buffer.rs b/crates/blockchain-tree/src/block_buffer.rs index 8d8bbddeaa..1132791159 100644 --- a/crates/blockchain-tree/src/block_buffer.rs +++ b/crates/blockchain-tree/src/block_buffer.rs @@ -215,13 +215,13 @@ impl BlockBuffer { #[cfg(test)] mod tests { - use reth_interfaces::test_utils::generators; - use std::collections::HashMap; - - use reth_interfaces::test_utils::generators::{random_block, Rng}; - use reth_primitives::{BlockHash, BlockNumHash, SealedBlockWithSenders}; - use crate::BlockBuffer; + use reth_interfaces::test_utils::{ + generators, + generators::{random_block, Rng}, + }; + use reth_primitives::{BlockHash, BlockNumHash, SealedBlockWithSenders}; + use std::collections::HashMap; fn create_block(rng: &mut R, number: u64, parent: BlockHash) -> SealedBlockWithSenders { let block = random_block(rng, number, Some(parent), None, None); @@ -231,7 +231,8 @@ mod tests { #[test] fn simple_insertion() { let mut rng = generators::rng(); - let block1 = create_block(&mut rng, 10, BlockHash::random()); + let parent = rng.gen(); + let block1 = create_block(&mut rng, 10, parent); let mut buffer = BlockBuffer::new(3); buffer.insert_block(block1.clone()); @@ -244,11 +245,12 @@ mod tests { fn take_all_chain_of_childrens() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 12, block2.hash); - let block4 = create_block(&mut rng, 14, BlockHash::random()); + let parent4 = rng.gen(); + let block4 = create_block(&mut rng, 14, parent4); let mut buffer = BlockBuffer::new(5); @@ -273,7 +275,7 @@ mod tests { fn take_all_multi_level_childrens() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 11, block1.hash); @@ -307,7 +309,7 @@ mod tests { fn take_self_with_childs() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 11, block1.hash); @@ -341,11 +343,12 @@ mod tests { fn clean_chain_of_children() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 12, block2.hash); - let block4 = create_block(&mut rng, 14, BlockHash::random()); + let parent4 = rng.gen(); + let block4 = create_block(&mut rng, 14, parent4); let mut buffer = BlockBuffer::new(5); @@ -363,7 +366,7 @@ mod tests { fn clean_all_multi_level_childrens() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 11, block1.hash); @@ -385,14 +388,17 @@ mod tests { fn clean_multi_chains() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block1a = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block2a = create_block(&mut rng, 11, block1.hash); - let random_block1 = create_block(&mut rng, 10, BlockHash::random()); - let random_block2 = create_block(&mut rng, 11, BlockHash::random()); - let random_block3 = create_block(&mut rng, 12, BlockHash::random()); + let random_parent1 = rng.gen(); + let random_block1 = create_block(&mut rng, 10, random_parent1); + let random_parent2 = rng.gen(); + let random_block2 = create_block(&mut rng, 11, random_parent2); + let random_parent3 = rng.gen(); + let random_block3 = create_block(&mut rng, 12, random_parent3); let mut buffer = BlockBuffer::new(10); @@ -436,11 +442,12 @@ mod tests { fn evict_with_gap() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 12, block2.hash); - let block4 = create_block(&mut rng, 13, BlockHash::random()); + let parent4 = rng.gen(); + let block4 = create_block(&mut rng, 13, parent4); let mut buffer = BlockBuffer::new(3); @@ -472,11 +479,12 @@ mod tests { fn simple_eviction() { let mut rng = generators::rng(); - let main_parent = BlockNumHash::new(9, BlockHash::random()); + let main_parent = BlockNumHash::new(9, rng.gen()); let block1 = create_block(&mut rng, 10, main_parent.hash); let block2 = create_block(&mut rng, 11, block1.hash); let block3 = create_block(&mut rng, 12, block2.hash); - let block4 = create_block(&mut rng, 13, BlockHash::random()); + let parent4 = rng.gen(); + let block4 = create_block(&mut rng, 13, parent4); let mut buffer = BlockBuffer::new(3); diff --git a/crates/blockchain-tree/src/blockchain_tree.rs b/crates/blockchain-tree/src/blockchain_tree.rs index b28004f3f1..a5bcb03c85 100644 --- a/crates/blockchain-tree/src/blockchain_tree.rs +++ b/crates/blockchain-tree/src/blockchain_tree.rs @@ -946,7 +946,7 @@ impl BlockchainTree } let Some(chain_id) = self.block_indices.get_blocks_chain_id(block_hash) else { - warn!(target: "blockchain_tree", ?block_hash, "Block hash not found in block indices"); + debug!(target: "blockchain_tree", ?block_hash, "Block hash not found in block indices"); return Err(CanonicalError::from(BlockchainTreeError::BlockHashNotFoundInChain { block_hash: *block_hash, }) @@ -1178,7 +1178,7 @@ mod tests { use reth_db::{test_utils::create_test_rw_db, transaction::DbTxMut, DatabaseEnv}; use reth_interfaces::test_utils::TestConsensus; use reth_primitives::{ - proofs::EMPTY_ROOT, stage::StageCheckpoint, ChainSpecBuilder, H256, MAINNET, + proofs::EMPTY_ROOT, stage::StageCheckpoint, ChainSpecBuilder, B256, MAINNET, }; use reth_provider::{ test_utils::{blocks::BlockChainTestData, TestExecutorFactory}, @@ -1218,7 +1218,7 @@ mod tests { for i in 0..10 { provider .tx_ref() - .put::(i, H256([100 + i as u8; 32])) + .put::(i, B256::new([100 + i as u8; 32])) .unwrap(); } provider @@ -1321,10 +1321,10 @@ mod tests { BlockchainTree::new(externals, sender, config, None).expect("failed to create tree"); // genesis block 10 is already canonical - tree.make_canonical(&H256::zero()).unwrap(); + tree.make_canonical(&B256::ZERO).unwrap(); // make sure is_block_hash_canonical returns true for genesis block - tree.is_block_hash_canonical(&H256::zero()).unwrap(); + tree.is_block_hash_canonical(&B256::ZERO).unwrap(); // make genesis block 10 as finalized tree.finalize_block(10); @@ -1356,7 +1356,7 @@ mod tests { ); // check if random block is known - let old_block = BlockNumHash::new(1, H256([32; 32])); + let old_block = BlockNumHash::new(1, B256::new([32; 32])); let err = BlockchainTreeError::PendingBlockIsFinalized { last_finalized: 10 }; assert_eq!(tree.is_block_known(old_block).unwrap_err().as_tree_error(), Some(err)); @@ -1424,10 +1424,10 @@ mod tests { /**** INSERT SIDE BLOCKS *** */ let mut block1a = block1.clone(); - let block1a_hash = H256([0x33; 32]); + let block1a_hash = B256::new([0x33; 32]); block1a.hash = block1a_hash; let mut block2a = block2.clone(); - let block2a_hash = H256([0x34; 32]); + let block2a_hash = B256::new([0x34; 32]); block2a.hash = block2a_hash; // reinsert two blocks that point to canonical chain @@ -1627,8 +1627,8 @@ mod tests { // insert unconnected block2b let mut block2b = block2a.clone(); - block2b.hash = H256([0x99; 32]); - block2b.parent_hash = H256([0x88; 32]); + block2b.hash = B256::new([0x99; 32]); + block2b.parent_hash = B256::new([0x88; 32]); assert_eq!( tree.insert_block(block2b.clone()).unwrap(), diff --git a/crates/blockchain-tree/src/lib.rs b/crates/blockchain-tree/src/lib.rs index 9c370a5e50..919a69f424 100644 --- a/crates/blockchain-tree/src/lib.rs +++ b/crates/blockchain-tree/src/lib.rs @@ -13,7 +13,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -48,3 +48,6 @@ mod canonical_chain; pub mod metrics; pub use block_buffer::BlockBuffer; + +/// Implementation of Tree traits that does nothing. +pub mod noop; diff --git a/crates/blockchain-tree/src/noop.rs b/crates/blockchain-tree/src/noop.rs new file mode 100644 index 0000000000..d9c87e6d71 --- /dev/null +++ b/crates/blockchain-tree/src/noop.rs @@ -0,0 +1,133 @@ +use reth_interfaces::{ + blockchain_tree::{ + error::{BlockchainTreeError, InsertBlockError}, + BlockchainTreeEngine, BlockchainTreeViewer, CanonicalOutcome, InsertPayloadOk, + }, + RethResult, +}; +use reth_primitives::{ + BlockHash, BlockNumHash, BlockNumber, Receipt, SealedBlock, SealedBlockWithSenders, + SealedHeader, +}; +use reth_provider::{ + BlockchainTreePendingStateProvider, BundleStateDataProvider, CanonStateNotificationSender, + CanonStateNotifications, CanonStateSubscriptions, +}; +use std::collections::{BTreeMap, HashSet}; + +/// A BlockchainTree that does nothing. +/// +/// Caution: this is only intended for testing purposes, or for wiring components together. +#[derive(Debug, Clone, Default)] +#[non_exhaustive] +pub struct NoopBlockchainTree {} + +impl BlockchainTreeEngine for NoopBlockchainTree { + fn buffer_block(&self, _block: SealedBlockWithSenders) -> Result<(), InsertBlockError> { + Ok(()) + } + + fn insert_block( + &self, + block: SealedBlockWithSenders, + ) -> Result { + Err(InsertBlockError::tree_error( + BlockchainTreeError::BlockHashNotFoundInChain { block_hash: block.hash }, + block.block, + )) + } + + fn finalize_block(&self, _finalized_block: BlockNumber) {} + + fn connect_buffered_blocks_to_canonical_hashes_and_finalize( + &self, + _last_finalized_block: BlockNumber, + ) -> RethResult<()> { + Ok(()) + } + + fn connect_buffered_blocks_to_canonical_hashes(&self) -> RethResult<()> { + Ok(()) + } + + fn make_canonical(&self, block_hash: &BlockHash) -> RethResult { + Err(BlockchainTreeError::BlockHashNotFoundInChain { block_hash: *block_hash }.into()) + } + + fn unwind(&self, _unwind_to: BlockNumber) -> RethResult<()> { + Ok(()) + } +} + +impl BlockchainTreeViewer for NoopBlockchainTree { + fn blocks(&self) -> BTreeMap> { + Default::default() + } + + fn header_by_hash(&self, _hash: BlockHash) -> Option { + None + } + + fn block_by_hash(&self, _hash: BlockHash) -> Option { + None + } + + fn buffered_block_by_hash(&self, _block_hash: BlockHash) -> Option { + None + } + + fn buffered_header_by_hash(&self, _block_hash: BlockHash) -> Option { + None + } + + fn canonical_blocks(&self) -> BTreeMap { + Default::default() + } + + fn find_canonical_ancestor(&self, _parent_hash: BlockHash) -> Option { + None + } + + fn is_canonical(&self, block_hash: BlockHash) -> RethResult { + Err(BlockchainTreeError::BlockHashNotFoundInChain { block_hash }.into()) + } + + fn lowest_buffered_ancestor(&self, _hash: BlockHash) -> Option { + None + } + + fn canonical_tip(&self) -> BlockNumHash { + Default::default() + } + + fn pending_blocks(&self) -> (BlockNumber, Vec) { + (0, vec![]) + } + + fn pending_block_num_hash(&self) -> Option { + None + } + + fn pending_block_and_receipts(&self) -> Option<(SealedBlock, Vec)> { + None + } + + fn receipts_by_block_hash(&self, _block_hash: BlockHash) -> Option> { + None + } +} + +impl BlockchainTreePendingStateProvider for NoopBlockchainTree { + fn find_pending_state_provider( + &self, + _block_hash: BlockHash, + ) -> Option> { + None + } +} + +impl CanonStateSubscriptions for NoopBlockchainTree { + fn subscribe_to_canonical_state(&self) -> CanonStateNotifications { + CanonStateNotificationSender::new(1).subscribe() + } +} diff --git a/crates/blockchain-tree/src/shareable.rs b/crates/blockchain-tree/src/shareable.rs index a3f7505d93..d1b777464c 100644 --- a/crates/blockchain-tree/src/shareable.rs +++ b/crates/blockchain-tree/src/shareable.rs @@ -149,6 +149,11 @@ impl BlockchainTreeViewer None } + fn is_canonical(&self, hash: BlockHash) -> RethResult { + trace!(target: "blockchain_tree", ?hash, "Checking if block is canonical"); + self.tree.read().is_block_hash_canonical(&hash) + } + fn lowest_buffered_ancestor(&self, hash: BlockHash) -> Option { trace!(target: "blockchain_tree", ?hash, "Returning lowest buffered ancestor"); self.tree.read().lowest_buffered_ancestor(&hash).cloned() @@ -159,11 +164,6 @@ impl BlockchainTreeViewer self.tree.read().block_indices().canonical_tip() } - fn is_canonical(&self, hash: BlockHash) -> RethResult { - trace!(target: "blockchain_tree", ?hash, "Checking if block is canonical"); - self.tree.read().is_block_hash_canonical(&hash) - } - fn pending_blocks(&self) -> (BlockNumber, Vec) { trace!(target: "blockchain_tree", "Returning all pending blocks"); self.tree.read().block_indices().pending_blocks() diff --git a/crates/config/src/lib.rs b/crates/config/src/lib.rs index 5186e309de..643c476cd9 100644 --- a/crates/config/src/lib.rs +++ b/crates/config/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/consensus/auto-seal/src/client.rs b/crates/consensus/auto-seal/src/client.rs index 0658cc969a..7fa13a860d 100644 --- a/crates/consensus/auto-seal/src/client.rs +++ b/crates/consensus/auto-seal/src/client.rs @@ -7,7 +7,7 @@ use reth_interfaces::p2p::{ priority::Priority, }; use reth_primitives::{ - BlockBody, BlockHashOrNumber, Header, HeadersDirection, PeerId, WithPeerId, H256, + BlockBody, BlockHashOrNumber, Header, HeadersDirection, PeerId, WithPeerId, B256, }; use std::fmt::Debug; use tracing::{trace, warn}; @@ -67,7 +67,7 @@ impl AutoSealClient { headers } - async fn fetch_bodies(&self, hashes: Vec) -> Vec { + async fn fetch_bodies(&self, hashes: Vec) -> Vec { trace!(target: "consensus::auto", ?hashes, "received bodies request"); let storage = self.storage.read().await; let mut bodies = Vec::new(); @@ -106,7 +106,7 @@ impl BodiesClient for AutoSealClient { fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, _priority: Priority, ) -> Self::Output { let this = self.clone(); diff --git a/crates/consensus/auto-seal/src/lib.rs b/crates/consensus/auto-seal/src/lib.rs index c2cc8bdfb1..29074cc927 100644 --- a/crates/consensus/auto-seal/src/lib.rs +++ b/crates/consensus/auto-seal/src/lib.rs @@ -10,7 +10,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -24,7 +24,7 @@ use reth_interfaces::{ use reth_primitives::{ constants::{EMPTY_RECEIPTS, EMPTY_TRANSACTIONS, ETHEREUM_BLOCK_GAS_LIMIT}, proofs, Address, Block, BlockBody, BlockHash, BlockHashOrNumber, BlockNumber, Bloom, ChainSpec, - Header, ReceiptWithBloom, SealedBlock, SealedHeader, TransactionSigned, EMPTY_OMMER_ROOT, H256, + Header, ReceiptWithBloom, SealedBlock, SealedHeader, TransactionSigned, B256, EMPTY_OMMER_ROOT, U256, }; use reth_provider::{ @@ -207,7 +207,7 @@ pub(crate) struct StorageInner { /// Tracks best block pub(crate) best_block: u64, /// Tracks hash of best block - pub(crate) best_hash: H256, + pub(crate) best_hash: B256, /// The total difficulty of the chain until this block pub(crate) total_difficulty: U256, } @@ -340,7 +340,7 @@ impl StorageInner { .map(|r| (*r).clone().expect("receipts have not been pruned").into()) .collect::>(); header.logs_bloom = - receipts_with_bloom.iter().fold(Bloom::zero(), |bloom, r| bloom | r.bloom); + receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom); proofs::calculate_receipt_root(&receipts_with_bloom) }; diff --git a/crates/consensus/beacon/Cargo.toml b/crates/consensus/beacon/Cargo.toml index 9c552cde15..5e72b1b783 100644 --- a/crates/consensus/beacon/Cargo.toml +++ b/crates/consensus/beacon/Cargo.toml @@ -19,6 +19,7 @@ reth-rpc-types.workspace = true reth-tasks.workspace = true reth-payload-builder.workspace = true reth-prune = { path = "../../prune" } +reth-snapshot = { path = "../../snapshot" } reth-rpc-types-compat.workspace = true # async tokio = { workspace = true, features = ["sync"] } diff --git a/crates/consensus/beacon/src/engine/forkchoice.rs b/crates/consensus/beacon/src/engine/forkchoice.rs index f2858d9417..bab7593357 100644 --- a/crates/consensus/beacon/src/engine/forkchoice.rs +++ b/crates/consensus/beacon/src/engine/forkchoice.rs @@ -1,4 +1,4 @@ -use reth_primitives::H256; +use reth_primitives::B256; use reth_rpc_types::engine::{ForkchoiceState, PayloadStatusEnum}; /// The struct that keeps track of the received forkchoice state and their status. @@ -65,13 +65,13 @@ impl ForkchoiceStateTracker { /// Returns the last valid head hash. #[allow(unused)] - pub(crate) fn last_valid_head(&self) -> Option { + pub(crate) fn last_valid_head(&self) -> Option { self.last_valid.as_ref().map(|s| s.head_block_hash) } /// Returns the head hash of the latest received FCU to which we need to sync. #[allow(unused)] - pub(crate) fn sync_target(&self) -> Option { + pub(crate) fn sync_target(&self) -> Option { self.last_syncing.as_ref().map(|s| s.head_block_hash) } @@ -141,14 +141,14 @@ impl From for ForkchoiceStatus { /// A helper type to check represent hashes of a [ForkchoiceState] #[derive(Clone, Copy, Debug, PartialEq, Eq)] pub(crate) enum ForkchoiceStateHash { - Head(H256), - Safe(H256), - Finalized(H256), + Head(B256), + Safe(B256), + Finalized(B256), } impl ForkchoiceStateHash { /// Tries to find a matching hash in the given [ForkchoiceState]. - pub(crate) fn find(state: &ForkchoiceState, hash: H256) -> Option { + pub(crate) fn find(state: &ForkchoiceState, hash: B256) -> Option { if state.head_block_hash == hash { Some(ForkchoiceStateHash::Head(hash)) } else if state.safe_block_hash == hash { @@ -166,8 +166,8 @@ impl ForkchoiceStateHash { } } -impl AsRef for ForkchoiceStateHash { - fn as_ref(&self) -> &H256 { +impl AsRef for ForkchoiceStateHash { + fn as_ref(&self) -> &B256 { match self { ForkchoiceStateHash::Head(h) => h, ForkchoiceStateHash::Safe(h) => h, diff --git a/crates/consensus/beacon/src/engine/hooks/controller.rs b/crates/consensus/beacon/src/engine/hooks/controller.rs index 30c0a96977..1848211a3d 100644 --- a/crates/consensus/beacon/src/engine/hooks/controller.rs +++ b/crates/consensus/beacon/src/engine/hooks/controller.rs @@ -54,7 +54,7 @@ impl EngineHooksController { ) -> Poll> { let Some(mut hook) = self.running_hook_with_db_write.take() else { return Poll::Pending }; - match hook.poll(cx, args) { + match hook.poll(cx, args)? { Poll::Ready((event, action)) => { let result = PolledHook { event, action, db_access_level: hook.db_access_level() }; @@ -109,7 +109,7 @@ impl EngineHooksController { return Poll::Pending } - if let Poll::Ready((event, action)) = hook.poll(cx, args) { + if let Poll::Ready((event, action)) = hook.poll(cx, args)? { let result = PolledHook { event, action, db_access_level: hook.db_access_level() }; debug!( diff --git a/crates/consensus/beacon/src/engine/hooks/mod.rs b/crates/consensus/beacon/src/engine/hooks/mod.rs index 3185fcb3b2..a4e4feab6f 100644 --- a/crates/consensus/beacon/src/engine/hooks/mod.rs +++ b/crates/consensus/beacon/src/engine/hooks/mod.rs @@ -1,4 +1,4 @@ -use reth_interfaces::RethError; +use reth_interfaces::{RethError, RethResult}; use reth_primitives::BlockNumber; use std::{ fmt, @@ -11,6 +11,9 @@ pub(crate) use controller::{EngineHooksController, PolledHook}; mod prune; pub use prune::PruneHook; +mod snapshot; +pub use snapshot::SnapshotHook; + /// Collection of [engine hooks][`EngineHook`]. #[derive(Default)] pub struct EngineHooks { @@ -47,7 +50,7 @@ pub trait EngineHook: Send + Sync + 'static { &mut self, cx: &mut Context<'_>, ctx: EngineContext, - ) -> Poll<(EngineHookEvent, Option)>; + ) -> Poll)>>; /// Returns [db access level][`EngineHookDBAccessLevel`] the hook needs. fn db_access_level(&self) -> EngineHookDBAccessLevel; @@ -58,6 +61,8 @@ pub trait EngineHook: Send + Sync + 'static { pub struct EngineContext { /// Tip block number. pub tip_block_number: BlockNumber, + /// Finalized block number, if known. + pub finalized_block_number: Option, } /// An event emitted when [hook][`EngineHook`] is polled. diff --git a/crates/consensus/beacon/src/engine/hooks/prune.rs b/crates/consensus/beacon/src/engine/hooks/prune.rs index 35953e71bf..993a16556d 100644 --- a/crates/consensus/beacon/src/engine/hooks/prune.rs +++ b/crates/consensus/beacon/src/engine/hooks/prune.rs @@ -9,7 +9,7 @@ use crate::{ use futures::FutureExt; use metrics::Counter; use reth_db::database::Database; -use reth_interfaces::RethError; +use reth_interfaces::{RethError, RethResult}; use reth_primitives::BlockNumber; use reth_prune::{Pruner, PrunerError, PrunerWithResult}; use reth_tasks::TaskSpawner; @@ -55,7 +55,7 @@ impl PruneHook { fn poll_pruner( &mut self, cx: &mut Context<'_>, - ) -> Poll<(EngineHookEvent, Option)> { + ) -> Poll)>> { let result = match self.pruner_state { PrunerState::Idle(_) => return Poll::Pending, PrunerState::Running(ref mut fut) => { @@ -69,14 +69,7 @@ impl PruneHook { match result { Ok(_) => EngineHookEvent::Finished(Ok(())), - Err(err) => EngineHookEvent::Finished(Err(match err { - PrunerError::PrunePart(_) | PrunerError::InconsistentData(_) => { - EngineHookError::Internal(Box::new(err)) - } - PrunerError::Interface(err) => err.into(), - PrunerError::Database(err) => RethError::Database(err).into(), - PrunerError::Provider(err) => RethError::Provider(err).into(), - })), + Err(err) => EngineHookEvent::Finished(Err(err.into())), } } Err(_) => { @@ -85,14 +78,15 @@ impl PruneHook { } }; - Poll::Ready((event, None)) + Poll::Ready(Ok((event, None))) } /// This will try to spawn the pruner if it is idle: /// 1. Check if pruning is needed through [Pruner::is_pruning_needed]. - /// 2a. If pruning is needed, pass tip block number to the [Pruner::run] and spawn it in a + /// 2. + /// 1. If pruning is needed, pass tip block number to the [Pruner::run] and spawn it in a /// separate task. Set pruner state to [PrunerState::Running]. - /// 2b. If pruning is not needed, set pruner state back to [PrunerState::Idle]. + /// 2. If pruning is not needed, set pruner state back to [PrunerState::Idle]. /// /// If pruner is already running, do nothing. fn try_spawn_pruner( @@ -136,11 +130,11 @@ impl EngineHook for PruneHook { &mut self, cx: &mut Context<'_>, ctx: EngineContext, - ) -> Poll<(EngineHookEvent, Option)> { + ) -> Poll)>> { // Try to spawn a pruner match self.try_spawn_pruner(ctx.tip_block_number) { Some((EngineHookEvent::NotReady, _)) => return Poll::Pending, - Some((event, action)) => return Poll::Ready((event, action)), + Some((event, action)) => return Poll::Ready(Ok((event, action))), None => (), } @@ -176,3 +170,16 @@ struct Metrics { /// The number of times the pruner was run. runs: Counter, } + +impl From for EngineHookError { + fn from(err: PrunerError) -> Self { + match err { + PrunerError::PrunePart(_) | PrunerError::InconsistentData(_) => { + EngineHookError::Internal(Box::new(err)) + } + PrunerError::Interface(err) => err.into(), + PrunerError::Database(err) => RethError::Database(err).into(), + PrunerError::Provider(err) => RethError::Provider(err).into(), + } + } +} diff --git a/crates/consensus/beacon/src/engine/hooks/snapshot.rs b/crates/consensus/beacon/src/engine/hooks/snapshot.rs new file mode 100644 index 0000000000..b0dca56599 --- /dev/null +++ b/crates/consensus/beacon/src/engine/hooks/snapshot.rs @@ -0,0 +1,161 @@ +//! Snapshot hook for the engine implementation. + +use crate::{ + engine::hooks::{ + EngineContext, EngineHook, EngineHookAction, EngineHookError, EngineHookEvent, + }, + hooks::EngineHookDBAccessLevel, +}; +use futures::FutureExt; +use reth_db::database::Database; +use reth_interfaces::{RethError, RethResult}; +use reth_primitives::BlockNumber; +use reth_snapshot::{Snapshotter, SnapshotterError, SnapshotterWithResult}; +use reth_tasks::TaskSpawner; +use std::task::{ready, Context, Poll}; +use tokio::sync::oneshot; + +/// Manages snapshotting under the control of the engine. +/// +/// This type controls the [Snapshotter]. +#[derive(Debug)] +pub struct SnapshotHook { + /// The current state of the snapshotter. + state: SnapshotterState, + /// The type that can spawn the snapshotter task. + task_spawner: Box, +} + +impl SnapshotHook { + /// Create a new instance + pub fn new(snapshotter: Snapshotter, task_spawner: Box) -> Self { + Self { state: SnapshotterState::Idle(Some(snapshotter)), task_spawner } + } + + /// Advances the snapshotter state. + /// + /// This checks for the result in the channel, or returns pending if the snapshotter is idle. + fn poll_snapshotter( + &mut self, + cx: &mut Context<'_>, + ) -> Poll)>> { + let result = match self.state { + SnapshotterState::Idle(_) => return Poll::Pending, + SnapshotterState::Running(ref mut fut) => { + ready!(fut.poll_unpin(cx)) + } + }; + + let event = match result { + Ok((snapshotter, result)) => { + self.state = SnapshotterState::Idle(Some(snapshotter)); + + match result { + Ok(_) => EngineHookEvent::Finished(Ok(())), + Err(err) => EngineHookEvent::Finished(Err(err.into())), + } + } + Err(_) => { + // failed to receive the snapshotter + EngineHookEvent::Finished(Err(EngineHookError::ChannelClosed)) + } + }; + + Poll::Ready(Ok((event, None))) + } + + /// This will try to spawn the snapshotter if it is idle: + /// 1. Check if snapshotting is needed through [Snapshotter::get_snapshot_targets] and then + /// [SnapshotTargets::any](reth_snapshot::SnapshotTargets::any). + /// 2. + /// 1. If snapshotting is needed, pass snapshot request to the [Snapshotter::run] and spawn + /// it in a separate task. Set snapshotter state to [SnapshotterState::Running]. + /// 2. If snapshotting is not needed, set snapshotter state back to + /// [SnapshotterState::Idle]. + /// + /// If snapshotter is already running, do nothing. + fn try_spawn_snapshotter( + &mut self, + finalized_block_number: BlockNumber, + ) -> RethResult)>> { + Ok(match &mut self.state { + SnapshotterState::Idle(snapshotter) => { + let Some(mut snapshotter) = snapshotter.take() else { return Ok(None) }; + + let targets = snapshotter.get_snapshot_targets(finalized_block_number)?; + + // Check if the snapshotting of any parts has been requested. + if targets.any() { + let (tx, rx) = oneshot::channel(); + self.task_spawner.spawn_critical_blocking( + "snapshotter task", + Box::pin(async move { + let result = snapshotter.run(targets); + let _ = tx.send((snapshotter, result)); + }), + ); + self.state = SnapshotterState::Running(rx); + + Some((EngineHookEvent::Started, None)) + } else { + self.state = SnapshotterState::Idle(Some(snapshotter)); + Some((EngineHookEvent::NotReady, None)) + } + } + SnapshotterState::Running(_) => None, + }) + } +} + +impl EngineHook for SnapshotHook { + fn name(&self) -> &'static str { + "Snapshot" + } + + fn poll( + &mut self, + cx: &mut Context<'_>, + ctx: EngineContext, + ) -> Poll)>> { + let Some(finalized_block_number) = ctx.finalized_block_number else { + return Poll::Ready(Ok((EngineHookEvent::NotReady, None))) + }; + + // Try to spawn a snapshotter + match self.try_spawn_snapshotter(finalized_block_number)? { + Some((EngineHookEvent::NotReady, _)) => return Poll::Pending, + Some((event, action)) => return Poll::Ready(Ok((event, action))), + None => (), + } + + // Poll snapshotter and check its status + self.poll_snapshotter(cx) + } + + fn db_access_level(&self) -> EngineHookDBAccessLevel { + EngineHookDBAccessLevel::ReadOnly + } +} + +/// The possible snapshotter states within the sync controller. +/// +/// [SnapshotterState::Idle] means that the snapshotter is currently idle. +/// [SnapshotterState::Running] means that the snapshotter is currently running. +#[derive(Debug)] +enum SnapshotterState { + /// Snapshotter is idle. + Idle(Option>), + /// Snapshotter is running and waiting for a response + Running(oneshot::Receiver>), +} + +impl From for EngineHookError { + fn from(err: SnapshotterError) -> Self { + match err { + SnapshotterError::InconsistentData(_) => EngineHookError::Internal(Box::new(err)), + SnapshotterError::Interface(err) => err.into(), + SnapshotterError::Database(err) => RethError::Database(err).into(), + SnapshotterError::Provider(err) => RethError::Provider(err).into(), + } + } +} diff --git a/crates/consensus/beacon/src/engine/invalid_headers.rs b/crates/consensus/beacon/src/engine/invalid_headers.rs index 251f8db18c..aaac2956f8 100644 --- a/crates/consensus/beacon/src/engine/invalid_headers.rs +++ b/crates/consensus/beacon/src/engine/invalid_headers.rs @@ -2,7 +2,7 @@ use reth_metrics::{ metrics::{Counter, Gauge}, Metrics, }; -use reth_primitives::{Header, SealedHeader, H256}; +use reth_primitives::{Header, SealedHeader, B256}; use schnellru::{ByLength, LruMap}; use std::sync::Arc; use tracing::warn; @@ -16,7 +16,7 @@ const INVALID_HEADER_HIT_EVICTION_THRESHOLD: u8 = 128; /// Keeps track of invalid headers. pub(crate) struct InvalidHeaderCache { /// This maps a header hash to a reference to its invalid ancestor. - headers: LruMap, + headers: LruMap, /// Metrics for the cache. metrics: InvalidHeaderCacheMetrics, } @@ -26,7 +26,7 @@ impl InvalidHeaderCache { Self { headers: LruMap::new(ByLength::new(max_length)), metrics: Default::default() } } - fn insert_entry(&mut self, hash: H256, header: Arc
) { + fn insert_entry(&mut self, hash: B256, header: Arc
) { self.headers.insert(hash, HeaderEntry { header, hit_count: 0 }); } @@ -34,7 +34,7 @@ impl InvalidHeaderCache { /// /// If this is called, the hit count for the entry is incremented. /// If the hit count exceeds the threshold, the entry is evicted and `None` is returned. - pub(crate) fn get(&mut self, hash: &H256) -> Option> { + pub(crate) fn get(&mut self, hash: &B256) -> Option> { { let entry = self.headers.get(hash)?; entry.hit_count += 1; @@ -51,7 +51,7 @@ impl InvalidHeaderCache { /// Inserts an invalid block into the cache, with a given invalid ancestor. pub(crate) fn insert_with_invalid_ancestor( &mut self, - header_hash: H256, + header_hash: B256, invalid_ancestor: Arc
, ) { if self.get(&header_hash).is_none() { diff --git a/crates/consensus/beacon/src/engine/mod.rs b/crates/consensus/beacon/src/engine/mod.rs index 013fe3aac4..db3f5d8fe2 100644 --- a/crates/consensus/beacon/src/engine/mod.rs +++ b/crates/consensus/beacon/src/engine/mod.rs @@ -23,7 +23,7 @@ use reth_interfaces::{ use reth_payload_builder::{PayloadBuilderAttributes, PayloadBuilderHandle}; use reth_primitives::{ constants::EPOCH_SLOTS, listener::EventListeners, stage::StageId, BlockNumHash, BlockNumber, - ChainSpec, Head, Header, SealedBlock, SealedHeader, H256, U256, + ChainSpec, Head, Header, SealedBlock, SealedHeader, B256, U256, }; use reth_provider::{ BlockIdReader, BlockReader, BlockSource, CanonChainTracker, ChainSpecProvider, ProviderError, @@ -230,7 +230,7 @@ where max_block: Option, run_pipeline_continuously: bool, payload_builder: PayloadBuilderHandle, - target: Option, + target: Option, pipeline_run_threshold: u64, hooks: EngineHooks, ) -> RethResult<(Self, BeaconConsensusEngineHandle)> { @@ -274,7 +274,7 @@ where max_block: Option, run_pipeline_continuously: bool, payload_builder: PayloadBuilderHandle, - target: Option, + target: Option, pipeline_run_threshold: u64, to_engine: UnboundedSender, rx: UnboundedReceiver, @@ -328,7 +328,7 @@ where /// # Returns /// /// A target block hash if the pipeline is inconsistent, otherwise `None`. - fn check_pipeline_consistency(&self) -> RethResult> { + fn check_pipeline_consistency(&self) -> RethResult> { // If no target was provided, check if the stages are congruent - check if the // checkpoint of the last stage matches the checkpoint of the first. let first_stage_checkpoint = self @@ -388,7 +388,7 @@ where canonical_tip_num: u64, target_block_number: u64, downloaded_block: Option, - ) -> Option { + ) -> Option { let sync_target_state = self.forkchoice_state_tracker.sync_target_state(); // check if the distance exceeds the threshold for pipeline sync @@ -465,12 +465,12 @@ where /// the above conditions. fn latest_valid_hash_for_invalid_payload( &self, - parent_hash: H256, + parent_hash: B256, insert_err: Option<&InsertBlockErrorKind>, - ) -> Option { + ) -> Option { // check pre merge block error if insert_err.map(|err| err.is_block_pre_merge()).unwrap_or_default() { - return Some(H256::zero()) + return Some(B256::ZERO) } // If this is sent from new payload then the parent hash could be in a side chain, and is @@ -485,7 +485,7 @@ where // we need to check if the parent block is the last POW block, if so then the payload is // the first POS. The engine API spec mandates a zero hash to be returned: if parent_header.difficulty != U256::ZERO { - return Some(H256::zero()) + return Some(B256::ZERO) } // parent is canonical POS block @@ -496,12 +496,12 @@ where /// Prepares the invalid payload response for the given hash, checking the /// database for the parent hash and populating the payload status with the latest valid hash /// according to the engine api spec. - fn prepare_invalid_response(&self, mut parent_hash: H256) -> PayloadStatus { + fn prepare_invalid_response(&self, mut parent_hash: B256) -> PayloadStatus { // Edge case: the `latestValid` field is the zero hash if the parent block is the terminal // PoW block, which we need to identify by looking at the parent's block difficulty if let Ok(Some(parent)) = self.blockchain.header_by_hash_or_number(parent_hash.into()) { if parent.difficulty != U256::ZERO { - parent_hash = H256::zero(); + parent_hash = B256::ZERO; } } @@ -518,8 +518,8 @@ where /// be invalid. fn check_invalid_ancestor_with_head( &mut self, - check: H256, - head: H256, + check: B256, + head: B256, ) -> Option { // check if the check hash was previously marked as invalid let header = self.invalid_headers.get(&check)?; @@ -535,7 +535,7 @@ where /// Checks if the given `head` points to an invalid header, which requires a specific response /// to a forkchoice update. - fn check_invalid_ancestor(&mut self, head: H256) -> Option { + fn check_invalid_ancestor(&mut self, head: B256) -> Option { let parent_hash = { // check if the head was previously marked as invalid let header = self.invalid_headers.get(&head)?; @@ -879,7 +879,7 @@ where /// /// Returns an error if the block is not found. #[inline] - fn update_safe_block(&self, safe_block_hash: H256) -> RethResult<()> { + fn update_safe_block(&self, safe_block_hash: B256) -> RethResult<()> { if !safe_block_hash.is_zero() { if self.blockchain.safe_block_hash()? == Some(safe_block_hash) { // nothing to update @@ -899,7 +899,7 @@ where /// /// Returns an error if the block is not found. #[inline] - fn update_finalized_block(&self, finalized_block_hash: H256) -> RethResult<()> { + fn update_finalized_block(&self, finalized_block_hash: B256) -> RethResult<()> { if !finalized_block_hash.is_zero() { if self.blockchain.finalized_block_hash()? == Some(finalized_block_hash) { // nothing to update @@ -949,7 +949,7 @@ where return PayloadStatus::from_status(PayloadStatusEnum::Invalid { validation_error: error.to_string(), }) - .with_latest_valid_hash(H256::zero()) + .with_latest_valid_hash(B256::ZERO) } RethError::BlockchainTree(BlockchainTreeError::BlockHashNotFoundInChain { .. }) => { // This just means we couldn't find the block when attempting to make it canonical, @@ -1008,7 +1008,7 @@ where /// /// Returns the parent hash of the block itself if the block is buffered and has no other /// buffered ancestors. - fn lowest_buffered_ancestor_or(&self, hash: H256) -> H256 { + fn lowest_buffered_ancestor_or(&self, hash: B256) -> B256 { self.blockchain .lowest_buffered_ancestor(hash) .map(|block| block.parent_hash) @@ -1030,7 +1030,7 @@ where // client software MUST respond with -38003: `Invalid payload attributes` and MUST NOT // begin a payload build process. In such an event, the forkchoiceState update MUST NOT // be rolled back. - if attrs.timestamp <= head.timestamp.into() { + if attrs.timestamp.to::() <= head.timestamp { return OnForkChoiceUpdated::invalid_payload_attributes() } @@ -1079,7 +1079,7 @@ where payload: ExecutionPayload, cancun_fields: Option, ) -> Result { - let block = match self.ensure_well_formed_payload(payload, cancun_fields)? { + let block = match self.ensure_well_formed_payload(payload, cancun_fields) { Ok(block) => block, Err(status) => return Ok(status), }; @@ -1148,7 +1148,7 @@ where &self, payload: ExecutionPayload, cancun_fields: Option, - ) -> Result, BeaconOnNewPayloadError> { + ) -> Result { let parent_hash = payload.parent_hash(); let block_hash = payload.block_hash(); @@ -1158,15 +1158,13 @@ where ) { Ok(block) => { // make sure there are no blob transactions in the payload if it is pre-cancun - // we perform this check before validating the block hash because INVALID_PARAMS - // must be returned over an INVALID response. if !self.chain_spec().is_cancun_active_at_timestamp(block.timestamp) && block.has_blob_transactions() { - return Err(BeaconOnNewPayloadError::PreCancunBlockWithBlobTransactions) + Err(PayloadError::PreCancunBlockWithBlobTransactions) + } else { + validate_block_hash(block_hash, block) } - - validate_block_hash(block_hash, block) } Err(error) => Err(error), }; @@ -1185,7 +1183,7 @@ where } let status = PayloadStatusEnum::from(error); - return Ok(Err(PayloadStatus::new(status, latest_valid_hash))) + return Err(PayloadStatus::new(status, latest_valid_hash)) } }; @@ -1196,13 +1194,9 @@ where .flatten() .collect::>(); - if let Err(status) = - self.validate_versioned_hashes(parent_hash, block_versioned_hashes, cancun_fields) - { - return Ok(Err(status)) - } + self.validate_versioned_hashes(parent_hash, block_versioned_hashes, cancun_fields)?; - Ok(Ok(block)) + Ok(block) } /// Returns the currently configured [ChainSpec]. @@ -1219,8 +1213,8 @@ where /// fn validate_versioned_hashes( &self, - parent_hash: H256, - block_versioned_hashes: Vec<&H256>, + parent_hash: B256, + block_versioned_hashes: Vec<&B256>, cancun_fields: Option, ) -> Result<(), PayloadStatus> { // This validates the following engine API rule: @@ -1371,7 +1365,7 @@ where /// /// If the given block is missing from the database, this will return `false`. Otherwise, `true` /// is returned: the database contains the hash and the tree was updated. - fn update_tree_on_finished_pipeline(&mut self, block_hash: H256) -> RethResult { + fn update_tree_on_finished_pipeline(&mut self, block_hash: B256) -> RethResult { let synced_to_finalized = match self.blockchain.block_number(block_hash)? { Some(number) => { // Attempt to restore the tree. @@ -1749,6 +1743,10 @@ where self.sync_state_updater.update_sync_state(SyncState::Syncing) } EngineHookEvent::Finished(_) => { + // Hook with read-write access to the database has finished running, so engine + // can process new FCU/payload messages from CL again. It's safe to + // return `false` on `eth_syncing` request. + self.sync_state_updater.update_sync_state(SyncState::Idle); // If the hook had read-write access to the database, it means that the engine // may have accumulated some buffered blocks. if let Err(error) = @@ -1792,45 +1790,60 @@ where // Control loop that advances the state 'main: loop { - // Poll a running hook with db write access first, as we will not be able to process - // any engine messages until it's finished. - if let Poll::Ready(result) = this.hooks.poll_running_hook_with_db_write( - cx, - EngineContext { tip_block_number: this.blockchain.canonical_tip().number }, - )? { - this.on_hook_result(result)?; - } + // Poll a running hook with db write access (if any) and CL messages first, draining + // both and then proceeding to polling other parts such as SyncController and hooks. + loop { + // Poll a running hook with db write access first, as we will not be able to process + // any engine messages until it's finished. + if let Poll::Ready(result) = this.hooks.poll_running_hook_with_db_write( + cx, + EngineContext { + tip_block_number: this.blockchain.canonical_tip().number, + finalized_block_number: this.blockchain.finalized_block_number()?, + }, + )? { + this.on_hook_result(result)?; + continue + } - // Process all incoming messages from the CL, these can affect the state of the - // SyncController, hence they are polled first, and they're also time sensitive, hence - // they're always drained first. - while let Poll::Ready(Some(msg)) = this.engine_message_rx.poll_next_unpin(cx) { - match msg { - BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { - match this.on_forkchoice_updated(state, payload_attrs, tx) { - OnForkchoiceUpdateOutcome::Processed => {} - OnForkchoiceUpdateOutcome::ReachedMaxBlock => { - // reached the max block, we can terminate the future - return Poll::Ready(Ok(())) - } - OnForkchoiceUpdateOutcome::Fatal(err) => { - // fatal error, we can terminate the future - return Poll::Ready(Err(RethError::Execution(err).into())) + // Process one incoming message from the CL. We don't drain the messages right away, + // because we want to sneak a polling of running hook in between them. + // + // These messages can affect the state of the SyncController and they're also time + // sensitive, hence they are polled first. + if let Poll::Ready(Some(msg)) = this.engine_message_rx.poll_next_unpin(cx) { + match msg { + BeaconEngineMessage::ForkchoiceUpdated { state, payload_attrs, tx } => { + match this.on_forkchoice_updated(state, payload_attrs, tx) { + OnForkchoiceUpdateOutcome::Processed => {} + OnForkchoiceUpdateOutcome::ReachedMaxBlock => { + // reached the max block, we can terminate the future + return Poll::Ready(Ok(())) + } + OnForkchoiceUpdateOutcome::Fatal(err) => { + // fatal error, we can terminate the future + return Poll::Ready(Err(RethError::Execution(err).into())) + } } } + BeaconEngineMessage::NewPayload { payload, cancun_fields, tx } => { + this.metrics.new_payload_messages.increment(1); + let res = this.on_new_payload(payload, cancun_fields); + let _ = tx.send(res); + } + BeaconEngineMessage::TransitionConfigurationExchanged => { + this.blockchain.on_transition_configuration_exchanged(); + } + BeaconEngineMessage::EventListener(tx) => { + this.listeners.push_listener(tx); + } } - BeaconEngineMessage::NewPayload { payload, cancun_fields, tx } => { - this.metrics.new_payload_messages.increment(1); - let res = this.on_new_payload(payload, cancun_fields); - let _ = tx.send(res); - } - BeaconEngineMessage::TransitionConfigurationExchanged => { - this.blockchain.on_transition_configuration_exchanged(); - } - BeaconEngineMessage::EventListener(tx) => { - this.listeners.push_listener(tx); - } + continue } + + // Both running hook with db write access and engine messages are pending, + // proceed to other polls + break } // process sync events if any @@ -1856,7 +1869,10 @@ where if !this.forkchoice_state_tracker.is_latest_invalid() { if let Poll::Ready(result) = this.hooks.poll_next_hook( cx, - EngineContext { tip_block_number: this.blockchain.canonical_tip().number }, + EngineContext { + tip_block_number: this.blockchain.canonical_tip().number, + finalized_block_number: this.blockchain.finalized_block_number()?, + }, this.sync.is_pipeline_active(), )? { this.on_hook_result(result)?; @@ -1893,7 +1909,8 @@ mod tests { BeaconForkChoiceUpdateError, }; use assert_matches::assert_matches; - use reth_primitives::{stage::StageCheckpoint, ChainSpec, ChainSpecBuilder, H256, MAINNET}; + use reth_interfaces::test_utils::generators::{self, Rng}; + use reth_primitives::{stage::StageCheckpoint, ChainSpec, ChainSpecBuilder, B256, MAINNET}; use reth_provider::{BlockWriter, ProviderFactory}; use reth_rpc_types::engine::{ForkchoiceState, ForkchoiceUpdated, PayloadStatus}; use reth_rpc_types_compat::engine::payload::try_block_to_payload_v1; @@ -1904,6 +1921,7 @@ mod tests { // Pipeline error is propagated. #[tokio::test] async fn pipeline_error_is_propagated() { + let mut rng = generators::rng(); let chain_spec = Arc::new( ChainSpecBuilder::default() .chain(MAINNET.chain) @@ -1922,7 +1940,7 @@ mod tests { let _ = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), ..Default::default() }) .await; @@ -1935,6 +1953,7 @@ mod tests { // Test that the consensus engine is idle until first forkchoice updated is received. #[tokio::test] async fn is_idle_until_forkchoice_is_set() { + let mut rng = generators::rng(); let chain_spec = Arc::new( ChainSpecBuilder::default() .chain(MAINNET.chain) @@ -1963,7 +1982,7 @@ mod tests { // consensus engine is still idle because pruning is running let _ = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), ..Default::default() }) .await; @@ -1983,7 +2002,7 @@ mod tests { Err(TryRecvError::Empty) => { let _ = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), ..Default::default() }) .await; @@ -1998,6 +2017,7 @@ mod tests { // for the second time. #[tokio::test] async fn runs_pipeline_again_if_tree_not_restored() { + let mut rng = generators::rng(); let chain_spec = Arc::new( ChainSpecBuilder::default() .chain(MAINNET.chain) @@ -2019,7 +2039,7 @@ mod tests { let _ = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), ..Default::default() }) .await; @@ -2032,6 +2052,7 @@ mod tests { #[tokio::test] async fn terminates_upon_reaching_max_block() { + let mut rng = generators::rng(); let max_block = 1000; let chain_spec = Arc::new( ChainSpecBuilder::default() @@ -2054,7 +2075,7 @@ mod tests { let _ = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), ..Default::default() }) .await; @@ -2077,8 +2098,9 @@ mod tests { mod fork_choice_updated { use super::*; use reth_db::{tables, transaction::DbTxMut}; - use reth_interfaces::test_utils::{generators, generators::random_block}; + use reth_interfaces::test_utils::generators::random_block; use reth_rpc_types::engine::ForkchoiceUpdateError; + #[tokio::test] async fn empty_head() { let chain_spec = Arc::new( @@ -2235,7 +2257,7 @@ mod tests { let res = env .send_forkchoice_updated(ForkchoiceState { - head_block_hash: H256::random(), + head_block_hash: rng.gen(), finalized_block_hash: block1.hash, ..Default::default() }) @@ -2295,7 +2317,7 @@ mod tests { assert_matches!(res, Ok(result) => { let ForkchoiceUpdated { payload_status, .. } = result; assert_matches!(payload_status.status, PayloadStatusEnum::Invalid { .. }); - assert_eq!(payload_status.latest_valid_hash, Some(H256::zero())); + assert_eq!(payload_status.latest_valid_hash, Some(B256::ZERO)); }); } @@ -2335,7 +2357,7 @@ mod tests { validation_error: BlockValidationError::BlockPreMerge { hash: block1.hash } .to_string(), }) - .with_latest_valid_hash(H256::zero()); + .with_latest_valid_hash(B256::ZERO); assert_matches!(res, Ok(result) => assert_eq!(result, expected_result)); } } @@ -2537,7 +2559,8 @@ mod tests { assert_matches!(res, Ok(ForkchoiceUpdated { payload_status, .. }) => assert_eq!(payload_status, expected_result)); // Send new payload - let block = random_block(&mut rng, 2, Some(H256::random()), None, Some(0)); + let parent = rng.gen(); + let block = random_block(&mut rng, 2, Some(parent), None, Some(0)); let res = env.send_new_payload(try_block_to_payload_v1(block), None).await; let expected_result = PayloadStatus::from_status(PayloadStatusEnum::Syncing); assert_matches!(res, Ok(result) => assert_eq!(result, expected_result)); @@ -2596,7 +2619,7 @@ mod tests { validation_error: BlockValidationError::BlockPreMerge { hash: block1.hash } .to_string(), }) - .with_latest_valid_hash(H256::zero()); + .with_latest_valid_hash(B256::ZERO); assert_matches!(res, Ok(ForkchoiceUpdated { payload_status, .. }) => assert_eq!(payload_status, expected_result)); // Send new payload @@ -2609,7 +2632,7 @@ mod tests { validation_error: BlockValidationError::BlockPreMerge { hash: block2.hash } .to_string(), }) - .with_latest_valid_hash(H256::zero()); + .with_latest_valid_hash(B256::ZERO); assert_eq!(result, expected_result); assert_matches!(engine_rx.try_recv(), Err(TryRecvError::Empty)); diff --git a/crates/consensus/beacon/src/engine/sync.rs b/crates/consensus/beacon/src/engine/sync.rs index fa5cfe842e..3db1c569a0 100644 --- a/crates/consensus/beacon/src/engine/sync.rs +++ b/crates/consensus/beacon/src/engine/sync.rs @@ -8,7 +8,7 @@ use reth_interfaces::p2p::{ full_block::{FetchFullBlockFuture, FetchFullBlockRangeFuture, FullBlockClient}, headers::client::HeadersClient, }; -use reth_primitives::{BlockNumber, ChainSpec, SealedBlock, H256}; +use reth_primitives::{BlockNumber, ChainSpec, SealedBlock, B256}; use reth_stages::{ControlFlow, Pipeline, PipelineError, PipelineWithResult}; use reth_tasks::TaskSpawner; use std::{ @@ -40,7 +40,7 @@ where /// The pipeline is used for large ranges. pipeline_state: PipelineState, /// Pending target block for the pipeline to sync - pending_pipeline_target: Option, + pending_pipeline_target: Option, /// In-flight full block requests in progress. inflight_full_block_requests: Vec>, /// In-flight full block _range_ requests in progress. @@ -109,7 +109,7 @@ where } /// Cancels the full block request with the given hash. - pub(crate) fn cancel_full_block_request(&mut self, hash: H256) { + pub(crate) fn cancel_full_block_request(&mut self, hash: B256) { self.inflight_full_block_requests.retain(|req| *req.hash() != hash); self.update_block_download_metrics(); } @@ -136,7 +136,7 @@ where } /// Returns true if there's already a request for the given hash. - pub(crate) fn is_inflight_request(&self, hash: H256) -> bool { + pub(crate) fn is_inflight_request(&self, hash: B256) -> bool { self.inflight_full_block_requests.iter().any(|req| *req.hash() == hash) } @@ -144,7 +144,7 @@ where /// /// If the `count` is 1, this will use the `download_full_block` method instead, because it /// downloads headers and bodies for the block concurrently. - pub(crate) fn download_block_range(&mut self, hash: H256, count: u64) { + pub(crate) fn download_block_range(&mut self, hash: B256, count: u64) { if count == 1 { self.download_full_block(hash); } else { @@ -167,7 +167,7 @@ where /// /// Returns `true` if the request was started, `false` if there's already a request for the /// given hash. - pub(crate) fn download_full_block(&mut self, hash: H256) -> bool { + pub(crate) fn download_full_block(&mut self, hash: B256) -> bool { if self.is_inflight_request(hash) { return false } @@ -185,7 +185,7 @@ where } /// Sets a new target to sync the pipeline to. - pub(crate) fn set_pipeline_sync_target(&mut self, target: H256) { + pub(crate) fn set_pipeline_sync_target(&mut self, target: B256) { self.pending_pipeline_target = Some(target); } @@ -349,7 +349,7 @@ pub(crate) enum EngineSyncEvent { /// Pipeline started syncing /// /// This is none if the pipeline is triggered without a specific target. - PipelineStarted(Option), + PipelineStarted(Option), /// Pipeline finished /// /// If this is returned, the pipeline is idle. @@ -457,7 +457,7 @@ mod tests { executor_factory.extend(self.executor_results); // Setup pipeline - let (tip_tx, _tip_rx) = watch::channel(H256::default()); + let (tip_tx, _tip_rx) = watch::channel(B256::default()); let mut pipeline = Pipeline::builder() .add_stages(TestStages::new(self.pipeline_exec_outputs, Default::default())) .with_tip_sender(tip_tx); diff --git a/crates/consensus/beacon/src/engine/test_utils.rs b/crates/consensus/beacon/src/engine/test_utils.rs index 9955837359..4765c3ab92 100644 --- a/crates/consensus/beacon/src/engine/test_utils.rs +++ b/crates/consensus/beacon/src/engine/test_utils.rs @@ -19,7 +19,7 @@ use reth_interfaces::{ test_utils::{NoopFullBlockClient, TestConsensus}, }; use reth_payload_builder::test_utils::spawn_test_payload_service; -use reth_primitives::{BlockNumber, ChainSpec, PruneBatchSizes, PruneModes, H256, U256}; +use reth_primitives::{BlockNumber, ChainSpec, PruneBatchSizes, PruneModes, B256, U256}; use reth_provider::{ providers::BlockchainProvider, test_utils::TestExecutorFactory, BlockExecutor, BundleStateWithReceipts, ExecutorFactory, ProviderFactory, PrunableBlockExecutor, @@ -55,14 +55,14 @@ pub struct TestEnv { pub db: DB, // Keep the tip receiver around, so it's not dropped. #[allow(dead_code)] - tip_rx: watch::Receiver, + tip_rx: watch::Receiver, engine_handle: BeaconConsensusEngineHandle, } impl TestEnv { fn new( db: DB, - tip_rx: watch::Receiver, + tip_rx: watch::Receiver, engine_handle: BeaconConsensusEngineHandle, ) -> Self { Self { db, tip_rx, engine_handle } @@ -468,7 +468,7 @@ where }; // Setup pipeline - let (tip_tx, tip_rx) = watch::channel(H256::default()); + let (tip_tx, tip_rx) = watch::channel(B256::default()); let mut pipeline = match self.base_config.pipeline_config { TestPipelineConfig::Test(outputs) => Pipeline::builder() .add_stages(TestStages::new(outputs, Default::default())) @@ -521,6 +521,7 @@ where 5, PruneModes::none(), PruneBatchSizes::default(), + watch::channel(None).1, ); let mut hooks = EngineHooks::new(); diff --git a/crates/consensus/beacon/src/lib.rs b/crates/consensus/beacon/src/lib.rs index d2d3fd3583..d3904044f7 100644 --- a/crates/consensus/beacon/src/lib.rs +++ b/crates/consensus/beacon/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/consensus/common/src/lib.rs b/crates/consensus/common/src/lib.rs index 31f0f2b15a..f1bb63a039 100644 --- a/crates/consensus/common/src/lib.rs +++ b/crates/consensus/common/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/consensus/common/src/validation.rs b/crates/consensus/common/src/validation.rs index cd7290ac38..715e7333ab 100644 --- a/crates/consensus/common/src/validation.rs +++ b/crates/consensus/common/src/validation.rs @@ -230,21 +230,6 @@ pub fn validate_block_standalone( expected: *header_withdrawals_root, }) } - - // Validate that withdrawal index is monotonically increasing within a block. - if let Some(first) = withdrawals.first() { - let mut prev_index = first.index; - for withdrawal in withdrawals.iter().skip(1) { - let expected = prev_index + 1; - if expected != withdrawal.index { - return Err(ConsensusError::WithdrawalIndexInvalid { - got: withdrawal.index, - expected, - }) - } - prev_index = withdrawal.index; - } - } } // EIP-4844: Shard Blob Transactions @@ -350,7 +335,6 @@ pub fn validate_header_regarding_parent( /// Checks: /// If we already know the block. /// If parent is known -/// If withdrawals are valid /// /// Returns parent block header pub fn validate_block_regarding_chain( @@ -369,33 +353,6 @@ pub fn validate_block_regarding_chain { - if withdrawal.index + 1 != withdrawals.first().unwrap().index { - return Err(ConsensusError::WithdrawalIndexInvalid { - got: withdrawals.first().unwrap().index, - expected: withdrawal.index + 1, - } - .into()) - } - } - None => { - if withdrawals.first().unwrap().index != 0 { - return Err(ConsensusError::WithdrawalIndexInvalid { - got: withdrawals.first().unwrap().index, - expected: 0, - } - .into()) - } - } - } - } - } - // Return parent header. Ok(parent.seal(block.parent_hash)) } @@ -501,13 +458,15 @@ pub fn validate_4844_header_standalone(header: &SealedHeader) -> Result<(), Cons #[cfg(test)] mod tests { use super::*; - use assert_matches::assert_matches; use mockall::mock; - use reth_interfaces::{RethError::Consensus, RethResult}; + use reth_interfaces::{ + test_utils::generators::{self, Rng}, + RethResult, + }; use reth_primitives::{ constants::eip4844::DATA_GAS_PER_BLOB, hex_literal::hex, proofs, Account, Address, BlockBody, BlockHash, BlockHashOrNumber, Bytes, ChainSpecBuilder, Header, Signature, - TransactionKind, TransactionSigned, Withdrawal, H256, MAINNET, U256, + TransactionKind, TransactionSigned, Withdrawal, MAINNET, U256, }; use std::ops::RangeBounds; @@ -625,11 +584,12 @@ mod tests { let signature = Signature { odd_y_parity: true, r: U256::default(), s: U256::default() }; let tx = TransactionSigned::from_transaction_and_signature(request, signature); - let signer = Address::zero(); + let signer = Address::ZERO; TransactionSignedEcRecovered::from_signed_transaction(tx, signer) } fn mock_blob_tx(nonce: u64, num_blobs: usize) -> TransactionSigned { + let mut rng = generators::rng(); let request = Transaction::Eip4844(TxEip4844 { chain_id: 1u64, nonce, @@ -641,7 +601,7 @@ mod tests { value: 3, input: Bytes::from(vec![1, 2]), access_list: Default::default(), - blob_versioned_hashes: vec![H256::random(); num_blobs], + blob_versioned_hashes: std::iter::repeat_with(|| rng.gen()).take(num_blobs).collect(), }); let signature = Signature { odd_y_parity: true, r: U256::default(), s: U256::default() }; @@ -795,43 +755,14 @@ mod tests { let block = create_block_with_withdrawals(&[5, 6, 7, 8, 9]); assert_eq!(validate_block_standalone(&block, &chain_spec), Ok(())); - // Invalid withdrawal index - let block = create_block_with_withdrawals(&[100, 102]); - assert_matches!( - validate_block_standalone(&block, &chain_spec), - Err(ConsensusError::WithdrawalIndexInvalid { .. }) - ); - let block = create_block_with_withdrawals(&[5, 6, 7, 9]); - assert_matches!( - validate_block_standalone(&block, &chain_spec), - Err(ConsensusError::WithdrawalIndexInvalid { .. }) - ); - let (_, parent) = mock_block(); - let mut provider = Provider::new(Some(parent.clone())); - // Withdrawal index should be 0 if there are no withdrawals in the chain - let block = create_block_with_withdrawals(&[1, 2, 3]); - provider.withdrawals_provider.expect_latest_withdrawal().return_const(Ok(None)); - assert_matches!( - validate_block_regarding_chain(&block, &provider), - Err(Consensus(ConsensusError::WithdrawalIndexInvalid { got: 1, expected: 0 })) - ); + let provider = Provider::new(Some(parent.clone())); let block = create_block_with_withdrawals(&[0, 1, 2]); let res = validate_block_regarding_chain(&block, &provider); assert!(res.is_ok()); // Withdrawal index should be the last withdrawal index + 1 let mut provider = Provider::new(Some(parent)); - let block = create_block_with_withdrawals(&[4, 5, 6]); - provider - .withdrawals_provider - .expect_latest_withdrawal() - .return_const(Ok(Some(Withdrawal { index: 2, ..Default::default() }))); - assert_matches!( - validate_block_regarding_chain(&block, &provider), - Err(Consensus(ConsensusError::WithdrawalIndexInvalid { got: 4, expected: 3 })) - ); - let block = create_block_with_withdrawals(&[3, 4, 5]); provider .withdrawals_provider diff --git a/crates/interfaces/Cargo.toml b/crates/interfaces/Cargo.toml index 158d036116..506efd0bd4 100644 --- a/crates/interfaces/Cargo.toml +++ b/crates/interfaces/Cargo.toml @@ -9,6 +9,7 @@ repository.workspace = true [dependencies] reth-codecs = { path = "../storage/codecs" } +reth-nippy-jar = { path = "../storage/nippy-jar" } reth-primitives.workspace = true reth-rpc-types.workspace = true reth-network-api.workspace = true @@ -45,7 +46,6 @@ reth-db = { workspace = true, features = ["test-utils"] } tokio = { workspace = true, features = ["full"] } tokio-stream = { workspace = true, features = ["sync"] } arbitrary = { workspace = true, features = ["derive"] } -hex-literal.workspace = true secp256k1 = { workspace = true, features = ["alloc", "recovery", "rand"] } [features] diff --git a/crates/interfaces/src/consensus.rs b/crates/interfaces/src/consensus.rs index e151e42c29..4e8c1d8e5e 100644 --- a/crates/interfaces/src/consensus.rs +++ b/crates/interfaces/src/consensus.rs @@ -1,6 +1,6 @@ use async_trait::async_trait; use reth_primitives::{ - BlockHash, BlockNumber, Header, InvalidTransactionError, SealedBlock, SealedHeader, H256, U256, + BlockHash, BlockNumber, Header, InvalidTransactionError, SealedBlock, SealedHeader, B256, U256, }; use std::fmt::Debug; @@ -94,18 +94,18 @@ pub enum ConsensusError { #[error("Block ommer hash ({got:?}) is different from expected: ({expected:?})")] BodyOmmersHashDiff { /// The actual ommer hash. - got: H256, + got: B256, /// The expected ommer hash. - expected: H256, + expected: B256, }, /// Error when the state root in the block is different from the expected state root. #[error("Block state root ({got:?}) is different from expected: ({expected:?})")] BodyStateRootDiff { /// The actual state root. - got: H256, + got: B256, /// The expected state root. - expected: H256, + expected: B256, }, /// Error when the transaction root in the block is different from the expected transaction @@ -113,9 +113,9 @@ pub enum ConsensusError { #[error("Block transaction root ({got:?}) is different from expected ({expected:?})")] BodyTransactionRootDiff { /// The actual transaction root. - got: H256, + got: B256, /// The expected transaction root. - expected: H256, + expected: B256, }, /// Error when the withdrawals root in the block is different from the expected withdrawals @@ -123,9 +123,9 @@ pub enum ConsensusError { #[error("Block withdrawals root ({got:?}) is different from expected ({expected:?})")] BodyWithdrawalsRootDiff { /// The actual withdrawals root. - got: H256, + got: B256, /// The expected withdrawals root. - expected: H256, + expected: B256, }, /// Error when a block with a specific hash and number is already known. @@ -161,9 +161,9 @@ pub enum ConsensusError { )] ParentHashMismatch { /// The expected parent hash. - expected_parent_hash: H256, + expected_parent_hash: B256, /// The actual parent hash. - got_parent_hash: H256, + got_parent_hash: B256, }, /// Error when the block timestamp is in the past compared to the parent timestamp. @@ -246,15 +246,6 @@ pub enum ConsensusError { #[error("Unexpected withdrawals root")] WithdrawalsRootUnexpected, - /// Error when the withdrawal index is invalid. - #[error("Withdrawal index #{got} is invalid. Expected: #{expected}.")] - WithdrawalIndexInvalid { - /// The actual withdrawal index. - got: u64, - /// The expected withdrawal index. - expected: u64, - }, - /// Error when withdrawals are missing. #[error("Missing withdrawals")] BodyWithdrawalsMissing, diff --git a/crates/interfaces/src/error.rs b/crates/interfaces/src/error.rs index 32d81e4f98..85d0cdca6f 100644 --- a/crates/interfaces/src/error.rs +++ b/crates/interfaces/src/error.rs @@ -29,3 +29,9 @@ pub enum RethError { #[error("{0}")] Custom(String), } + +impl From for RethError { + fn from(err: reth_nippy_jar::NippyJarError) -> Self { + RethError::Custom(err.to_string()) + } +} diff --git a/crates/interfaces/src/executor.rs b/crates/interfaces/src/executor.rs index fed5b37554..933deb7ef9 100644 --- a/crates/interfaces/src/executor.rs +++ b/crates/interfaces/src/executor.rs @@ -1,4 +1,4 @@ -use reth_primitives::{BlockNumHash, Bloom, PrunePartError, H256}; +use reth_primitives::{BlockNumHash, Bloom, PrunePartError, B256}; use thiserror::Error; /// Transaction validation errors @@ -9,7 +9,7 @@ pub enum BlockValidationError { #[error("EVM reported invalid transaction ({hash:?}): {message}")] EVM { /// The hash of the transaction - hash: H256, + hash: B256, /// Error message message: String, }, @@ -23,9 +23,9 @@ pub enum BlockValidationError { #[error("Receipt root {got:?} is different than expected {expected:?}.")] ReceiptRootDiff { /// The actual receipt root - got: H256, + got: B256, /// The expected receipt root - expected: H256, + expected: B256, }, /// Error when header bloom filter doesn't match expected value #[error("Header bloom filter {got:?} is different than expected {expected:?}.")] @@ -57,10 +57,10 @@ pub enum BlockValidationError { #[error("Block {hash:?} is pre merge")] BlockPreMerge { /// The hash of the block - hash: H256, + hash: B256, }, #[error("Missing total difficulty for block {hash:?}")] - MissingTotalDifficulty { hash: H256 }, + MissingTotalDifficulty { hash: B256 }, /// Error for EIP-4788 when parent beacon block root is missing #[error("EIP-4788 Parent beacon block root missing for active Cancun block")] MissingParentBeaconBlockRoot, diff --git a/crates/interfaces/src/lib.rs b/crates/interfaces/src/lib.rs index 7e27108ff7..c2f4b99d8a 100644 --- a/crates/interfaces/src/lib.rs +++ b/crates/interfaces/src/lib.rs @@ -7,7 +7,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/interfaces/src/p2p/bodies/client.rs b/crates/interfaces/src/p2p/bodies/client.rs index 8104d1afe9..4b7f3366a2 100644 --- a/crates/interfaces/src/p2p/bodies/client.rs +++ b/crates/interfaces/src/p2p/bodies/client.rs @@ -5,7 +5,7 @@ use std::{ use crate::p2p::{download::DownloadClient, error::PeerRequestResult, priority::Priority}; use futures::{Future, FutureExt}; -use reth_primitives::{BlockBody, H256}; +use reth_primitives::{BlockBody, B256}; /// The bodies future type pub type BodiesFut = Pin>> + Send + Sync>>; @@ -17,23 +17,23 @@ pub trait BodiesClient: DownloadClient { type Output: Future>> + Sync + Send + Unpin; /// Fetches the block body for the requested block. - fn get_block_bodies(&self, hashes: Vec) -> Self::Output { + fn get_block_bodies(&self, hashes: Vec) -> Self::Output { self.get_block_bodies_with_priority(hashes, Priority::Normal) } /// Fetches the block body for the requested block with priority - fn get_block_bodies_with_priority(&self, hashes: Vec, priority: Priority) + fn get_block_bodies_with_priority(&self, hashes: Vec, priority: Priority) -> Self::Output; /// Fetches a single block body for the requested hash. - fn get_block_body(&self, hash: H256) -> SingleBodyRequest { + fn get_block_body(&self, hash: B256) -> SingleBodyRequest { self.get_block_body_with_priority(hash, Priority::Normal) } /// Fetches a single block body for the requested hash with priority fn get_block_body_with_priority( &self, - hash: H256, + hash: B256, priority: Priority, ) -> SingleBodyRequest { let fut = self.get_block_bodies_with_priority(vec![hash], priority); diff --git a/crates/interfaces/src/p2p/either.rs b/crates/interfaces/src/p2p/either.rs index 7a742efca8..1a6bd170c2 100644 --- a/crates/interfaces/src/p2p/either.rs +++ b/crates/interfaces/src/p2p/either.rs @@ -5,7 +5,7 @@ use crate::p2p::{ priority::Priority, }; use futures::future::Either; -use reth_primitives::H256; +use reth_primitives::B256; /// A downloader that combines two different downloaders/client implementations that have the same /// associated types. @@ -45,7 +45,7 @@ where fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, priority: Priority, ) -> Self::Output { match self { diff --git a/crates/interfaces/src/p2p/error.rs b/crates/interfaces/src/p2p/error.rs index ac0030c61b..89257e200a 100644 --- a/crates/interfaces/src/p2p/error.rs +++ b/crates/interfaces/src/p2p/error.rs @@ -1,7 +1,7 @@ use super::headers::client::HeadersRequest; use crate::{consensus, db}; use reth_network_api::ReputationChangeKind; -use reth_primitives::{BlockHashOrNumber, BlockNumber, Header, WithPeerId, H256}; +use reth_primitives::{BlockHashOrNumber, BlockNumber, Header, WithPeerId, B256}; use std::ops::RangeInclusive; use thiserror::Error; use tokio::sync::{mpsc, oneshot}; @@ -122,7 +122,7 @@ pub enum DownloadError { #[error("Failed to validate header {hash}. Details: {error}.")] HeaderValidation { /// Hash of header failing validation - hash: H256, + hash: B256, /// The details of validation failure #[source] error: consensus::ConsensusError, @@ -131,9 +131,9 @@ pub enum DownloadError { #[error("Received invalid tip: {received:?}. Expected {expected:?}.")] InvalidTip { /// The hash of the received tip - received: H256, + received: B256, /// The hash of the expected tip - expected: H256, + expected: B256, }, /// Received a tip with an invalid tip number #[error("Received invalid tip number: {received:?}. Expected {expected:?}.")] @@ -164,7 +164,7 @@ pub enum DownloadError { #[error("Failed to validate body for header {hash}. Details: {error}.")] BodyValidation { /// Hash of header failing validation - hash: H256, + hash: B256, /// The details of validation failure #[source] error: consensus::ConsensusError, diff --git a/crates/interfaces/src/p2p/full_block.rs b/crates/interfaces/src/p2p/full_block.rs index 1536a7057a..e50e3bbd6a 100644 --- a/crates/interfaces/src/p2p/full_block.rs +++ b/crates/interfaces/src/p2p/full_block.rs @@ -9,7 +9,7 @@ use crate::{ }; use futures::Stream; use reth_primitives::{ - BlockBody, Header, HeadersDirection, SealedBlock, SealedHeader, WithPeerId, H256, + BlockBody, Header, HeadersDirection, SealedBlock, SealedHeader, WithPeerId, B256, }; use std::{ cmp::Reverse, @@ -52,7 +52,7 @@ where /// /// Caution: This does no validation of body (transactions) response but guarantees that the /// [SealedHeader] matches the requested hash. - pub fn get_full_block(&self, hash: H256) -> FetchFullBlockFuture { + pub fn get_full_block(&self, hash: B256) -> FetchFullBlockFuture { let client = self.client.clone(); FetchFullBlockFuture { hash, @@ -77,7 +77,7 @@ where /// The returned future yields bodies in falling order, i.e. with descending block numbers. pub fn get_full_block_range( &self, - hash: H256, + hash: B256, count: u64, ) -> FetchFullBlockRangeFuture { let client = self.client.clone(); @@ -117,7 +117,7 @@ where Client: BodiesClient + HeadersClient, { client: Client, - hash: H256, + hash: B256, request: FullBlockRequest, header: Option, body: Option, @@ -128,7 +128,7 @@ where Client: BodiesClient + HeadersClient, { /// Returns the hash of the block being requested. - pub fn hash(&self) -> &H256 { + pub fn hash(&self) -> &B256 { &self.hash } @@ -364,7 +364,7 @@ where /// The consensus instance used to validate the blocks. consensus: Arc, /// The block hash to start fetching from (inclusive). - start_hash: H256, + start_hash: B256, /// How many blocks to fetch: `len([start_hash, ..]) == count` count: u64, /// Requests for headers and bodies that are in progress. @@ -382,7 +382,7 @@ where Client: BodiesClient + HeadersClient, { /// Returns the block hashes for the given range, if they are available. - pub fn range_block_hashes(&self) -> Option> { + pub fn range_block_hashes(&self) -> Option> { self.headers.as_ref().map(|h| h.iter().map(|h| h.hash()).collect::>()) } @@ -409,7 +409,7 @@ where /// Returns the remaining hashes for the bodies request, based on the headers that still exist /// in the `root_map`. - fn remaining_bodies_hashes(&self) -> Vec { + fn remaining_bodies_hashes(&self) -> Vec { self.pending_headers.iter().map(|h| h.hash()).collect::>() } @@ -521,7 +521,7 @@ where } /// Returns the start hash for the request - pub fn start_hash(&self) -> H256 { + pub fn start_hash(&self) -> B256 { self.start_hash } @@ -605,8 +605,8 @@ where // future, and one which is a bodies range future. // // The headers range future should yield the bodies range future. - // The bodies range future should not have an Option>, it should - // have a populated Vec from the successful headers range future. + // The bodies range future should not have an Option>, it should + // have a populated Vec from the successful headers range future. // // This is optimal because we can not send a bodies request without // first completing the headers request. This way we can get rid of the diff --git a/crates/interfaces/src/p2p/headers/downloader.rs b/crates/interfaces/src/p2p/headers/downloader.rs index e662e56b47..bdee5f060d 100644 --- a/crates/interfaces/src/p2p/headers/downloader.rs +++ b/crates/interfaces/src/p2p/headers/downloader.rs @@ -4,7 +4,7 @@ use crate::{ p2p::error::{DownloadError, DownloadResult}, }; use futures::Stream; -use reth_primitives::{BlockHashOrNumber, SealedHeader, H256}; +use reth_primitives::{BlockHashOrNumber, SealedHeader, B256}; /// A downloader capable of fetching and yielding block headers. /// @@ -42,7 +42,7 @@ pub enum SyncTarget { /// Sync _inclusively_ to the given block hash. /// /// This target specifies the upper end of the sync gap `(head...tip]` - Tip(H256), + Tip(B256), /// This represents a gap missing headers bounded by the given header `h` in the form of /// `(head,..h),h+1,h+2...` /// diff --git a/crates/interfaces/src/provider.rs b/crates/interfaces/src/provider.rs index 5359aa6d72..68439c0cbb 100644 --- a/crates/interfaces/src/provider.rs +++ b/crates/interfaces/src/provider.rs @@ -1,4 +1,4 @@ -use reth_primitives::{Address, BlockHash, BlockHashOrNumber, BlockNumber, TxNumber, H256}; +use reth_primitives::{Address, BlockHash, BlockHashOrNumber, BlockNumber, TxNumber, B256}; /// Bundled errors variants thrown by various providers. #[allow(missing_docs)] @@ -21,7 +21,7 @@ pub enum ProviderError { /// The account address address: Address, /// The storage key - storage_key: H256, + storage_key: B256, }, /// The block number was found for the given address, but the changeset was not found. #[error("Account {address:?} ChangeSet for block #{block_number} does not exist")] @@ -60,10 +60,10 @@ pub enum ProviderError { CacheServiceUnavailable, /// Thrown when we failed to lookup a block for the pending state #[error("Unknown block hash: {0:}")] - UnknownBlockHash(H256), + UnknownBlockHash(B256), /// Thrown when we were unable to find a state for a block hash #[error("No State found for block hash: {0:}")] - StateForHashNotFound(H256), + StateForHashNotFound(B256), /// Unable to compute state root on top of historical block #[error("Unable to compute state root on top of historical block")] StateRootNotAvailableForHistoricalBlock, @@ -74,9 +74,9 @@ pub enum ProviderError { #[error("Merkle trie root mismatch at #{block_number} ({block_hash:?}). Got: {got:?}. Expected: {expected:?}")] StateRootMismatch { /// Expected root - expected: H256, + expected: B256, /// Calculated root - got: H256, + got: B256, /// Block number block_number: BlockNumber, /// Block hash @@ -86,9 +86,9 @@ pub enum ProviderError { #[error("Unwind merkle trie root mismatch at #{block_number} ({block_hash:?}). Got: {got:?}. Expected: {expected:?}")] UnwindStateRootMismatch { /// Expected root - expected: H256, + expected: B256, /// Calculated root - got: H256, + got: B256, /// Target block number block_number: BlockNumber, /// Block hash diff --git a/crates/interfaces/src/test_utils/bodies.rs b/crates/interfaces/src/test_utils/bodies.rs index 6a6e9ad85b..3f4daccb5f 100644 --- a/crates/interfaces/src/test_utils/bodies.rs +++ b/crates/interfaces/src/test_utils/bodies.rs @@ -6,7 +6,7 @@ use crate::p2p::{ }; use async_trait::async_trait; use futures::{future, Future, FutureExt}; -use reth_primitives::{BlockBody, WithPeerId, H256}; +use reth_primitives::{BlockBody, WithPeerId, B256}; use std::{ fmt::{Debug, Formatter}, pin::Pin, @@ -37,13 +37,13 @@ impl DownloadClient for TestBodiesClient { impl BodiesClient for TestBodiesClient where - F: Fn(Vec) -> PeerRequestResult> + Send + Sync, + F: Fn(Vec) -> PeerRequestResult> + Send + Sync, { type Output = BodiesFut; fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, _priority: Priority, ) -> Self::Output { let (tx, rx) = oneshot::channel(); diff --git a/crates/interfaces/src/test_utils/full_block.rs b/crates/interfaces/src/test_utils/full_block.rs index 9d1545a0b6..a0e3ed59d2 100644 --- a/crates/interfaces/src/test_utils/full_block.rs +++ b/crates/interfaces/src/test_utils/full_block.rs @@ -8,7 +8,7 @@ use crate::p2p::{ use parking_lot::Mutex; use reth_primitives::{ BlockBody, BlockHashOrNumber, BlockNumHash, Header, HeadersDirection, PeerId, SealedBlock, - SealedHeader, WithPeerId, H256, + SealedHeader, WithPeerId, B256, }; use std::{collections::HashMap, sync::Arc}; @@ -30,7 +30,7 @@ impl BodiesClient for NoopFullBlockClient { fn get_block_bodies_with_priority( &self, - _hashes: Vec, + _hashes: Vec, _priority: Priority, ) -> Self::Output { futures::future::ready(Ok(WithPeerId::new(PeerId::random(), vec![]))) @@ -55,8 +55,8 @@ impl HeadersClient for NoopFullBlockClient { /// This full block client can be [Clone]d and shared between multiple tasks. #[derive(Clone, Debug)] pub struct TestFullBlockClient { - headers: Arc>>, - bodies: Arc>>, + headers: Arc>>, + bodies: Arc>>, // soft response limit, max number of bodies to respond with soft_limit: usize, } @@ -147,7 +147,7 @@ impl BodiesClient for TestFullBlockClient { fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, _priority: Priority, ) -> Self::Output { let bodies = self.bodies.lock(); diff --git a/crates/interfaces/src/test_utils/generators.rs b/crates/interfaces/src/test_utils/generators.rs index ae0c0bb0d4..daaabc52fe 100644 --- a/crates/interfaces/src/test_utils/generators.rs +++ b/crates/interfaces/src/test_utils/generators.rs @@ -5,7 +5,7 @@ use rand::{ use reth_primitives::{ proofs, sign_message, Account, Address, BlockNumber, Bytes, Header, Log, Receipt, SealedBlock, SealedHeader, Signature, StorageEntry, Transaction, TransactionKind, TransactionSigned, - TxLegacy, H160, H256, U256, + TxLegacy, B256, U256, }; use secp256k1::{KeyPair, Message as SecpMessage, Secp256k1, SecretKey, SECP256K1}; use std::{ @@ -40,7 +40,7 @@ pub fn rng() -> StdRng { pub fn random_header_range( rng: &mut R, range: std::ops::Range, - head: H256, + head: B256, ) -> Vec { let mut headers = Vec::with_capacity(range.end.saturating_sub(range.start) as usize); for idx in range { @@ -56,7 +56,7 @@ pub fn random_header_range( /// Generate a random [SealedHeader]. /// /// The header is assumed to not be correct if validated. -pub fn random_header(rng: &mut R, number: u64, parent: Option) -> SealedHeader { +pub fn random_header(rng: &mut R, number: u64, parent: Option) -> SealedHeader { let header = reth_primitives::Header { number, nonce: rng.gen(), @@ -79,7 +79,7 @@ pub fn random_tx(rng: &mut R) -> Transaction { nonce: rng.gen::().into(), gas_price: rng.gen::().into(), gas_limit: rng.gen::().into(), - to: TransactionKind::Call(Address::random()), + to: TransactionKind::Call(rng.gen()), value: rng.gen::().into(), input: Bytes::default(), }) @@ -100,7 +100,7 @@ pub fn random_signed_tx(rng: &mut R) -> TransactionSigned { /// Signs the [Transaction] with the given key pair. pub fn sign_tx_with_key_pair(key_pair: KeyPair, tx: Transaction) -> TransactionSigned { let signature = - sign_message(H256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); + sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); TransactionSigned::from_transaction_and_signature(tx, signature) } @@ -127,7 +127,7 @@ pub fn generate_keys(rng: &mut R, count: usize) -> Vec { pub fn random_block( rng: &mut R, number: u64, - parent: Option, + parent: Option, tx_count: Option, ommers_count: Option, ) -> SealedBlock { @@ -173,7 +173,7 @@ pub fn random_block( pub fn random_block_range( rng: &mut R, block_numbers: RangeInclusive, - head: H256, + head: B256, tx_count: Range, ) -> Vec { let mut blocks = @@ -237,7 +237,7 @@ where prev_from.balance = prev_from.balance.wrapping_sub(transfer); // deposit in receiving account and update storage - let (prev_to, storage): &mut (Account, BTreeMap) = state.get_mut(&to).unwrap(); + let (prev_to, storage): &mut (Account, BTreeMap) = state.get_mut(&to).unwrap(); let mut old_entries: Vec<_> = new_entries .into_iter() @@ -303,7 +303,12 @@ pub fn random_account_change( /// Generate a random storage change. pub fn random_storage_entry(rng: &mut R, key_range: std::ops::Range) -> StorageEntry { - let key = H256::from_low_u64_be(key_range.sample_single(rng)); + let key = B256::new({ + let n = key_range.sample_single(rng); + let mut m = [0u8; 32]; + m[24..32].copy_from_slice(&n.to_be_bytes()); + m + }); let value = U256::from(rng.gen::()); StorageEntry { key, value } @@ -313,7 +318,7 @@ pub fn random_storage_entry(rng: &mut R, key_range: std::ops::Range pub fn random_eoa_account(rng: &mut R) -> (Address, Account) { let nonce: u64 = rng.gen(); let balance = U256::from(rng.gen::()); - let addr = H160::from(rng.gen::()); + let addr = rng.gen(); (addr, Account { nonce, balance, bytecode_hash: None }) } @@ -338,7 +343,7 @@ pub fn random_contract_account_range( let mut accounts = Vec::with_capacity(acc_range.end.saturating_sub(acc_range.start) as usize); for _ in acc_range { let (address, eoa_account) = random_eoa_account(rng); - let account = Account { bytecode_hash: Some(H256::random()), ..eoa_account }; + let account = Account { bytecode_hash: Some(rng.gen()), ..eoa_account }; accounts.push((address, account)) } accounts @@ -366,25 +371,23 @@ pub fn random_receipt( /// Generate random log pub fn random_log(rng: &mut R, address: Option
, topics_count: Option) -> Log { - let data_byte_count = rng.gen::(); - let topics_count = topics_count.unwrap_or_else(|| rng.gen::()); + let data_byte_count = rng.gen::() as usize; + let topics_count = topics_count.unwrap_or_else(|| rng.gen()) as usize; Log { address: address.unwrap_or_else(|| rng.gen()), - topics: (0..topics_count).map(|_| rng.gen()).collect(), - data: Bytes::from((0..data_byte_count).map(|_| rng.gen::()).collect::>()), + topics: std::iter::repeat_with(|| rng.gen()).take(topics_count).collect(), + data: std::iter::repeat_with(|| rng.gen()).take(data_byte_count).collect::>().into(), } } #[cfg(test)] mod test { - use std::str::FromStr; - use super::*; - use hex_literal::hex; use reth_primitives::{ - keccak256, public_key_to_address, AccessList, Address, TransactionKind, TxEip1559, + hex, keccak256, public_key_to_address, AccessList, Address, TransactionKind, TxEip1559, }; use secp256k1::KeyPair; + use std::str::FromStr; #[test] fn test_sign_message() { @@ -407,7 +410,7 @@ mod test { let key_pair = KeyPair::new(&secp, &mut rand::thread_rng()); let signature = - sign_message(H256::from_slice(&key_pair.secret_bytes()[..]), signature_hash) + sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), signature_hash) .unwrap(); let signed = TransactionSigned::from_transaction_and_signature(tx.clone(), signature); @@ -440,12 +443,12 @@ mod test { let hash = transaction.signature_hash(); let expected = - H256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") + B256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") .unwrap(); assert_eq!(expected, hash); let secret = - H256::from_str("4646464646464646464646464646464646464646464646464646464646464646") + B256::from_str("4646464646464646464646464646464646464646464646464646464646464646") .unwrap(); let signature = sign_message(secret, hash).unwrap(); diff --git a/crates/interfaces/src/test_utils/headers.rs b/crates/interfaces/src/test_utils/headers.rs index 1fa8be1e44..469a91c62f 100644 --- a/crates/interfaces/src/test_utils/headers.rs +++ b/crates/interfaces/src/test_utils/headers.rs @@ -16,7 +16,7 @@ use futures::{future, Future, FutureExt, Stream, StreamExt}; use reth_eth_wire::BlockHeaders; use reth_primitives::{ BlockHash, BlockNumber, Head, Header, HeadersDirection, PeerId, SealedBlock, SealedHeader, - WithPeerId, H256, U256, + WithPeerId, B256, U256, }; use reth_rpc_types::engine::ForkchoiceState; use std::{ diff --git a/crates/metrics/metrics-derive/src/lib.rs b/crates/metrics/metrics-derive/src/lib.rs index 12d253e363..085c0a1ad5 100644 --- a/crates/metrics/metrics-derive/src/lib.rs +++ b/crates/metrics/metrics-derive/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/metrics/src/lib.rs b/crates/metrics/src/lib.rs index b82146e8d5..71eb722530 100644 --- a/crates/metrics/src/lib.rs +++ b/crates/metrics/src/lib.rs @@ -8,7 +8,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/common/src/lib.rs b/crates/net/common/src/lib.rs index 87c98bf6b0..f65702aaec 100644 --- a/crates/net/common/src/lib.rs +++ b/crates/net/common/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/discv4/Cargo.toml b/crates/net/discv4/Cargo.toml index 2f3c2293e8..95fb6bc92d 100644 --- a/crates/net/discv4/Cargo.toml +++ b/crates/net/discv4/Cargo.toml @@ -13,15 +13,15 @@ Ethereum network discovery [dependencies] # reth reth-primitives.workspace = true -reth-rlp.workspace = true -reth-rlp-derive = { path = "../../rlp/rlp-derive" } reth-net-common = { path = "../common" } reth-net-nat = { path = "../nat" } # ethereum +alloy-rlp = { workspace = true, features = ["derive"] } discv5.workspace = true secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery", "serde"] } enr = { workspace = true, default-features = false, features = ["rust-secp256k1"] } +rlp = "0.5" # needed for enr # async/futures tokio = { workspace = true, features = ["io-util", "net", "time"] } @@ -31,7 +31,6 @@ tokio-stream.workspace = true tracing.workspace = true thiserror.workspace = true parking_lot.workspace = true -hex = "0.4" rand = { workspace = true, optional = true } generic-array = "0.14" serde = { workspace = true, optional = true } diff --git a/crates/net/discv4/src/config.rs b/crates/net/discv4/src/config.rs index 2f56835fc6..17ba66d132 100644 --- a/crates/net/discv4/src/config.rs +++ b/crates/net/discv4/src/config.rs @@ -3,13 +3,13 @@ //! This basis of this file has been taken from the discv5 codebase: //! +use alloy_rlp::Encodable; use reth_net_common::ban_list::BanList; use reth_net_nat::{NatResolver, ResolveNatInterval}; use reth_primitives::{ bytes::{Bytes, BytesMut}, NodeRecord, }; -use reth_rlp::Encodable; use std::{ collections::{HashMap, HashSet}, time::Duration, diff --git a/crates/net/discv4/src/error.rs b/crates/net/discv4/src/error.rs index 804e084ba5..580f0e23d8 100644 --- a/crates/net/discv4/src/error.rs +++ b/crates/net/discv4/src/error.rs @@ -7,7 +7,7 @@ use tokio::sync::{mpsc::error::SendError, oneshot::error::RecvError}; #[allow(missing_docs)] pub enum DecodePacketError { #[error("Failed to rlp decode: {0:?}")] - Rlp(#[from] reth_rlp::DecodeError), + Rlp(#[from] alloy_rlp::Error), #[error("Received packet len too short.")] PacketTooShort, #[error("Hash of the header not equals to the hash of the data.")] diff --git a/crates/net/discv4/src/lib.rs b/crates/net/discv4/src/lib.rs index b7c0e57096..ebe7175ac2 100644 --- a/crates/net/discv4/src/lib.rs +++ b/crates/net/discv4/src/lib.rs @@ -19,7 +19,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms, unreachable_pub, unused_crate_dependencies)] @@ -29,6 +29,7 @@ use crate::{ error::{DecodePacketError, Discv4Error}, proto::{FindNode, Message, Neighbours, Packet, Ping, Pong}, }; +use alloy_rlp::{RlpDecodable, RlpEncodable}; use discv5::{ kbucket, kbucket::{ @@ -42,9 +43,8 @@ use parking_lot::Mutex; use proto::{EnrRequest, EnrResponse, EnrWrapper}; use reth_primitives::{ bytes::{Bytes, BytesMut}, - ForkId, PeerId, H256, + hex, ForkId, PeerId, B256, }; -use reth_rlp::{RlpDecodable, RlpEncodable}; use secp256k1::SecretKey; use std::{ cell::RefCell, @@ -88,6 +88,11 @@ use reth_net_nat::ResolveNatInterval; /// reexport to get public ip. pub use reth_net_nat::{external_ip, NatResolver}; +/// The default address for discv4 via UDP +/// +/// Note: the default TCP address is the same. +pub const DEFAULT_DISCOVERY_ADDR: Ipv4Addr = Ipv4Addr::UNSPECIFIED; + /// The default port for discv4 via UDP /// /// Note: the default TCP port is the same. @@ -352,7 +357,7 @@ impl Discv4 { /// Sets the pair in the EIP-868 [`Enr`] of the node. /// /// If the key already exists, this will update it. - pub fn set_eip868_rlp(&self, key: Vec, value: impl reth_rlp::Encodable) { + pub fn set_eip868_rlp(&self, key: Vec, value: impl alloy_rlp::Encodable) { let mut buf = BytesMut::new(); value.encode(&mut buf); self.set_eip868_rlp_pair(key, buf.freeze()) @@ -929,7 +934,7 @@ impl Discv4Service { } /// Encodes the packet, sends it and returns the hash. - pub(crate) fn send_packet(&mut self, msg: Message, to: SocketAddr) -> H256 { + pub(crate) fn send_packet(&mut self, msg: Message, to: SocketAddr) -> B256 { let (payload, hash) = msg.encode(&self.secret_key); trace!(target : "discv4", r#type=?msg.msg_type(), ?to, ?hash, "sending packet"); let _ = self.egress.try_send((payload, to)).map_err(|err| { @@ -943,7 +948,7 @@ impl Discv4Service { } /// Message handler for an incoming `Ping` - fn on_ping(&mut self, ping: Ping, remote_addr: SocketAddr, remote_id: PeerId, hash: H256) { + fn on_ping(&mut self, ping: Ping, remote_addr: SocketAddr, remote_id: PeerId, hash: B256) { if self.is_expired(ping.expire) { // ping's expiration timestamp is in the past return @@ -1067,7 +1072,7 @@ impl Discv4Service { /// Sends a ping message to the node's UDP address. /// /// Returns the echo hash of the ping message. - pub(crate) fn send_ping(&mut self, node: NodeRecord, reason: PingReason) -> H256 { + pub(crate) fn send_ping(&mut self, node: NodeRecord, reason: PingReason) -> B256 { let remote_addr = node.udp_addr(); let id = node.id; let ping = Ping { @@ -1200,7 +1205,7 @@ impl Discv4Service { msg: EnrRequest, remote_addr: SocketAddr, id: PeerId, - request_hash: H256, + request_hash: B256, ) { if !self.config.enable_eip868 || self.is_expired(msg.expire) { return @@ -1720,7 +1725,7 @@ struct PingRequest { // Node to which the request was sent. node: NodeRecord, // Hash sent in the Ping request - echo_hash: H256, + echo_hash: B256, /// Why this ping was sent. reason: PingReason, } @@ -1929,7 +1934,7 @@ struct EnrRequestState { // Timestamp when the request was sent. sent_at: Instant, // Hash sent in the Ping request - echo_hash: H256, + echo_hash: B256, } /// Stored node info. @@ -2057,9 +2062,9 @@ impl From for EnrForkIdEntry { mod tests { use super::*; use crate::test_utils::{create_discv4, create_discv4_with_config, rng_endpoint, rng_record}; + use alloy_rlp::{Decodable, Encodable}; use rand::{thread_rng, Rng}; - use reth_primitives::{hex_literal::hex, mainnet_nodes, ForkHash}; - use reth_rlp::{Decodable, Encodable}; + use reth_primitives::{hex, mainnet_nodes, ForkHash}; use std::{future::poll_fn, net::Ipv4Addr}; #[tokio::test] @@ -2191,8 +2196,8 @@ mod tests { enr_sq: Some(rng.gen()), }; - let id = PeerId::random(); - service.on_ping(ping, addr, id, H256::random()); + let id = PeerId::random_with(&mut rng); + service.on_ping(ping, addr, id, rng.gen()); let key = kad_key(id); match service.kbuckets.entry(&key) { @@ -2223,8 +2228,8 @@ mod tests { enr_sq: Some(rng.gen()), }; - let id = PeerId::random(); - service.on_ping(ping, addr, id, H256::random()); + let id = PeerId::random_with(&mut rng); + service.on_ping(ping, addr, id, rng.gen()); let key = kad_key(id); match service.kbuckets.entry(&key) { diff --git a/crates/net/discv4/src/node.rs b/crates/net/discv4/src/node.rs index ff154c4bad..2e8dc1773a 100644 --- a/crates/net/discv4/src/node.rs +++ b/crates/net/discv4/src/node.rs @@ -13,8 +13,8 @@ impl From for NodeKey { impl From for discv5::Key { fn from(value: NodeKey) -> Self { - let hash = keccak256(value.0.as_bytes()); - let hash = *GenericArray::from_slice(hash.as_bytes()); + let hash = keccak256(value.0.as_slice()); + let hash = *GenericArray::from_slice(hash.as_slice()); discv5::Key::new_raw(value, hash) } } diff --git a/crates/net/discv4/src/proto.rs b/crates/net/discv4/src/proto.rs index 85cbf69093..a707174c5c 100644 --- a/crates/net/discv4/src/proto.rs +++ b/crates/net/discv4/src/proto.rs @@ -3,15 +3,14 @@ #![allow(missing_docs)] use crate::{error::DecodePacketError, EnrForkIdEntry, PeerId, MAX_PACKET_SIZE, MIN_PACKET_SIZE}; +use alloy_rlp::{ + length_of_length, Decodable, Encodable, Error as RlpError, Header, RlpDecodable, RlpEncodable, +}; use enr::{Enr, EnrKey}; use reth_primitives::{ bytes::{Buf, BufMut, Bytes, BytesMut}, - keccak256, - rpc_utils::rlp, - ForkId, NodeRecord, H256, + keccak256, ForkId, NodeRecord, B256, }; -use reth_rlp::{length_of_length, Decodable, DecodeError, Encodable, Header}; -use reth_rlp_derive::{RlpDecodable, RlpEncodable}; use secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, SecretKey, SECP256K1, @@ -78,13 +77,13 @@ impl Message { /// /// The datagram is `header || payload` /// where header is `hash || signature || packet-type` - pub fn encode(&self, secret_key: &SecretKey) -> (Bytes, H256) { + pub fn encode(&self, secret_key: &SecretKey) -> (Bytes, B256) { // allocate max packet size let mut datagram = BytesMut::with_capacity(MAX_PACKET_SIZE); // since signature has fixed len, we can split and fill the datagram buffer at fixed // positions, this way we can encode the message directly in the datagram buffer - let mut sig_bytes = datagram.split_off(H256::len_bytes()); + let mut sig_bytes = datagram.split_off(B256::len_bytes()); let mut payload = sig_bytes.split_off(secp256k1::constants::COMPACT_SIGNATURE_SIZE + 1); match self { @@ -126,7 +125,7 @@ impl Message { sig_bytes.unsplit(payload); let hash = keccak256(&sig_bytes); - datagram.extend_from_slice(hash.as_bytes()); + datagram.extend_from_slice(hash.as_slice()); datagram.unsplit(sig_bytes); (datagram.freeze(), hash) @@ -146,7 +145,7 @@ impl Message { // signature = sign(packet-type || packet-data) let header_hash = keccak256(&packet[32..]); - let data_hash = H256::from_slice(&packet[..32]); + let data_hash = B256::from_slice(&packet[..32]); if data_hash != header_hash { return Err(DecodePacketError::HashMismatch) } @@ -156,7 +155,7 @@ impl Message { let recoverable_sig = RecoverableSignature::from_compact(signature, recovery_id)?; // recover the public key - let msg = secp256k1::Message::from_slice(keccak256(&packet[97..]).as_bytes())?; + let msg = secp256k1::Message::from_slice(keccak256(&packet[97..]).as_slice())?; let pk = SECP256K1.recover_ecdsa(&msg, &recoverable_sig)?; let node_id = PeerId::from_slice(&pk.serialize_uncompressed()[1..]); @@ -182,7 +181,7 @@ impl Message { pub struct Packet { pub msg: Message, pub node_id: PeerId, - pub hash: H256, + pub hash: B256, } /// Represents the `from`, `to` fields in the packets @@ -258,24 +257,24 @@ where } impl Decodable for EnrWrapper { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let enr = as rlp::Decodable>::decode(&rlp::Rlp::new(buf)) .map_err(|e| match e { - rlp::DecoderError::RlpIsTooShort => DecodeError::InputTooShort, - rlp::DecoderError::RlpInvalidLength => DecodeError::Overflow, - rlp::DecoderError::RlpExpectedToBeList => DecodeError::UnexpectedString, - rlp::DecoderError::RlpExpectedToBeData => DecodeError::UnexpectedList, + rlp::DecoderError::RlpIsTooShort => RlpError::InputTooShort, + rlp::DecoderError::RlpInvalidLength => RlpError::Overflow, + rlp::DecoderError::RlpExpectedToBeList => RlpError::UnexpectedString, + rlp::DecoderError::RlpExpectedToBeData => RlpError::UnexpectedList, rlp::DecoderError::RlpDataLenWithZeroPrefix | - rlp::DecoderError::RlpListLenWithZeroPrefix => DecodeError::LeadingZero, - rlp::DecoderError::RlpInvalidIndirection => DecodeError::NonCanonicalSize, + rlp::DecoderError::RlpListLenWithZeroPrefix => RlpError::LeadingZero, + rlp::DecoderError::RlpInvalidIndirection => RlpError::NonCanonicalSize, rlp::DecoderError::RlpIncorrectListLen => { - DecodeError::Custom("incorrect list length when decoding rlp") + RlpError::Custom("incorrect list length when decoding rlp") } - rlp::DecoderError::RlpIsTooBig => DecodeError::Custom("rlp is too big"), + rlp::DecoderError::RlpIsTooBig => RlpError::Custom("rlp is too big"), rlp::DecoderError::RlpInconsistentLengthAndData => { - DecodeError::Custom("inconsistent length and data when decoding rlp") + RlpError::Custom("inconsistent length and data when decoding rlp") } - rlp::DecoderError::Custom(s) => DecodeError::Custom(s), + rlp::DecoderError::Custom(s) => RlpError::Custom(s), }) .map(EnrWrapper::new); if enr.is_ok() { @@ -296,7 +295,7 @@ pub struct EnrRequest { /// A [ENRResponse packet](https://github.com/ethereum/devp2p/blob/master/discv4.md#enrresponse-packet-0x06). #[derive(Clone, Debug, Eq, PartialEq, RlpEncodable)] pub struct EnrResponse { - pub request_hash: H256, + pub request_hash: B256, pub enr: EnrWrapper, } @@ -313,22 +312,22 @@ impl EnrResponse { } impl Decodable for EnrResponse { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let b = &mut &**buf; let rlp_head = Header::decode(b)?; if !rlp_head.list { - return Err(DecodeError::UnexpectedString) + return Err(RlpError::UnexpectedString) } // let started_len = b.len(); let this = Self { - request_hash: reth_rlp::Decodable::decode(b)?, + request_hash: alloy_rlp::Decodable::decode(b)?, enr: EnrWrapper::::decode(b)?, }; - // TODO: `Decodable` can be derived once we have native reth_rlp decoding for ENR: + // TODO: `Decodable` can be derived once we have native alloy_rlp decoding for ENR: // Skipping the size check here is fine since the `buf` is the UDP datagram // let consumed = started_len - b.len(); // if consumed != rlp_head.payload_length { - // return Err(reth_rlp::DecodeError::ListLengthMismatch { + // return Err(alloy_rlp::Error::ListLengthMismatch { // expected: rlp_head.payload_length, // got: consumed, // }) @@ -388,11 +387,11 @@ impl Encodable for Ping { } impl Decodable for Ping { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let b = &mut &**buf; let rlp_head = Header::decode(b)?; if !rlp_head.list { - return Err(DecodeError::UnexpectedString) + return Err(RlpError::UnexpectedString) } let started_len = b.len(); let _version = u32::decode(b)?; @@ -410,7 +409,7 @@ impl Decodable for Ping { let consumed = started_len - b.len(); if consumed > rlp_head.payload_length { - return Err(DecodeError::ListLengthMismatch { + return Err(RlpError::ListLengthMismatch { expected: rlp_head.payload_length, got: consumed, }) @@ -426,7 +425,7 @@ impl Decodable for Ping { #[derive(Clone, Debug, Eq, PartialEq)] pub struct Pong { pub to: NodeEndpoint, - pub echo: H256, + pub echo: B256, pub expire: u64, /// Optional enr_seq for pub enr_sq: Option, @@ -437,7 +436,7 @@ impl Encodable for Pong { #[derive(RlpEncodable)] struct PongMessageEIP868<'a> { to: &'a NodeEndpoint, - echo: &'a H256, + echo: &'a B256, expire: u64, enr_seq: u64, } @@ -445,7 +444,7 @@ impl Encodable for Pong { #[derive(RlpEncodable)] struct PongMessage<'a> { to: &'a NodeEndpoint, - echo: &'a H256, + echo: &'a B256, expire: u64, } @@ -459,11 +458,11 @@ impl Encodable for Pong { } impl Decodable for Pong { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let b = &mut &**buf; let rlp_head = Header::decode(b)?; if !rlp_head.list { - return Err(DecodeError::UnexpectedString) + return Err(RlpError::UnexpectedString) } let started_len = b.len(); let mut this = Self { @@ -480,7 +479,7 @@ impl Decodable for Pong { let consumed = started_len - b.len(); if consumed > rlp_head.payload_length { - return Err(DecodeError::ListLengthMismatch { + return Err(RlpError::ListLengthMismatch { expected: rlp_head.payload_length, got: consumed, }) @@ -502,7 +501,7 @@ mod tests { }; use enr::{EnrBuilder, EnrPublicKey}; use rand::{thread_rng, Rng, RngCore}; - use reth_primitives::{hex_literal::hex, ForkHash}; + use reth_primitives::{hex, ForkHash}; #[test] fn test_endpoint_ipv_v4() { @@ -594,7 +593,7 @@ mod tests { rng.fill_bytes(&mut ip); let msg = Pong { to: rng_endpoint(&mut rng), - echo: H256::random(), + echo: rng.gen(), expire: rng.gen(), enr_sq: None, }; @@ -615,7 +614,7 @@ mod tests { rng.fill_bytes(&mut ip); let msg = Pong { to: rng_endpoint(&mut rng), - echo: H256::random(), + echo: rng.gen(), expire: rng.gen(), enr_sq: Some(rng.gen()), }; @@ -719,11 +718,12 @@ mod tests { #[test] fn encode_decode_enr_msg() { use self::EnrWrapper; + use alloy_rlp::Decodable; use enr::secp256k1::SecretKey; - use reth_rlp::Decodable; use std::net::Ipv4Addr; - let key = SecretKey::new(&mut rand::rngs::OsRng); + let mut rng = rand::rngs::OsRng; + let key = SecretKey::new(&mut rng); let ip = Ipv4Addr::new(127, 0, 0, 1); let tcp = 3000; @@ -740,7 +740,7 @@ mod tests { EnrWrapper::new(builder.build(&key).unwrap()) }; - let enr_respone = EnrResponse { request_hash: H256::random(), enr }; + let enr_respone = EnrResponse { request_hash: rng.gen(), enr }; let mut buf = Vec::new(); enr_respone.encode(&mut buf); @@ -757,8 +757,8 @@ mod tests { #[test] fn encode_known_rlp_enr() { use self::EnrWrapper; + use alloy_rlp::Decodable; use enr::{secp256k1::SecretKey, EnrPublicKey}; - use reth_rlp::Decodable; use std::net::Ipv4Addr; let valid_record = diff --git a/crates/net/discv4/src/test_utils.rs b/crates/net/discv4/src/test_utils.rs index 4fa3b13fd7..7e7b27d45c 100644 --- a/crates/net/discv4/src/test_utils.rs +++ b/crates/net/discv4/src/test_utils.rs @@ -8,7 +8,7 @@ use crate::{ IngressReceiver, PeerId, SAFE_MAX_DATAGRAM_NEIGHBOUR_RECORDS, }; use rand::{thread_rng, Rng, RngCore}; -use reth_primitives::{hex_literal::hex, ForkHash, ForkId, NodeRecord, H256}; +use reth_primitives::{hex, ForkHash, ForkId, NodeRecord, B256}; use secp256k1::{SecretKey, SECP256K1}; use std::{ collections::{HashMap, HashSet}, @@ -113,7 +113,7 @@ impl MockDiscovery { } /// Encodes the packet, sends it and returns the hash. - fn send_packet(&mut self, msg: Message, to: SocketAddr) -> H256 { + fn send_packet(&mut self, msg: Message, to: SocketAddr) -> B256 { let (payload, hash) = msg.encode(&self.secret_key); let _ = self.egress.try_send((payload, to)); hash @@ -236,21 +236,21 @@ pub fn rng_endpoint(rng: &mut impl Rng) -> NodeEndpoint { pub fn rng_record(rng: &mut impl RngCore) -> NodeRecord { let NodeEndpoint { address, udp_port, tcp_port } = rng_endpoint(rng); - NodeRecord { address, tcp_port, udp_port, id: PeerId::random() } + NodeRecord { address, tcp_port, udp_port, id: rng.gen() } } pub fn rng_ipv6_record(rng: &mut impl RngCore) -> NodeRecord { let mut ip = [0u8; 16]; rng.fill_bytes(&mut ip); let address = IpAddr::V6(ip.into()); - NodeRecord { address, tcp_port: rng.gen(), udp_port: rng.gen(), id: PeerId::random() } + NodeRecord { address, tcp_port: rng.gen(), udp_port: rng.gen(), id: rng.gen() } } pub fn rng_ipv4_record(rng: &mut impl RngCore) -> NodeRecord { let mut ip = [0u8; 4]; rng.fill_bytes(&mut ip); let address = IpAddr::V4(ip.into()); - NodeRecord { address, tcp_port: rng.gen(), udp_port: rng.gen(), id: PeerId::random() } + NodeRecord { address, tcp_port: rng.gen(), udp_port: rng.gen(), id: rng.gen() } } pub fn rng_message(rng: &mut impl RngCore) -> Message { @@ -263,11 +263,11 @@ pub fn rng_message(rng: &mut impl RngCore) -> Message { }), 2 => Message::Pong(Pong { to: rng_endpoint(rng), - echo: H256::random(), + echo: rng.gen(), expire: rng.gen(), enr_sq: None, }), - 3 => Message::FindNode(FindNode { id: PeerId::random(), expire: rng.gen() }), + 3 => Message::FindNode(FindNode { id: rng.gen(), expire: rng.gen() }), 4 => { let num: usize = rng.gen_range(1..=SAFE_MAX_DATAGRAM_NEIGHBOUR_RECORDS); Message::Neighbours(Neighbours { diff --git a/crates/net/dns/Cargo.toml b/crates/net/dns/Cargo.toml index 012fe4548d..f6ed6faf0f 100644 --- a/crates/net/dns/Cargo.toml +++ b/crates/net/dns/Cargo.toml @@ -12,9 +12,9 @@ description = "Support for EIP-1459 Node Discovery via DNS" # reth reth-primitives.workspace = true reth-net-common = { path = "../common" } -reth-rlp.workspace = true # ethereum +alloy-rlp.workspace = true secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery", "serde"] } enr = { workspace = true, default-features = false, features = ["rust-secp256k1"] } diff --git a/crates/net/dns/src/lib.rs b/crates/net/dns/src/lib.rs index e62e105456..439e652986 100644 --- a/crates/net/dns/src/lib.rs +++ b/crates/net/dns/src/lib.rs @@ -8,7 +8,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -389,7 +389,7 @@ pub enum DnsDiscoveryEvent { /// Converts an [Enr] into a [NodeRecord] fn convert_enr_node_record(enr: &Enr) -> Option { - use reth_rlp::Decodable; + use alloy_rlp::Decodable; let node_record = NodeRecord { address: enr.ip4().map(IpAddr::from).or_else(|| enr.ip6().map(IpAddr::from))?, @@ -409,9 +409,9 @@ fn convert_enr_node_record(enr: &Enr) -> Option mod tests { use super::*; use crate::tree::TreeRootEntry; + use alloy_rlp::Encodable; use enr::{EnrBuilder, EnrKey}; use reth_primitives::{Chain, Hardfork, MAINNET}; - use reth_rlp::Encodable; use secp256k1::rand::thread_rng; use std::{future::poll_fn, net::Ipv4Addr}; use tokio_stream::StreamExt; diff --git a/crates/net/dns/src/tree.rs b/crates/net/dns/src/tree.rs index d9e0408995..b1b2c263b4 100644 --- a/crates/net/dns/src/tree.rs +++ b/crates/net/dns/src/tree.rs @@ -25,7 +25,7 @@ use crate::error::{ }; use data_encoding::{BASE32_NOPAD, BASE64URL_NOPAD}; use enr::{Enr, EnrError, EnrKey, EnrKeyUnambiguous, EnrPublicKey}; -use reth_primitives::{bytes::Bytes, hex}; +use reth_primitives::{hex, Bytes}; use secp256k1::SecretKey; use std::{ fmt, diff --git a/crates/net/downloaders/Cargo.toml b/crates/net/downloaders/Cargo.toml index dbed752c13..bc399f9d2d 100644 --- a/crates/net/downloaders/Cargo.toml +++ b/crates/net/downloaders/Cargo.toml @@ -33,7 +33,7 @@ rayon.workspace = true thiserror.workspace = true # optional deps for the test-utils feature -reth-rlp = { workspace = true, optional = true } +alloy-rlp = { workspace = true, optional = true } tempfile = { version = "3.3", optional = true } itertools = { workspace = true, optional = true } @@ -44,10 +44,10 @@ reth-tracing = { path = "../../tracing" } assert_matches.workspace = true tokio = { workspace = true, features = ["macros", "rt-multi-thread"] } -reth-rlp.workspace = true +alloy-rlp.workspace = true itertools.workspace = true tempfile = "3.3" [features] -test-utils = ["dep:reth-rlp", "dep:tempfile", "dep:itertools"] +test-utils = ["dep:alloy-rlp", "dep:tempfile", "dep:itertools"] diff --git a/crates/net/downloaders/src/bodies/bodies.rs b/crates/net/downloaders/src/bodies/bodies.rs index e2f51d0c24..e50bb53076 100644 --- a/crates/net/downloaders/src/bodies/bodies.rs +++ b/crates/net/downloaders/src/bodies/bodies.rs @@ -605,7 +605,7 @@ mod tests { use futures_util::stream::StreamExt; use reth_db::test_utils::create_test_rw_db; use reth_interfaces::test_utils::{generators, generators::random_block_range, TestConsensus}; - use reth_primitives::{BlockBody, H256}; + use reth_primitives::{BlockBody, B256}; use std::{collections::HashMap, sync::Arc}; // Check that the blocks are emitted in order of block number, not in order of @@ -642,7 +642,7 @@ mod tests { // Generate some random blocks let db = create_test_rw_db(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=199, H256::zero(), 1..2); + let blocks = random_block_range(&mut rng, 0..=199, B256::ZERO, 1..2); let headers = blocks.iter().map(|block| block.header.clone()).collect::>(); let bodies = blocks diff --git a/crates/net/downloaders/src/bodies/request.rs b/crates/net/downloaders/src/bodies/request.rs index 8698e881eb..4fd4fb7849 100644 --- a/crates/net/downloaders/src/bodies/request.rs +++ b/crates/net/downloaders/src/bodies/request.rs @@ -8,7 +8,7 @@ use reth_interfaces::{ priority::Priority, }, }; -use reth_primitives::{BlockBody, PeerId, SealedBlock, SealedHeader, WithPeerId, H256}; +use reth_primitives::{BlockBody, PeerId, SealedBlock, SealedHeader, WithPeerId, B256}; use std::{ collections::VecDeque, mem, @@ -98,14 +98,14 @@ where } /// Retrieve header hashes for the next request. - fn next_request(&self) -> Option> { + fn next_request(&self) -> Option> { let mut hashes = self.pending_headers.iter().filter(|h| !h.is_empty()).map(|h| h.hash()).peekable(); hashes.peek().is_some().then(|| hashes.collect()) } /// Submit the request with the given priority. - fn submit_request(&mut self, req: Vec, priority: Priority) { + fn submit_request(&mut self, req: Vec, priority: Priority) { tracing::trace!(target: "downloaders::bodies", request_len = req.len(), "Requesting bodies"); let client = Arc::clone(&self.client); self.last_request_len = Some(req.len()); @@ -254,14 +254,14 @@ mod tests { p2p::bodies::response::BlockResponse, test_utils::{generators, generators::random_header_range, TestConsensus}, }; - use reth_primitives::H256; + use reth_primitives::B256; use std::sync::Arc; /// Check if future returns empty bodies without dispathing any requests. #[tokio::test] async fn request_returns_empty_bodies() { let mut rng = generators::rng(); - let headers = random_header_range(&mut rng, 0..20, H256::zero()); + let headers = random_header_range(&mut rng, 0..20, B256::ZERO); let client = Arc::new(TestBodiesClient::default()); let fut = BodiesRequestFuture::new( diff --git a/crates/net/downloaders/src/bodies/test_utils.rs b/crates/net/downloaders/src/bodies/test_utils.rs index 45e5db1e3c..42261a02bf 100644 --- a/crates/net/downloaders/src/bodies/test_utils.rs +++ b/crates/net/downloaders/src/bodies/test_utils.rs @@ -2,12 +2,12 @@ //! Test helper impls for generating bodies use reth_db::{database::Database, tables, transaction::DbTxMut, DatabaseEnv}; use reth_interfaces::{db, p2p::bodies::response::BlockResponse}; -use reth_primitives::{Block, BlockBody, SealedBlock, SealedHeader, H256}; +use reth_primitives::{Block, BlockBody, SealedBlock, SealedHeader, B256}; use std::collections::HashMap; pub(crate) fn zip_blocks<'a>( headers: impl Iterator, - bodies: &mut HashMap, + bodies: &mut HashMap, ) -> Vec { headers .into_iter() @@ -29,7 +29,7 @@ pub(crate) fn zip_blocks<'a>( pub(crate) fn create_raw_bodies<'a>( headers: impl Iterator, - bodies: &mut HashMap, + bodies: &mut HashMap, ) -> Vec { headers .into_iter() diff --git a/crates/net/downloaders/src/headers/reverse_headers.rs b/crates/net/downloaders/src/headers/reverse_headers.rs index 848ab4945b..53fa2cf504 100644 --- a/crates/net/downloaders/src/headers/reverse_headers.rs +++ b/crates/net/downloaders/src/headers/reverse_headers.rs @@ -18,7 +18,7 @@ use reth_interfaces::{ }, }; use reth_primitives::{ - BlockHashOrNumber, BlockNumber, Header, HeadersDirection, PeerId, SealedHeader, H256, + BlockHashOrNumber, BlockNumber, Header, HeadersDirection, PeerId, SealedHeader, B256, }; use reth_tasks::{TaskSpawner, TokioTaskExecutor}; use std::{ @@ -989,13 +989,13 @@ impl std::error::Error for HeadersResponseError {} #[derive(Clone, Debug)] pub enum SyncTargetBlock { /// Block hash of the targeted block - Hash(H256), + Hash(B256), /// Block number of the targeted block Number(u64), /// Both the block hash and number of the targeted block HashAndNumber { /// Block hash of the targeted block - hash: H256, + hash: B256, /// Block number of the targeted block number: u64, }, @@ -1003,7 +1003,7 @@ pub enum SyncTargetBlock { impl SyncTargetBlock { /// Create new instance from hash. - fn from_hash(hash: H256) -> Self { + fn from_hash(hash: B256) -> Self { Self::Hash(hash) } @@ -1013,7 +1013,7 @@ impl SyncTargetBlock { } /// Set the hash for the sync target. - fn with_hash(self, hash: H256) -> Self { + fn with_hash(self, hash: B256) -> Self { match self { Self::Hash(_) => Self::Hash(hash), Self::Number(number) => Self::HashAndNumber { hash, number }, @@ -1054,7 +1054,7 @@ impl SyncTargetBlock { } /// Return the hash of the target block, if it is set. - fn hash(&self) -> Option { + fn hash(&self) -> Option { match self { Self::Hash(hash) => Some(*hash), Self::Number(_) => None, @@ -1235,7 +1235,7 @@ mod tests { let fixtures = vec![ Fixture { - sync_target_block: SyncTargetBlock::Hash(H256::random()), + sync_target_block: SyncTargetBlock::Hash(B256::random()), // Hash maps to None here, all other variants map to Some sync_target_option: None, replace_number: 1, @@ -1251,7 +1251,7 @@ mod tests { }, Fixture { sync_target_block: SyncTargetBlock::HashAndNumber { - hash: H256::random(), + hash: B256::random(), number: 1, }, sync_target_option: Some(1), @@ -1284,16 +1284,16 @@ mod tests { let mut downloader = ReverseHeadersDownloaderBuilder::default() .build(Arc::clone(&client), Arc::new(TestConsensus::default())); downloader.update_local_head(genesis); - downloader.update_sync_target(SyncTarget::Tip(H256::random())); + downloader.update_sync_target(SyncTarget::Tip(B256::random())); downloader.sync_target_request.take(); - let target = SyncTarget::Tip(H256::random()); + let target = SyncTarget::Tip(B256::random()); downloader.update_sync_target(target); assert!(downloader.sync_target_request.is_some()); downloader.sync_target_request.take(); - let target = SyncTarget::Gap(Header::default().seal(H256::random())); + let target = SyncTarget::Gap(Header::default().seal(B256::random())); downloader.update_sync_target(target); assert!(downloader.sync_target_request.is_none()); assert_matches!( @@ -1312,12 +1312,12 @@ mod tests { let mut downloader = ReverseHeadersDownloaderBuilder::default() .build(Arc::clone(&client), Arc::new(TestConsensus::default())); downloader.update_local_head(header.clone()); - downloader.update_sync_target(SyncTarget::Tip(H256::random())); + downloader.update_sync_target(SyncTarget::Tip(B256::random())); downloader.queued_validated_headers.push(header.clone()); let mut next = header.as_ref().clone(); next.number += 1; - downloader.update_local_head(next.seal(H256::random())); + downloader.update_local_head(next.seal(B256::random())); assert!(downloader.queued_validated_headers.is_empty()); } @@ -1353,7 +1353,7 @@ mod tests { .request_limit(batch_size) .build(Arc::clone(&client), Arc::new(TestConsensus::default())); downloader.update_local_head(genesis); - downloader.update_sync_target(SyncTarget::Tip(H256::random())); + downloader.update_sync_target(SyncTarget::Tip(B256::random())); downloader.next_request_block_number = start; diff --git a/crates/net/downloaders/src/lib.rs b/crates/net/downloaders/src/lib.rs index 92f5936820..f96208657a 100644 --- a/crates/net/downloaders/src/lib.rs +++ b/crates/net/downloaders/src/lib.rs @@ -7,7 +7,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![allow(clippy::result_large_err)] // TODO(danipopes): fix this #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] diff --git a/crates/net/downloaders/src/test_utils/bodies_client.rs b/crates/net/downloaders/src/test_utils/bodies_client.rs index 9f120c0b2b..e2968fc8d1 100644 --- a/crates/net/downloaders/src/test_utils/bodies_client.rs +++ b/crates/net/downloaders/src/test_utils/bodies_client.rs @@ -3,7 +3,7 @@ use reth_interfaces::p2p::{ download::DownloadClient, priority::Priority, }; -use reth_primitives::{BlockBody, PeerId, H256}; +use reth_primitives::{BlockBody, PeerId, B256}; use std::{ collections::HashMap, fmt::Debug, @@ -18,14 +18,14 @@ use tokio::sync::Mutex; /// A [BodiesClient] for testing. #[derive(Debug, Default)] pub struct TestBodiesClient { - bodies: Arc>>, + bodies: Arc>>, should_delay: bool, max_batch_size: Option, times_requested: AtomicU64, } impl TestBodiesClient { - pub(crate) fn with_bodies(mut self, bodies: HashMap) -> Self { + pub(crate) fn with_bodies(mut self, bodies: HashMap) -> Self { self.bodies = Arc::new(Mutex::new(bodies)); self } @@ -60,7 +60,7 @@ impl BodiesClient for TestBodiesClient { fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, _priority: Priority, ) -> Self::Output { let should_delay = self.should_delay; @@ -71,7 +71,7 @@ impl BodiesClient for TestBodiesClient { Box::pin(async move { if should_delay { - tokio::time::sleep(Duration::from_millis(hashes[0].to_low_u64_be() % 100)).await; + tokio::time::sleep(Duration::from_millis((hashes[0][0] % 100) as u64)).await; } let bodies = &mut *bodies.lock().await; diff --git a/crates/net/downloaders/src/test_utils/file_client.rs b/crates/net/downloaders/src/test_utils/file_client.rs index 11135264cd..29902d946f 100644 --- a/crates/net/downloaders/src/test_utils/file_client.rs +++ b/crates/net/downloaders/src/test_utils/file_client.rs @@ -1,4 +1,5 @@ use super::file_codec::BlockFileCodec; +use alloy_rlp::{Decodable, Header as RlpHeader}; use itertools::Either; use reth_interfaces::{ p2p::{ @@ -12,9 +13,8 @@ use reth_interfaces::{ }; use reth_primitives::{ Block, BlockBody, BlockHash, BlockHashOrNumber, BlockNumber, Header, HeadersDirection, PeerId, - H256, + B256, }; -use reth_rlp::{Decodable, Header as RlpHeader}; use std::{ collections::HashMap, iter::zip, @@ -65,7 +65,7 @@ pub enum FileClientError { /// An error occurred when decoding blocks, headers, or rlp headers from the file. #[error(transparent)] - Rlp(#[from] reth_rlp::DecodeError), + Rlp(#[from] alloy_rlp::Error), } impl FileClient { @@ -115,7 +115,7 @@ impl FileClient { } /// Get the tip hash of the chain. - pub fn tip(&self) -> Option { + pub fn tip(&self) -> Option { self.headers.get(&(self.headers.len() as u64)).map(|h| h.hash_slow()) } @@ -213,7 +213,7 @@ impl BodiesClient for FileClient { fn get_block_bodies_with_priority( &self, - hashes: Vec, + hashes: Vec, _priority: Priority, ) -> Self::Output { // this just searches the buffer, and fails if it can't find the block @@ -255,6 +255,7 @@ mod tests { headers::{reverse_headers::ReverseHeadersDownloaderBuilder, test_utils::child_header}, test_utils::{generate_bodies, generate_bodies_file}, }; + use alloy_rlp::Encodable; use assert_matches::assert_matches; use futures::SinkExt; use futures_util::stream::StreamExt; @@ -267,7 +268,6 @@ mod tests { test_utils::TestConsensus, }; use reth_primitives::SealedHeader; - use reth_rlp::Encodable; use std::{ io::{Read, Seek, SeekFrom, Write}, sync::Arc, diff --git a/crates/net/downloaders/src/test_utils/file_codec.rs b/crates/net/downloaders/src/test_utils/file_codec.rs index 98388be4ad..443877026b 100644 --- a/crates/net/downloaders/src/test_utils/file_codec.rs +++ b/crates/net/downloaders/src/test_utils/file_codec.rs @@ -1,17 +1,17 @@ //! Codec for reading raw block bodies from a file. use super::FileClientError; +use alloy_rlp::{Decodable, Encodable}; use reth_primitives::{ bytes::{Buf, BytesMut}, Block, }; -use reth_rlp::{Decodable, Encodable}; use tokio_util::codec::{Decoder, Encoder}; /// Codec for reading raw block bodies from a file. /// /// If using with [`FramedRead`](tokio_util::codec::FramedRead), the user should make sure the /// framed reader has capacity for the entire block file. Otherwise, the decoder will return -/// [`InputTooShort`](reth_rlp::DecodeError::InputTooShort), because RLP headers can only be +/// [`InputTooShort`](alloy_rlp::Error::InputTooShort), because RLP headers can only be /// decoded if the internal buffer is large enough to contain the entire block body. /// /// Without ensuring the framed reader has capacity for the entire file, a block body is likely to diff --git a/crates/net/downloaders/src/test_utils/mod.rs b/crates/net/downloaders/src/test_utils/mod.rs index 98c850ef1a..4009e7431d 100644 --- a/crates/net/downloaders/src/test_utils/mod.rs +++ b/crates/net/downloaders/src/test_utils/mod.rs @@ -3,7 +3,7 @@ use crate::bodies::test_utils::create_raw_bodies; use futures::SinkExt; use reth_interfaces::test_utils::generators::random_block_range; -use reth_primitives::{BlockBody, SealedHeader, H256}; +use reth_primitives::{BlockBody, SealedHeader, B256}; use std::{collections::HashMap, io::SeekFrom, ops::RangeInclusive}; use tokio::{ fs::File, @@ -26,9 +26,9 @@ pub(crate) const TEST_SCOPE: &str = "downloaders.test"; /// Generate a set of bodies and their corresponding block hashes pub(crate) fn generate_bodies( range: RangeInclusive, -) -> (Vec, HashMap) { +) -> (Vec, HashMap) { let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, range, H256::zero(), 0..2); + let blocks = random_block_range(&mut rng, range, B256::ZERO, 0..2); let headers = blocks.iter().map(|block| block.header.clone()).collect(); let bodies = blocks @@ -52,7 +52,7 @@ pub(crate) fn generate_bodies( /// bodies and corresponding block hashes pub(crate) async fn generate_bodies_file( rng: RangeInclusive, -) -> (tokio::fs::File, Vec, HashMap) { +) -> (tokio::fs::File, Vec, HashMap) { let (headers, mut bodies) = generate_bodies(0..=19); let raw_block_bodies = create_raw_bodies(headers.clone().iter(), &mut bodies.clone()); diff --git a/crates/net/ecies/Cargo.toml b/crates/net/ecies/Cargo.toml index 7b4253d062..615dd03874 100644 --- a/crates/net/ecies/Cargo.toml +++ b/crates/net/ecies/Cargo.toml @@ -8,10 +8,10 @@ homepage.workspace = true repository.workspace = true [dependencies] -reth-rlp = { workspace = true, features = ["derive", "ethereum-types", "std"] } reth-primitives.workspace = true reth-net-common = { path = "../common" } +alloy-rlp = { workspace = true, features = ["derive"] } futures.workspace = true thiserror.workspace = true tokio = { workspace = true, features = ["full"] } @@ -38,6 +38,3 @@ aes = "0.8.1" hmac = "0.12.1" block-padding = "0.3.2" cipher = { version = "0.4.3", features = ["block-padding"] } - -[dev-dependencies] -hex-literal.workspace = true diff --git a/crates/net/ecies/src/algorithm.rs b/crates/net/ecies/src/algorithm.rs index 26f530ab82..66ddd00174 100644 --- a/crates/net/ecies/src/algorithm.rs +++ b/crates/net/ecies/src/algorithm.rs @@ -6,6 +6,7 @@ use crate::{ ECIESError, }; use aes::{cipher::StreamCipher, Aes128, Aes256}; +use alloy_rlp::{Encodable, Rlp, RlpEncodable, RlpMaxEncodedLen}; use byteorder::{BigEndian, ByteOrder, ReadBytesExt}; use ctr::Ctr64BE; use digest::{crypto_common::KeyIvInit, Digest}; @@ -13,9 +14,8 @@ use educe::Educe; use rand::{thread_rng, Rng}; use reth_primitives::{ bytes::{BufMut, Bytes, BytesMut}, - H128, H256, H512 as PeerId, + B128, B256, B512 as PeerId, }; -use reth_rlp::{Encodable, Rlp, RlpEncodable, RlpMaxEncodedLen}; use secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, PublicKey, SecretKey, SECP256K1, @@ -31,12 +31,12 @@ const PROTOCOL_VERSION: usize = 4; /// /// This uses the given remote public key and local (ephemeral) secret key to [compute a shared /// secp256k1 point](secp256k1::ecdh::shared_secret_point) and slices off the y coordinate from the -/// returned pair, returning only the bytes of the x coordinate as a [`H256`]. -fn ecdh_x(public_key: &PublicKey, secret_key: &SecretKey) -> H256 { - H256::from_slice(&secp256k1::ecdh::shared_secret_point(public_key, secret_key)[..32]) +/// returned pair, returning only the bytes of the x coordinate as a [`B256`]. +fn ecdh_x(public_key: &PublicKey, secret_key: &SecretKey) -> B256 { + B256::from_slice(&secp256k1::ecdh::shared_secret_point(public_key, secret_key)[..32]) } -fn kdf(secret: H256, s1: &[u8], dest: &mut [u8]) { +fn kdf(secret: B256, s1: &[u8], dest: &mut [u8]) { // SEC/ISO/Shoup specify counter size SHOULD be equivalent // to size of hash output, however, it also notes that // the 4 bytes is okay. NIST specifies 4 bytes. @@ -46,7 +46,7 @@ fn kdf(secret: H256, s1: &[u8], dest: &mut [u8]) { let mut hasher = Sha256::default(); let ctrs = [(ctr >> 24) as u8, (ctr >> 16) as u8, (ctr >> 8) as u8, ctr as u8]; hasher.update(ctrs); - hasher.update(secret.as_bytes()); + hasher.update(secret.as_slice()); hasher.update(s1); let d = hasher.finalize(); dest[written..(written + 32)].copy_from_slice(&d); @@ -68,11 +68,11 @@ pub struct ECIES { #[educe(Debug(ignore))] ephemeral_secret_key: SecretKey, ephemeral_public_key: PublicKey, - ephemeral_shared_secret: Option, + ephemeral_shared_secret: Option, remote_ephemeral_public_key: Option, - nonce: H256, - remote_nonce: Option, + nonce: B256, + remote_nonce: Option, #[educe(Debug(ignore))] ingress_aes: Option>, @@ -100,7 +100,7 @@ impl ECIES { fn new_static_client( secret_key: SecretKey, remote_id: PeerId, - nonce: H256, + nonce: B256, ephemeral_secret_key: SecretKey, ) -> Result { let public_key = PublicKey::from_secret_key(SECP256K1, &secret_key); @@ -133,9 +133,9 @@ impl ECIES { /// Create a new ECIES client with the given static secret key and remote peer ID. pub fn new_client(secret_key: SecretKey, remote_id: PeerId) -> Result { - let nonce = H256::random(); - let ephemeral_secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng()); - + let mut rng = thread_rng(); + let nonce = rng.gen(); + let ephemeral_secret_key = SecretKey::new(&mut rng); Self::new_static_client(secret_key, remote_id, nonce, ephemeral_secret_key) } @@ -143,7 +143,7 @@ impl ECIES { /// key. pub fn new_static_server( secret_key: SecretKey, - nonce: H256, + nonce: B256, ephemeral_secret_key: SecretKey, ) -> Result { let public_key = PublicKey::from_secret_key(SECP256K1, &secret_key); @@ -175,9 +175,9 @@ impl ECIES { /// Create a new ECIES server with the given static secret key. pub fn new_server(secret_key: SecretKey) -> Result { - let nonce = H256::random(); - let ephemeral_secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng()); - + let mut rng = thread_rng(); + let nonce = rng.gen(); + let ephemeral_secret_key = SecretKey::new(&mut rng); Self::new_static_server(secret_key, nonce, ephemeral_secret_key) } @@ -187,9 +187,11 @@ impl ECIES { } fn encrypt_message(&self, data: &[u8], out: &mut BytesMut) { + let mut rng = thread_rng(); + out.reserve(secp256k1::constants::UNCOMPRESSED_PUBLIC_KEY_SIZE + 16 + data.len() + 32); - let secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng()); + let secret_key = SecretKey::new(&mut rng); out.extend_from_slice( &PublicKey::from_secret_key(SECP256K1, &secret_key).serialize_uncompressed(), ); @@ -198,11 +200,11 @@ impl ECIES { let mut key = [0u8; 32]; kdf(x, &[], &mut key); - let enc_key = H128::from_slice(&key[..16]); + let enc_key = B128::from_slice(&key[..16]); let mac_key = sha256(&key[16..32]); - let iv = H128::random(); - let mut encryptor = Ctr64BE::::new(enc_key.as_ref().into(), iv.as_ref().into()); + let iv: B128 = rng.gen(); + let mut encryptor = Ctr64BE::::new((&enc_key.0).into(), (&iv.0).into()); let mut encrypted = data.to_vec(); encryptor.apply_keystream(&mut encrypted); @@ -210,9 +212,9 @@ impl ECIES { let total_size: u16 = u16::try_from(65 + 16 + data.len() + 32).unwrap(); let tag = - hmac_sha256(mac_key.as_ref(), &[iv.as_bytes(), &encrypted], &total_size.to_be_bytes()); + hmac_sha256(mac_key.as_ref(), &[iv.as_slice(), &encrypted], &total_size.to_be_bytes()); - out.extend_from_slice(iv.as_bytes()); + out.extend_from_slice(iv.as_slice()); out.extend_from_slice(&encrypted); out.extend_from_slice(tag.as_ref()); } @@ -223,12 +225,12 @@ impl ECIES { let public_key = PublicKey::from_slice(pubkey_bytes)?; let (data_iv, tag_bytes) = split_at_mut(encrypted, encrypted.len() - 32)?; let (iv, encrypted_data) = split_at_mut(data_iv, 16)?; - let tag = H256::from_slice(tag_bytes); + let tag = B256::from_slice(tag_bytes); let x = ecdh_x(&public_key, &self.secret_key); let mut key = [0u8; 32]; kdf(x, &[], &mut key); - let enc_key = H128::from_slice(&key[..16]); + let enc_key = B128::from_slice(&key[..16]); let mac_key = sha256(&key[16..32]); let check_tag = hmac_sha256(mac_key.as_ref(), &[iv, encrypted_data], auth_data); @@ -238,7 +240,7 @@ impl ECIES { let decrypted_data = encrypted_data; - let mut decryptor = Ctr64BE::::new(enc_key.as_ref().into(), (*iv).into()); + let mut decryptor = Ctr64BE::::new((&enc_key.0).into(), (*iv).into()); decryptor.apply_keystream(decrypted_data); Ok(decrypted_data) @@ -249,7 +251,7 @@ impl ECIES { let msg = x ^ self.nonce; let (rec_id, sig) = SECP256K1 .sign_ecdsa_recoverable( - &secp256k1::Message::from_slice(msg.as_bytes()).unwrap(), + &secp256k1::Message::from_slice(msg.as_slice()).unwrap(), &self.ephemeral_secret_key, ) .serialize_compact(); @@ -264,7 +266,7 @@ impl ECIES { struct S<'a> { sig_bytes: &'a [u8; 65], id: &'a PeerId, - nonce: &'a H256, + nonce: &'a B256, protocol_version: u8, } @@ -346,11 +348,11 @@ impl ECIES { #[derive(RlpEncodable, RlpMaxEncodedLen)] struct S { id: PeerId, - nonce: H256, + nonce: B256, protocol_version: u8, } - reth_rlp::encode_fixed_size(&S { + alloy_rlp::encode_fixed_size(&S { id: pk2id(&self.ephemeral_public_key), nonce: self.nonce, protocol_version: PROTOCOL_VERSION as u8, @@ -425,32 +427,30 @@ impl ECIES { } { hasher.update(el); } - let h_nonce = H256::from(hasher.finalize().as_ref()); + let h_nonce = B256::from(hasher.finalize().as_ref()); - let iv = H128::default(); - let shared_secret: H256 = { + let iv = B128::default(); + let shared_secret: B256 = { let mut hasher = Keccak256::new(); hasher.update(self.ephemeral_shared_secret.unwrap().0.as_ref()); hasher.update(h_nonce.0.as_ref()); - H256::from(hasher.finalize().as_ref()) + B256::from(hasher.finalize().as_ref()) }; - let aes_secret: H256 = { + let aes_secret: B256 = { let mut hasher = Keccak256::new(); hasher.update(self.ephemeral_shared_secret.unwrap().0.as_ref()); hasher.update(shared_secret.0.as_ref()); - H256::from(hasher.finalize().as_ref()) + B256::from(hasher.finalize().as_ref()) }; - self.ingress_aes = - Some(Ctr64BE::::new(aes_secret.0.as_ref().into(), iv.as_ref().into())); - self.egress_aes = - Some(Ctr64BE::::new(aes_secret.0.as_ref().into(), iv.as_ref().into())); + self.ingress_aes = Some(Ctr64BE::::new((&aes_secret.0).into(), (&iv.0).into())); + self.egress_aes = Some(Ctr64BE::::new((&aes_secret.0).into(), (&iv.0).into())); - let mac_secret: H256 = { + let mac_secret: B256 = { let mut hasher = Keccak256::new(); hasher.update(self.ephemeral_shared_secret.unwrap().0.as_ref()); hasher.update(aes_secret.0.as_ref()); - H256::from(hasher.finalize().as_ref()) + B256::from(hasher.finalize().as_ref()) }; self.ingress_mac = Some(MAC::new(mac_secret)); self.ingress_mac.as_mut().unwrap().update((mac_secret ^ self.nonce).as_ref()); @@ -484,13 +484,13 @@ impl ECIES { out.reserve(ECIES::header_len()); out.extend_from_slice(&header); - out.extend_from_slice(tag.as_bytes()); + out.extend_from_slice(tag.as_slice()); } pub fn read_header(&mut self, data: &mut [u8]) -> Result { let (header_bytes, mac_bytes) = split_at_mut(data, 16)?; let header = HeaderBytes::from_mut_slice(header_bytes); - let mac = H128::from_slice(&mac_bytes[..16]); + let mac = B128::from_slice(&mac_bytes[..16]); self.ingress_mac.as_mut().unwrap().update_header(header); let check_mac = self.ingress_mac.as_mut().unwrap().digest(); @@ -538,12 +538,12 @@ impl ECIES { self.egress_mac.as_mut().unwrap().update_body(encrypted); let tag = self.egress_mac.as_mut().unwrap().digest(); - out.extend_from_slice(tag.as_bytes()); + out.extend_from_slice(tag.as_slice()); } pub fn read_body<'a>(&mut self, data: &'a mut [u8]) -> Result<&'a mut [u8], ECIESError> { let (body, mac_bytes) = split_at_mut(data, data.len() - 16)?; - let mac = H128::from_slice(mac_bytes); + let mac = B128::from_slice(mac_bytes); self.ingress_mac.as_mut().unwrap().update_body(body); let check_mac = self.ingress_mac.as_mut().unwrap().digest(); if check_mac != mac { @@ -561,7 +561,7 @@ impl ECIES { #[cfg(test)] mod tests { use super::*; - use hex_literal::hex; + use reth_primitives::{b256, hex}; #[test] fn ecdh() { @@ -573,15 +573,16 @@ mod tests { assert_eq!( ecdh_x(&remote_public_key, &our_secret_key), - hex!("821ce7e01ea11b111a52b2dafae8a3031a372d83bdf1a78109fa0783c2b9d5d3").into() + hex!("821ce7e01ea11b111a52b2dafae8a3031a372d83bdf1a78109fa0783c2b9d5d3") ) } #[test] fn communicate() { - let server_secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng()); + let mut rng = thread_rng(); + let server_secret_key = SecretKey::new(&mut rng); let server_public_key = PublicKey::from_secret_key(SECP256K1, &server_secret_key); - let client_secret_key = SecretKey::new(&mut secp256k1::rand::thread_rng()); + let client_secret_key = SecretKey::new(&mut rng); let mut server_ecies = ECIES::new_server(server_secret_key).unwrap(); let mut client_ecies = @@ -667,7 +668,7 @@ mod tests { .unwrap(); let client_nonce = - H256(hex!("7e968bba13b6c50e2c4cd7f241cc0d64d1ac25c7f5952df231ac6a2bda8ee5d6")); + b256!("7e968bba13b6c50e2c4cd7f241cc0d64d1ac25c7f5952df231ac6a2bda8ee5d6"); let server_id = pk2id(&PublicKey::from_secret_key(SECP256K1, &eip8_test_server_key())); @@ -682,7 +683,7 @@ mod tests { .unwrap(); let server_nonce = - H256(hex!("559aead08264d5795d3909718cdd05abd49572e84fe55590eef31a88a08fdffd")); + b256!("559aead08264d5795d3909718cdd05abd49572e84fe55590eef31a88a08fdffd"); ECIES::new_static_server(eip8_test_server_key(), server_nonce, server_ephemeral_key) .unwrap() diff --git a/crates/net/ecies/src/codec.rs b/crates/net/ecies/src/codec.rs index 33f42b7d02..d7335c5bf9 100644 --- a/crates/net/ecies/src/codec.rs +++ b/crates/net/ecies/src/codec.rs @@ -1,5 +1,5 @@ use crate::{algorithm::ECIES, ECIESError, EgressECIESValue, IngressECIESValue}; -use reth_primitives::{bytes::BytesMut, H512 as PeerId}; +use reth_primitives::{bytes::BytesMut, B512 as PeerId}; use secp256k1::SecretKey; use std::{fmt::Debug, io}; use tokio_util::codec::{Decoder, Encoder}; diff --git a/crates/net/ecies/src/error.rs b/crates/net/ecies/src/error.rs index f4281ffad9..1a2a738c4b 100644 --- a/crates/net/ecies/src/error.rs +++ b/crates/net/ecies/src/error.rs @@ -63,7 +63,7 @@ pub enum ECIESErrorImpl { Secp256k1(secp256k1::Error), /// Error when decoding RLP data #[error(transparent)] - RLPDecoding(reth_rlp::DecodeError), + RLPDecoding(alloy_rlp::Error), /// Error when converting to integer #[error(transparent)] FromInt(std::num::TryFromIntError), @@ -111,8 +111,8 @@ impl From for ECIESError { } } -impl From for ECIESError { - fn from(source: reth_rlp::DecodeError) -> Self { +impl From for ECIESError { + fn from(source: alloy_rlp::Error) -> Self { ECIESErrorImpl::RLPDecoding(source).into() } } diff --git a/crates/net/ecies/src/lib.rs b/crates/net/ecies/src/lib.rs index 92ae43a775..bd2449d03a 100644 --- a/crates/net/ecies/src/lib.rs +++ b/crates/net/ecies/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -21,7 +21,7 @@ mod codec; use reth_primitives::{ bytes::{Bytes, BytesMut}, - H512 as PeerId, + B512 as PeerId, }; /// Raw egress values for an ECIES protocol diff --git a/crates/net/ecies/src/mac.rs b/crates/net/ecies/src/mac.rs index 6593ed9fe8..6768fd7b71 100644 --- a/crates/net/ecies/src/mac.rs +++ b/crates/net/ecies/src/mac.rs @@ -4,7 +4,7 @@ use block_padding::NoPadding; use cipher::BlockEncrypt; use digest::KeyInit; use generic_array::GenericArray; -use reth_primitives::{H128, H256}; +use reth_primitives::{B128, B256}; use sha3::{Digest, Keccak256}; use typenum::U16; @@ -19,13 +19,13 @@ pub type HeaderBytes = GenericArray; /// and is not defined as a general MAC. #[derive(Debug)] pub struct MAC { - secret: H256, + secret: B256, hasher: Keccak256, } impl MAC { /// Initialize the MAC with the given secret - pub fn new(secret: H256) -> Self { + pub fn new(secret: B256) -> Self { Self { secret, hasher: Keccak256::new() } } @@ -37,9 +37,9 @@ impl MAC { /// Accumulate the given [`HeaderBytes`] into the MAC's internal state. pub fn update_header(&mut self, data: &HeaderBytes) { let aes = Aes256Enc::new_from_slice(self.secret.as_ref()).unwrap(); - let mut encrypted = self.digest().to_fixed_bytes(); + let mut encrypted = self.digest().0; - aes.encrypt_padded::(&mut encrypted, H128::len_bytes()).unwrap(); + aes.encrypt_padded::(&mut encrypted, B128::len_bytes()).unwrap(); for i in 0..data.len() { encrypted[i] ^= data[i]; } @@ -51,9 +51,9 @@ impl MAC { self.hasher.update(data); let prev = self.digest(); let aes = Aes256Enc::new_from_slice(self.secret.as_ref()).unwrap(); - let mut encrypted = self.digest().to_fixed_bytes(); + let mut encrypted = self.digest().0; - aes.encrypt_padded::(&mut encrypted, H128::len_bytes()).unwrap(); + aes.encrypt_padded::(&mut encrypted, B128::len_bytes()).unwrap(); for i in 0..16 { encrypted[i] ^= prev[i]; } @@ -62,7 +62,7 @@ impl MAC { /// Produce a digest by finalizing the internal keccak256 hasher and returning the first 128 /// bits. - pub fn digest(&self) -> H128 { - H128::from_slice(&self.hasher.clone().finalize()[..16]) + pub fn digest(&self) -> B128 { + B128::from_slice(&self.hasher.clone().finalize()[..16]) } } diff --git a/crates/net/ecies/src/stream.rs b/crates/net/ecies/src/stream.rs index 64d21f0a3b..1113687090 100644 --- a/crates/net/ecies/src/stream.rs +++ b/crates/net/ecies/src/stream.rs @@ -6,7 +6,7 @@ use futures::{ready, Sink, SinkExt}; use reth_net_common::stream::HasRemoteAddr; use reth_primitives::{ bytes::{Bytes, BytesMut}, - H512 as PeerId, + B512 as PeerId, }; use secp256k1::SecretKey; use std::{ diff --git a/crates/net/ecies/src/util.rs b/crates/net/ecies/src/util.rs index 8b09729dfe..8496891883 100644 --- a/crates/net/ecies/src/util.rs +++ b/crates/net/ecies/src/util.rs @@ -1,25 +1,25 @@ //! Utility functions for hashing and encoding. use hmac::{Hmac, Mac}; -use reth_primitives::{H256, H512 as PeerId}; +use reth_primitives::{B256, B512 as PeerId}; use secp256k1::PublicKey; use sha2::{Digest, Sha256}; /// Hashes the input data with SHA256. -pub(crate) fn sha256(data: &[u8]) -> H256 { - H256::from(Sha256::digest(data).as_ref()) +pub(crate) fn sha256(data: &[u8]) -> B256 { + B256::from(Sha256::digest(data).as_ref()) } /// Produces a HMAC_SHA256 digest of the `input_data` and `auth_data` with the given `key`. /// This is done by accumulating each slice in `input_data` into the HMAC state, then accumulating /// the `auth_data` and returning the resulting digest. -pub(crate) fn hmac_sha256(key: &[u8], input: &[&[u8]], auth_data: &[u8]) -> H256 { +pub(crate) fn hmac_sha256(key: &[u8], input: &[&[u8]], auth_data: &[u8]) -> B256 { let mut hmac = Hmac::::new_from_slice(key).unwrap(); for input in input { hmac.update(input); } hmac.update(auth_data); - H256::from_slice(&hmac.finalize().into_bytes()) + B256::from_slice(&hmac.finalize().into_bytes()) } /// Converts a [secp256k1::PublicKey] to a [PeerId] by stripping the @@ -31,13 +31,13 @@ pub fn pk2id(pk: &PublicKey) -> PeerId { /// Converts a [PeerId] to a [secp256k1::PublicKey] by prepending the [PeerId] bytes with the /// SECP256K1_TAG_PUBKEY_UNCOMPRESSED tag. pub(crate) fn id2pk(id: PeerId) -> Result { - // NOTE: H512 is used as a PeerId not because it represents a hash, but because 512 bits is + // NOTE: B512 is used as a PeerId not because it represents a hash, but because 512 bits is // enough to represent an uncompressed public key. let mut s = [0u8; 65]; // SECP256K1_TAG_PUBKEY_UNCOMPRESSED = 0x04 // see: https://github.com/bitcoin-core/secp256k1/blob/master/include/secp256k1.h#L211 s[0] = 4; - s[1..].copy_from_slice(id.as_bytes()); + s[1..].copy_from_slice(id.as_slice()); PublicKey::from_slice(&s) } diff --git a/crates/net/eth-wire/Cargo.toml b/crates/net/eth-wire/Cargo.toml index bda62bcfdd..636b30c50e 100644 --- a/crates/net/eth-wire/Cargo.toml +++ b/crates/net/eth-wire/Cargo.toml @@ -9,24 +9,20 @@ homepage.workspace = true repository.workspace = true [dependencies] -bytes.workspace = true -thiserror.workspace = true -serde = { workspace = true, optional = true } - # reth reth-codecs = { path = "../../storage/codecs" } reth-primitives.workspace = true reth-ecies = { path = "../ecies" } -reth-rlp = { workspace = true, features = ["alloc", "derive", "std", "ethereum-types", "smol_str"] } +alloy-rlp = { workspace = true, features = ["derive"] } reth-discv4 = { path = "../discv4" } # metrics reth-metrics.workspace = true metrics.workspace = true -# used for Chain and builders -ethers-core = { workspace = true, default-features = false } - +bytes.workspace = true +thiserror.workspace = true +serde = { workspace = true, optional = true } tokio = { workspace = true, features = ["full"] } tokio-util = { workspace = true, features = ["io", "codec"] } futures.workspace = true @@ -34,7 +30,6 @@ tokio-stream.workspace = true pin-project.workspace = true tracing.workspace = true snap = "1.0.5" -smol_str = "0.2" async-trait.workspace = true # arbitrary utils @@ -49,8 +44,6 @@ ethers-core = { workspace = true, default-features = false } test-fuzz = "4" tokio-util = { workspace = true, features = ["io", "codec"] } -hex-literal.workspace = true -hex = "0.4" rand.workspace = true secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } @@ -60,7 +53,7 @@ proptest-derive.workspace = true [features] default = ["serde"] -serde = ["dep:serde", "smol_str/serde"] +serde = ["dep:serde"] arbitrary = ["reth-primitives/arbitrary", "dep:arbitrary", "dep:proptest", "dep:proptest-derive"] [[test]] diff --git a/crates/net/eth-wire/src/builder.rs b/crates/net/eth-wire/src/builder.rs index 27b2f17e88..b30b53684c 100644 --- a/crates/net/eth-wire/src/builder.rs +++ b/crates/net/eth-wire/src/builder.rs @@ -4,23 +4,23 @@ use crate::{ capability::Capability, hello::HelloMessage, p2pstream::ProtocolVersion, EthVersion, Status, }; use reth_discv4::DEFAULT_DISCOVERY_PORT; -use reth_primitives::{Chain, ForkId, PeerId, H256, U256}; +use reth_primitives::{Chain, ForkId, PeerId, B256, U256}; /// Builder for [`Status`] messages. /// /// # Example /// ``` /// use reth_eth_wire::EthVersion; -/// use reth_primitives::{Chain, U256, H256, MAINNET_GENESIS, MAINNET, Hardfork}; +/// use reth_primitives::{Chain, U256, B256, MAINNET_GENESIS, MAINNET, Hardfork}; /// use reth_eth_wire::types::Status; /// /// // this is just an example status message! /// let status = Status::builder() /// .version(EthVersion::Eth66.into()) -/// .chain(Chain::Named(ethers_core::types::Chain::Mainnet)) +/// .chain(Chain::mainnet()) /// .total_difficulty(U256::from(100)) -/// .blockhash(H256::from(MAINNET_GENESIS)) -/// .genesis(H256::from(MAINNET_GENESIS)) +/// .blockhash(B256::from(MAINNET_GENESIS)) +/// .genesis(B256::from(MAINNET_GENESIS)) /// .forkid(Hardfork::Paris.fork_id(&MAINNET).unwrap()) /// .build(); /// @@ -28,10 +28,10 @@ use reth_primitives::{Chain, ForkId, PeerId, H256, U256}; /// status, /// Status { /// version: EthVersion::Eth66.into(), -/// chain: Chain::Named(ethers_core::types::Chain::Mainnet), +/// chain: Chain::mainnet(), /// total_difficulty: U256::from(100), -/// blockhash: H256::from(MAINNET_GENESIS), -/// genesis: H256::from(MAINNET_GENESIS), +/// blockhash: B256::from(MAINNET_GENESIS), +/// genesis: B256::from(MAINNET_GENESIS), /// forkid: Hardfork::Paris.fork_id(&MAINNET).unwrap(), /// } /// ); @@ -66,13 +66,13 @@ impl StatusBuilder { } /// Sets the block hash. - pub fn blockhash(mut self, blockhash: H256) -> Self { + pub fn blockhash(mut self, blockhash: B256) -> Self { self.status.blockhash = blockhash; self } /// Sets the genesis hash. - pub fn genesis(mut self, genesis: H256) -> Self { + pub fn genesis(mut self, genesis: B256) -> Self { self.status.genesis = genesis; self } diff --git a/crates/net/eth-wire/src/capability.rs b/crates/net/eth-wire/src/capability.rs index fd528d2fe7..f303e238f3 100644 --- a/crates/net/eth-wire/src/capability.rs +++ b/crates/net/eth-wire/src/capability.rs @@ -1,10 +1,9 @@ //! All capability related types use crate::{version::ParseVersionError, EthMessage, EthVersion}; +use alloy_rlp::{Decodable, Encodable, RlpDecodable, RlpEncodable}; use reth_codecs::add_arbitrary_tests; use reth_primitives::bytes::{BufMut, Bytes}; -use reth_rlp::{Decodable, DecodeError, Encodable, RlpDecodable, RlpEncodable}; -use smol_str::SmolStr; use std::fmt; #[cfg(feature = "serde")] @@ -43,14 +42,14 @@ pub enum CapabilityMessage { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct Capability { /// The name of the subprotocol - pub name: SmolStr, + pub name: String, /// The version of the subprotocol pub version: usize, } impl Capability { /// Create a new `Capability` with the given name and version. - pub fn new(name: SmolStr, version: usize) -> Self { + pub fn new(name: String, version: usize) -> Self { Self { name, version } } @@ -83,7 +82,7 @@ impl fmt::Display for Capability { impl<'a> arbitrary::Arbitrary<'a> for Capability { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { let version = u.int_in_range(0..=32)?; // TODO: What's the max? - let name: SmolStr = String::arbitrary(u)?.into(); // TODO: what possible values? + let name = String::arbitrary(u)?; // TODO: what possible values? Ok(Self { name, version }) } } @@ -95,7 +94,7 @@ impl proptest::arbitrary::Arbitrary for Capability { any_with::(args) // TODO: what possible values? .prop_flat_map(move |name| { any_with::(()) // TODO: What's the max? - .prop_map(move |version| Capability { name: name.clone().into(), version }) + .prop_map(move |version| Capability { name: name.clone(), version }) }) .boxed() } @@ -169,7 +168,7 @@ impl Encodable for Capabilities { } impl Decodable for Capabilities { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let inner = Vec::::decode(buf)?; Ok(Self { @@ -189,7 +188,7 @@ pub enum SharedCapability { Eth { version: EthVersion, offset: u8 }, /// An unknown capability. - UnknownCapability { name: SmolStr, version: u8, offset: u8 }, + UnknownCapability { name: String, version: u8, offset: u8 }, } impl SharedCapability { diff --git a/crates/net/eth-wire/src/disconnect.rs b/crates/net/eth-wire/src/disconnect.rs index a3d0b252a3..8e51cfe4f4 100644 --- a/crates/net/eth-wire/src/disconnect.rs +++ b/crates/net/eth-wire/src/disconnect.rs @@ -1,11 +1,10 @@ //! Disconnect -use bytes::Bytes; +use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header}; use futures::{Sink, SinkExt}; use reth_codecs::derive_arbitrary; use reth_ecies::stream::ECIESStream; use reth_primitives::bytes::{Buf, BufMut}; -use reth_rlp::{Decodable, DecodeError, Encodable, Header}; use std::fmt::Display; use thiserror::Error; use tokio::io::AsyncWrite; @@ -120,11 +119,11 @@ impl Encodable for DisconnectReason { /// The [`Decodable`] implementation for [`DisconnectReason`] supports either a disconnect reason /// encoded a single byte or a RLP list containing the disconnect reason. impl Decodable for DisconnectReason { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { if buf.is_empty() { - return Err(DecodeError::InputTooShort) + return Err(RlpError::InputTooShort) } else if buf.len() > 2 { - return Err(DecodeError::Overflow) + return Err(RlpError::Overflow) } if buf.len() > 1 { @@ -132,7 +131,7 @@ impl Decodable for DisconnectReason { // buf[0] is the first (and only) element of the list. let header = Header::decode(buf)?; if !header.list { - return Err(DecodeError::UnexpectedString) + return Err(RlpError::UnexpectedString) } } @@ -143,7 +142,7 @@ impl Decodable for DisconnectReason { Ok(DisconnectReason::DisconnectRequested) } else { DisconnectReason::try_from(u8::decode(buf)?) - .map_err(|_| DecodeError::Custom("unknown disconnect reason")) + .map_err(|_| RlpError::Custom("unknown disconnect reason")) } } } @@ -178,7 +177,7 @@ where } #[async_trait::async_trait] -impl CanDisconnect for ECIESStream +impl CanDisconnect for ECIESStream where S: AsyncWrite + Unpin + Send, { @@ -190,8 +189,8 @@ where #[cfg(test)] mod tests { use crate::{p2pstream::P2PMessage, DisconnectReason}; + use alloy_rlp::{Decodable, Encodable}; use reth_primitives::hex; - use reth_rlp::{Decodable, Encodable}; fn all_reasons() -> Vec { vec![ diff --git a/crates/net/eth-wire/src/errors/eth.rs b/crates/net/eth-wire/src/errors/eth.rs index d87fed4ebd..e120c61ee8 100644 --- a/crates/net/eth-wire/src/errors/eth.rs +++ b/crates/net/eth-wire/src/errors/eth.rs @@ -2,7 +2,7 @@ use crate::{ errors::P2PStreamError, version::ParseVersionError, DisconnectReason, EthMessageID, EthVersion, }; -use reth_primitives::{Chain, ValidationError, H256}; +use reth_primitives::{Chain, ValidationError, B256}; use std::io; /// Errors when sending/receiving messages @@ -50,8 +50,8 @@ impl From for EthStreamError { } } -impl From for EthStreamError { - fn from(err: reth_rlp::DecodeError) -> Self { +impl From for EthStreamError { + fn from(err: alloy_rlp::Error) -> Self { P2PStreamError::from(err).into() } } @@ -69,7 +69,7 @@ pub enum EthHandshakeError { #[error(transparent)] InvalidFork(#[from] ValidationError), #[error("mismatched genesis in Status message. expected: {expected:?}, got: {got:?}")] - MismatchedGenesis { expected: H256, got: H256 }, + MismatchedGenesis { expected: B256, got: B256 }, #[error("mismatched protocol version in Status message. expected: {expected:?}, got: {got:?}")] MismatchedProtocolVersion { expected: u8, got: u8 }, #[error("mismatched chain in Status message. expected: {expected:?}, got: {got:?}")] diff --git a/crates/net/eth-wire/src/errors/p2p.rs b/crates/net/eth-wire/src/errors/p2p.rs index 3d7a2e7f00..f323a47c1c 100644 --- a/crates/net/eth-wire/src/errors/p2p.rs +++ b/crates/net/eth-wire/src/errors/p2p.rs @@ -12,7 +12,7 @@ pub enum P2PStreamError { #[error(transparent)] Io(#[from] io::Error), #[error(transparent)] - Rlp(#[from] reth_rlp::DecodeError), + Rlp(#[from] alloy_rlp::Error), #[error(transparent)] Snap(#[from] snap::Error), #[error(transparent)] @@ -73,7 +73,7 @@ pub enum P2PHandshakeError { #[error("Disconnected by peer: {0}")] Disconnected(DisconnectReason), #[error("error decoding a message during handshake: {0}")] - DecodeError(#[from] reth_rlp::DecodeError), + DecodeError(#[from] alloy_rlp::Error), } /// An error that can occur when interacting with a pinger. diff --git a/crates/net/eth-wire/src/ethstream.rs b/crates/net/eth-wire/src/ethstream.rs index 47527e0b1a..b09241e9d2 100644 --- a/crates/net/eth-wire/src/ethstream.rs +++ b/crates/net/eth-wire/src/ethstream.rs @@ -4,13 +4,13 @@ use crate::{ types::{EthMessage, ProtocolMessage, Status}, CanDisconnect, DisconnectReason, EthVersion, }; +use alloy_rlp::Encodable; use futures::{ready, Sink, SinkExt, StreamExt}; use pin_project::pin_project; use reth_primitives::{ bytes::{Bytes, BytesMut}, ForkFilter, }; -use reth_rlp::Encodable; use std::{ pin::Pin, task::{Context, Poll}, @@ -322,25 +322,24 @@ mod tests { types::{broadcast::BlockHashNumber, EthMessage, EthVersion, Status}, EthStream, PassthroughCodec, }; - use ethers_core::types::Chain; use futures::{SinkExt, StreamExt}; use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_ecies::{stream::ECIESStream, util::pk2id}; - use reth_primitives::{ForkFilter, Head, H256, U256}; + use reth_primitives::{ForkFilter, Head, NamedChain, B256, U256}; use secp256k1::{SecretKey, SECP256K1}; use tokio::net::{TcpListener, TcpStream}; use tokio_util::codec::Decoder; #[tokio::test] async fn can_handshake() { - let genesis = H256::random(); + let genesis = B256::random(); let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { version: EthVersion::Eth67 as u8, - chain: Chain::Mainnet.into(), + chain: NamedChain::Mainnet.into(), total_difficulty: U256::ZERO, - blockhash: H256::random(), + blockhash: B256::random(), genesis, // Pass the current fork id. forkid: fork_filter.current(), @@ -380,14 +379,14 @@ mod tests { #[tokio::test] async fn pass_handshake_on_low_td_bitlen() { - let genesis = H256::random(); + let genesis = B256::random(); let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { version: EthVersion::Eth67 as u8, - chain: Chain::Mainnet.into(), + chain: NamedChain::Mainnet.into(), total_difficulty: U256::from(2).pow(U256::from(100)) - U256::from(1), - blockhash: H256::random(), + blockhash: B256::random(), genesis, // Pass the current fork id. forkid: fork_filter.current(), @@ -427,14 +426,14 @@ mod tests { #[tokio::test] async fn fail_handshake_on_high_td_bitlen() { - let genesis = H256::random(); + let genesis = B256::random(); let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { version: EthVersion::Eth67 as u8, - chain: Chain::Mainnet.into(), + chain: NamedChain::Mainnet.into(), total_difficulty: U256::from(2).pow(U256::from(100)), - blockhash: H256::random(), + blockhash: B256::random(), genesis, // Pass the current fork id. forkid: fork_filter.current(), @@ -485,8 +484,8 @@ mod tests { let local_addr = listener.local_addr().unwrap(); let test_msg = EthMessage::NewBlockHashes( vec![ - BlockHashNumber { hash: H256::random(), number: 5 }, - BlockHashNumber { hash: H256::random(), number: 6 }, + BlockHashNumber { hash: B256::random(), number: 5 }, + BlockHashNumber { hash: B256::random(), number: 6 }, ] .into(), ); @@ -520,8 +519,8 @@ mod tests { let server_key = SecretKey::new(&mut rand::thread_rng()); let test_msg = EthMessage::NewBlockHashes( vec![ - BlockHashNumber { hash: H256::random(), number: 5 }, - BlockHashNumber { hash: H256::random(), number: 6 }, + BlockHashNumber { hash: B256::random(), number: 5 }, + BlockHashNumber { hash: B256::random(), number: 6 }, ] .into(), ); @@ -562,20 +561,20 @@ mod tests { let server_key = SecretKey::new(&mut rand::thread_rng()); let test_msg = EthMessage::NewBlockHashes( vec![ - BlockHashNumber { hash: H256::random(), number: 5 }, - BlockHashNumber { hash: H256::random(), number: 6 }, + BlockHashNumber { hash: B256::random(), number: 5 }, + BlockHashNumber { hash: B256::random(), number: 6 }, ] .into(), ); - let genesis = H256::random(); + let genesis = B256::random(); let fork_filter = ForkFilter::new(Head::default(), genesis, 0, Vec::new()); let status = Status { version: EthVersion::Eth67 as u8, - chain: Chain::Mainnet.into(), + chain: NamedChain::Mainnet.into(), total_difficulty: U256::ZERO, - blockhash: H256::random(), + blockhash: B256::random(), genesis, // Pass the current fork id. forkid: fork_filter.current(), diff --git a/crates/net/eth-wire/src/hello.rs b/crates/net/eth-wire/src/hello.rs index 23cc0551e7..b1acd2c7a2 100644 --- a/crates/net/eth-wire/src/hello.rs +++ b/crates/net/eth-wire/src/hello.rs @@ -1,8 +1,8 @@ use crate::{capability::Capability, EthVersion, ProtocolVersion}; +use alloy_rlp::{RlpDecodable, RlpEncodable}; use reth_codecs::derive_arbitrary; use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_primitives::{constants::RETH_CLIENT_VERSION, PeerId}; -use reth_rlp::{RlpDecodable, RlpEncodable}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -109,9 +109,9 @@ impl HelloMessageBuilder { #[cfg(test)] mod tests { + use alloy_rlp::{Decodable, Encodable, EMPTY_STRING_CODE}; use reth_discv4::DEFAULT_DISCOVERY_PORT; use reth_ecies::util::pk2id; - use reth_rlp::{Decodable, Encodable, EMPTY_STRING_CODE}; use secp256k1::{SecretKey, SECP256K1}; use crate::{ diff --git a/crates/net/eth-wire/src/lib.rs b/crates/net/eth-wire/src/lib.rs index f8b5978b11..92323694b5 100644 --- a/crates/net/eth-wire/src/lib.rs +++ b/crates/net/eth-wire/src/lib.rs @@ -8,7 +8,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/eth-wire/src/p2pstream.rs b/crates/net/eth-wire/src/p2pstream.rs index 0b7ffcd5d8..205eaddaee 100644 --- a/crates/net/eth-wire/src/p2pstream.rs +++ b/crates/net/eth-wire/src/p2pstream.rs @@ -6,6 +6,7 @@ use crate::{ pinger::{Pinger, PingerEvent}, DisconnectReason, HelloMessage, }; +use alloy_rlp::{Decodable, Encodable, Error as RlpError, EMPTY_LIST_CODE}; use futures::{Sink, SinkExt, StreamExt}; use pin_project::pin_project; use reth_codecs::derive_arbitrary; @@ -14,7 +15,6 @@ use reth_primitives::{ bytes::{Buf, BufMut, Bytes, BytesMut}, hex, }; -use reth_rlp::{Decodable, DecodeError, Encodable, EMPTY_LIST_CODE}; use std::{ collections::{BTreeSet, HashMap, HashSet, VecDeque}, io, @@ -732,14 +732,14 @@ impl Encodable for P2PMessage { /// The [`Decodable`] implementation for [`P2PMessage::Ping`] and [`P2PMessage::Pong`] expects a /// snappy encoded payload, see [`Encodable`] implementation. impl Decodable for P2PMessage { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { /// Removes the snappy prefix from the Ping/Pong buffer - fn advance_snappy_ping_pong_payload(buf: &mut &[u8]) -> Result<(), DecodeError> { + fn advance_snappy_ping_pong_payload(buf: &mut &[u8]) -> alloy_rlp::Result<()> { if buf.len() < 3 { - return Err(DecodeError::InputTooShort) + return Err(RlpError::InputTooShort) } if buf[..3] != [0x01, 0x00, EMPTY_LIST_CODE] { - return Err(DecodeError::Custom("expected snappy payload")) + return Err(RlpError::Custom("expected snappy payload")) } buf.advance(3); Ok(()) @@ -747,7 +747,7 @@ impl Decodable for P2PMessage { let message_id = u8::decode(&mut &buf[..])?; let id = P2PMessageID::try_from(message_id) - .or(Err(DecodeError::Custom("unknown p2p message id")))?; + .or(Err(RlpError::Custom("unknown p2p message id")))?; buf.advance(1); match id { P2PMessageID::Hello => Ok(P2PMessage::Hello(HelloMessage::decode(buf)?)), @@ -828,12 +828,12 @@ impl Encodable for ProtocolVersion { } impl Decodable for ProtocolVersion { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let version = u8::decode(buf)?; match version { 4 => Ok(ProtocolVersion::V4), 5 => Ok(ProtocolVersion::V5), - _ => Err(DecodeError::Custom("unknown p2p protocol version")), + _ => Err(RlpError::Custom("unknown p2p protocol version")), } } } diff --git a/crates/net/eth-wire/src/types/blocks.rs b/crates/net/eth-wire/src/types/blocks.rs index 2b5bfd0207..088b4de4d4 100644 --- a/crates/net/eth-wire/src/types/blocks.rs +++ b/crates/net/eth-wire/src/types/blocks.rs @@ -1,8 +1,8 @@ //! Implements the `GetBlockHeaders`, `GetBlockBodies`, `BlockHeaders`, and `BlockBodies` message //! types. +use alloy_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; use reth_codecs::derive_arbitrary; -use reth_primitives::{BlockBody, BlockHashOrNumber, Header, HeadersDirection, H256}; -use reth_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; +use reth_primitives::{BlockBody, BlockHashOrNumber, Header, HeadersDirection, B256}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -57,11 +57,11 @@ impl From> for BlockHeaders { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct GetBlockBodies( /// The block hashes to request bodies for. - pub Vec, + pub Vec, ); -impl From> for GetBlockBodies { - fn from(hashes: Vec) -> Self { +impl From> for GetBlockBodies { + fn from(hashes: Vec) -> Self { GetBlockBodies(hashes) } } @@ -93,12 +93,11 @@ mod test { use crate::types::{ message::RequestPair, BlockBodies, BlockHeaders, GetBlockBodies, GetBlockHeaders, }; - use hex_literal::hex; + use alloy_rlp::{Decodable, Encodable}; use reth_primitives::{ - BlockHashOrNumber, Header, HeadersDirection, Signature, Transaction, TransactionKind, + hex, BlockHashOrNumber, Header, HeadersDirection, Signature, Transaction, TransactionKind, TransactionSigned, TxLegacy, U256, }; - use reth_rlp::{Decodable, Encodable}; use std::str::FromStr; use super::BlockBody; diff --git a/crates/net/eth-wire/src/types/broadcast.rs b/crates/net/eth-wire/src/types/broadcast.rs index 18da1bd756..983dcc1109 100644 --- a/crates/net/eth-wire/src/types/broadcast.rs +++ b/crates/net/eth-wire/src/types/broadcast.rs @@ -1,11 +1,10 @@ //! Types for broadcasting new data. use crate::{EthMessage, EthVersion}; -use bytes::Bytes; -use reth_codecs::derive_arbitrary; -use reth_primitives::{Block, TransactionSigned, H256, U128}; -use reth_rlp::{ +use alloy_rlp::{ Decodable, Encodable, RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper, }; +use reth_codecs::derive_arbitrary; +use reth_primitives::{Block, Bytes, TransactionSigned, B256, U128}; use std::sync::Arc; #[cfg(feature = "serde")] @@ -41,7 +40,7 @@ impl NewBlockHashes { #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct BlockHashNumber { /// The block hash - pub hash: H256, + pub hash: B256, /// The block number pub number: u64, } @@ -137,7 +136,7 @@ impl NewPooledTransactionHashes { } /// Returns an iterator over all transaction hashes. - pub fn iter_hashes(&self) -> impl Iterator + '_ { + pub fn iter_hashes(&self) -> impl Iterator + '_ { match self { NewPooledTransactionHashes::Eth66(msg) => msg.0.iter(), NewPooledTransactionHashes::Eth68(msg) => msg.hashes.iter(), @@ -145,7 +144,7 @@ impl NewPooledTransactionHashes { } /// Consumes the type and returns all hashes - pub fn into_hashes(self) -> Vec { + pub fn into_hashes(self) -> Vec { match self { NewPooledTransactionHashes::Eth66(msg) => msg.0, NewPooledTransactionHashes::Eth68(msg) => msg.hashes, @@ -153,7 +152,7 @@ impl NewPooledTransactionHashes { } /// Returns an iterator over all transaction hashes. - pub fn into_iter_hashes(self) -> impl Iterator { + pub fn into_iter_hashes(self) -> impl Iterator { match self { NewPooledTransactionHashes::Eth66(msg) => msg.0.into_iter(), NewPooledTransactionHashes::Eth68(msg) => msg.hashes.into_iter(), @@ -220,11 +219,11 @@ pub struct NewPooledTransactionHashes66( /// Transaction hashes for new transactions that have appeared on the network. /// Clients should request the transactions with the given hashes using a /// [`GetPooledTransactions`](crate::GetPooledTransactions) message. - pub Vec, + pub Vec, ); -impl From> for NewPooledTransactionHashes66 { - fn from(v: Vec) -> Self { +impl From> for NewPooledTransactionHashes66 { + fn from(v: Vec) -> Self { NewPooledTransactionHashes66(v) } } @@ -243,8 +242,8 @@ pub struct NewPooledTransactionHashes68 { /// the following way: /// * `[type_0: B_1, type_1: B_1, ...]` /// - /// This would make it seem like the [`Encodable`](reth_rlp::Encodable) and - /// [`Decodable`](reth_rlp::Decodable) implementations should directly use a `Vec` for + /// This would make it seem like the [`Encodable`](alloy_rlp::Encodable) and + /// [`Decodable`](alloy_rlp::Decodable) implementations should directly use a `Vec` for /// encoding and decoding, because it looks like this field should be encoded as a _list_ of /// bytes. /// @@ -255,14 +254,14 @@ pub struct NewPooledTransactionHashes68 { /// **not** a RLP list. /// /// Because of this, we do not directly use the `Vec` when encoding and decoding, and - /// instead use the [`Encodable`](reth_rlp::Encodable) and [`Decodable`](reth_rlp::Decodable) + /// instead use the [`Encodable`](alloy_rlp::Encodable) and [`Decodable`](alloy_rlp::Decodable) /// implementations for `&[u8]` instead, which encodes into a RLP string, and expects an RLP /// string when decoding. pub types: Vec, /// Transaction sizes for new transactions that have appeared on the network. pub sizes: Vec, /// Transaction hashes for new transactions that have appeared on the network. - pub hashes: Vec, + pub hashes: Vec, } impl Encodable for NewPooledTransactionHashes68 { @@ -271,7 +270,7 @@ impl Encodable for NewPooledTransactionHashes68 { struct EncodableNewPooledTransactionHashes68<'a> { types: &'a [u8], sizes: &'a Vec, - hashes: &'a Vec, + hashes: &'a Vec, } let encodable = EncodableNewPooledTransactionHashes68 { @@ -287,7 +286,7 @@ impl Encodable for NewPooledTransactionHashes68 { struct EncodableNewPooledTransactionHashes68<'a> { types: &'a [u8], sizes: &'a Vec, - hashes: &'a Vec, + hashes: &'a Vec, } let encodable = EncodableNewPooledTransactionHashes68 { @@ -301,12 +300,12 @@ impl Encodable for NewPooledTransactionHashes68 { } impl Decodable for NewPooledTransactionHashes68 { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { #[derive(RlpDecodable)] struct EncodableNewPooledTransactionHashes68 { types: Bytes, sizes: Vec, - hashes: Vec, + hashes: Vec, } let encodable = EncodableNewPooledTransactionHashes68::decode(buf)?; @@ -316,13 +315,11 @@ impl Decodable for NewPooledTransactionHashes68 { #[cfg(test)] mod tests { - use std::str::FromStr; - - use bytes::BytesMut; - use hex_literal::hex; - use reth_rlp::{Decodable, Encodable}; - use super::*; + use alloy_rlp::{Decodable, Encodable}; + use bytes::BytesMut; + use reth_primitives::hex; + use std::str::FromStr; /// Takes as input a struct / encoded hex message pair, ensuring that we encode to the exact hex /// message, and decode to the exact struct. @@ -341,12 +338,12 @@ mod tests { #[test] fn can_return_latest_block() { - let mut blocks = NewBlockHashes(vec![BlockHashNumber { hash: H256::random(), number: 0 }]); + let mut blocks = NewBlockHashes(vec![BlockHashNumber { hash: B256::random(), number: 0 }]); let latest = blocks.latest().unwrap(); assert_eq!(latest.number, 0); - blocks.0.push(BlockHashNumber { hash: H256::random(), number: 100 }); - blocks.0.push(BlockHashNumber { hash: H256::random(), number: 2 }); + blocks.0.push(BlockHashNumber { hash: B256::random(), number: 100 }); + blocks.0.push(BlockHashNumber { hash: B256::random(), number: 2 }); let latest = blocks.latest().unwrap(); assert_eq!(latest.number, 100); } @@ -362,7 +359,7 @@ mod tests { NewPooledTransactionHashes68 { types: vec![0x00], sizes: vec![0x00], - hashes: vec![H256::from_str( + hashes: vec![B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000000", ) .unwrap()], @@ -374,11 +371,11 @@ mod tests { types: vec![0x00, 0x00], sizes: vec![0x00, 0x00], hashes: vec![ - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000000", ) .unwrap(), - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000000", ) .unwrap(), @@ -390,7 +387,7 @@ mod tests { NewPooledTransactionHashes68 { types: vec![0x02], sizes: vec![0xb6], - hashes: vec![H256::from_str( + hashes: vec![B256::from_str( "0xfecbed04c7b88d8e7221a0a3f5dc33f220212347fc167459ea5cc9c3eb4c1124", ) .unwrap()], @@ -402,11 +399,11 @@ mod tests { types: vec![0xff, 0xff], sizes: vec![0xffffffff, 0xffffffff], hashes: vec![ - H256::from_str( + B256::from_str( "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", ) .unwrap(), - H256::from_str( + B256::from_str( "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff", ) .unwrap(), @@ -419,11 +416,11 @@ mod tests { types: vec![0xff, 0xff], sizes: vec![0xffffffff, 0xffffffff], hashes: vec![ - H256::from_str( + B256::from_str( "0xbeefcafebeefcafebeefcafebeefcafebeefcafebeefcafebeefcafebeefcafe", ) .unwrap(), - H256::from_str( + B256::from_str( "0xbeefcafebeefcafebeefcafebeefcafebeefcafebeefcafebeefcafebeefcafe", ) .unwrap(), @@ -436,11 +433,11 @@ mod tests { types: vec![0x10, 0x10], sizes: vec![0xdeadc0de, 0xdeadc0de], hashes: vec![ - H256::from_str( + B256::from_str( "0x3b9aca00f0671c9a2a1b817a0a78d3fe0c0f776cccb2a8c3c1b412a4f4e4d4e2", ) .unwrap(), - H256::from_str( + B256::from_str( "0x3b9aca00f0671c9a2a1b817a0a78d3fe0c0f776cccb2a8c3c1b412a4f4e4d4e2", ) .unwrap(), @@ -453,11 +450,11 @@ mod tests { types: vec![0x6f, 0x6f], sizes: vec![0x7fffffff, 0x7fffffff], hashes: vec![ - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000002", ) .unwrap(), - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000002", ) .unwrap(), diff --git a/crates/net/eth-wire/src/types/message.rs b/crates/net/eth-wire/src/types/message.rs index d37f5033be..17efbeb2df 100644 --- a/crates/net/eth-wire/src/types/message.rs +++ b/crates/net/eth-wire/src/types/message.rs @@ -5,8 +5,8 @@ use super::{ NewPooledTransactionHashes68, NodeData, PooledTransactions, Receipts, Status, Transactions, }; use crate::{errors::EthStreamError, EthVersion, SharedTransactions}; +use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; use reth_primitives::bytes::{Buf, BufMut}; -use reth_rlp::{length_of_length, Decodable, Encodable, Header}; use std::{fmt::Debug, sync::Arc}; #[cfg(feature = "serde")] @@ -325,8 +325,8 @@ impl Encodable for EthMessageID { } impl Decodable for EthMessageID { - fn decode(buf: &mut &[u8]) -> Result { - let id = buf.first().ok_or(reth_rlp::DecodeError::InputTooShort)?; + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let id = buf.first().ok_or(alloy_rlp::Error::InputTooShort)?; let id = match id { 0x00 => EthMessageID::Status, 0x01 => EthMessageID::NewBlockHashes, @@ -343,7 +343,7 @@ impl Decodable for EthMessageID { 0x0e => EthMessageID::NodeData, 0x0f => EthMessageID::GetReceipts, 0x10 => EthMessageID::Receipts, - _ => return Err(reth_rlp::DecodeError::Custom("Invalid message ID")), + _ => return Err(alloy_rlp::Error::Custom("Invalid message ID")), }; buf.advance(1); Ok(id) @@ -393,7 +393,7 @@ impl Encodable for RequestPair where T: Encodable, { - fn encode(&self, out: &mut dyn reth_rlp::BufMut) { + fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { let header = Header { list: true, payload_length: self.request_id.length() + self.message.length() }; @@ -416,7 +416,7 @@ impl Decodable for RequestPair where T: Decodable, { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let _header = Header::decode(buf)?; Ok(Self { request_id: u64::decode(buf)?, message: T::decode(buf)? }) } @@ -428,8 +428,8 @@ mod test { errors::EthStreamError, types::message::RequestPair, EthMessage, EthMessageID, GetNodeData, NodeData, ProtocolMessage, }; - use hex_literal::hex; - use reth_rlp::{Decodable, Encodable}; + use alloy_rlp::{Decodable, Encodable}; + use reth_primitives::hex; fn encode(value: T) -> Vec { let mut buf = vec![]; diff --git a/crates/net/eth-wire/src/types/receipts.rs b/crates/net/eth-wire/src/types/receipts.rs index f1bb3bc120..27a482ceb7 100644 --- a/crates/net/eth-wire/src/types/receipts.rs +++ b/crates/net/eth-wire/src/types/receipts.rs @@ -1,7 +1,7 @@ //! Implements the `GetReceipts` and `Receipts` message types. +use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use reth_codecs::derive_arbitrary; -use reth_primitives::{ReceiptWithBloom, H256}; -use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; +use reth_primitives::{ReceiptWithBloom, B256}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct GetReceipts( /// The block hashes to request receipts for. - pub Vec, + pub Vec, ); /// The response to [`GetReceipts`], containing receipt lists that correspond to each block @@ -37,9 +37,8 @@ mod test { types::{message::RequestPair, GetReceipts}, Receipts, }; - use hex_literal::hex; - use reth_primitives::{Log, Receipt, ReceiptWithBloom, TxType}; - use reth_rlp::{Decodable, Encodable}; + use alloy_rlp::{Decodable, Encodable}; + use reth_primitives::{hex, Log, Receipt, ReceiptWithBloom, TxType}; #[test] fn roundtrip_eip1559() { diff --git a/crates/net/eth-wire/src/types/state.rs b/crates/net/eth-wire/src/types/state.rs index 699e52568c..bad88680fc 100644 --- a/crates/net/eth-wire/src/types/state.rs +++ b/crates/net/eth-wire/src/types/state.rs @@ -1,7 +1,7 @@ //! Implements the `GetNodeData` and `NodeData` message types. +use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use reth_codecs::derive_arbitrary; -use reth_primitives::{Bytes, H256}; -use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; +use reth_primitives::{Bytes, B256}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -12,7 +12,7 @@ use serde::{Deserialize, Serialize}; #[derive_arbitrary(rlp)] #[derive(Clone, Debug, PartialEq, Eq, RlpEncodableWrapper, RlpDecodableWrapper, Default)] #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] -pub struct GetNodeData(pub Vec); +pub struct GetNodeData(pub Vec); /// The response to [`GetNodeData`], containing the state tree nodes or contract bytecode /// corresponding to the requested hashes. @@ -26,10 +26,10 @@ pub struct NodeData(pub Vec); #[cfg(test)] mod test { - use hex_literal::hex; + use reth_primitives::hex; use crate::{message::RequestPair, GetNodeData, NodeData}; - use reth_rlp::{Decodable, Encodable}; + use alloy_rlp::{Decodable, Encodable}; #[test] // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 diff --git a/crates/net/eth-wire/src/types/status.rs b/crates/net/eth-wire/src/types/status.rs index cbb8ca0c16..c112fe58cc 100644 --- a/crates/net/eth-wire/src/types/status.rs +++ b/crates/net/eth-wire/src/types/status.rs @@ -1,10 +1,9 @@ use crate::{EthVersion, StatusBuilder}; - +use alloy_rlp::{RlpDecodable, RlpEncodable}; use reth_codecs::derive_arbitrary; use reth_primitives::{ - hex, Chain, ChainSpec, ForkId, Genesis, Hardfork, Head, H256, MAINNET, U256, + hex, Chain, ChainSpec, ForkId, Genesis, Hardfork, Head, NamedChain, B256, MAINNET, U256, }; -use reth_rlp::{RlpDecodable, RlpEncodable}; use std::fmt::{Debug, Display}; #[cfg(feature = "serde")] @@ -31,10 +30,10 @@ pub struct Status { pub total_difficulty: U256, /// The highest difficulty block hash the peer has seen - pub blockhash: H256, + pub blockhash: B256, /// The genesis hash of the peer's chain. - pub genesis: H256, + pub genesis: B256, /// The fork identifier, a [CRC32 /// checksum](https://en.wikipedia.org/wiki/Cyclic_redundancy_check#CRC-32_algorithm) for @@ -134,7 +133,7 @@ impl Default for Status { let mainnet_genesis = MAINNET.genesis_hash(); Status { version: EthVersion::Eth68 as u8, - chain: Chain::Named(ethers_core::types::Chain::Mainnet), + chain: Chain::Named(NamedChain::Mainnet), total_difficulty: U256::from(17_179_869_184u64), blockhash: mainnet_genesis, genesis: mainnet_genesis, @@ -148,13 +147,12 @@ impl Default for Status { #[cfg(test)] mod tests { use crate::types::{EthVersion, Status}; - use ethers_core::types::Chain as NamedChain; - use hex_literal::hex; + use alloy_rlp::{Decodable, Encodable}; use rand::Rng; use reth_primitives::{ - Chain, ChainSpec, ForkCondition, ForkHash, ForkId, Genesis, Hardfork, Head, H256, U256, + hex, Chain, ChainSpec, ForkCondition, ForkHash, ForkId, Genesis, Hardfork, Head, + NamedChain, B256, U256, }; - use reth_rlp::{Decodable, Encodable}; use std::str::FromStr; #[test] @@ -164,11 +162,11 @@ mod tests { version: EthVersion::Eth67 as u8, chain: Chain::Named(NamedChain::Mainnet), total_difficulty: U256::from(36206751599115524359527u128), - blockhash: H256::from_str( + blockhash: B256::from_str( "feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13d", ) .unwrap(), - genesis: H256::from_str( + genesis: B256::from_str( "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3", ) .unwrap(), @@ -187,11 +185,11 @@ mod tests { version: EthVersion::Eth67 as u8, chain: Chain::Named(NamedChain::Mainnet), total_difficulty: U256::from(36206751599115524359527u128), - blockhash: H256::from_str( + blockhash: B256::from_str( "feb27336ca7923f8fab3bd617fcb6e75841538f71c1bcfc267d7838489d9e13d", ) .unwrap(), - genesis: H256::from_str( + genesis: B256::from_str( "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3", ) .unwrap(), @@ -208,11 +206,11 @@ mod tests { version: EthVersion::Eth66 as u8, chain: Chain::Named(NamedChain::BinanceSmartChain), total_difficulty: U256::from(37851386u64), - blockhash: H256::from_str( + blockhash: B256::from_str( "f8514c4680ef27700751b08f37645309ce65a449616a3ea966bf39dd935bb27b", ) .unwrap(), - genesis: H256::from_str( + genesis: B256::from_str( "0d21840abff46b96c84b2ac9e10e4f5cdaeb5693cb665db62a2f3b02d2d57b5b", ) .unwrap(), @@ -231,11 +229,11 @@ mod tests { version: EthVersion::Eth66 as u8, chain: Chain::Named(NamedChain::BinanceSmartChain), total_difficulty: U256::from(37851386u64), - blockhash: H256::from_str( + blockhash: B256::from_str( "f8514c4680ef27700751b08f37645309ce65a449616a3ea966bf39dd935bb27b", ) .unwrap(), - genesis: H256::from_str( + genesis: B256::from_str( "0d21840abff46b96c84b2ac9e10e4f5cdaeb5693cb665db62a2f3b02d2d57b5b", ) .unwrap(), @@ -255,11 +253,11 @@ mod tests { "0x000000000000000000000000006d68fcffffffffffffffffffffffffdeab81b8", ) .unwrap(), - blockhash: H256::from_str( + blockhash: B256::from_str( "523e8163a6d620a4cc152c547a05f28a03fec91a2a615194cb86df9731372c0c", ) .unwrap(), - genesis: H256::from_str( + genesis: B256::from_str( "6499dccdc7c7def3ebb1ce4c6ee27ec6bd02aee570625ca391919faf77ef27bd", ) .unwrap(), @@ -271,8 +269,8 @@ mod tests { #[test] fn init_custom_status_fields() { - let head_hash = H256::random(); let mut rng = rand::thread_rng(); + let head_hash = rng.gen(); let total_difficulty = U256::from(rng.gen::()); // create a genesis that has a random part, so we can check that the hash is preserved diff --git a/crates/net/eth-wire/src/types/transactions.rs b/crates/net/eth-wire/src/types/transactions.rs index 4cb1abab28..46fc30865e 100644 --- a/crates/net/eth-wire/src/types/transactions.rs +++ b/crates/net/eth-wire/src/types/transactions.rs @@ -1,7 +1,7 @@ //! Implements the `GetPooledTransactions` and `PooledTransactions` message types. +use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use reth_codecs::derive_arbitrary; -use reth_primitives::{PooledTransactionsElement, TransactionSigned, H256}; -use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; +use reth_primitives::{PooledTransactionsElement, TransactionSigned, B256}; #[cfg(feature = "serde")] use serde::{Deserialize, Serialize}; @@ -12,12 +12,12 @@ use serde::{Deserialize, Serialize}; #[cfg_attr(feature = "serde", derive(Serialize, Deserialize))] pub struct GetPooledTransactions( /// The transaction hashes to request transaction bodies for. - pub Vec, + pub Vec, ); impl From> for GetPooledTransactions where - T: Into, + T: Into, { fn from(hashes: Vec) -> Self { GetPooledTransactions(hashes.into_iter().map(|h| h.into()).collect()) @@ -48,11 +48,10 @@ impl From> for PooledTransactions { #[cfg(test)] mod test { use crate::{message::RequestPair, GetPooledTransactions, PooledTransactions}; - use hex_literal::hex; + use alloy_rlp::{Decodable, Encodable}; use reth_primitives::{ hex, Signature, Transaction, TransactionKind, TransactionSigned, TxEip1559, TxLegacy, U256, }; - use reth_rlp::{Decodable, Encodable}; use std::str::FromStr; #[test] diff --git a/crates/net/eth-wire/tests/fuzz_roundtrip.rs b/crates/net/eth-wire/tests/fuzz_roundtrip.rs index 34117e8e13..ed9467d293 100644 --- a/crates/net/eth-wire/tests/fuzz_roundtrip.rs +++ b/crates/net/eth-wire/tests/fuzz_roundtrip.rs @@ -1,10 +1,10 @@ //! Round-trip encoding fuzzing for the `eth-wire` crate. -use reth_rlp::{Decodable, Encodable}; +use alloy_rlp::{Decodable, Encodable}; use serde::Serialize; use std::fmt::Debug; -/// Creates a fuzz test for a type that should be [`Encodable`](reth_rlp::Encodable) and -/// [`Decodable`](reth_rlp::Decodable). +/// Creates a fuzz test for a type that should be [`Encodable`](alloy_rlp::Encodable) and +/// [`Decodable`](alloy_rlp::Decodable). /// /// The test will create a random instance of the type, encode it, and then decode it. fn roundtrip_encoding(thing: T) @@ -48,6 +48,7 @@ macro_rules! fuzz_type_and_name { #[cfg(any(test, feature = "bench"))] pub mod fuzz_rlp { use crate::roundtrip_encoding; + use alloy_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use reth_codecs::derive_arbitrary; use reth_eth_wire::{ BlockBodies, BlockHeaders, DisconnectReason, GetBlockBodies, GetBlockHeaders, GetNodeData, @@ -56,7 +57,6 @@ pub mod fuzz_rlp { PooledTransactions, Receipts, Status, Transactions, }; use reth_primitives::{BlockHashOrNumber, TransactionSigned}; - use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper}; use serde::{Deserialize, Serialize}; use test_fuzz::test_fuzz; diff --git a/crates/net/eth-wire/tests/new_block.rs b/crates/net/eth-wire/tests/new_block.rs index 9e011bd8d4..cc22a46973 100644 --- a/crates/net/eth-wire/tests/new_block.rs +++ b/crates/net/eth-wire/tests/new_block.rs @@ -1,7 +1,7 @@ //! Decoding tests for [`NewBlock`] +use alloy_rlp::Decodable; use reth_eth_wire::NewBlock; use reth_primitives::hex; -use reth_rlp::Decodable; use std::{fs, path::PathBuf}; #[test] diff --git a/crates/net/eth-wire/tests/new_pooled_transactions.rs b/crates/net/eth-wire/tests/new_pooled_transactions.rs index 9f767c2573..0ed63d6029 100644 --- a/crates/net/eth-wire/tests/new_pooled_transactions.rs +++ b/crates/net/eth-wire/tests/new_pooled_transactions.rs @@ -1,7 +1,7 @@ //! Decoding tests for [`NewPooledTransactions`] +use alloy_rlp::Decodable; use reth_eth_wire::NewPooledTransactionHashes66; use reth_primitives::hex; -use reth_rlp::Decodable; use std::{fs, path::PathBuf}; #[test] diff --git a/crates/net/eth-wire/tests/pooled_transactions.rs b/crates/net/eth-wire/tests/pooled_transactions.rs index a204a93e8c..268b6a9c34 100644 --- a/crates/net/eth-wire/tests/pooled_transactions.rs +++ b/crates/net/eth-wire/tests/pooled_transactions.rs @@ -1,7 +1,7 @@ //! Decoding tests for [`PooledTransactions`] +use alloy_rlp::Decodable; use reth_eth_wire::PooledTransactions; use reth_primitives::{hex, Bytes, PooledTransactionsElement}; -use reth_rlp::Decodable; use std::{fs, path::PathBuf}; #[test] diff --git a/crates/net/nat/src/lib.rs b/crates/net/nat/src/lib.rs index 65382c7fc7..080319eef2 100644 --- a/crates/net/nat/src/lib.rs +++ b/crates/net/nat/src/lib.rs @@ -7,7 +7,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/network-api/src/lib.rs b/crates/net/network-api/src/lib.rs index 1b9f76e046..4cb69d1187 100644 --- a/crates/net/network-api/src/lib.rs +++ b/crates/net/network-api/src/lib.rs @@ -9,7 +9,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/network/Cargo.toml b/crates/net/network/Cargo.toml index 714c4477cf..971e38afa3 100644 --- a/crates/net/network/Cargo.toml +++ b/crates/net/network/Cargo.toml @@ -26,13 +26,13 @@ reth-discv4 = { path = "../discv4" } reth-dns-discovery = { path = "../dns" } reth-eth-wire = { path = "../eth-wire" } reth-ecies = { path = "../ecies" } -reth-rlp.workspace = true -reth-rlp-derive = { path = "../../rlp/rlp-derive" } reth-tasks.workspace = true reth-transaction-pool.workspace = true reth-provider.workspace = true reth-rpc-types.workspace = true +alloy-rlp.workspace = true + # async/futures futures.workspace = true pin-project.workspace = true @@ -63,7 +63,6 @@ rand.workspace = true secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recovery"] } enr = { workspace = true, features = ["rust-secp256k1"], optional = true } -ethers-core = { workspace = true, default-features = false, optional = true } tempfile = { version = "3.3", optional = true } [dev-dependencies] @@ -88,12 +87,11 @@ ethers-middleware = { workspace = true, default-features = false } enr = { workspace = true, features = ["serde", "rust-secp256k1"] } # misc -hex = "0.4" serial_test.workspace = true tempfile = "3.3" [features] default = ["serde"] serde = ["dep:serde", "dep:humantime-serde", "secp256k1/serde", "enr?/serde", "dep:serde_json"] -test-utils = ["reth-provider/test-utils", "dep:enr", "dep:ethers-core", "dep:tempfile"] +test-utils = ["reth-provider/test-utils", "dep:enr", "dep:tempfile"] geth-tests = [] diff --git a/crates/net/network/src/cache.rs b/crates/net/network/src/cache.rs index 80a8972b8b..4acd66e14b 100644 --- a/crates/net/network/src/cache.rs +++ b/crates/net/network/src/cache.rs @@ -52,6 +52,11 @@ impl LruCache { { self.inner.contains(value) } + + /// Returns an iterator over all cached entries + pub fn iter(&self) -> impl Iterator + '_ { + self.inner.iter() + } } impl Extend for LruCache diff --git a/crates/net/network/src/fetch/client.rs b/crates/net/network/src/fetch/client.rs index 9136ae4df9..a9febae7ec 100644 --- a/crates/net/network/src/fetch/client.rs +++ b/crates/net/network/src/fetch/client.rs @@ -11,15 +11,12 @@ use reth_interfaces::p2p::{ priority::Priority, }; use reth_network_api::ReputationChangeKind; -use reth_primitives::{Header, PeerId, H256}; +use reth_primitives::{Header, PeerId, B256}; use std::sync::{ atomic::{AtomicUsize, Ordering}, Arc, }; -use tokio::sync::{ - mpsc::UnboundedSender, - oneshot::{self}, -}; +use tokio::sync::{mpsc::UnboundedSender, oneshot}; /// Front-end API for fetching data from the network. /// @@ -111,7 +108,7 @@ impl BodiesClient for FetchClient { /// Sends a `GetBlockBodies` request to an available peer. fn get_block_bodies_with_priority( &self, - request: Vec, + request: Vec, priority: Priority, ) -> Self::Output { let (response, rx) = oneshot::channel(); diff --git a/crates/net/network/src/fetch/mod.rs b/crates/net/network/src/fetch/mod.rs index 3a1a8f7675..cda61fc230 100644 --- a/crates/net/network/src/fetch/mod.rs +++ b/crates/net/network/src/fetch/mod.rs @@ -9,7 +9,7 @@ use reth_interfaces::p2p::{ priority::Priority, }; use reth_network_api::ReputationChangeKind; -use reth_primitives::{BlockBody, Header, PeerId, H256}; +use reth_primitives::{BlockBody, Header, PeerId, B256}; use std::{ collections::{HashMap, VecDeque}, sync::{ @@ -37,7 +37,7 @@ pub struct StateFetcher { HashMap>>>, /// Currently active [`GetBlockBodies`] requests inflight_bodies_requests: - HashMap, PeerRequestResult>>>, + HashMap, PeerRequestResult>>>, /// The list of _available_ peers for requests. peers: HashMap, /// The handle to the peers manager @@ -73,7 +73,7 @@ impl StateFetcher { pub(crate) fn new_active_peer( &mut self, peer_id: PeerId, - best_hash: H256, + best_hash: B256, best_number: u64, timeout: Arc, ) { @@ -100,7 +100,7 @@ impl StateFetcher { /// Updates the block information for the peer. /// /// Returns `true` if this a newer block - pub(crate) fn update_peer_block(&mut self, peer_id: &PeerId, hash: H256, number: u64) -> bool { + pub(crate) fn update_peer_block(&mut self, peer_id: &PeerId, hash: B256, number: u64) -> bool { if let Some(peer) = self.peers.get_mut(peer_id) { if number > peer.best_number { peer.best_hash = hash; @@ -302,7 +302,7 @@ struct Peer { /// The state this peer currently resides in. state: PeerState, /// Best known hash that the peer has - best_hash: H256, + best_hash: B256, /// Tracks the best number of the peer. best_number: u64, /// Tracks the current timeout value we use for the peer. @@ -372,7 +372,7 @@ pub(crate) enum DownloadRequest { }, /// Download the requested headers and send response through channel GetBlockBodies { - request: Vec, + request: Vec, response: oneshot::Sender>>, priority: Priority, }, @@ -429,7 +429,7 @@ pub(crate) enum BlockResponseOutcome { mod tests { use super::*; use crate::{peers::PeersManager, PeersConfig}; - use reth_primitives::{SealedHeader, H256, H512}; + use reth_primitives::{SealedHeader, B256, B512}; use std::future::poll_fn; #[tokio::test(flavor = "multi_thread")] @@ -457,10 +457,10 @@ mod tests { let manager = PeersManager::new(PeersConfig::default()); let mut fetcher = StateFetcher::new(manager.handle(), Default::default()); // Add a few random peers - let peer1 = H512::random(); - let peer2 = H512::random(); - fetcher.new_active_peer(peer1, H256::random(), 1, Arc::new(AtomicU64::new(1))); - fetcher.new_active_peer(peer2, H256::random(), 2, Arc::new(AtomicU64::new(1))); + let peer1 = B512::random(); + let peer2 = B512::random(); + fetcher.new_active_peer(peer1, B256::random(), 1, Arc::new(AtomicU64::new(1))); + fetcher.new_active_peer(peer2, B256::random(), 2, Arc::new(AtomicU64::new(1))); let first_peer = fetcher.next_peer().unwrap(); assert!(first_peer == peer1 || first_peer == peer2); @@ -480,15 +480,15 @@ mod tests { let manager = PeersManager::new(PeersConfig::default()); let mut fetcher = StateFetcher::new(manager.handle(), Default::default()); // Add a few random peers - let peer1 = H512::random(); - let peer2 = H512::random(); - let peer3 = H512::random(); + let peer1 = B512::random(); + let peer2 = B512::random(); + let peer3 = B512::random(); let peer2_timeout = Arc::new(AtomicU64::new(300)); - fetcher.new_active_peer(peer1, H256::random(), 1, Arc::new(AtomicU64::new(30))); - fetcher.new_active_peer(peer2, H256::random(), 2, Arc::clone(&peer2_timeout)); - fetcher.new_active_peer(peer3, H256::random(), 3, Arc::new(AtomicU64::new(50))); + fetcher.new_active_peer(peer1, B256::random(), 1, Arc::new(AtomicU64::new(30))); + fetcher.new_active_peer(peer2, B256::random(), 2, Arc::clone(&peer2_timeout)); + fetcher.new_active_peer(peer3, B256::random(), 3, Arc::new(AtomicU64::new(50))); // Must always get peer1 (lowest timeout) assert_eq!(fetcher.next_peer(), Some(peer1)); @@ -504,7 +504,7 @@ mod tests { async fn test_on_block_headers_response() { let manager = PeersManager::new(PeersConfig::default()); let mut fetcher = StateFetcher::new(manager.handle(), Default::default()); - let peer_id = H512::random(); + let peer_id = B512::random(); assert_eq!(fetcher.on_block_headers_response(peer_id, Ok(vec![Header::default()])), None); @@ -534,7 +534,7 @@ mod tests { async fn test_header_response_outcome() { let manager = PeersManager::new(PeersConfig::default()); let mut fetcher = StateFetcher::new(manager.handle(), Default::default()); - let peer_id = H512::random(); + let peer_id = B512::random(); let request_pair = || { let (tx, _rx) = oneshot::channel(); diff --git a/crates/net/network/src/lib.rs b/crates/net/network/src/lib.rs index 0978811ccb..f4b7f8f117 100644 --- a/crates/net/network/src/lib.rs +++ b/crates/net/network/src/lib.rs @@ -108,7 +108,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, rustdoc::all)] // TODO(danipopes): unreachable_pub #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/net/network/src/manager.rs b/crates/net/network/src/manager.rs index c076c7fba8..22454d8516 100644 --- a/crates/net/network/src/manager.rs +++ b/crates/net/network/src/manager.rs @@ -41,7 +41,7 @@ use reth_eth_wire::{ use reth_metrics::common::mpsc::UnboundedMeteredSender; use reth_net_common::bandwidth_meter::BandwidthMeter; use reth_network_api::ReputationChangeKind; -use reth_primitives::{listener::EventListeners, ForkId, NodeRecord, PeerId, H256}; +use reth_primitives::{listener::EventListeners, ForkId, NodeRecord, PeerId, B256}; use reth_provider::{BlockNumReader, BlockReader}; use reth_rpc_types::{EthProtocolInfo, NetworkStatus}; use std::{ @@ -301,7 +301,7 @@ where } /// Returns the configured genesis hash - pub fn genesis_hash(&self) -> H256 { + pub fn genesis_hash(&self) -> B256 { self.swarm.state().genesis_hash() } diff --git a/crates/net/network/src/message.rs b/crates/net/network/src/message.rs index 12b4c4980a..79ef8737e7 100644 --- a/crates/net/network/src/message.rs +++ b/crates/net/network/src/message.rs @@ -12,7 +12,7 @@ use reth_eth_wire::{ }; use reth_interfaces::p2p::error::{RequestError, RequestResult}; use reth_primitives::{ - BlockBody, Bytes, Header, PeerId, PooledTransactionsElement, ReceiptWithBloom, H256, + BlockBody, Bytes, Header, PeerId, PooledTransactionsElement, ReceiptWithBloom, B256, }; use std::{ fmt, @@ -25,7 +25,7 @@ use tokio::sync::{mpsc, mpsc::error::TrySendError, oneshot}; #[derive(Debug, Clone)] pub struct NewBlockMessage { /// Hash of the block - pub hash: H256, + pub hash: B256, /// Raw received message pub block: Arc, } diff --git a/crates/net/network/src/network.rs b/crates/net/network/src/network.rs index aa6d287215..360a0a0ac3 100644 --- a/crates/net/network/src/network.rs +++ b/crates/net/network/src/network.rs @@ -11,7 +11,7 @@ use reth_network_api::{ NetworkError, NetworkInfo, PeerInfo, PeerKind, Peers, PeersInfo, Reputation, ReputationChangeKind, }; -use reth_primitives::{Head, NodeRecord, PeerId, TransactionSigned, H256}; +use reth_primitives::{Head, NodeRecord, PeerId, TransactionSigned, B256}; use reth_rpc_types::NetworkStatus; use std::{ net::SocketAddr, @@ -135,7 +135,7 @@ impl NetworkHandle { /// Caution: in PoS this is a noop, since new block are no longer announced over devp2p, but are /// instead sent to node node by the CL. However, they can still be requested over devp2p, but /// broadcasting them is a considered a protocol violation.. - pub fn announce_block(&self, block: NewBlock, hash: H256) { + pub fn announce_block(&self, block: NewBlock, hash: B256) { self.send_message(NetworkHandleMessage::AnnounceBlock(block, hash)) } @@ -332,7 +332,7 @@ pub(crate) enum NetworkHandleMessage { /// Add a new listener for [`NetworkEvent`]. EventListener(UnboundedSender), /// Broadcast event to announce a new block to all nodes. - AnnounceBlock(NewBlock, H256), + AnnounceBlock(NewBlock, B256), /// Sends the list of transactions to the given peer. SendTransaction { peer_id: PeerId, msg: SharedTransactions }, /// Sends the list of transactions hashes to the given peer. diff --git a/crates/net/network/src/peers/manager.rs b/crates/net/network/src/peers/manager.rs index 6e4b62c7b7..3dc69012f4 100644 --- a/crates/net/network/src/peers/manager.rs +++ b/crates/net/network/src/peers/manager.rs @@ -1349,7 +1349,7 @@ mod test { }; use reth_net_common::ban_list::BanList; use reth_network_api::ReputationChangeKind; - use reth_primitives::{PeerId, H512}; + use reth_primitives::{PeerId, B512}; use std::{ collections::HashSet, future::{poll_fn, Future}, @@ -2002,7 +2002,7 @@ mod test { let ban_list = BanList::new(HashSet::new(), vec![ip]); let config = PeersConfig::default().with_ban_list(ban_list); let mut peer_manager = PeersManager::new(config); - peer_manager.add_peer(H512::default(), socket_addr, None); + peer_manager.add_peer(B512::default(), socket_addr, None); assert!(peer_manager.peers.is_empty()); } @@ -2031,7 +2031,7 @@ mod test { async fn test_on_active_inbound_ban_list() { let ip = IpAddr::V4(Ipv4Addr::new(127, 0, 1, 2)); let socket_addr = SocketAddr::new(ip, 8008); - let given_peer_id: PeerId = H512::from_low_u64_ne(123403423412); + let given_peer_id = PeerId::random(); let ban_list = BanList::new(vec![given_peer_id], HashSet::new()); let config = PeersConfig::default().with_ban_list(ban_list); let mut peer_manager = PeersManager::new(config); diff --git a/crates/net/network/src/state.rs b/crates/net/network/src/state.rs index d2828e522d..d2b94f49a6 100644 --- a/crates/net/network/src/state.rs +++ b/crates/net/network/src/state.rs @@ -16,7 +16,7 @@ use reth_eth_wire::{ capability::Capabilities, BlockHashNumber, DisconnectReason, NewBlockHashes, Status, }; use reth_network_api::PeerKind; -use reth_primitives::{ForkId, PeerId, H256}; +use reth_primitives::{ForkId, PeerId, B256}; use reth_provider::BlockNumReader; use std::{ collections::{HashMap, VecDeque}, @@ -60,7 +60,7 @@ pub struct NetworkState { /// Network discovery. discovery: Discovery, /// The genesis hash of the network we're on - genesis_hash: H256, + genesis_hash: B256, /// The type that handles requests. /// /// The fetcher streams RLPx related requests on a per-peer basis to this type. This type will @@ -77,7 +77,7 @@ where client: C, discovery: Discovery, peers_manager: PeersManager, - genesis_hash: H256, + genesis_hash: B256, num_active_peers: Arc, ) -> Self { let state_fetcher = StateFetcher::new(peers_manager.handle(), num_active_peers); @@ -113,7 +113,7 @@ where } /// Configured genesis hash. - pub fn genesis_hash(&self) -> H256 { + pub fn genesis_hash(&self) -> B256 { self.genesis_hash } @@ -227,7 +227,7 @@ where } /// Updates the block information for the peer. - pub(crate) fn update_peer_block(&mut self, peer_id: &PeerId, hash: H256, number: u64) { + pub(crate) fn update_peer_block(&mut self, peer_id: &PeerId, hash: B256, number: u64) { if let Some(peer) = self.active_peers.get_mut(peer_id) { peer.best_hash = hash; } @@ -242,7 +242,7 @@ where /// Invoked after a `NewBlock` message was received by the peer. /// /// This will keep track of blocks we know a peer has - pub(crate) fn on_new_block(&mut self, peer_id: PeerId, hash: H256) { + pub(crate) fn on_new_block(&mut self, peer_id: PeerId, hash: B256) { // Mark the blocks as seen if let Some(peer) = self.active_peers.get_mut(&peer_id) { peer.blocks.insert(hash); @@ -469,7 +469,7 @@ where #[derive(Debug)] pub(crate) struct ActivePeer { /// Best block of the peer. - pub(crate) best_hash: H256, + pub(crate) best_hash: B256, /// The capabilities of the remote peer. #[allow(unused)] pub(crate) capabilities: Arc, @@ -478,7 +478,7 @@ pub(crate) struct ActivePeer { /// The response receiver for a currently active request to that peer. pub(crate) pending_response: Option, /// Blocks we know the peer has. - pub(crate) blocks: LruCache, + pub(crate) blocks: LruCache, } /// Message variants triggered by the [`NetworkState`] @@ -530,7 +530,7 @@ mod tests { BlockBodies, EthVersion, Status, }; use reth_interfaces::p2p::{bodies::client::BodiesClient, error::RequestError}; - use reth_primitives::{BlockBody, Header, PeerId, H256}; + use reth_primitives::{BlockBody, Header, PeerId, B256}; use reth_provider::test_utils::NoopProvider; use std::{ future::poll_fn, @@ -606,11 +606,11 @@ mod tests { }); // send requests to the state via the client - let (peer, bodies) = client.get_block_bodies(vec![H256::random()]).await.unwrap().split(); + let (peer, bodies) = client.get_block_bodies(vec![B256::random()]).await.unwrap().split(); assert_eq!(peer, peer_id); assert_eq!(bodies, vec![body]); - let resp = client.get_block_bodies(vec![H256::random()]).await; + let resp = client.get_block_bodies(vec![B256::random()]).await; assert!(resp.is_err()); assert_eq!(resp.unwrap_err(), RequestError::ConnectionDropped); } diff --git a/crates/net/network/src/transactions.rs b/crates/net/network/src/transactions.rs index fade4be1a5..0db1d2318c 100644 --- a/crates/net/network/src/transactions.rs +++ b/crates/net/network/src/transactions.rs @@ -20,14 +20,14 @@ use reth_metrics::common::mpsc::UnboundedMeteredReceiver; use reth_network_api::{Peers, ReputationChangeKind}; use reth_primitives::{ FromRecoveredPooledTransaction, IntoRecoveredTransaction, PeerId, PooledTransactionsElement, - TransactionSigned, TxHash, H256, + TransactionSigned, TxHash, B256, }; use reth_transaction_pool::{ error::PoolResult, GetPooledTransactionLimit, PoolTransaction, PropagateKind, PropagatedTransactions, TransactionPool, ValidPoolTransaction, }; use std::{ - collections::{hash_map::Entry, HashMap}, + collections::{hash_map::Entry, HashMap, HashSet}, num::NonZeroUsize, pin::Pin, sync::Arc, @@ -91,8 +91,10 @@ impl TransactionsHandle { } /// Request the active peer IDs from the [`TransactionsManager`]. - pub fn get_active_peers(&self) { - self.send(TransactionsCommand::GetActivePeers) + pub async fn get_active_peers(&self) -> Result, RecvError> { + let (tx, rx) = oneshot::channel(); + self.send(TransactionsCommand::GetActivePeers(tx)); + rx.await } /// Manually propagate full transactions to a specific peer. @@ -101,13 +103,22 @@ impl TransactionsHandle { } /// Request the transaction hashes known by specific peers. - pub fn get_transaction_hashes(&self, peers: Vec) { - self.send(TransactionsCommand::GetTransactionHashes(peers)) + pub async fn get_transaction_hashes( + &self, + peers: Vec, + ) -> Result>, RecvError> { + let (tx, rx) = oneshot::channel(); + self.send(TransactionsCommand::GetTransactionHashes { peers, tx }); + rx.await } /// Request the transaction hashes known by a specific peer. - pub fn get_peer_transaction_hashes(&self, peer: PeerId) { - self.send(TransactionsCommand::GetPeerTransactionHashes(peer)) + pub async fn get_peer_transaction_hashes( + &self, + peer: PeerId, + ) -> Result, RecvError> { + let res = self.get_transaction_hashes(vec![peer]).await?; + Ok(res.into_values().next().unwrap_or_default()) } } @@ -346,6 +357,53 @@ where propagated } + /// Propagate the full transactions to a specific peer + /// + /// Returns the propagated transactions + fn propagate_full_transactions_to_peer( + &mut self, + txs: Vec, + peer_id: PeerId, + ) -> Option { + let peer = self.peers.get_mut(&peer_id)?; + let mut propagated = PropagatedTransactions::default(); + trace!(target: "net::tx", ?peer_id, "Propagating transactions to peer"); + + // filter all transactions unknown to the peer + let mut full_transactions = FullTransactionsBuilder::default(); + + let to_propagate = self + .pool + .get_all(txs) + .into_iter() + .filter(|tx| !tx.transaction.is_eip4844()) + .map(PropagateTransaction::new); + + // Iterate through the transactions to propagate and fill the hashes and full transaction + for tx in to_propagate { + if peer.transactions.insert(tx.hash()) { + full_transactions.push(&tx); + } + } + + if full_transactions.transactions.is_empty() { + // nothing to propagate + return None + } + + let new_full_transactions = full_transactions.build(); + for tx in new_full_transactions.iter() { + propagated.0.entry(tx.hash()).or_default().push(PropagateKind::Full(peer_id)); + } + // send full transactions + self.network.send_transactions(peer_id, new_full_transactions); + + // Update propagated transactions metrics + self.metrics.propagated_transactions.increment(propagated.0.len() as u64); + + Some(propagated) + } + /// Propagate the transaction hashes to the given peer /// /// Note: This will only send the hashes for transactions that exist in the pool. @@ -493,10 +551,27 @@ where TransactionsCommand::PropagateHashesTo(hashes, peer) => { self.propagate_hashes_to(hashes, peer) } - TransactionsCommand::GetActivePeers => todo!(), - TransactionsCommand::PropagateTransactionsTo(_txs, _peer) => todo!(), - TransactionsCommand::GetTransactionHashes(_peers) => todo!(), - TransactionsCommand::GetPeerTransactionHashes(_peer) => todo!(), + TransactionsCommand::GetActivePeers(tx) => { + let peers = self.peers.keys().copied().collect::>(); + tx.send(peers).ok(); + } + TransactionsCommand::PropagateTransactionsTo(_txs, _peer) => { + if let Some(propagated) = self.propagate_full_transactions_to_peer(_txs, _peer) { + self.pool.on_propagated(propagated); + } + } + TransactionsCommand::GetTransactionHashes { peers, tx } => { + let mut res = HashMap::with_capacity(peers.len()); + for peer_id in peers { + let hashes = self + .peers + .get(&peer_id) + .map(|peer| peer.transactions.iter().copied().collect::>()) + .unwrap_or_default(); + res.insert(peer_id, hashes); + } + tx.send(res).ok(); + } } } @@ -911,7 +986,7 @@ impl Future for GetPooledTxRequestFut { #[derive(Debug)] struct Peer { /// Keeps track of transactions that we know the peer has seen. - transactions: LruCache, + transactions: LruCache, /// A communication channel directly to the peer's session task. request_tx: PeerRequestSender, /// negotiated version of the session. @@ -925,17 +1000,18 @@ struct Peer { #[derive(Debug)] enum TransactionsCommand { /// Propagate a transaction hash to the network. - PropagateHash(H256), + PropagateHash(B256), /// Propagate transaction hashes to a specific peer. - PropagateHashesTo(Vec, PeerId), + PropagateHashesTo(Vec, PeerId), /// Request the list of active peer IDs from the [`TransactionsManager`]. - GetActivePeers, + GetActivePeers(oneshot::Sender>), /// Propagate a collection of full transactions to a specific peer. PropagateTransactionsTo(Vec, PeerId), /// Request transaction hashes known by specific peers from the [`TransactionsManager`]. - GetTransactionHashes(Vec), - /// Request transaction hashes known by a specific peer from the [`TransactionsManager`]. - GetPeerTransactionHashes(PeerId), + GetTransactionHashes { + peers: Vec, + tx: oneshot::Sender>>, + }, } /// All events related to transactions emitted by the network. @@ -960,10 +1036,11 @@ pub enum NetworkTransactionEvent { mod tests { use super::*; use crate::{test_utils::Testnet, NetworkConfigBuilder, NetworkManager}; + use alloy_rlp::Decodable; use reth_interfaces::sync::{NetworkSyncUpdater, SyncState}; use reth_network_api::NetworkInfo; + use reth_primitives::hex; use reth_provider::test_utils::NoopProvider; - use reth_rlp::Decodable; use reth_transaction_pool::test_utils::{testing_pool, MockTransaction}; use secp256k1::SecretKey; use std::future::poll_fn; @@ -1036,7 +1113,7 @@ mod tests { } } // random tx: - let input = hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap(); + let input = hex!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); let signed_tx = TransactionSigned::decode(&mut &input[..]).unwrap(); transactions.on_network_tx_event(NetworkTransactionEvent::IncomingTransactions { peer_id: *handle1.peer_id(), @@ -1122,7 +1199,7 @@ mod tests { } } // random tx: - let input = hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap(); + let input = hex!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); let signed_tx = TransactionSigned::decode(&mut &input[..]).unwrap(); transactions.on_network_tx_event(NetworkTransactionEvent::IncomingTransactions { peer_id: *handle1.peer_id(), @@ -1205,7 +1282,7 @@ mod tests { } } // random tx: - let input = hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap(); + let input = hex!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); let signed_tx = TransactionSigned::decode(&mut &input[..]).unwrap(); transactions.on_network_tx_event(NetworkTransactionEvent::IncomingTransactions { peer_id: *handle1.peer_id(), diff --git a/crates/net/network/tests/it/clique/clique_middleware.rs b/crates/net/network/tests/it/clique/clique_middleware.rs index ac06c7b911..e79d80e964 100644 --- a/crates/net/network/tests/it/clique/clique_middleware.rs +++ b/crates/net/network/tests/it/clique/clique_middleware.rs @@ -10,7 +10,7 @@ use ethers_middleware::SignerMiddleware; use ethers_providers::Middleware; use ethers_signers::Signer; use reth_network::test_utils::enr_to_peer_id; -use reth_primitives::PeerId; +use reth_primitives::{hex, PeerId}; use thiserror::Error; use tracing::trace; diff --git a/crates/net/network/tests/it/geth.rs b/crates/net/network/tests/it/geth.rs index a21637ecf0..83fefb9bc0 100644 --- a/crates/net/network/tests/it/geth.rs +++ b/crates/net/network/tests/it/geth.rs @@ -1,6 +1,6 @@ use crate::clique::{CliqueGethInstance, CliqueMiddleware}; use ethers_core::{ - types::{transaction::eip2718::TypedTransaction, Eip1559TransactionRequest, H160, U64}, + types::{transaction::eip2718::TypedTransaction, Address, Eip1559TransactionRequest}, utils::Geth, }; use ethers_providers::Middleware; @@ -113,7 +113,7 @@ async fn init_geth() -> (CliqueGethInstance, Arc) { let txs = nonces.map(|nonce| { // create a tx that just sends to the zero addr TypedTransaction::Eip1559( - Eip1559TransactionRequest::new().to(H160::zero()).value(1u64).nonce(nonce), + Eip1559TransactionRequest::new().to(Address::zero()).value(1u64).nonce(nonce), ) }); tracing::info!("generated transactions for blocks"); @@ -122,7 +122,7 @@ async fn init_geth() -> (CliqueGethInstance, Arc) { clique.provider.send_requests(txs).await.unwrap(); let block = clique.provider.get_block_number().await.unwrap(); - assert!(block > U64::zero()); + assert!(block.as_u64() > 0); (clique, Arc::new(chainspec)) } diff --git a/crates/net/network/tests/it/requests.rs b/crates/net/network/tests/it/requests.rs index 01bb7fb676..cb35e93b9b 100644 --- a/crates/net/network/tests/it/requests.rs +++ b/crates/net/network/tests/it/requests.rs @@ -9,7 +9,7 @@ use reth_network::test_utils::{NetworkEventStream, Testnet}; use reth_network_api::{NetworkInfo, Peers}; use reth_primitives::{ Block, BlockBody, Bytes, Header, HeadersDirection, Signature, Transaction, TransactionKind, - TransactionSigned, TxEip2930, H256, U256, + TransactionSigned, TxEip2930, U256, }; use reth_provider::test_utils::MockEthProvider; use std::sync::Arc; @@ -58,7 +58,7 @@ async fn test_get_body() { // request some blocks for _ in 0..100 { // Set a new random block to the mock storage and request it via the network - let block_hash = H256::random(); + let block_hash = rng.gen(); let mut block = Block::default(); block.body.push(rng_transaction(&mut rng)); @@ -100,12 +100,12 @@ async fn test_get_header() { assert_eq!(connected, *handle1.peer_id()); let start: u64 = rng.gen(); - let mut hash = H256::random(); + let mut hash = rng.gen(); // request some headers for idx in 0..100 { // Set a new random header to the mock storage and request it via the network let header = Header { number: start + idx, parent_hash: hash, ..Default::default() }; - hash = H256::random(); + hash = rng.gen(); mock_provider.add_header(hash, header.clone()); diff --git a/crates/payload/basic/Cargo.toml b/crates/payload/basic/Cargo.toml index c6d505b6d0..5ae986c40c 100644 --- a/crates/payload/basic/Cargo.toml +++ b/crates/payload/basic/Cargo.toml @@ -13,13 +13,13 @@ description = "A basic payload builder for reth that uses the txpool API to buil reth-primitives.workspace = true reth-revm = { path = "../../revm" } reth-transaction-pool.workspace = true -reth-rlp.workspace = true reth-provider.workspace = true reth-payload-builder.workspace = true reth-tasks.workspace = true reth-interfaces.workspace = true ## ethereum +alloy-rlp.workspace = true revm.workspace = true ## async diff --git a/crates/payload/basic/src/lib.rs b/crates/payload/basic/src/lib.rs index e7d7abfe1a..c347134f75 100644 --- a/crates/payload/basic/src/lib.rs +++ b/crates/payload/basic/src/lib.rs @@ -3,13 +3,14 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] use crate::metrics::PayloadBuilderMetrics; +use alloy_rlp::Encodable; use futures_core::ready; use futures_util::FutureExt; use reth_interfaces::{RethError, RethResult}; @@ -18,14 +19,14 @@ use reth_payload_builder::{ PayloadBuilderAttributes, PayloadJob, PayloadJobGenerator, }; use reth_primitives::{ - bytes::{Bytes, BytesMut}, + bytes::BytesMut, calculate_excess_blob_gas, constants::{ eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE, EMPTY_RECEIPTS, EMPTY_TRANSACTIONS, EMPTY_WITHDRAWALS, ETHEREUM_BLOCK_GAS_LIMIT, RETH_CLIENT_VERSION, SLOT_DURATION, }, - proofs, Block, BlockNumberOrTag, ChainSpec, Header, IntoRecoveredTransaction, Receipt, - SealedBlock, Withdrawal, EMPTY_OMMER_ROOT, H256, U256, + proofs, Block, BlockNumberOrTag, Bytes, ChainSpec, Header, IntoRecoveredTransaction, Receipt, + Receipts, SealedBlock, Withdrawal, B256, EMPTY_OMMER_ROOT, U256, }; use reth_provider::{BlockReaderIdExt, BlockSource, BundleStateWithReceipts, StateProviderFactory}; use reth_revm::{ @@ -34,7 +35,6 @@ use reth_revm::{ into_reth_log, state_change::{apply_beacon_root_contract_call, post_block_withdrawals_balance_increments}, }; -use reth_rlp::Encodable; use reth_tasks::TaskSpawner; use reth_transaction_pool::TransactionPool; use revm::{ @@ -256,7 +256,7 @@ impl Default for BasicPayloadJobGeneratorConfig { let mut extradata = BytesMut::new(); RETH_CLIENT_VERSION.as_bytes().encode(&mut extradata); Self { - extradata: extradata.freeze(), + extradata: extradata.freeze().into(), max_gas_limit: ETHEREUM_BLOCK_GAS_LIMIT, interval: Duration::from_secs(1), // 12s slot time @@ -787,7 +787,11 @@ where // 4788 contract call db.merge_transitions(BundleRetention::PlainState); - let bundle = BundleStateWithReceipts::new(db.take_bundle(), vec![receipts], block_number); + let bundle = BundleStateWithReceipts::new( + db.take_bundle(), + Receipts::from_vec(vec![receipts]), + block_number, + ); let receipts_root = bundle.receipts_root_slow(block_number).expect("Number is in range"); let logs_bloom = bundle.block_logs_bloom(block_number).expect("Number is in range"); @@ -839,7 +843,7 @@ where gas_limit: block_gas_limit, difficulty: U256::ZERO, gas_used: cumulative_gas_used, - extra_data: extra_data.into(), + extra_data, parent_beacon_block_root: attributes.parent_beacon_block_root, blob_gas_used, excess_blob_gas, @@ -907,7 +911,8 @@ where db.merge_transitions(BundleRetention::PlainState); // calculate the state root - let bundle_state = BundleStateWithReceipts::new(db.take_bundle(), vec![], block_number); + let bundle_state = + BundleStateWithReceipts::new(db.take_bundle(), Receipts::new(), block_number); let state_root = state.state_root(&bundle_state)?; let header = Header { @@ -929,7 +934,7 @@ where gas_used: 0, blob_gas_used: None, excess_blob_gas: None, - extra_data: extra_data.into(), + extra_data, parent_beacon_block_root: attributes.parent_beacon_block_root, }; @@ -944,7 +949,7 @@ where #[derive(Default)] struct WithdrawalsOutcome { withdrawals: Option>, - withdrawals_root: Option, + withdrawals_root: Option, } impl WithdrawalsOutcome { diff --git a/crates/payload/builder/Cargo.toml b/crates/payload/builder/Cargo.toml index b4dd6b1d73..05c0abe016 100644 --- a/crates/payload/builder/Cargo.toml +++ b/crates/payload/builder/Cargo.toml @@ -12,13 +12,13 @@ description = "reth payload builder" ## reth reth-primitives.workspace = true reth-rpc-types.workspace = true -reth-rlp.workspace = true reth-transaction-pool.workspace = true reth-interfaces.workspace = true reth-revm-primitives = { path = "../../revm/revm-primitives" } reth-rpc-types-compat.workspace = true ## ethereum +alloy-rlp.workspace = true revm-primitives.workspace = true ## async @@ -35,5 +35,8 @@ thiserror.workspace = true sha2 = { version = "0.10", default-features = false } tracing.workspace = true +[dev-dependencies] +revm.workspace = true + [features] test-utils = [] diff --git a/crates/payload/builder/src/database.rs b/crates/payload/builder/src/database.rs index 0de967c57d..8cf0a458b4 100644 --- a/crates/payload/builder/src/database.rs +++ b/crates/payload/builder/src/database.rs @@ -12,15 +12,14 @@ use std::{ /// A container type that caches reads from an underlying [DatabaseRef]. /// -/// This is intended to be used in conjunction with [State](reth_revm_primitives::db::State) +/// This is intended to be used in conjunction with `revm::db::State` /// during payload building which repeatedly accesses the same data. /// /// # Example /// /// ``` /// use reth_payload_builder::database::CachedReads; -/// use reth_revm_primitives::db::State; -/// use revm_primitives::db::DatabaseRef; +/// use revm::db::{DatabaseRef, State}; /// /// fn build_payload(db: DB) { /// let mut cached_reads = CachedReads::default(); @@ -119,7 +118,7 @@ impl<'a, DB: DatabaseRef> Database for CachedReadsDbMut<'a, DB> { /// A [DatabaseRef] that caches reads inside [CachedReads]. /// /// This is intended to be used as the [DatabaseRef] for -/// [State](reth_revm_primitives::db::State) for repeated payload build jobs. +/// `revm::db::State` for repeated payload build jobs. #[derive(Debug)] pub struct CachedReadsDBRef<'a, DB> { inner: RefCell>, diff --git a/crates/payload/builder/src/error.rs b/crates/payload/builder/src/error.rs index d709ba2286..bbaf2856f5 100644 --- a/crates/payload/builder/src/error.rs +++ b/crates/payload/builder/src/error.rs @@ -1,7 +1,7 @@ //! Error types emitted by types or implementations of this crate. use reth_interfaces::RethError; -use reth_primitives::H256; +use reth_primitives::B256; use reth_transaction_pool::BlobStoreError; use revm_primitives::EVMError; use tokio::sync::oneshot; @@ -11,7 +11,7 @@ use tokio::sync::oneshot; pub enum PayloadBuilderError { /// Thrown whe the parent block is missing. #[error("missing parent block {0:?}")] - MissingParentBlock(H256), + MissingParentBlock(B256), /// An oneshot channels has been closed. #[error("sender has been dropped")] ChannelClosed, diff --git a/crates/payload/builder/src/lib.rs b/crates/payload/builder/src/lib.rs index 1180ce8e08..a743553b99 100644 --- a/crates/payload/builder/src/lib.rs +++ b/crates/payload/builder/src/lib.rs @@ -95,7 +95,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/payload/builder/src/payload.rs b/crates/payload/builder/src/payload.rs index b7573588d7..f80f9d3952 100644 --- a/crates/payload/builder/src/payload.rs +++ b/crates/payload/builder/src/payload.rs @@ -1,10 +1,10 @@ //! Contains types required for building a payload. +use alloy_rlp::Encodable; use reth_primitives::{ - Address, BlobTransactionSidecar, ChainSpec, Header, SealedBlock, Withdrawal, H256, U256, + Address, BlobTransactionSidecar, ChainSpec, Header, SealedBlock, Withdrawal, B256, U256, }; use reth_revm_primitives::config::revm_spec_by_timestamp_after_merge; -use reth_rlp::Encodable; use reth_rpc_types::engine::{ ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, ExecutionPayloadV1, PayloadAttributes, PayloadId, @@ -122,17 +122,17 @@ pub struct PayloadBuilderAttributes { /// Id of the payload pub id: PayloadId, /// Parent block to build the payload on top - pub parent: H256, + pub parent: B256, /// Timestamp for the generated payload pub timestamp: u64, /// Address of the recipient for collecting transaction fee pub suggested_fee_recipient: Address, /// Randomness value for the generated payload - pub prev_randao: H256, + pub prev_randao: B256, /// Withdrawals for the generated payload pub withdrawals: Vec, /// Root of the parent beacon block - pub parent_beacon_block_root: Option, + pub parent_beacon_block_root: Option, } // === impl PayloadBuilderAttributes === @@ -141,7 +141,7 @@ impl PayloadBuilderAttributes { /// Creates a new payload builder for the given parent block and the attributes. /// /// Derives the unique [PayloadId] for the given parent and attributes - pub fn new(parent: H256, attributes: PayloadAttributes) -> Self { + pub fn new(parent: B256, attributes: PayloadAttributes) -> Self { let id = payload_id(&parent, &attributes); let withdraw = attributes.withdrawals.map( @@ -156,7 +156,7 @@ impl PayloadBuilderAttributes { Self { id, parent, - timestamp: attributes.timestamp.as_u64(), + timestamp: attributes.timestamp.to(), suggested_fee_recipient: attributes.suggested_fee_recipient, prev_randao: attributes.prev_randao, withdrawals: withdraw.unwrap_or_default(), @@ -225,13 +225,13 @@ impl PayloadBuilderAttributes { /// Generates the payload id for the configured payload /// /// Returns an 8-byte identifier by hashing the payload components with sha256 hash. -pub(crate) fn payload_id(parent: &H256, attributes: &PayloadAttributes) -> PayloadId { +pub(crate) fn payload_id(parent: &B256, attributes: &PayloadAttributes) -> PayloadId { use sha2::Digest; let mut hasher = sha2::Sha256::new(); - hasher.update(parent.as_bytes()); - hasher.update(&attributes.timestamp.as_u64().to_be_bytes()[..]); - hasher.update(attributes.prev_randao.as_bytes()); - hasher.update(attributes.suggested_fee_recipient.as_bytes()); + hasher.update(parent.as_slice()); + hasher.update(&attributes.timestamp.to::().to_be_bytes()[..]); + hasher.update(attributes.prev_randao.as_slice()); + hasher.update(attributes.suggested_fee_recipient.as_slice()); if let Some(withdrawals) = &attributes.withdrawals { let mut buf = Vec::new(); withdrawals.encode(&mut buf); diff --git a/crates/primitives/Cargo.toml b/crates/primitives/Cargo.toml index a89431e431..82c40d7604 100644 --- a/crates/primitives/Cargo.toml +++ b/crates/primitives/Cargo.toml @@ -10,26 +10,17 @@ description = "Commonly used types in reth." [dependencies] # reth -reth-rlp = { workspace = true, features = ["std", "derive", "ethereum-types"] } -reth-rlp-derive = { path = "../rlp/rlp-derive" } reth-codecs = { path = "../storage/codecs" } revm-primitives = { workspace = true, features = ["serde"] } # ethereum -ethers-core = { workspace = true, default-features = false } -tiny-keccak = { version = "2.0", features = ["keccak"] } -crunchy = { version = "0.2.2", default-features = false, features = ["limit_256"] } -ruint = { version = "1.9.0", features = ["primitive-types", "rlp"] } - -# Bloom -fixed-hash = { version = "0.8", default-features = false, features = ["rustc-hex"] } +alloy-primitives = { workspace = true, features = ["rand", "rlp"] } +alloy-rlp = { workspace = true, features = ["arrayvec"] } +alloy-sol-types.workspace = true +ethers-core = { workspace = true, default-features = false, optional = true } # crypto -secp256k1 = { workspace = true, default-features = false, features = [ - "global-context", - "alloc", - "recovery", -] } +secp256k1 = { workspace = true, features = ["global-context", "recovery"] } # for eip-4844 c-kzg = { workspace = true, features = ["serde"] } @@ -46,17 +37,15 @@ tokio-stream.workspace = true # misc bytes.workspace = true +byteorder = "1" serde.workspace = true serde_json.workspace = true serde_with = "3.3.0" thiserror.workspace = true sucds = "~0.6" -hex = "0.4" -hex-literal.workspace = true modular-bitfield = "0.11.2" derive_more = "0.99" url = "2.3" -impl-serde = "0.4.0" once_cell = "1.17.0" zstd = { version = "0.12", features = ["experimental"] } paste = "1.0" @@ -64,6 +53,7 @@ rayon = "1.7" tempfile = "3.3" sha2 = "0.10.7" itertools = "0.11" +num_enum = "0.7" # `test-utils` feature plain_hasher = { version = "0.2", optional = true } @@ -77,7 +67,6 @@ strum = { workspace = true, features = ["derive"] } [dev-dependencies] serde_json.workspace = true -hex-literal.workspace = true test-fuzz = "4" rand.workspace = true revm-primitives = { workspace = true, features = ["arbitrary"] } @@ -100,7 +89,7 @@ pprof = { version = "0.12", features = ["flamegraph", "frame-pointer", "criterio [features] default = [] arbitrary = ["revm-primitives/arbitrary", "dep:arbitrary", "dep:proptest", "dep:proptest-derive"] -test-utils = ["dep:plain_hasher", "dep:hash-db"] +test-utils = ["dep:plain_hasher", "dep:hash-db", "dep:ethers-core"] [[bench]] name = "recover_ecdsa_crit" diff --git a/crates/primitives/benches/recover_ecdsa_crit.rs b/crates/primitives/benches/recover_ecdsa_crit.rs index 7706b68a67..57893faa9f 100644 --- a/crates/primitives/benches/recover_ecdsa_crit.rs +++ b/crates/primitives/benches/recover_ecdsa_crit.rs @@ -1,8 +1,7 @@ +use alloy_rlp::Decodable; use criterion::{criterion_group, criterion_main, Criterion}; -use hex_literal::hex; use pprof::criterion::{Output, PProfProfiler}; -use reth_primitives::TransactionSigned; -use reth_rlp::Decodable; +use reth_primitives::{hex_literal::hex, TransactionSigned}; /// Benchmarks the recovery of the public key from the ECDSA message using criterion. pub fn criterion_benchmark(c: &mut Criterion) { diff --git a/crates/primitives/benches/trie_root.rs b/crates/primitives/benches/trie_root.rs index 63afc32941..6baf543194 100644 --- a/crates/primitives/benches/trie_root.rs +++ b/crates/primitives/benches/trie_root.rs @@ -4,7 +4,7 @@ use proptest::{ strategy::{Strategy, ValueTree}, test_runner::TestRunner, }; -use reth_primitives::{proofs::triehash::KeccakHasher, ReceiptWithBloom, H256}; +use reth_primitives::{proofs::triehash::KeccakHasher, ReceiptWithBloom, B256}; /// Benchmarks different implementations of the root calculation. pub fn trie_root_benchmark(c: &mut Criterion) { @@ -14,34 +14,24 @@ pub fn trie_root_benchmark(c: &mut Criterion) { let group_name = |description: &str| format!("receipts root | size: {size} | {description}"); - let (test_data, expected) = generate_test_data(size); - use implementations::*; + let receipts = &generate_test_data(size)[..]; + assert_eq!(trie_hash_ordered_trie_root(receipts), hash_builder_root(receipts)); group.bench_function(group_name("triehash::ordered_trie_root"), |b| { - b.iter(|| { - let receipts = test_data.clone(); - let result = black_box(trie_hash_ordered_trie_root(receipts.into_iter())); - assert_eq!(result, expected); - }); + b.iter(|| trie_hash_ordered_trie_root(black_box(receipts))); }); group.bench_function(group_name("HashBuilder"), |b| { - b.iter(|| { - let receipts = test_data.clone(); - let result = black_box(hash_builder_root(receipts)); - assert_eq!(result, expected); - }); + b.iter(|| hash_builder_root(black_box(receipts))); }); } } -fn generate_test_data(size: usize) -> (Vec, H256) { - let receipts = prop::collection::vec(any::(), size) +fn generate_test_data(size: usize) -> Vec { + prop::collection::vec(any::(), size) .new_tree(&mut TestRunner::new(ProptestConfig::default())) .unwrap() - .current(); - let root = implementations::hash_builder_root(receipts.clone()); - (receipts, root) + .current() } criterion_group! { @@ -53,23 +43,22 @@ criterion_main!(benches); mod implementations { use super::*; + use alloy_rlp::Encodable; use bytes::BytesMut; use reth_primitives::{ proofs::adjust_index_for_rlp, trie::{HashBuilder, Nibbles}, }; - use reth_rlp::Encodable; - use std::vec::IntoIter; - pub fn trie_hash_ordered_trie_root(receipts: IntoIter) -> H256 { - triehash::ordered_trie_root::(receipts.map(|receipt| { + pub fn trie_hash_ordered_trie_root(receipts: &[ReceiptWithBloom]) -> B256 { + triehash::ordered_trie_root::(receipts.iter().map(|receipt| { let mut receipt_rlp = Vec::new(); receipt.encode_inner(&mut receipt_rlp, false); receipt_rlp })) } - pub fn hash_builder_root(receipts: Vec) -> H256 { + pub fn hash_builder_root(receipts: &[ReceiptWithBloom]) -> B256 { let mut index_buffer = BytesMut::new(); let mut value_buffer = BytesMut::new(); @@ -90,3 +79,4 @@ mod implementations { hb.root() } } +use implementations::*; diff --git a/crates/primitives/src/abi.rs b/crates/primitives/src/abi.rs deleted file mode 100644 index fe89ccc0c6..0000000000 --- a/crates/primitives/src/abi.rs +++ /dev/null @@ -1,16 +0,0 @@ -//! Eth ABI helpers. -use crate::constants::SELECTOR_LEN; - -/// Returns the revert reason from the given output data, if it's an abi encoded String. Returns -/// `None` if the output is not long enough to contain a function selector or the content is not a -/// valid abi encoded String. -/// -/// **Note:** it's assumed the `out` buffer starts with the call's signature -pub fn decode_revert_reason(out: impl AsRef<[u8]>) -> Option { - use ethers_core::abi::AbiDecode; - let out = out.as_ref(); - if out.len() < SELECTOR_LEN { - return None - } - String::decode(&out[SELECTOR_LEN..]).ok() -} diff --git a/crates/primitives/src/account.rs b/crates/primitives/src/account.rs index 876c8e30c3..390663b0c0 100644 --- a/crates/primitives/src/account.rs +++ b/crates/primitives/src/account.rs @@ -1,8 +1,8 @@ -use crate::{H256, KECCAK_EMPTY, U256}; -use bytes::{Buf, Bytes}; -use fixed_hash::byteorder::{BigEndian, ReadBytesExt}; +use crate::{B256, KECCAK_EMPTY, U256}; +use byteorder::{BigEndian, ReadBytesExt}; +use bytes::Buf; use reth_codecs::{main_codec, Compact}; -use revm_primitives::{Bytecode as RevmBytecode, BytecodeState, JumpMap}; +use revm_primitives::{Bytecode as RevmBytecode, BytecodeState, Bytes, JumpMap}; use serde::{Deserialize, Serialize}; use std::ops::Deref; @@ -15,7 +15,7 @@ pub struct Account { /// Account balance. pub balance: U256, /// Hash of the account's bytecode. - pub bytecode_hash: Option, + pub bytecode_hash: Option, } impl Account { @@ -37,7 +37,7 @@ impl Account { /// Returns an account bytecode's hash. /// In case of no bytecode, returns [`KECCAK_EMPTY`]. - pub fn get_bytecode_hash(&self) -> H256 { + pub fn get_bytecode_hash(&self) -> B256 { match self.bytecode_hash { Some(hash) => hash, None => KECCAK_EMPTY, @@ -101,7 +101,7 @@ impl Compact for Bytecode { Self: Sized, { let len = buf.read_u32::().expect("could not read bytecode length"); - let bytes = buf.copy_to_bytes(len as usize); + let bytes = Bytes::from(buf.copy_to_bytes(len as usize)); let variant = buf.read_u8().expect("could not read bytecode variant"); let decoded = match variant { 0 => Bytecode(RevmBytecode::new_raw(bytes)), @@ -124,7 +124,7 @@ impl Compact for Bytecode { #[cfg(test)] mod tests { use super::*; - use hex_literal::hex; + use crate::hex_literal::hex; #[test] fn test_account() { diff --git a/crates/primitives/src/bits.rs b/crates/primitives/src/bits.rs deleted file mode 100644 index daa377b471..0000000000 --- a/crates/primitives/src/bits.rs +++ /dev/null @@ -1,76 +0,0 @@ -//! Fixed hash types -#![allow(clippy::non_canonical_clone_impl)] - -use bytes::Buf; -use derive_more::{AsRef, Deref}; -use fixed_hash::construct_fixed_hash; -use impl_serde::impl_fixed_hash_serde; -use reth_codecs::{impl_hash_compact, Compact}; -use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper, RlpMaxEncodedLen}; - -/// Implements a fixed hash type (eg. H512) with `serde`, `Arbitrary`, `proptest::Arbitrary` and -/// `Compact` support. -#[macro_export] -macro_rules! impl_fixed_hash_type { - ($(($name:tt, $size:expr)),+) => { - - #[cfg(any(test, feature = "arbitrary"))] - use proptest::{ - arbitrary::{any_with, ParamsFor}, - strategy::{BoxedStrategy, Strategy}, - }; - - #[cfg(any(test, feature = "arbitrary"))] - use arbitrary::Arbitrary; - - $( - construct_fixed_hash! { - #[cfg_attr(any(test, feature = "arbitrary"), derive(Arbitrary))] - #[derive(AsRef, Deref, RlpEncodableWrapper, RlpDecodableWrapper, RlpMaxEncodedLen)] - #[doc = concat!(stringify!($name), " fixed hash type.")] - pub struct $name($size); - } - - impl_hash_compact!($name); - - impl_fixed_hash_serde!($name, $size); - - #[cfg(any(test, feature = "arbitrary"))] - impl proptest::arbitrary::Arbitrary for $name { - type Parameters = ParamsFor; - type Strategy = BoxedStrategy<$name>; - - fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { - proptest::collection::vec(any_with::(args), $size) - .prop_map(move |vec| $name::from_slice(&vec)) - .boxed() - } - } - )+ - - #[cfg(test)] - mod hash_tests { - use super::*; - - #[test] - fn arbitrary() { - $( - proptest::proptest!(|(field: $name)| { - let mut buf = vec![]; - field.to_compact(&mut buf); - - // Add noise. We want to make sure that only $size bytes get consumed. - buf.push(1); - - let (decoded, remaining_buf) = $name::from_compact(&buf, buf.len()); - - assert!(field == decoded); - assert!(remaining_buf.len() == 1); - }); - )+ - } - } - }; -} - -impl_fixed_hash_type!((H64, 8), (H512, 64)); diff --git a/crates/primitives/src/block.rs b/crates/primitives/src/block.rs index c9be7194ad..36cfdceaf1 100644 --- a/crates/primitives/src/block.rs +++ b/crates/primitives/src/block.rs @@ -1,9 +1,8 @@ use crate::{ - Address, BlockHash, BlockNumber, Header, SealedHeader, TransactionSigned, Withdrawal, H256, U64, + Address, BlockHash, BlockNumber, Header, SealedHeader, TransactionSigned, Withdrawal, B256, U64, }; -use fixed_hash::rustc_hex::FromHexError; +use alloy_rlp::{Decodable, Encodable, Error as RlpError, RlpDecodable, RlpEncodable}; use reth_codecs::derive_arbitrary; -use reth_rlp::{Decodable, DecodeError, Encodable, RlpDecodable, RlpEncodable}; use serde::{ de::{MapAccess, Visitor}, ser::SerializeStruct, @@ -44,7 +43,7 @@ impl Block { /// Seal the block with a known hash. /// /// WARNING: This method does not perform validation whether the hash is correct. - pub fn seal(self, hash: H256) -> SealedBlock { + pub fn seal(self, hash: B256) -> SealedBlock { SealedBlock { header: self.header.seal(hash), body: self.body, @@ -145,7 +144,7 @@ impl SealedBlock { } /// Header hash. - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { self.header.hash() } @@ -275,7 +274,7 @@ impl std::ops::DerefMut for SealedBlockWithSenders { #[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum BlockHashOrNumber { /// A block hash - Hash(H256), + Hash(B256), /// A block number Number(u64), } @@ -293,8 +292,8 @@ impl BlockHashOrNumber { } } -impl From for BlockHashOrNumber { - fn from(value: H256) -> Self { +impl From for BlockHashOrNumber { + fn from(value: B256) -> Self { BlockHashOrNumber::Hash(value) } } @@ -323,15 +322,15 @@ impl Encodable for BlockHashOrNumber { /// Allows for RLP decoding of a block hash or block number impl Decodable for BlockHashOrNumber { - fn decode(buf: &mut &[u8]) -> Result { - let header: u8 = *buf.first().ok_or(DecodeError::InputTooShort)?; + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let header: u8 = *buf.first().ok_or(RlpError::InputTooShort)?; // if the byte string is exactly 32 bytes, decode it into a Hash // 0xa0 = 0x80 (start of string) + 0x20 (32, length of string) if header == 0xa0 { // strip the first byte, parsing the rest of the string. // If the rest of the string fails to decode into 32 bytes, we'll bubble up the // decoding error. - let hash = H256::decode(buf)?; + let hash = B256::decode(buf)?; Ok(Self::Hash(hash)) } else { // a block number when encoded as bytes ranges from 0 to any number of bytes - we're @@ -348,7 +347,7 @@ impl Decodable for BlockHashOrNumber { pub struct ParseBlockHashOrNumberError { input: String, pares_int_error: ParseIntError, - hex_error: FromHexError, + hex_error: crate::hex::FromHexError, } impl FromStr for BlockHashOrNumber { @@ -357,7 +356,7 @@ impl FromStr for BlockHashOrNumber { fn from_str(s: &str) -> Result { match u64::from_str(s) { Ok(val) => Ok(val.into()), - Err(pares_int_error) => match H256::from_str(s) { + Err(pares_int_error) => match B256::from_str(s) { Ok(val) => Ok(val.into()), Err(hex_error) => Err(ParseBlockHashOrNumberError { input: s.to_string(), @@ -383,7 +382,7 @@ pub enum BlockId { impl BlockId { /// Returns the block hash if it is [BlockId::Hash] - pub fn as_block_hash(&self) -> Option { + pub fn as_block_hash(&self) -> Option { match self { BlockId::Hash(hash) => Some(hash.block_hash), BlockId::Number(_) => None, @@ -413,14 +412,14 @@ impl From for BlockId { } } -impl From for BlockId { - fn from(block_hash: H256) -> Self { +impl From for BlockId { + fn from(block_hash: B256) -> Self { BlockId::Hash(RpcBlockHash { block_hash, require_canonical: None }) } } -impl From<(H256, Option)> for BlockId { - fn from(hash_can: (H256, Option)) -> Self { +impl From<(B256, Option)> for BlockId { + fn from(hash_can: (B256, Option)) -> Self { BlockId::Hash(RpcBlockHash { block_hash: hash_can.0, require_canonical: hash_can.1 }) } } @@ -434,7 +433,7 @@ impl From for BlockId { impl From for BlockId { fn from(value: BlockHashOrNumber) -> Self { match value { - BlockHashOrNumber::Hash(hash) => H256::from(hash.0).into(), + BlockHashOrNumber::Hash(hash) => B256::from(hash.0).into(), BlockHashOrNumber::Number(number) => number.into(), } } @@ -480,7 +479,7 @@ impl<'de> Deserialize<'de> for BlockId { // Since there is no way to clearly distinguish between a DATA parameter and a QUANTITY parameter. A str is therefor deserialized into a Block Number: // However, since the hex string should be a QUANTITY, we can safely assume that if the len is 66 bytes, it is in fact a hash, ref if v.len() == 66 { - Ok(BlockId::Hash(v.parse::().map_err(serde::de::Error::custom)?.into())) + Ok(BlockId::Hash(v.parse::().map_err(serde::de::Error::custom)?.into())) } else { // quantity hex string or tag Ok(BlockId::Number(v.parse().map_err(serde::de::Error::custom)?)) @@ -512,7 +511,7 @@ impl<'de> Deserialize<'de> for BlockId { return Err(serde::de::Error::duplicate_field("blockHash")) } - block_hash = Some(map.next_value::()?); + block_hash = Some(map.next_value::()?); } "requireCanonical" => { if number.is_some() || require_canonical.is_some() { @@ -612,7 +611,7 @@ impl From for BlockNumberOrTag { impl From for BlockNumberOrTag { fn from(num: U64) -> Self { - num.as_u64().into() + num.into_limbs()[0].into() } } @@ -705,31 +704,31 @@ pub struct HexStringMissingPrefixError; #[derive(Copy, Clone, Debug, PartialEq, Eq, Serialize)] pub struct RpcBlockHash { /// A block hash - pub block_hash: H256, + pub block_hash: B256, /// Whether the block must be a canonical block pub require_canonical: Option, } impl RpcBlockHash { - pub fn from_hash(block_hash: H256, require_canonical: Option) -> Self { + pub fn from_hash(block_hash: B256, require_canonical: Option) -> Self { RpcBlockHash { block_hash, require_canonical } } } -impl From for RpcBlockHash { - fn from(value: H256) -> Self { +impl From for RpcBlockHash { + fn from(value: B256) -> Self { Self::from_hash(value, None) } } -impl From for H256 { +impl From for B256 { fn from(value: RpcBlockHash) -> Self { value.block_hash } } -impl AsRef for RpcBlockHash { - fn as_ref(&self) -> &H256 { +impl AsRef for RpcBlockHash { + fn as_ref(&self) -> &B256 { &self.block_hash } } @@ -831,18 +830,18 @@ impl BlockBody { } /// Calculate the transaction root for the block body. - pub fn calculate_tx_root(&self) -> H256 { + pub fn calculate_tx_root(&self) -> B256 { crate::proofs::calculate_transaction_root(&self.transactions) } /// Calculate the ommers root for the block body. - pub fn calculate_ommers_root(&self) -> H256 { + pub fn calculate_ommers_root(&self) -> B256 { crate::proofs::calculate_ommers_root(&self.ommers) } /// Calculate the withdrawals root for the block body, if withdrawals exist. If there are no /// withdrawals, this will return `None`. - pub fn calculate_withdrawals_root(&self) -> Option { + pub fn calculate_withdrawals_root(&self) -> Option { self.withdrawals.as_ref().map(|w| crate::proofs::calculate_withdrawals_root(w)) } @@ -877,16 +876,17 @@ impl BlockBody { #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize, Hash)] pub struct BlockBodyRoots { /// The transaction root for the block body. - pub tx_root: H256, + pub tx_root: B256, /// The ommers hash for the block body. - pub ommers_hash: H256, + pub ommers_hash: B256, /// The withdrawals root for the block body, if withdrawals exist. - pub withdrawals_root: Option, + pub withdrawals_root: Option, } #[cfg(test)] mod test { use super::{BlockId, BlockNumberOrTag::*, *}; + use crate::hex_literal::hex; /// Check parsing according to EIP-1898. #[test] @@ -901,7 +901,7 @@ mod test { #[test] fn can_parse_block_hash() { let block_hash = - H256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") + B256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") .unwrap(); let block_hash_json = serde_json::json!( { "blockHash": "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"} @@ -912,7 +912,7 @@ mod test { #[test] fn can_parse_block_hash_with_canonical() { let block_hash = - H256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") + B256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") .unwrap(); let block_id = BlockId::Hash(RpcBlockHash::from_hash(block_hash, Some(true))); let block_hash_json = serde_json::json!( @@ -960,7 +960,7 @@ mod test { #[test] fn serde_blockid_hash() { - let block_id = BlockId::from(H256::default()); + let block_id = BlockId::from(B256::default()); let serialized = serde_json::to_string(&block_id).unwrap(); let deserialized: BlockId = serde_json::from_str(&serialized).unwrap(); assert_eq!(deserialized, block_id) @@ -969,7 +969,7 @@ mod test { #[test] fn serde_blockid_hash_from_str() { let val = "\"0x898753d8fdd8d92c1907ca21e68c7970abd290c647a202091181deec3f30a0b2\""; - let block_hash: H256 = serde_json::from_str(val).unwrap(); + let block_hash: B256 = serde_json::from_str(val).unwrap(); let block_id: BlockId = serde_json::from_str(val).unwrap(); assert_eq!(block_id, BlockId::Hash(block_hash.into())); } @@ -989,7 +989,7 @@ mod test { let block_id_param = value.pointer("/params/1").unwrap().to_string(); let block_id: BlockId = serde_json::from_str::(&block_id_param).unwrap(); let hash = - H256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") + B256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") .unwrap(); assert_eq!(BlockId::from(hash), block_id); let serialized = serde_json::to_string(&BlockId::from(hash)).unwrap(); @@ -1017,7 +1017,7 @@ mod test { let payload = r#"{"blockHash": "0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"}"#; let parsed = serde_json::from_str::(payload).unwrap(); let expected = BlockId::from( - H256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") + B256::from_str("0xd4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3") .unwrap(), ); assert_eq!(parsed, expected); @@ -1025,13 +1025,12 @@ mod test { #[test] fn encode_decode_raw_block() { - let block = "0xf90288f90218a0fe21bb173f43067a9f90cfc59bbb6830a7a2929b5de4a61f372a9db28e87f9aea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a061effbbcca94f0d3e02e5bd22e986ad57142acabf0cb3d129a6ad8d0f8752e94a0d911c25e97e27898680d242b7780b6faef30995c355a2d5de92e6b9a7212ad3aa0056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008003834c4b408252081e80a00000000000000000000000000000000000000000000000000000000000000000880000000000000000842806be9da056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421f869f86702842806be9e82520894658bdf435d810c91414ec09147daa6db624063798203e880820a95a040ce7918eeb045ebf8c8b1887ca139d076bda00fa828a07881d442a72626c42da0156576a68e456e295e4c9cf67cf9f53151f329438916e0f24fc69d6bbb7fbacfc0c0"; - let bytes = hex::decode(&block[2..]).unwrap(); + let bytes = hex!("f90288f90218a0fe21bb173f43067a9f90cfc59bbb6830a7a2929b5de4a61f372a9db28e87f9aea01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347940000000000000000000000000000000000000000a061effbbcca94f0d3e02e5bd22e986ad57142acabf0cb3d129a6ad8d0f8752e94a0d911c25e97e27898680d242b7780b6faef30995c355a2d5de92e6b9a7212ad3aa0056b23fbba480696b65fe5a59b8f2148a1299103c4f57df839233af2cf4ca2d2b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008003834c4b408252081e80a00000000000000000000000000000000000000000000000000000000000000000880000000000000000842806be9da056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421f869f86702842806be9e82520894658bdf435d810c91414ec09147daa6db624063798203e880820a95a040ce7918eeb045ebf8c8b1887ca139d076bda00fa828a07881d442a72626c42da0156576a68e456e295e4c9cf67cf9f53151f329438916e0f24fc69d6bbb7fbacfc0c0"); let bytes_buf = &mut bytes.as_ref(); let block = Block::decode(bytes_buf).unwrap(); let mut encoded_buf = Vec::new(); block.encode(&mut encoded_buf); - assert_eq!(bytes, encoded_buf); + assert_eq!(bytes[..], encoded_buf); } #[test] diff --git a/crates/primitives/src/bloom.rs b/crates/primitives/src/bloom.rs deleted file mode 100644 index 947a2a1286..0000000000 --- a/crates/primitives/src/bloom.rs +++ /dev/null @@ -1,259 +0,0 @@ -//! Bloom type. -//! -//! Adapted from -#![allow(missing_docs)] -#![allow(clippy::non_canonical_clone_impl)] - -use crate::{impl_fixed_hash_type, keccak256, Log}; -use bytes::Buf; -use core::{mem, ops}; -use crunchy::unroll; -use derive_more::{AsRef, Deref}; -use fixed_hash::*; -use impl_serde::impl_fixed_hash_serde; -use reth_codecs::{impl_hash_compact, Compact}; -use reth_rlp::{RlpDecodableWrapper, RlpEncodableWrapper, RlpMaxEncodedLen}; - -/// Length of bloom filter used for Ethereum. -pub const BLOOM_BITS: u32 = 3; -pub const BLOOM_SIZE: usize = 256; - -impl_fixed_hash_type!((Bloom, BLOOM_SIZE)); - -/// Returns log2. -fn log2(x: usize) -> u32 { - if x <= 1 { - return 0 - } - - let n = x.leading_zeros(); - mem::size_of::() as u32 * 8 - n -} - -#[derive(Debug)] -pub enum Input<'a> { - Raw(&'a [u8]), - Hash(&'a [u8; 32]), -} - -enum Hash<'a> { - Ref(&'a [u8; 32]), - Owned([u8; 32]), -} - -impl<'a> From> for Hash<'a> { - fn from(input: Input<'a>) -> Self { - match input { - Input::Raw(raw) => Hash::Owned(keccak256(raw).0), - Input::Hash(hash) => Hash::Ref(hash), - } - } -} - -impl<'a> ops::Index for Hash<'a> { - type Output = u8; - - fn index(&self, index: usize) -> &u8 { - match *self { - Hash::Ref(r) => &r[index], - Hash::Owned(ref hash) => &hash[index], - } - } -} - -impl<'a> Hash<'a> { - fn len(&self) -> usize { - match *self { - Hash::Ref(r) => r.len(), - Hash::Owned(ref hash) => hash.len(), - } - } -} - -impl<'a> PartialEq> for Bloom { - fn eq(&self, other: &BloomRef<'a>) -> bool { - let s_ref: &[u8] = &self.0; - let o_ref: &[u8] = other.0; - s_ref.eq(o_ref) - } -} - -impl<'a> From> for Bloom { - fn from(input: Input<'a>) -> Bloom { - let mut bloom = Bloom::default(); - bloom.accrue(input); - bloom - } -} - -impl Bloom { - pub fn contains_bloom<'a, B>(&self, bloom: B) -> bool - where - BloomRef<'a>: From, - { - let bloom_ref: BloomRef<'_> = bloom.into(); - self.contains_bloom_ref(bloom_ref) - } - - fn contains_bloom_ref(&self, bloom: BloomRef<'_>) -> bool { - let self_ref: BloomRef<'_> = self.into(); - self_ref.contains_bloom(bloom) - } - - pub fn accrue(&mut self, input: Input<'_>) { - let p = BLOOM_BITS; - - let m = self.0.len(); - let bloom_bits = m * 8; - let mask = bloom_bits - 1; - let bloom_bytes = (log2(bloom_bits) + 7) / 8; - - let hash: Hash<'_> = input.into(); - - // must be a power of 2 - assert_eq!(m & (m - 1), 0); - // out of range - assert!(p * bloom_bytes <= hash.len() as u32); - - let mut ptr = 0; - - assert_eq!(BLOOM_BITS, 3); - unroll! { - for i in 0..3 { - let _ = i; - let mut index = 0_usize; - for _ in 0..bloom_bytes { - index = (index << 8) | hash[ptr] as usize; - ptr += 1; - } - index &= mask; - self.0[m - 1 - index / 8] |= 1 << (index % 8); - } - } - } - - pub fn accrue_bloom<'a, B>(&mut self, bloom: B) - where - BloomRef<'a>: From, - { - let bloom_ref: BloomRef<'_> = bloom.into(); - assert_eq!(self.0.len(), BLOOM_SIZE); - assert_eq!(bloom_ref.0.len(), BLOOM_SIZE); - for i in 0..BLOOM_SIZE { - self.0[i] |= bloom_ref.0[i]; - } - } - - pub fn data(&self) -> &[u8; BLOOM_SIZE] { - &self.0 - } -} - -#[derive(Clone, Copy, Debug)] -pub struct BloomRef<'a>(&'a [u8; BLOOM_SIZE]); - -impl<'a> BloomRef<'a> { - /// Returns `true` if bloom only consists of `0` - pub fn is_empty(&self) -> bool { - self.0.iter().all(|x| *x == 0) - } - - pub fn contains_bloom<'b, B>(&self, bloom: B) -> bool - where - BloomRef<'b>: From, - { - let bloom_ref: BloomRef<'_> = bloom.into(); - assert_eq!(self.0.len(), BLOOM_SIZE); - assert_eq!(bloom_ref.0.len(), BLOOM_SIZE); - for i in 0..BLOOM_SIZE { - let a = self.0[i]; - let b = bloom_ref.0[i]; - if (a & b) != b { - return false - } - } - true - } - - pub fn data(&self) -> &'a [u8; BLOOM_SIZE] { - self.0 - } -} - -impl<'a> From<&'a [u8; BLOOM_SIZE]> for BloomRef<'a> { - fn from(data: &'a [u8; BLOOM_SIZE]) -> Self { - BloomRef(data) - } -} - -impl<'a> From<&'a Bloom> for BloomRef<'a> { - fn from(bloom: &'a Bloom) -> Self { - BloomRef(&bloom.0) - } -} - -// See Section 4.3.1 "Transaction Receipt" of the Yellow Paper -fn m3_2048(bloom: &mut Bloom, x: &[u8]) { - let hash = keccak256(x); - let h: &[u8; 32] = hash.as_ref(); - for i in [0, 2, 4] { - let bit = (h[i + 1] as usize + ((h[i] as usize) << 8)) & 0x7FF; - bloom.0[BLOOM_SIZE - 1 - bit / 8] |= 1 << (bit % 8); - } -} - -/// Calculate receipt logs bloom. -pub fn logs_bloom<'a, It>(logs: It) -> Bloom -where - It: IntoIterator, -{ - let mut bloom = Bloom::zero(); - for log in logs { - m3_2048(&mut bloom, log.address.as_bytes()); - for topic in &log.topics { - m3_2048(&mut bloom, topic.as_bytes()); - } - } - bloom -} - -#[cfg(test)] -mod tests { - use super::*; - use crate::hex_literal::hex; - - #[test] - fn hardcoded_bloom() { - let logs = vec![ - Log { - address: hex!("22341ae42d6dd7384bc8584e50419ea3ac75b83f").into(), - topics: vec![hex!( - "04491edcd115127caedbd478e2e7895ed80c7847e903431f94f9cfa579cad47f" - ) - .into()], - data: vec![].into(), - }, - Log { - address: hex!("e7fb22dfef11920312e4989a3a2b81e2ebf05986").into(), - topics: vec![ - hex!("7f1fef85c4b037150d3675218e0cdb7cf38fea354759471e309f3354918a442f").into(), - hex!("d85629c7eaae9ea4a10234fed31bc0aeda29b2683ebe0c1882499d272621f6b6").into(), - ], - data: hex::decode("2d690516512020171c1ec870f6ff45398cc8609250326be89915fb538e7b") - .unwrap() - .into(), - }, - ]; - assert_eq!( - logs_bloom(&logs), - Bloom::from(hex!( - "000000000000000000810000000000000000000000000000000000020000000000000000000000000000008000" - "000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000" - "000000000000000000000000000000000000000200000000000000000000000000000000000000000000000000" - "000000000000000000000000000000000000000000000000000000280000000000400000800000004000000000" - "000000000000000000000000000000000000000000000000000000000000100000100000000000000000000000" - "00000000001400000000000000008000000000000000000000000000000000" - )) - ); - } -} diff --git a/crates/primitives/src/chain/info.rs b/crates/primitives/src/chain/info.rs index 7a7d4b0ee4..2084c23274 100644 --- a/crates/primitives/src/chain/info.rs +++ b/crates/primitives/src/chain/info.rs @@ -1,10 +1,10 @@ -use crate::{BlockNumber, H256}; +use crate::{BlockNumber, B256}; /// Current status of the blockchain's head. #[derive(Default, Clone, Debug, Eq, PartialEq)] pub struct ChainInfo { /// The block hash of the highest fully synced block. - pub best_hash: H256, + pub best_hash: B256, /// The block number of the highest fully synced block. pub best_number: BlockNumber, } diff --git a/crates/primitives/src/chain/mod.rs b/crates/primitives/src/chain/mod.rs index 5ffdf9c2da..f2867256c8 100644 --- a/crates/primitives/src/chain/mod.rs +++ b/crates/primitives/src/chain/mod.rs @@ -3,10 +3,12 @@ use crate::{ net::{goerli_nodes, mainnet_nodes, sepolia_nodes}, NodeRecord, U256, U64, }; +use alloy_rlp::{Decodable, Encodable}; +use num_enum::TryFromPrimitive; use reth_codecs::add_arbitrary_tests; -use reth_rlp::{Decodable, Encodable}; use serde::{Deserialize, Serialize}; use std::{fmt, str::FromStr}; +use strum::{AsRefStr, EnumCount, EnumIter, EnumString, EnumVariantNames}; // The chain spec module. mod spec; @@ -19,12 +21,76 @@ pub use spec::{ mod info; pub use info::ChainInfo; +/// An Ethereum EIP-155 chain. +#[derive( + Clone, + Copy, + Debug, + PartialEq, + Eq, + PartialOrd, + Ord, + Hash, + AsRefStr, // AsRef, fmt::Display + EnumVariantNames, // Chain::VARIANTS + EnumString, // FromStr, TryFrom<&str> + EnumIter, // Chain::iter + EnumCount, // Chain::COUNT + TryFromPrimitive, // TryFrom + Deserialize, + Serialize, +)] +#[serde(rename_all = "snake_case")] +#[strum(serialize_all = "snake_case")] +#[repr(u64)] +#[allow(missing_docs)] +pub enum NamedChain { + Mainnet = 1, + Morden = 2, + Ropsten = 3, + Rinkeby = 4, + Goerli = 5, + Kovan = 42, + Holesky = 17000, + Sepolia = 11155111, + + Optimism = 10, + OptimismKovan = 69, + OptimismGoerli = 420, + + Arbitrum = 42161, + ArbitrumTestnet = 421611, + ArbitrumGoerli = 421613, + ArbitrumNova = 42170, + + #[serde(alias = "bsc")] + #[strum(to_string = "bsc")] + BinanceSmartChain = 56, + #[serde(alias = "bsc_testnet")] + #[strum(to_string = "bsc_testnet")] + BinanceSmartChainTestnet = 97, + + Dev = 1337, +} + +impl From for u64 { + fn from(value: NamedChain) -> Self { + value as u64 + } +} + +impl fmt::Display for NamedChain { + fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { + self.as_ref().fmt(f) + } +} + /// Either a named or chain id or the actual id value #[add_arbitrary_tests(rlp)] #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Serialize, Deserialize)] pub enum Chain { /// Contains a known chain - Named(ethers_core::types::Chain), + Named(NamedChain), /// Contains the id of a chain Id(u64), } @@ -32,27 +98,27 @@ pub enum Chain { impl Chain { /// Returns the mainnet chain. pub const fn mainnet() -> Self { - Chain::Named(ethers_core::types::Chain::Mainnet) + Chain::Named(NamedChain::Mainnet) } /// Returns the goerli chain. pub const fn goerli() -> Self { - Chain::Named(ethers_core::types::Chain::Goerli) + Chain::Named(NamedChain::Goerli) } /// Returns the sepolia chain. pub const fn sepolia() -> Self { - Chain::Named(ethers_core::types::Chain::Sepolia) + Chain::Named(NamedChain::Sepolia) } /// Returns the holesky chain. pub const fn holesky() -> Self { - Chain::Named(ethers_core::types::Chain::Holesky) + Chain::Named(NamedChain::Holesky) } /// Returns the dev chain. pub const fn dev() -> Self { - Chain::Named(ethers_core::types::Chain::Dev) + Chain::Named(NamedChain::Dev) } /// The id of the chain @@ -63,25 +129,16 @@ impl Chain { } } - /// Helper function for checking if a chainid corresponds to a legacy chainid - /// without eip1559 - pub fn is_legacy(&self) -> bool { - match self { - Chain::Named(c) => c.is_legacy(), - Chain::Id(_) => false, - } - } - /// Returns the address of the public DNS node list for the given chain. /// /// See also pub fn public_dns_network_protocol(self) -> Option { - use ethers_core::types::Chain::*; + use NamedChain as C; const DNS_PREFIX: &str = "enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@"; - let named: ethers_core::types::Chain = self.try_into().ok()?; + let named: NamedChain = self.try_into().ok()?; - if matches!(named, Mainnet | Goerli | Sepolia | Ropsten | Rinkeby) { + if matches!(named, C::Mainnet | C::Goerli | C::Sepolia | C::Ropsten | C::Rinkeby) { return Some(format!("{DNS_PREFIX}all.{}.ethdisco.net", named.as_ref().to_lowercase())) } None @@ -89,12 +146,12 @@ impl Chain { /// Returns bootnodes for the given chain. pub fn bootnodes(self) -> Option> { - use ethers_core::types::Chain::*; + use NamedChain as C; match self.try_into().ok()? { - Mainnet => Some(mainnet_nodes()), - Goerli => Some(goerli_nodes()), - Sepolia => Some(sepolia_nodes()), - Holesky => Some(holesky_nodes()), + C::Mainnet => Some(mainnet_nodes()), + C::Goerli => Some(goerli_nodes()), + C::Sepolia => Some(sepolia_nodes()), + C::Holesky => Some(holesky_nodes()), _ => None, } } @@ -105,7 +162,7 @@ impl fmt::Display for Chain { match self { Chain::Named(chain) => chain.fmt(f), Chain::Id(id) => { - if let Ok(chain) = ethers_core::types::Chain::try_from(*id) { + if let Ok(chain) = NamedChain::try_from(*id) { chain.fmt(f) } else { id.fmt(f) @@ -115,15 +172,15 @@ impl fmt::Display for Chain { } } -impl From for Chain { - fn from(id: ethers_core::types::Chain) -> Self { +impl From for Chain { + fn from(id: NamedChain) -> Self { Chain::Named(id) } } impl From for Chain { fn from(id: u64) -> Self { - ethers_core::types::Chain::try_from(id).map(Chain::Named).unwrap_or_else(|_| Chain::Id(id)) + NamedChain::try_from(id).map(Chain::Named).unwrap_or_else(|_| Chain::Id(id)) } } @@ -144,7 +201,7 @@ impl From for u64 { impl From for U64 { fn from(c: Chain) -> Self { - u64::from(c).into() + U64::from(u64::from(c)) } } @@ -154,8 +211,8 @@ impl From for U256 { } } -impl TryFrom for ethers_core::types::Chain { - type Error = >::Error; +impl TryFrom for NamedChain { + type Error = >::Error; fn try_from(chain: Chain) -> Result { match chain { @@ -169,7 +226,7 @@ impl FromStr for Chain { type Err = String; fn from_str(s: &str) -> Result { - if let Ok(chain) = ethers_core::types::Chain::from_str(s) { + if let Ok(chain) = NamedChain::from_str(s) { Ok(Chain::Named(chain)) } else { s.parse::() @@ -180,7 +237,7 @@ impl FromStr for Chain { } impl Encodable for Chain { - fn encode(&self, out: &mut dyn reth_rlp::BufMut) { + fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { match self { Self::Named(chain) => u64::from(*chain).encode(out), Self::Id(id) => id.encode(out), @@ -195,14 +252,14 @@ impl Encodable for Chain { } impl Decodable for Chain { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { Ok(u64::decode(buf)?.into()) } } impl Default for Chain { fn default() -> Self { - ethers_core::types::Chain::Mainnet.into() + NamedChain::Mainnet.into() } } @@ -210,9 +267,9 @@ impl Default for Chain { impl<'a> arbitrary::Arbitrary<'a> for Chain { fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { if u.ratio(1, 2)? { - let chain = u.int_in_range(0..=(ethers_core::types::Chain::COUNT - 1))?; + let chain = u.int_in_range(0..=(NamedChain::COUNT - 1))?; - return Ok(Chain::Named(ethers_core::types::Chain::iter().nth(chain).expect("in range"))) + return Ok(Chain::Named(NamedChain::iter().nth(chain).expect("in range"))) } Ok(Self::Id(u64::arbitrary(u)?)) @@ -220,7 +277,7 @@ impl<'a> arbitrary::Arbitrary<'a> for Chain { } #[cfg(any(test, feature = "arbitrary"))] -use strum::{EnumCount, IntoEnumIterator}; +use strum::IntoEnumIterator; #[cfg(any(test, feature = "arbitrary"))] use proptest::{ @@ -234,8 +291,8 @@ use proptest::{ impl proptest::arbitrary::Arbitrary for Chain { type Parameters = ParamsFor; fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { - let named = any::() - .prop_map(move |sel| Chain::Named(sel.select(ethers_core::types::Chain::iter()))); + let named = + any::().prop_map(move |sel| Chain::Named(sel.select(NamedChain::iter()))); let id = any::().prop_map(Chain::from); proptest::strategy::Union::new_weighted(vec![(50, named.boxed()), (50, id.boxed())]).boxed() } @@ -255,37 +312,13 @@ mod tests { #[test] fn test_named_id() { - let chain = Chain::Named(ethers_core::types::Chain::Goerli); + let chain = Chain::Named(NamedChain::Goerli); assert_eq!(chain.id(), 5); } - #[test] - fn test_optimism_chain() { - let chain = Chain::Named(ethers_core::types::Chain::Optimism); - assert!(!chain.is_legacy()); - } - - #[test] - fn test_legacy_named_chain() { - let chain = Chain::Named(ethers_core::types::Chain::BinanceSmartChain); - assert!(chain.is_legacy()); - } - - #[test] - fn test_not_legacy_named_chain() { - let chain = Chain::Named(ethers_core::types::Chain::Mainnet); - assert!(!chain.is_legacy()); - } - - #[test] - fn test_not_legacy_id_chain() { - let chain = Chain::Id(1234); - assert!(!chain.is_legacy()); - } - #[test] fn test_display_named_chain() { - let chain = Chain::Named(ethers_core::types::Chain::Mainnet); + let chain = Chain::Named(NamedChain::Mainnet); assert_eq!(format!("{chain}"), "mainnet"); } @@ -306,7 +339,7 @@ mod tests { #[test] fn test_into_u256() { - let chain = Chain::Named(ethers_core::types::Chain::Goerli); + let chain = Chain::Named(NamedChain::Goerli); let n: U256 = chain.into(); let expected = U256::from(5); @@ -316,7 +349,7 @@ mod tests { #[test] #[allow(non_snake_case)] fn test_into_U64() { - let chain = Chain::Named(ethers_core::types::Chain::Goerli); + let chain = Chain::Named(NamedChain::Goerli); let n: U64 = chain.into(); let expected = U64::from(5); @@ -326,7 +359,7 @@ mod tests { #[test] fn test_from_str_named_chain() { let result = Chain::from_str("mainnet"); - let expected = Chain::Named(ethers_core::types::Chain::Mainnet); + let expected = Chain::Named(NamedChain::Mainnet); assert!(result.is_ok()); assert_eq!(result.unwrap(), expected); @@ -351,7 +384,7 @@ mod tests { #[test] fn test_default() { let default = Chain::default(); - let expected = Chain::Named(ethers_core::types::Chain::Mainnet); + let expected = Chain::Named(NamedChain::Mainnet); assert_eq!(default, expected); } @@ -366,7 +399,7 @@ mod tests { #[test] fn test_dns_network() { let s = "enrtree://AKA3AM6LPBYEUDMVNU3BSVQJ5AD45Y7YPOHJLEF6W26QOE4VTUDPE@all.mainnet.ethdisco.net"; - let chain: Chain = ethers_core::types::Chain::Mainnet.into(); + let chain: Chain = NamedChain::Mainnet.into(); assert_eq!(s, chain.public_dns_network_protocol().unwrap().as_str()); } } diff --git a/crates/primitives/src/chain/spec.rs b/crates/primitives/src/chain/spec.rs index 794b3fee47..37def84b6f 100644 --- a/crates/primitives/src/chain/spec.rs +++ b/crates/primitives/src/chain/spec.rs @@ -7,10 +7,10 @@ use crate::{ header::Head, proofs::genesis_state_root, Address, BlockNumber, Chain, ForkFilter, ForkHash, ForkId, Genesis, Hardfork, Header, - PruneBatchSizes, SealedHeader, EMPTY_OMMER_ROOT, H160, H256, U256, + PruneBatchSizes, SealedHeader, B256, EMPTY_OMMER_ROOT, U256, }; -use hex_literal::hex; use once_cell::sync::Lazy; +use revm_primitives::{address, b256}; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -24,9 +24,9 @@ pub static MAINNET: Lazy> = Lazy::new(|| { chain: Chain::mainnet(), genesis: serde_json::from_str(include_str!("../../res/genesis/mainnet.json")) .expect("Can't deserialize Mainnet genesis json"), - genesis_hash: Some(H256(hex!( + genesis_hash: Some(b256!( "d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3" - ))), + )), // paris_block_and_final_difficulty: Some(( 15537394, @@ -59,12 +59,13 @@ pub static MAINNET: Lazy> = Lazy::new(|| { ]), // https://etherscan.io/tx/0xe75fb554e433e03763a1560646ee22dcb74e5274b34c5ad644e7c0f619a7e1d0 deposit_contract: Some(DepositContract::new( - H160(hex!("00000000219ab540356cbb839cbe05303d7705fa")), + address!("00000000219ab540356cbb839cbe05303d7705fa"), 11052984, - H256(hex!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5")), + b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), )), base_fee_params: BaseFeeParams::ethereum(), prune_batch_sizes: PruneBatchSizes::mainnet(), + snapshot_block_interval: 500_000, } .into() }); @@ -75,9 +76,9 @@ pub static GOERLI: Lazy> = Lazy::new(|| { chain: Chain::goerli(), genesis: serde_json::from_str(include_str!("../../res/genesis/goerli.json")) .expect("Can't deserialize Goerli genesis json"), - genesis_hash: Some(H256(hex!( + genesis_hash: Some(b256!( "bf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a" - ))), + )), // paris_block_and_final_difficulty: Some((7382818, U256::from(10_790_000))), fork_timestamps: ForkTimestamps::default().shanghai(1678832736), @@ -101,12 +102,13 @@ pub static GOERLI: Lazy> = Lazy::new(|| { ]), // https://goerli.etherscan.io/tx/0xa3c07dc59bfdb1bfc2d50920fed2ef2c1c4e0a09fe2325dbc14e07702f965a78 deposit_contract: Some(DepositContract::new( - H160(hex!("ff50ed3d0ec03ac01d4c79aad74928bff48a7b2b")), + address!("ff50ed3d0ec03ac01d4c79aad74928bff48a7b2b"), 4367322, - H256(hex!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5")), + b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), )), base_fee_params: BaseFeeParams::ethereum(), prune_batch_sizes: PruneBatchSizes::testnet(), + snapshot_block_interval: 1_000_000, } .into() }); @@ -117,9 +119,9 @@ pub static SEPOLIA: Lazy> = Lazy::new(|| { chain: Chain::sepolia(), genesis: serde_json::from_str(include_str!("../../res/genesis/sepolia.json")) .expect("Can't deserialize Sepolia genesis json"), - genesis_hash: Some(H256(hex!( + genesis_hash: Some(b256!( "25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9" - ))), + )), // paris_block_and_final_difficulty: Some((1450409, U256::from(17_000_018_015_853_232u128))), fork_timestamps: ForkTimestamps::default().shanghai(1677557088), @@ -147,12 +149,13 @@ pub static SEPOLIA: Lazy> = Lazy::new(|| { ]), // https://sepolia.etherscan.io/tx/0x025ecbf81a2f1220da6285d1701dc89fb5a956b62562ee922e1a9efd73eb4b14 deposit_contract: Some(DepositContract::new( - H160(hex!("7f02c3e3c98b133055b8b348b2ac625669ed295d")), + address!("7f02c3e3c98b133055b8b348b2ac625669ed295d"), 1273020, - H256(hex!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5")), + b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), )), base_fee_params: BaseFeeParams::ethereum(), prune_batch_sizes: PruneBatchSizes::testnet(), + snapshot_block_interval: 1_000_000, } .into() }); @@ -163,9 +166,9 @@ pub static HOLESKY: Lazy> = Lazy::new(|| { chain: Chain::holesky(), genesis: serde_json::from_str(include_str!("../../res/genesis/holesky.json")) .expect("Can't deserialize Holesky genesis json"), - genesis_hash: Some(H256(hex!( + genesis_hash: Some(b256!( "b5f7f912443c940f21fd611f12828d75b534364ed9e95ca4e307729a4661bde4" - ))), + )), paris_block_and_final_difficulty: Some((0, U256::from(1))), fork_timestamps: ForkTimestamps::default().shanghai(1696000704), hardforks: BTreeMap::from([ @@ -188,12 +191,13 @@ pub static HOLESKY: Lazy> = Lazy::new(|| { (Hardfork::Shanghai, ForkCondition::Timestamp(1696000704)), ]), deposit_contract: Some(DepositContract::new( - H160(hex!("4242424242424242424242424242424242424242")), + address!("4242424242424242424242424242424242424242"), 0, - H256(hex!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5")), + b256!("649bbc62d0e31342afea4e5cd82d4049e7e1ee912fc0889aa790803be39038c5"), )), base_fee_params: BaseFeeParams::ethereum(), prune_batch_sizes: PruneBatchSizes::testnet(), + snapshot_block_interval: 1_000_000, } .into() }); @@ -207,9 +211,9 @@ pub static DEV: Lazy> = Lazy::new(|| { chain: Chain::dev(), genesis: serde_json::from_str(include_str!("../../res/genesis/dev.json")) .expect("Can't deserialize Dev testnet genesis json"), - genesis_hash: Some(H256(hex!( + genesis_hash: Some(b256!( "2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c" - ))), + )), paris_block_and_final_difficulty: Some((0, U256::from(0))), fork_timestamps: ForkTimestamps::default().shanghai(0), hardforks: BTreeMap::from([ @@ -273,7 +277,7 @@ pub struct ChainSpec { /// This acts as a small cache for known chains. If the chain is known, then the genesis hash /// is also known ahead of time, and this will be `Some`. #[serde(skip, default)] - pub genesis_hash: Option, + pub genesis_hash: Option, /// The genesis block pub genesis: Genesis, @@ -303,6 +307,9 @@ pub struct ChainSpec { /// data coming in. #[serde(default)] pub prune_batch_sizes: PruneBatchSizes, + + /// The block interval for creating snapshots. Each snapshot will have that much blocks in it. + pub snapshot_block_interval: u64, } impl Default for ChainSpec { @@ -317,6 +324,7 @@ impl Default for ChainSpec { deposit_contract: Default::default(), base_fee_params: BaseFeeParams::ethereum(), prune_batch_sizes: Default::default(), + snapshot_block_interval: Default::default(), } } } @@ -353,13 +361,13 @@ impl ChainSpec { if self.fork(Hardfork::Cancun).active_at_timestamp(self.genesis.timestamp) { let blob_gas_used = self.genesis.blob_gas_used.unwrap_or(0); let excess_blob_gas = self.genesis.excess_blob_gas.unwrap_or(0); - (Some(H256::zero()), Some(blob_gas_used), Some(excess_blob_gas)) + (Some(B256::ZERO), Some(blob_gas_used), Some(excess_blob_gas)) } else { (None, None, None) }; Header { - parent_hash: H256::zero(), + parent_hash: B256::ZERO, number: 0, transactions_root: EMPTY_TRANSACTIONS, ommers_hash: EMPTY_OMMER_ROOT, @@ -397,7 +405,7 @@ impl ChainSpec { } /// Get the hash of the genesis block. - pub fn genesis_hash(&self) -> H256 { + pub fn genesis_hash(&self) -> B256 { if let Some(hash) = self.genesis_hash { hash } else { @@ -1115,25 +1123,21 @@ pub struct DepositContract { /// Deployment Block pub block: BlockNumber, /// `DepositEvent` event signature - pub topic: H256, + pub topic: B256, } impl DepositContract { - fn new(address: Address, block: BlockNumber, topic: H256) -> Self { + fn new(address: Address, block: BlockNumber, topic: B256) -> Self { DepositContract { address, block, topic } } } #[cfg(test)] mod tests { - use crate::{ - constants::EMPTY_WITHDRAWALS, Address, AllGenesisFormats, Chain, ChainSpec, - ChainSpecBuilder, DisplayHardforks, ForkCondition, ForkHash, ForkId, Genesis, Hardfork, - Head, DEV, GOERLI, H256, HOLESKY, MAINNET, SEPOLIA, U256, - }; + use super::*; + use crate::{b256, hex, NamedChain, B256, DEV, GOERLI, HOLESKY, MAINNET, SEPOLIA, U256}; + use alloy_rlp::Encodable; use bytes::BytesMut; - use ethers_core::types as EtherType; - use reth_rlp::Encodable; use std::str::FromStr; fn test_fork_ids(spec: &ChainSpec, cases: &[(Head, ForkId)]) { @@ -1151,7 +1155,7 @@ mod tests { fn test_hardfork_list_display_mainnet() { assert_eq!( DisplayHardforks::from(MAINNET.hardforks().clone()).to_string(), - r##"Pre-merge hard forks (block based): + "Pre-merge hard forks (block based): - Frontier @0 - Homestead @1150000 - Dao @1920000 @@ -1171,7 +1175,7 @@ Merge hard forks: Post-merge hard forks (timestamp based): - Shanghai @1681338455 -"## +" ); } @@ -1185,9 +1189,9 @@ Post-merge hard forks (timestamp based): .build(); assert_eq!( DisplayHardforks::from(spec.hardforks().clone()).to_string(), - r##"Pre-merge hard forks (block based): + "Pre-merge hard forks (block based): - Frontier @0 -"## +" ); } @@ -1762,32 +1766,29 @@ Post-merge hard forks (timestamp based): // alloc key -> expected rlp mapping let key_rlp = vec![ - (hex_literal::hex!("658bdf435d810c91414ec09147daa6db62406379"), "f84d8089487a9a304539440000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"), - (hex_literal::hex!("aa00000000000000000000000000000000000000"), "f8440101a08afc95b7d18a226944b9c2070b6bda1c3a36afcc3730429d47579c94b9fe5850a0ce92c756baff35fa740c3557c1a971fd24d2d35b7c8e067880d50cd86bb0bc99"), - (hex_literal::hex!("bb00000000000000000000000000000000000000"), "f8440102a08afc95b7d18a226944b9c2070b6bda1c3a36afcc3730429d47579c94b9fe5850a0e25a53cbb501cec2976b393719c63d832423dd70a458731a0b64e4847bbca7d2"), + (hex!("658bdf435d810c91414ec09147daa6db62406379"), &hex!("f84d8089487a9a304539440000a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470")[..]), + (hex!("aa00000000000000000000000000000000000000"), &hex!("f8440101a08afc95b7d18a226944b9c2070b6bda1c3a36afcc3730429d47579c94b9fe5850a0ce92c756baff35fa740c3557c1a971fd24d2d35b7c8e067880d50cd86bb0bc99")[..]), + (hex!("bb00000000000000000000000000000000000000"), &hex!("f8440102a08afc95b7d18a226944b9c2070b6bda1c3a36afcc3730429d47579c94b9fe5850a0e25a53cbb501cec2976b393719c63d832423dd70a458731a0b64e4847bbca7d2")[..]), ]; for (key, expected_rlp) in key_rlp { - let account = chainspec.genesis.alloc.get(&key.into()).expect("account should exist"); + let account = chainspec.genesis.alloc.get(&key).expect("account should exist"); let mut account_rlp = BytesMut::new(); account.encode(&mut account_rlp); - assert_eq!(hex::encode(account_rlp), expected_rlp) + assert_eq!(account_rlp, expected_rlp) } assert_eq!(chainspec.genesis_hash, None); - let expected_state_root: H256 = - hex_literal::hex!("078dc6061b1d8eaa8493384b59c9c65ceb917201221d08b80c4de6770b6ec7e7") - .into(); + let expected_state_root: B256 = + hex!("078dc6061b1d8eaa8493384b59c9c65ceb917201221d08b80c4de6770b6ec7e7").into(); assert_eq!(chainspec.genesis_header().state_root, expected_state_root); - let expected_withdrawals_hash: H256 = - hex_literal::hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") - .into(); + let expected_withdrawals_hash: B256 = + hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421").into(); assert_eq!(chainspec.genesis_header().withdrawals_root, Some(expected_withdrawals_hash)); - let expected_hash: H256 = - hex_literal::hex!("1fc027d65f820d3eef441ebeec139ebe09e471cf98516dce7b5643ccb27f418c") - .into(); + let expected_hash: B256 = + hex!("1fc027d65f820d3eef441ebeec139ebe09e471cf98516dce7b5643ccb27f418c").into(); let hash = chainspec.genesis_hash(); assert_eq!(hash, expected_hash); } @@ -1854,10 +1855,9 @@ Post-merge hard forks (timestamp based): let genesis = serde_json::from_str::(hive_json).unwrap(); let chainspec: ChainSpec = genesis.into(); assert_eq!(chainspec.genesis_hash, None); - assert_eq!(Chain::Named(EtherType::Chain::Optimism), chainspec.chain); - let expected_state_root: H256 = - hex_literal::hex!("9a6049ac535e3dc7436c189eaa81c73f35abd7f282ab67c32944ff0301d63360") - .into(); + assert_eq!(chainspec.chain, Chain::Named(NamedChain::Optimism)); + let expected_state_root: B256 = + hex!("9a6049ac535e3dc7436c189eaa81c73f35abd7f282ab67c32944ff0301d63360").into(); assert_eq!(chainspec.genesis_header().state_root, expected_state_root); let hard_forks = vec![ Hardfork::Byzantium, @@ -1870,9 +1870,8 @@ Post-merge hard forks (timestamp based): assert_eq!(chainspec.hardforks.get(fork).unwrap(), &ForkCondition::Block(0)); } - let expected_hash: H256 = - hex_literal::hex!("5ae31c6522bd5856129f66be3d582b842e4e9faaa87f21cce547128339a9db3c") - .into(); + let expected_hash: B256 = + hex!("5ae31c6522bd5856129f66be3d582b842e4e9faaa87f21cce547128339a9db3c").into(); let hash = chainspec.genesis_header().hash_slow(); assert_eq!(hash, expected_hash); } @@ -1915,7 +1914,7 @@ Post-merge hard forks (timestamp based): // set the state root to the same as in the hive test the hash was pulled from header.state_root = - H256::from_str("0x62e2595e017f0ca23e08d17221010721a71c3ae932f4ea3cb12117786bb392d4") + B256::from_str("0x62e2595e017f0ca23e08d17221010721a71c3ae932f4ea3cb12117786bb392d4") .unwrap(); // shanghai is activated so we should have a withdrawals root @@ -1923,20 +1922,19 @@ Post-merge hard forks (timestamp based): // cancun is activated so we should have a zero parent beacon block root, zero blob gas // used, and zero excess blob gas - assert_eq!(header.parent_beacon_block_root, Some(H256::zero())); + assert_eq!(header.parent_beacon_block_root, Some(B256::ZERO)); assert_eq!(header.blob_gas_used, Some(0)); assert_eq!(header.excess_blob_gas, Some(0)); println!("header: {:?}", header); // check the genesis hash let genesis_hash = header.hash_slow(); - let expected_hash = H256::from(hex_literal::hex!( - "16bb7c59613a5bad3f7c04a852fd056545ade2483968d9a25a1abb05af0c4d37" - )); + let expected_hash = + b256!("16bb7c59613a5bad3f7c04a852fd056545ade2483968d9a25a1abb05af0c4d37"); assert_eq!(genesis_hash, expected_hash); // check that the forkhash is correct - let expected_forkhash = ForkHash(hex_literal::hex!("8062457a")); + let expected_forkhash = ForkHash(hex!("8062457a")); assert_eq!(ForkHash::from(genesis_hash), expected_forkhash); } diff --git a/crates/primitives/src/constants/mod.rs b/crates/primitives/src/constants/mod.rs index 3e499a1cae..93708ed651 100644 --- a/crates/primitives/src/constants/mod.rs +++ b/crates/primitives/src/constants/mod.rs @@ -1,7 +1,7 @@ //! Ethereum protocol-related constants -use crate::{H160, H256, U256}; -use hex_literal::hex; +use crate::{Address, B256, U256}; +use revm_primitives::{address, b256}; use std::time::Duration; /// [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#parameters) constants. @@ -73,45 +73,45 @@ pub const ETH_TO_WEI: u128 = FINNEY_TO_WEI * 1000; pub const MGAS_TO_GAS: u64 = 1_000_000u64; /// The Ethereum mainnet genesis hash. -pub const MAINNET_GENESIS: H256 = - H256(hex!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3")); +pub const MAINNET_GENESIS: B256 = + b256!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"); /// Goerli genesis hash. -pub const GOERLI_GENESIS: H256 = - H256(hex!("bf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a")); +pub const GOERLI_GENESIS: B256 = + b256!("bf7e331f7f7c1dd2e05159666b3bf8bc7a8a3a9eb1d518969eab529dd9b88c1a"); /// Sepolia genesis hash. -pub const SEPOLIA_GENESIS: H256 = - H256(hex!("25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9")); +pub const SEPOLIA_GENESIS: B256 = + b256!("25a5cc106eea7138acab33231d7160d69cb777ee0c2c553fcddf5138993e6dd9"); /// Holesky genesis hash. -pub const HOLESKY_GENESIS: H256 = - H256(hex!("ff9006519a8ce843ac9c28549d24211420b546e12ce2d170c77a8cca7964f23d")); +pub const HOLESKY_GENESIS: B256 = + b256!("ff9006519a8ce843ac9c28549d24211420b546e12ce2d170c77a8cca7964f23d"); /// Testnet genesis hash. -pub const DEV_GENESIS: H256 = - H256(hex!("2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c")); +pub const DEV_GENESIS: B256 = + b256!("2f980576711e3617a5e4d83dd539548ec0f7792007d505a3d2e9674833af2d7c"); /// Keccak256 over empty array. -pub const KECCAK_EMPTY: H256 = - H256(hex!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470")); +pub const KECCAK_EMPTY: B256 = + b256!("c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); /// Ommer root of empty list. -pub const EMPTY_OMMER_ROOT: H256 = - H256(hex!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347")); +pub const EMPTY_OMMER_ROOT: B256 = + b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"); /// hash of an empty set `keccak256(rlp([]))` -const EMPTY_SET_HASH: H256 = - H256(hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")); +const EMPTY_SET_HASH: B256 = + b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"); /// Transactions root of empty receipts set. -pub const EMPTY_RECEIPTS: H256 = EMPTY_SET_HASH; +pub const EMPTY_RECEIPTS: B256 = EMPTY_SET_HASH; /// Transactions root of empty transactions set. -pub const EMPTY_TRANSACTIONS: H256 = EMPTY_SET_HASH; +pub const EMPTY_TRANSACTIONS: B256 = EMPTY_SET_HASH; /// Withdrawals root of empty withdrawals set. -pub const EMPTY_WITHDRAWALS: H256 = EMPTY_SET_HASH; +pub const EMPTY_WITHDRAWALS: B256 = EMPTY_SET_HASH; /// The number of blocks to unwind during a reorg that already became a part of canonical chain. /// @@ -133,11 +133,11 @@ pub const BEACON_CONSENSUS_REORG_UNWIND_DEPTH: u64 = 3; pub const ALLOWED_FUTURE_BLOCK_TIME_SECONDS: u64 = 15; /// The address for the beacon roots contract defined in EIP-4788. -pub const BEACON_ROOTS_ADDRESS: H160 = H160(hex!("000F3df6D732807Ef1319fB7B8bB8522d0Beac02")); +pub const BEACON_ROOTS_ADDRESS: Address = address!("000F3df6D732807Ef1319fB7B8bB8522d0Beac02"); /// The caller to be used when calling the EIP-4788 beacon roots contract at the beginning of the /// block. -pub const SYSTEM_ADDRESS: H160 = H160(hex!("fffffffffffffffffffffffffffffffffffffffe")); +pub const SYSTEM_ADDRESS: Address = address!("fffffffffffffffffffffffffffffffffffffffe"); #[cfg(test)] mod tests { diff --git a/crates/primitives/src/contract.rs b/crates/primitives/src/contract.rs index 43ce75b53f..0464bc96af 100644 --- a/crates/primitives/src/contract.rs +++ b/crates/primitives/src/contract.rs @@ -22,6 +22,7 @@ pub fn create2_address_from_code( #[cfg(test)] mod tests { use super::*; + use crate::hex; #[test] fn contract_address() { @@ -41,8 +42,8 @@ mod tests { } } - #[test] // Test vectors from https://github.com/ethereum/EIPs/blob/master/EIPS/eip-1014.md#examples + #[test] fn test_create2_address() { for (from, salt, init_code, expected) in &[ ( @@ -94,7 +95,7 @@ mod tests { let salt = U256::try_from_be_slice(&salt).unwrap(); let init_code = hex::decode(init_code).unwrap(); let expected = expected.parse::
().unwrap(); - assert_eq!(expected, create2_address_from_code(from, init_code.clone(),salt )); + assert_eq!(expected, create2_address_from_code(from, init_code.clone(), salt)); // get_create2_address_from_hash() let init_code_hash = keccak256(init_code); diff --git a/crates/primitives/src/eip4844.rs b/crates/primitives/src/eip4844.rs index ea91d4f11e..bf84da3f75 100644 --- a/crates/primitives/src/eip4844.rs +++ b/crates/primitives/src/eip4844.rs @@ -2,7 +2,7 @@ use crate::{ constants::eip4844::{TARGET_DATA_GAS_PER_BLOCK, VERSIONED_HASH_VERSION_KZG}, kzg::KzgCommitment, - H256, + B256, }; use sha2::{Digest, Sha256}; @@ -12,10 +12,10 @@ pub use revm_primitives::calc_blob_gasprice; /// Calculates the versioned hash for a KzgCommitment /// /// Specified in [EIP-4844](https://eips.ethereum.org/EIPS/eip-4844#header-extension) -pub fn kzg_to_versioned_hash(commitment: KzgCommitment) -> H256 { +pub fn kzg_to_versioned_hash(commitment: KzgCommitment) -> B256 { let mut res = Sha256::digest(commitment.as_slice()); res[0] = VERSIONED_HASH_VERSION_KZG; - H256::from_slice(&res) + B256::new(res.into()) } /// Calculates the excess data gas for the next block, after applying the current set of blobs on diff --git a/crates/primitives/src/forkid.rs b/crates/primitives/src/forkid.rs index 5132a084e9..3de97bf3c2 100644 --- a/crates/primitives/src/forkid.rs +++ b/crates/primitives/src/forkid.rs @@ -4,10 +4,10 @@ #![deny(missing_docs)] -use crate::{BlockNumber, Head, H256}; +use crate::{hex, BlockNumber, Head, B256}; +use alloy_rlp::*; use crc::*; use reth_codecs::derive_arbitrary; -use reth_rlp::*; use serde::{Deserialize, Serialize}; use std::{ cmp::Ordering, @@ -41,8 +41,8 @@ impl fmt::Debug for ForkHash { } } -impl From for ForkHash { - fn from(genesis: H256) -> Self { +impl From for ForkHash { + fn from(genesis: B256) -> Self { Self(CRC_32_IEEE.checksum(&genesis[..]).to_be_bytes()) } } @@ -174,7 +174,7 @@ pub struct ForkFilter { impl ForkFilter { /// Create the filter from provided head, genesis block hash, past forks and expected future /// forks. - pub fn new(head: Head, genesis_hash: H256, genesis_timestamp: u64, forks: F) -> Self + pub fn new(head: Head, genesis_hash: B256, genesis_timestamp: u64, forks: F) -> Self where F: IntoIterator, { @@ -379,9 +379,11 @@ impl Cache { #[cfg(test)] mod tests { use super::*; - use hex_literal::hex; - const GENESIS_HASH: H256 = - H256(hex!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3")); + use crate::hex_literal::hex; + use revm_primitives::b256; + + const GENESIS_HASH: B256 = + b256!("d4e56740f876aef8c010b86a40d5f56745a118d0906a34e69aec8c0db1cb8fa3"); // EIP test vectors. #[test] diff --git a/crates/primitives/src/genesis.rs b/crates/primitives/src/genesis.rs index 131dbd31b8..2dea83ad47 100644 --- a/crates/primitives/src/genesis.rs +++ b/crates/primitives/src/genesis.rs @@ -1,14 +1,15 @@ use crate::{ keccak256, proofs::EMPTY_ROOT, - serde_helper::{deserialize_json_u256, deserialize_json_u256_opt, deserialize_storage_map}, + serde_helper::{ + deserialize_json_u256, deserialize_json_u256_opt, deserialize_storage_map, + num::{u64_hex_or_decimal, u64_hex_or_decimal_opt}, + }, trie::{HashBuilder, Nibbles}, - utils::serde_helpers::{deserialize_stringified_u64, deserialize_stringified_u64_opt}, - Account, Address, Bytes, H256, KECCAK_EMPTY, U256, + Account, Address, Bytes, B256, KECCAK_EMPTY, U256, }; +use alloy_rlp::{encode_fixed_size, length_of_length, Encodable, Header as RlpHeader}; use itertools::Itertools; -use reth_rlp::{encode_fixed_size, length_of_length, Encodable, Header as RlpHeader}; -use revm_primitives::B160; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -20,21 +21,21 @@ pub struct Genesis { #[serde(default)] pub config: ChainConfig, /// The genesis header nonce. - #[serde(deserialize_with = "deserialize_stringified_u64")] + #[serde(with = "u64_hex_or_decimal")] pub nonce: u64, /// The genesis header timestamp. - #[serde(deserialize_with = "deserialize_stringified_u64")] + #[serde(with = "u64_hex_or_decimal")] pub timestamp: u64, /// The genesis header extra data. pub extra_data: Bytes, /// The genesis header gas limit. - #[serde(deserialize_with = "deserialize_stringified_u64")] + #[serde(with = "u64_hex_or_decimal")] pub gas_limit: u64, /// The genesis header difficulty. #[serde(deserialize_with = "deserialize_json_u256")] pub difficulty: U256, /// The genesis header mix hash. - pub mix_hash: H256, + pub mix_hash: B256, /// The genesis header coinbase address. pub coinbase: Address, /// The initial state of accounts in the genesis block. @@ -46,22 +47,13 @@ pub struct Genesis { // should NOT be set in a real genesis file, but are included here for compatibility with // consensus tests, which have genesis files with these fields populated. /// The genesis header base fee - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub base_fee_per_gas: Option, /// The genesis header excess blob gas - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub excess_blob_gas: Option, /// The genesis header blob gas used - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub blob_gas_used: Option, } @@ -97,7 +89,7 @@ impl Genesis { } /// Set the mix hash of the header. - pub fn with_mix_hash(mut self, mix_hash: H256) -> Self { + pub fn with_mix_hash(mut self, mix_hash: B256) -> Self { self.mix_hash = mix_hash; self } @@ -142,11 +134,7 @@ impl Genesis { #[serde(deny_unknown_fields)] pub struct GenesisAccount { /// The nonce of the account at genesis. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt", - default - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt", default)] pub nonce: Option, /// The balance of the account at genesis. #[serde(deserialize_with = "deserialize_json_u256")] @@ -160,7 +148,7 @@ pub struct GenesisAccount { skip_serializing_if = "Option::is_none", deserialize_with = "deserialize_storage_map" )] - pub storage: Option>, + pub storage: Option>, } impl GenesisAccount { @@ -196,7 +184,7 @@ impl GenesisAccount { } /// Set the storage. - pub fn with_storage(mut self, storage: Option>) -> Self { + pub fn with_storage(mut self, storage: Option>) -> Self { self.storage = storage; self } @@ -218,7 +206,7 @@ impl Encodable for GenesisAccount { let storage_with_sorted_hashed_keys = storage .iter() - .filter(|(_k, &v)| v != H256::zero()) + .filter(|(_k, &v)| v != B256::ZERO) .map(|(slot, value)| (keccak256(slot), value)) .sorted_by_key(|(key, _)| *key); @@ -265,129 +253,78 @@ pub struct ChainConfig { pub chain_id: u64, /// The homestead switch block (None = no fork, 0 = already homestead). - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub homestead_block: Option, /// The DAO fork switch block (None = no fork). - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub dao_fork_block: Option, /// Whether or not the node supports the DAO hard-fork. pub dao_fork_support: bool, /// The EIP-150 hard fork block (None = no fork). - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub eip150_block: Option, /// The EIP-150 hard fork hash. #[serde(skip_serializing_if = "Option::is_none")] - pub eip150_hash: Option, + pub eip150_hash: Option, /// The EIP-155 hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub eip155_block: Option, /// The EIP-158 hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub eip158_block: Option, /// The Byzantium hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub byzantium_block: Option, /// The Constantinople hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub constantinople_block: Option, /// The Petersburg hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub petersburg_block: Option, /// The Istanbul hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub istanbul_block: Option, /// The Muir Glacier hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub muir_glacier_block: Option, /// The Berlin hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub berlin_block: Option, /// The London hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub london_block: Option, /// The Arrow Glacier hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub arrow_glacier_block: Option, /// The Gray Glacier hard fork block. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub gray_glacier_block: Option, /// Virtual fork after the merge to use as a network splitter. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub merge_netsplit_block: Option, /// Shanghai switch time. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub shanghai_time: Option, /// Cancun switch time. - #[serde( - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub cancun_time: Option, /// Total difficulty reached that triggers the merge consensus upgrade. @@ -424,19 +361,11 @@ pub struct EthashConfig {} #[derive(Clone, Debug, Deserialize, Serialize, PartialEq, Eq)] pub struct CliqueConfig { /// Number of seconds between blocks to enforce. - #[serde( - default, - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(default, skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub period: Option, /// Epoch length to reset votes and checkpoints. - #[serde( - default, - skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_stringified_u64_opt" - )] + #[serde(default, skip_serializing_if = "Option::is_none", with = "u64_hex_or_decimal_opt")] pub epoch: Option, } @@ -454,14 +383,14 @@ mod ethers_compat { .alloc .iter() .map(|(addr, account)| (addr.0.into(), account.clone().into())) - .collect::>(); + .collect::>(); Genesis { config: genesis.config.into(), nonce: genesis.nonce.as_u64(), timestamp: genesis.timestamp.as_u64(), gas_limit: genesis.gas_limit.as_u64(), - difficulty: genesis.difficulty.into(), + difficulty: U256::from_limbs(genesis.difficulty.0), mix_hash: genesis.mix_hash.0.into(), coinbase: genesis.coinbase.0.into(), extra_data: genesis.extra_data.0.into(), @@ -477,7 +406,7 @@ mod ethers_compat { impl From for GenesisAccount { fn from(genesis_account: EthersGenesisAccount) -> Self { Self { - balance: genesis_account.balance.into(), + balance: U256::from_limbs(genesis_account.balance.0), nonce: genesis_account.nonce, code: genesis_account.code.as_ref().map(|code| code.0.clone().into()), storage: genesis_account.storage.as_ref().map(|storage| { @@ -522,7 +451,7 @@ mod ethers_compat { dao_fork_block, dao_fork_support, eip150_block, - eip150_hash: eip150_hash.map(Into::into), + eip150_hash: eip150_hash.map(|x| x.0.into()), eip155_block, eip158_block, byzantium_block, @@ -537,7 +466,7 @@ mod ethers_compat { merge_netsplit_block, shanghai_time, cancun_time, - terminal_total_difficulty: terminal_total_difficulty.map(Into::into), + terminal_total_difficulty: terminal_total_difficulty.map(|x| U256::from_limbs(x.0)), terminal_total_difficulty_passed, ethash: ethash.map(Into::into), clique: clique.map(Into::into), @@ -562,8 +491,7 @@ mod ethers_compat { #[cfg(test)] mod tests { use super::*; - use crate::{Address, Bytes, U256}; - use hex_literal::hex; + use crate::{hex_literal::hex, Address, Bytes, U256}; use std::{collections::HashMap, str::FromStr}; #[test] @@ -1080,21 +1008,21 @@ mod tests { let storage = alloc_entry.storage.as_ref().expect("missing storage for parsed genesis"); let expected_storage = HashMap::from_iter(vec![ ( - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000000", ) .unwrap(), - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000001234", ) .unwrap(), ), ( - H256::from_str( + B256::from_str( "0x6661e9d6d8b923d5bbaab1b96e1dd51ff6ea2a93520fdc9eb75d059238b8c5e9", ) .unwrap(), - H256::from_str( + B256::from_str( "0x0000000000000000000000000000000000000000000000000000000000000001", ) .unwrap(), @@ -1174,7 +1102,7 @@ mod tests { Genesis { nonce: 0x0000000000000042, difficulty: U256::from(0x2123456), - mix_hash: H256::from_str( + mix_hash: B256::from_str( "0x123456789abcdef123456789abcdef123456789abcdef123456789abcdef1234", ) .unwrap(), @@ -1214,9 +1142,9 @@ mod tests { storage: Some(HashMap::from_iter(vec![ ( - H256::from_str("0x0000000000000000000000000000000000000000000000000000000000000001"). + B256::from_str("0x0000000000000000000000000000000000000000000000000000000000000001"). unwrap(), - H256::from_str("0x0000000000000000000000000000000000000000000000000000000000000022"). + B256::from_str("0x0000000000000000000000000000000000000000000000000000000000000022"). unwrap(), ), ])), }, diff --git a/crates/primitives/src/header.rs b/crates/primitives/src/header.rs index 0235ba8bb9..c3ccc3c80b 100644 --- a/crates/primitives/src/header.rs +++ b/crates/primitives/src/header.rs @@ -3,12 +3,12 @@ use crate::{ eip4844::{calc_blob_gasprice, calculate_excess_blob_gas}, keccak256, proofs::{EMPTY_LIST_HASH, EMPTY_ROOT}, - BaseFeeParams, BlockBodyRoots, BlockHash, BlockNumHash, BlockNumber, Bloom, Bytes, H160, H256, - H64, U256, + Address, BaseFeeParams, BlockBodyRoots, BlockHash, BlockNumHash, BlockNumber, Bloom, Bytes, + B256, B64, U256, }; +use alloy_rlp::{length_of_length, Decodable, Encodable, EMPTY_LIST_CODE, EMPTY_STRING_CODE}; use bytes::{Buf, BufMut, BytesMut}; use reth_codecs::{add_arbitrary_tests, derive_arbitrary, main_codec, Compact}; -use reth_rlp::{length_of_length, Decodable, Encodable, EMPTY_LIST_CODE, EMPTY_STRING_CODE}; use serde::{Deserialize, Serialize}; use std::{ mem, @@ -28,7 +28,7 @@ pub struct Head { /// The number of the head block. pub number: BlockNumber, /// The hash of the head block. - pub hash: H256, + pub hash: B256, /// The difficulty of the head block. pub difficulty: U256, /// The total difficulty at the head block. @@ -43,24 +43,24 @@ pub struct Head { pub struct Header { /// The Keccak 256-bit hash of the parent /// block’s header, in its entirety; formally Hp. - pub parent_hash: H256, + pub parent_hash: B256, /// The Keccak 256-bit hash of the ommers list portion of this block; formally Ho. - pub ommers_hash: H256, + pub ommers_hash: B256, /// The 160-bit address to which all fees collected from the successful mining of this block /// be transferred; formally Hc. - pub beneficiary: H160, + pub beneficiary: Address, /// The Keccak 256-bit hash of the root node of the state trie, after all transactions are /// executed and finalisations applied; formally Hr. - pub state_root: H256, + pub state_root: B256, /// The Keccak 256-bit hash of the root node of the trie structure populated with each /// transaction in the transactions list portion of the block; formally Ht. - pub transactions_root: H256, + pub transactions_root: B256, /// The Keccak 256-bit hash of the root node of the trie structure populated with the receipts /// of each transaction in the transactions list portion of the block; formally He. - pub receipts_root: H256, + pub receipts_root: B256, /// The Keccak 256-bit hash of the withdrawals list portion of this block. /// - pub withdrawals_root: Option, + pub withdrawals_root: Option, /// The Bloom filter composed from indexable information (logger address and log topics) /// contained in each log entry from the receipt of each transaction in the transactions list; /// formally Hb. @@ -81,7 +81,7 @@ pub struct Header { /// A 256-bit hash which, combined with the /// nonce, proves that a sufficient amount of computation has been carried out on this block; /// formally Hm. - pub mix_hash: H256, + pub mix_hash: B256, /// A 64-bit value which, combined with the mixhash, proves that a sufficient amount of /// computation has been carried out on this block; formally Hn. pub nonce: u64, @@ -106,7 +106,7 @@ pub struct Header { /// and more. /// /// The beacon roots contract handles root storage, enhancing Ethereum's functionalities. - pub parent_beacon_block_root: Option, + pub parent_beacon_block_root: Option, /// An arbitrary byte array containing data relevant to this block. This must be 32 bytes or /// fewer; formally Hx. pub extra_data: Bytes, @@ -147,7 +147,7 @@ impl Header { /// Heavy function that will calculate hash of data and will *not* save the change to metadata. /// Use [`Header::seal`], [`SealedHeader`] and unlock if you need hash to be persistent. - pub fn hash_slow(&self) -> H256 { + pub fn hash_slow(&self) -> B256 { let mut out = BytesMut::new(); self.encode(&mut out); keccak256(&out) @@ -220,7 +220,7 @@ impl Header { /// Seal the header with a known hash. /// /// WARNING: This method does not perform validation whether the hash is correct. - pub fn seal(self, hash: H256) -> SealedHeader { + pub fn seal(self, hash: B256) -> SealedHeader { SealedHeader { header: self, hash } } @@ -233,25 +233,25 @@ impl Header { /// Calculate a heuristic for the in-memory size of the [Header]. #[inline] pub fn size(&self) -> usize { - mem::size_of::() + // parent hash - mem::size_of::() + // ommers hash - mem::size_of::() + // beneficiary - mem::size_of::() + // state root - mem::size_of::() + // transactions root - mem::size_of::() + // receipts root - mem::size_of::>() + // withdrawals root + mem::size_of::() + // parent hash + mem::size_of::() + // ommers hash + mem::size_of::
() + // beneficiary + mem::size_of::() + // state root + mem::size_of::() + // transactions root + mem::size_of::() + // receipts root + mem::size_of::>() + // withdrawals root mem::size_of::() + // logs bloom mem::size_of::() + // difficulty mem::size_of::() + // number mem::size_of::() + // gas limit mem::size_of::() + // gas used mem::size_of::() + // timestamp - mem::size_of::() + // mix hash + mem::size_of::() + // mix hash mem::size_of::() + // nonce mem::size_of::>() + // base fee per gas mem::size_of::>() + // blob gas used mem::size_of::>() + // excess blob gas - mem::size_of::>() + // parent beacon block root + mem::size_of::>() + // parent beacon block root self.extra_data.len() // extra data } @@ -271,7 +271,7 @@ impl Header { length += self.timestamp.length(); length += self.extra_data.length(); length += self.mix_hash.length(); - length += H64::from_low_u64_be(self.nonce).length(); + length += B64::new(self.nonce.to_be_bytes()).length(); if let Some(base_fee) = self.base_fee_per_gas { length += U256::from(base_fee).length(); @@ -322,7 +322,7 @@ impl Header { impl Encodable for Header { fn encode(&self, out: &mut dyn BufMut) { let list_header = - reth_rlp::Header { list: true, payload_length: self.header_payload_length() }; + alloy_rlp::Header { list: true, payload_length: self.header_payload_length() }; list_header.encode(out); self.parent_hash.encode(out); self.ommers_hash.encode(out); @@ -338,7 +338,7 @@ impl Encodable for Header { self.timestamp.encode(out); self.extra_data.encode(out); self.mix_hash.encode(out); - H64::from_low_u64_be(self.nonce).encode(out); + B64::new(self.nonce.to_be_bytes()).encode(out); // Encode base fee. Put empty list if base fee is missing, // but withdrawals root is present. @@ -400,10 +400,10 @@ impl Encodable for Header { } impl Decodable for Header { - fn decode(buf: &mut &[u8]) -> Result { - let rlp_head = reth_rlp::Header::decode(buf)?; + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + let rlp_head = alloy_rlp::Header::decode(buf)?; if !rlp_head.list { - return Err(reth_rlp::DecodeError::UnexpectedString) + return Err(alloy_rlp::Error::UnexpectedString) } let started_len = buf.len(); let mut this = Self { @@ -421,7 +421,7 @@ impl Decodable for Header { timestamp: Decodable::decode(buf)?, extra_data: Decodable::decode(buf)?, mix_hash: Decodable::decode(buf)?, - nonce: H64::decode(buf)?.to_low_u64_be(), + nonce: u64::from_be_bytes(B64::decode(buf)?.0), base_fee_per_gas: None, withdrawals_root: None, blob_gas_used: None, @@ -471,12 +471,12 @@ impl Decodable for Header { // post-London, so this is technically not valid. However, a tool like proptest would // generate a block like this. if started_len - buf.len() < rlp_head.payload_length { - this.parent_beacon_block_root = Some(H256::decode(buf)?); + this.parent_beacon_block_root = Some(B256::decode(buf)?); } let consumed = started_len - buf.len(); if consumed != rlp_head.payload_length { - return Err(reth_rlp::DecodeError::ListLengthMismatch { + return Err(alloy_rlp::Error::ListLengthMismatch { expected: rlp_head.payload_length, got: consumed, }) @@ -556,7 +556,7 @@ impl Encodable for SealedHeader { } impl Decodable for SealedHeader { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let b = &mut &**buf; let started_len = buf.len(); @@ -651,7 +651,7 @@ impl Encodable for HeadersDirection { } impl Decodable for HeadersDirection { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let value: bool = Decodable::decode(buf)?; Ok(value.into()) } @@ -675,15 +675,15 @@ impl From for bool { #[cfg(feature = "test-utils")] mod ethers_compat { use super::*; - use ethers_core::types::{Block, H256 as EthersH256}; + use ethers_core::types::{Block, H256}; - impl From<&Block> for Header { - fn from(block: &Block) -> Self { + impl From<&Block> for Header { + fn from(block: &Block) -> Self { Header { parent_hash: block.parent_hash.0.into(), number: block.number.unwrap().as_u64(), gas_limit: block.gas_limit.as_u64(), - difficulty: block.difficulty.into(), + difficulty: U256::from_limbs(block.difficulty.0), nonce: block.nonce.unwrap().to_low_u64_be(), extra_data: block.extra_data.0.clone().into(), state_root: block.state_root.0.into(), @@ -704,8 +704,8 @@ mod ethers_compat { } } - impl From<&Block> for SealedHeader { - fn from(block: &Block) -> Self { + impl From<&Block> for SealedHeader { + fn from(block: &Block) -> Self { let header = Header::from(block); match block.hash { Some(hash) => header.seal(hash.0.into()), @@ -717,15 +717,14 @@ mod ethers_compat { #[cfg(test)] mod tests { - use super::{Bytes, Decodable, Encodable, Header, H256}; - use crate::{Address, HeadersDirection, U256}; - use ethers_core::utils::hex::{self, FromHex}; + use super::{Bytes, Decodable, Encodable, Header, B256}; + use crate::{address, b256, bloom, bytes, hex, Address, HeadersDirection, U256}; use std::str::FromStr; // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 #[test] fn test_encode_block_header() { - let expected = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); + let expected = hex!("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000"); let header = Header { difficulty: U256::from(0x8ae_u64), number: 0xd05_u64, @@ -733,10 +732,10 @@ mod tests { gas_used: 0x15b3_u64, timestamp: 0x1a0a_u64, extra_data: Bytes::from_str("7788").unwrap(), - ommers_hash: H256::zero(), - state_root: H256::zero(), - transactions_root: H256::zero(), - receipts_root: H256::zero(), + ommers_hash: B256::ZERO, + state_root: B256::ZERO, + transactions_root: B256::ZERO, + receipts_root: B256::ZERO, ..Default::default() }; let mut data = vec![]; @@ -749,23 +748,23 @@ mod tests { #[test] fn test_eip1559_block_header_hash() { let expected_hash = - H256::from_str("6a251c7c3c5dca7b42407a3752ff48f3bbca1fab7f9868371d9918daf1988d1f") + B256::from_str("6a251c7c3c5dca7b42407a3752ff48f3bbca1fab7f9868371d9918daf1988d1f") .unwrap(); let header = Header { - parent_hash: H256::from_str("e0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a").unwrap(), - ommers_hash: H256::from_str("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347").unwrap(), - beneficiary: Address::from_str("ba5e000000000000000000000000000000000000").unwrap(), - state_root: H256::from_str("ec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7").unwrap(), - transactions_root: H256::from_str("50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf").unwrap(), - receipts_root: H256::from_str("29b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9").unwrap(), - logs_bloom: <[u8; 256]>::from_hex("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000").unwrap().into(), + parent_hash: b256!("e0a94a7a3c9617401586b1a27025d2d9671332d22d540e0af72b069170380f2a"), + ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + beneficiary: address!("ba5e000000000000000000000000000000000000"), + state_root: b256!("ec3c94b18b8a1cff7d60f8d258ec723312932928626b4c9355eb4ab3568ec7f7"), + transactions_root: b256!("50f738580ed699f0469702c7ccc63ed2e51bc034be9479b7bff4e68dee84accf"), + receipts_root: b256!("29b0562f7140574dd0d50dee8a271b22e1a0a7b78fca58f7c60370d8317ba2a9"), + logs_bloom: bloom!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000"), difficulty: U256::from(0x020000), number: 0x01_u64, gas_limit: 0x016345785d8a0000_u64, gas_used: 0x015534_u64, timestamp: 0x079e, - extra_data: Bytes::from_str("42").unwrap(), - mix_hash: H256::from_str("0000000000000000000000000000000000000000000000000000000000000000").unwrap(), + extra_data: bytes!("42"), + mix_hash: b256!("0000000000000000000000000000000000000000000000000000000000000000"), nonce: 0, base_fee_per_gas: Some(0x036b_u64), withdrawals_root: None, @@ -779,7 +778,7 @@ mod tests { // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 #[test] fn test_decode_block_header() { - let data = hex::decode("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000").unwrap(); + let data = hex!("f901f9a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000940000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008208ae820d0582115c8215b3821a0a827788a00000000000000000000000000000000000000000000000000000000000000000880000000000000000"); let expected = Header { difficulty: U256::from(0x8aeu64), number: 0xd05u64, @@ -787,10 +786,10 @@ mod tests { gas_used: 0x15b3u64, timestamp: 0x1a0au64, extra_data: Bytes::from_str("7788").unwrap(), - ommers_hash: H256::zero(), - state_root: H256::zero(), - transactions_root: H256::zero(), - receipts_root: H256::zero(), + ommers_hash: B256::ZERO, + state_root: B256::ZERO, + transactions_root: B256::ZERO, + receipts_root: B256::ZERO, ..Default::default() }; let header =
::decode(&mut data.as_slice()).unwrap(); @@ -798,7 +797,7 @@ mod tests { // make sure the hash matches let expected_hash = - H256::from_str("8c2f2af15b7b563b6ab1e09bed0e9caade7ed730aec98b70a993597a797579a9") + B256::from_str("8c2f2af15b7b563b6ab1e09bed0e9caade7ed730aec98b70a993597a797579a9") .unwrap(); assert_eq!(header.hash_slow(), expected_hash); } @@ -806,22 +805,22 @@ mod tests { // Test vector from: https://github.com/ethereum/tests/blob/970503935aeb76f59adfa3b3224aabf25e77b83d/BlockchainTests/ValidBlocks/bcExample/shanghaiExample.json#L15-L34 #[test] fn test_decode_block_header_with_withdrawals() { - let data = hex::decode("f9021ca018db39e19931515b30b16b3a92c292398039e31d6c267111529c3f2ba0a26c17a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa095efce3d6972874ca8b531b233b7a1d1ff0a56f08b20c8f1b89bef1b001194a5a071e515dd89e8a7973402c2e11646081b4e2209b2d3a1550df5095289dabcb3fba0ed9c51ea52c968e552e370a77a41dac98606e98b915092fb5f949d6452fce1c4b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008001887fffffffffffffff830125b882079e42a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b42188000000000000000009a027f166f1d7c789251299535cb176ba34116e44894476a7886fe5d73d9be5c973").unwrap(); + let data = hex!("f9021ca018db39e19931515b30b16b3a92c292398039e31d6c267111529c3f2ba0a26c17a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa095efce3d6972874ca8b531b233b7a1d1ff0a56f08b20c8f1b89bef1b001194a5a071e515dd89e8a7973402c2e11646081b4e2209b2d3a1550df5095289dabcb3fba0ed9c51ea52c968e552e370a77a41dac98606e98b915092fb5f949d6452fce1c4b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008001887fffffffffffffff830125b882079e42a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b42188000000000000000009a027f166f1d7c789251299535cb176ba34116e44894476a7886fe5d73d9be5c973"); let expected = Header { - parent_hash: H256::from_str( + parent_hash: B256::from_str( "18db39e19931515b30b16b3a92c292398039e31d6c267111529c3f2ba0a26c17", ) .unwrap(), beneficiary: Address::from_str("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").unwrap(), - state_root: H256::from_str( + state_root: B256::from_str( "95efce3d6972874ca8b531b233b7a1d1ff0a56f08b20c8f1b89bef1b001194a5", ) .unwrap(), - transactions_root: H256::from_str( + transactions_root: B256::from_str( "71e515dd89e8a7973402c2e11646081b4e2209b2d3a1550df5095289dabcb3fb", ) .unwrap(), - receipts_root: H256::from_str( + receipts_root: B256::from_str( "ed9c51ea52c968e552e370a77a41dac98606e98b915092fb5f949d6452fce1c4", ) .unwrap(), @@ -830,13 +829,13 @@ mod tests { gas_used: 0x0125b8, timestamp: 0x079e, extra_data: Bytes::from_str("42").unwrap(), - mix_hash: H256::from_str( + mix_hash: B256::from_str( "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", ) .unwrap(), base_fee_per_gas: Some(0x09), withdrawals_root: Some( - H256::from_str("27f166f1d7c789251299535cb176ba34116e44894476a7886fe5d73d9be5c973") + B256::from_str("27f166f1d7c789251299535cb176ba34116e44894476a7886fe5d73d9be5c973") .unwrap(), ), ..Default::default() @@ -845,7 +844,7 @@ mod tests { assert_eq!(header, expected); let expected_hash = - H256::from_str("85fdec94c534fa0a1534720f167b899d1fc268925c71c0cbf5aaa213483f5a69") + B256::from_str("85fdec94c534fa0a1534720f167b899d1fc268925c71c0cbf5aaa213483f5a69") .unwrap(); assert_eq!(header.hash_slow(), expected_hash); } @@ -853,26 +852,26 @@ mod tests { // Test vector from: https://github.com/ethereum/tests/blob/7e9e0940c0fcdbead8af3078ede70f969109bd85/BlockchainTests/ValidBlocks/bcExample/cancunExample.json #[test] fn test_decode_block_header_with_blob_fields_ef_tests() { - let data = hex::decode("f90221a03a9b485972e7353edd9152712492f0c58d89ef80623686b6bf947a4a6dce6cb6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa03c837fc158e3e93eafcaf2e658a02f5d8f99abc9f1c4c66cdea96c0ca26406aea04409cc4b699384ba5f8248d92b784713610c5ff9c1de51e9239da0dac76de9cea046cab26abf1047b5b119ecc2dda1296b071766c8b1307e1381fcecc90d513d86b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008001887fffffffffffffff8302a86582079e42a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b42188000000000000000009a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218302000080").unwrap(); + let data = hex!("f90221a03a9b485972e7353edd9152712492f0c58d89ef80623686b6bf947a4a6dce6cb6a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa03c837fc158e3e93eafcaf2e658a02f5d8f99abc9f1c4c66cdea96c0ca26406aea04409cc4b699384ba5f8248d92b784713610c5ff9c1de51e9239da0dac76de9cea046cab26abf1047b5b119ecc2dda1296b071766c8b1307e1381fcecc90d513d86b90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008001887fffffffffffffff8302a86582079e42a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b42188000000000000000009a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b4218302000080"); let expected = Header { - parent_hash: H256::from_str( + parent_hash: B256::from_str( "3a9b485972e7353edd9152712492f0c58d89ef80623686b6bf947a4a6dce6cb6", ) .unwrap(), - ommers_hash: H256::from_str( + ommers_hash: B256::from_str( "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", ) .unwrap(), beneficiary: Address::from_str("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba").unwrap(), - state_root: H256::from_str( + state_root: B256::from_str( "3c837fc158e3e93eafcaf2e658a02f5d8f99abc9f1c4c66cdea96c0ca26406ae", ) .unwrap(), - transactions_root: H256::from_str( + transactions_root: B256::from_str( "4409cc4b699384ba5f8248d92b784713610c5ff9c1de51e9239da0dac76de9ce", ) .unwrap(), - receipts_root: H256::from_str( + receipts_root: B256::from_str( "46cab26abf1047b5b119ecc2dda1296b071766c8b1307e1381fcecc90d513d86", ) .unwrap(), @@ -883,14 +882,14 @@ mod tests { gas_used: 0x02a865, timestamp: 0x079e, extra_data: Bytes::from(vec![0x42]), - mix_hash: H256::from_str( + mix_hash: B256::from_str( "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", ) .unwrap(), nonce: 0, base_fee_per_gas: Some(9), withdrawals_root: Some( - H256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") + B256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") .unwrap(), ), blob_gas_used: Some(0x020000), @@ -902,7 +901,7 @@ mod tests { assert_eq!(header, expected); let expected_hash = - H256::from_str("0x10aca3ebb4cf6ddd9e945a5db19385f9c105ede7374380c50d56384c3d233785") + B256::from_str("0x10aca3ebb4cf6ddd9e945a5db19385f9c105ede7374380c50d56384c3d233785") .unwrap(); assert_eq!(header.hash_slow(), expected_hash); } @@ -910,48 +909,34 @@ mod tests { #[test] fn test_decode_block_header_with_blob_fields() { // Block from devnet-7 - let data = hex::decode("f90239a013a7ec98912f917b3e804654e37c9866092043c13eb8eab94eb64818e886cff5a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794f97e180c050e5ab072211ad2c213eb5aee4df134a0ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080830305988401c9c380808464c40d5499d883010c01846765746888676f312e32302e35856c696e7578a070ccadc40b16e2094954b1064749cc6fbac783c1712f1b271a8aac3eda2f232588000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421808401600000").unwrap(); + let data = hex!("f90239a013a7ec98912f917b3e804654e37c9866092043c13eb8eab94eb64818e886cff5a01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d4934794f97e180c050e5ab072211ad2c213eb5aee4df134a0ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068aa056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421b901000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000080830305988401c9c380808464c40d5499d883010c01846765746888676f312e32302e35856c696e7578a070ccadc40b16e2094954b1064749cc6fbac783c1712f1b271a8aac3eda2f232588000000000000000007a056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421808401600000"); let expected = Header { - parent_hash: H256::from_str( + parent_hash: B256::from_str( "13a7ec98912f917b3e804654e37c9866092043c13eb8eab94eb64818e886cff5", ) .unwrap(), - ommers_hash: H256::from_str( - "1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347", - ) - .unwrap(), - beneficiary: Address::from_str("f97e180c050e5ab072211ad2c213eb5aee4df134").unwrap(), - state_root: H256::from_str( - "ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068a", - ) - .unwrap(), - transactions_root: H256::from_str( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - ) - .unwrap(), - receipts_root: H256::from_str( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421", - ) - .unwrap(), + ommers_hash: b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"), + beneficiary: address!("f97e180c050e5ab072211ad2c213eb5aee4df134"), + state_root: b256!("ec229dbe85b0d3643ad0f471e6ec1a36bbc87deffbbd970762d22a53b35d068a"), + transactions_root: b256!( + "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + ), + receipts_root: b256!( + "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + ), logs_bloom: Default::default(), difficulty: U256::from(0), number: 0x30598, gas_limit: 0x1c9c380, gas_used: 0, timestamp: 0x64c40d54, - extra_data: Bytes::from( - hex::decode("d883010c01846765746888676f312e32302e35856c696e7578").unwrap(), - ), - mix_hash: H256::from_str( - "70ccadc40b16e2094954b1064749cc6fbac783c1712f1b271a8aac3eda2f2325", - ) - .unwrap(), + extra_data: bytes!("d883010c01846765746888676f312e32302e35856c696e7578"), + mix_hash: b256!("70ccadc40b16e2094954b1064749cc6fbac783c1712f1b271a8aac3eda2f2325"), nonce: 0, base_fee_per_gas: Some(7), - withdrawals_root: Some( - H256::from_str("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421") - .unwrap(), - ), + withdrawals_root: Some(b256!( + "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" + )), parent_beacon_block_root: None, blob_gas_used: Some(0), excess_blob_gas: Some(0x1600000), @@ -961,8 +946,7 @@ mod tests { assert_eq!(header, expected); let expected_hash = - H256::from_str("0x539c9ea0a3ca49808799d3964b8b6607037227de26bc51073c6926963127087b") - .unwrap(); + b256!("539c9ea0a3ca49808799d3964b8b6607037227de26bc51073c6926963127087b"); assert_eq!(header.hash_slow(), expected_hash); } diff --git a/crates/primitives/src/hex_bytes.rs b/crates/primitives/src/hex_bytes.rs deleted file mode 100644 index 0e2db39e55..0000000000 --- a/crates/primitives/src/hex_bytes.rs +++ /dev/null @@ -1,358 +0,0 @@ -use crate::serde_helper::hex_bytes; -use bytes::Buf; -use reth_codecs::Compact; -use reth_rlp::{Decodable, DecodeError, Encodable}; -use serde::{Deserialize, Serialize}; -use std::{ - borrow::Borrow, - clone::Clone, - fmt::{Debug, Display, Formatter, LowerHex, Result as FmtResult}, - ops::Deref, - str::FromStr, -}; -use thiserror::Error; - -/// Wrapper type around Bytes to deserialize/serialize "0x" prefixed ethereum hex strings -#[derive(Clone, Default, PartialEq, Eq, Hash, Ord, PartialOrd, Serialize, Deserialize)] -pub struct Bytes(#[serde(with = "hex_bytes")] pub bytes::Bytes); - -fn bytes_to_hex(b: &Bytes) -> String { - hex::encode(b.0.as_ref()) -} - -impl Debug for Bytes { - fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { - write!(f, "Bytes(0x{})", bytes_to_hex(self)) - } -} - -impl Display for Bytes { - fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { - write!(f, "0x{}", bytes_to_hex(self)) - } -} - -impl LowerHex for Bytes { - fn fmt(&self, f: &mut Formatter<'_>) -> FmtResult { - write!(f, "0x{}", bytes_to_hex(self)) - } -} - -impl Bytes { - /// Return bytes as [`Vec::`] - pub fn to_vec(&self) -> Vec { - self.as_ref().to_vec() - } -} - -impl Deref for Bytes { - type Target = [u8]; - - #[inline] - fn deref(&self) -> &[u8] { - self.as_ref() - } -} - -impl AsRef<[u8]> for Bytes { - fn as_ref(&self) -> &[u8] { - self.0.as_ref() - } -} - -impl Borrow<[u8]> for Bytes { - fn borrow(&self) -> &[u8] { - self.as_ref() - } -} - -impl IntoIterator for Bytes { - type Item = u8; - type IntoIter = bytes::buf::IntoIter; - - fn into_iter(self) -> Self::IntoIter { - self.0.into_iter() - } -} - -impl<'a> IntoIterator for &'a Bytes { - type Item = &'a u8; - type IntoIter = core::slice::Iter<'a, u8>; - - fn into_iter(self) -> Self::IntoIter { - self.as_ref().iter() - } -} - -impl From<&[u8]> for Bytes { - fn from(src: &[u8]) -> Self { - Self(bytes::Bytes::copy_from_slice(src)) - } -} - -impl From for Bytes { - fn from(src: bytes::Bytes) -> Self { - Self(src) - } -} - -impl From for bytes::Bytes { - fn from(src: Bytes) -> Self { - src.0 - } -} - -impl From> for Bytes { - fn from(src: Vec) -> Self { - Self(src.into()) - } -} - -impl From<[u8; N]> for Bytes { - fn from(src: [u8; N]) -> Self { - src.to_vec().into() - } -} - -impl<'a, const N: usize> From<&'a [u8; N]> for Bytes { - fn from(src: &'a [u8; N]) -> Self { - src.to_vec().into() - } -} - -impl PartialEq<[u8]> for Bytes { - fn eq(&self, other: &[u8]) -> bool { - self.as_ref() == other - } -} - -impl PartialEq for [u8] { - fn eq(&self, other: &Bytes) -> bool { - *other == *self - } -} - -impl PartialEq> for Bytes { - fn eq(&self, other: &Vec) -> bool { - self.as_ref() == &other[..] - } -} - -impl PartialEq for Vec { - fn eq(&self, other: &Bytes) -> bool { - *other == *self - } -} - -impl PartialEq for Bytes { - fn eq(&self, other: &bytes::Bytes) -> bool { - other == self.as_ref() - } -} - -impl Encodable for Bytes { - fn encode(&self, out: &mut dyn bytes::BufMut) { - self.0.encode(out) - } - fn length(&self) -> usize { - self.0.length() - } -} - -impl Decodable for Bytes { - fn decode(buf: &mut &[u8]) -> Result { - Ok(Self(bytes::Bytes::decode(buf)?)) - } -} - -#[derive(Debug, Clone, Error)] -#[error("Failed to parse bytes: {0}")] -pub struct ParseBytesError(String); - -impl FromStr for Bytes { - type Err = ParseBytesError; - - fn from_str(value: &str) -> Result { - if let Some(value) = value.strip_prefix("0x") { - hex::decode(value) - } else { - hex::decode(value) - } - .map(Into::into) - .map_err(|e| ParseBytesError(format!("Invalid hex: {e}"))) - } -} - -impl Compact for Bytes { - fn to_compact(self, buf: &mut B) -> usize - where - B: bytes::BufMut + AsMut<[u8]>, - { - let len = self.len(); - buf.put(self.0); - len - } - fn from_compact(mut buf: &[u8], len: usize) -> (Self, &[u8]) { - (buf.copy_to_bytes(len).into(), buf) - } -} - -#[cfg(any(test, feature = "arbitrary"))] -use proptest::strategy::Strategy; -#[cfg(any(test, feature = "arbitrary"))] -impl proptest::prelude::Arbitrary for Bytes { - type Parameters = proptest::arbitrary::ParamsFor; - fn arbitrary_with(args: Self::Parameters) -> Self::Strategy { - proptest::collection::vec(proptest::arbitrary::any_with::(args), 0..80) - .prop_map(move |vec| bytes::Bytes::from(vec).into()) - .boxed() - } - - type Strategy = proptest::prelude::BoxedStrategy; -} - -#[cfg(any(test, feature = "arbitrary"))] -impl<'a> arbitrary::Arbitrary<'a> for Bytes { - fn arbitrary(u: &mut arbitrary::Unstructured<'a>) -> arbitrary::Result { - let size = u.int_in_range(0..=80)?; - Ok(Self(bytes::Bytes::copy_from_slice(u.bytes(size)?))) - } -} - -#[cfg(test)] -mod tests { - use super::*; - - #[test] - fn test_from_bytes() { - let b = bytes::Bytes::from("0123456789abcdef"); - let wrapped_b = Bytes::from(b.clone()); - let expected = Bytes(b); - - assert_eq!(wrapped_b, expected); - } - - #[test] - fn test_from_slice() { - let arr = [1, 35, 69, 103, 137, 171, 205, 239]; - let b = Bytes::from(&arr); - let expected = Bytes(bytes::Bytes::from(arr.to_vec())); - - assert_eq!(b, expected); - } - - #[test] - fn hex_formatting() { - let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); - let expected = String::from("0x0123456789abcdef"); - assert_eq!(format!("{b:x}"), expected); - assert_eq!(format!("{b}"), expected); - } - - #[test] - fn test_from_str() { - let b = Bytes::from_str("0x1213"); - assert!(b.is_ok()); - let b = b.unwrap(); - assert_eq!(b.as_ref(), hex::decode("1213").unwrap()); - - let b = Bytes::from_str("1213"); - let b = b.unwrap(); - assert_eq!(b.as_ref(), hex::decode("1213").unwrap()); - } - - #[test] - fn test_debug_formatting() { - let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); - assert_eq!(format!("{b:?}"), "Bytes(0x0123456789abcdef)"); - assert_eq!(format!("{b:#?}"), "Bytes(0x0123456789abcdef)"); - } - - #[test] - fn test_to_vec() { - let vec = vec![1, 35, 69, 103, 137, 171, 205, 239]; - let b = Bytes::from(vec.clone()); - - assert_eq!(b.to_vec(), vec); - } - - #[test] - fn test_encodable_length_lt_56() { - let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); - // since the payload length is less than 56, this should give the length - // of the array + 1 = 9 - assert_eq!(b.length(), 9); - } - - #[test] - fn test_encodable_length_gt_56() { - let b = Bytes::from(vec![255; 57]); - // since the payload length is greater than 56, this should give the length - // of the array + (1 + 8 - payload_length.leading_zeros() as usize / 8) = 59 - assert_eq!(b.length(), 59); - } - - #[test] - fn test_encodable_encode() { - let b = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); - let mut buf = Vec::new(); - b.encode(&mut buf); - let expected: Vec = vec![136, 1, 35, 69, 103, 137, 171, 205, 239]; - assert_eq!(buf, expected); - } - - #[test] - fn test_decodable_decode() { - let buf: Vec = vec![136, 1, 35, 69, 103, 137, 171, 205, 239]; - let b = Bytes::decode(&mut &buf[..]).unwrap(); - let expected = Bytes::from(vec![1, 35, 69, 103, 137, 171, 205, 239]); - assert_eq!(b, expected); - } - - #[test] - fn test_vec_partialeq() { - let vec = vec![1, 35, 69, 103, 137, 171, 205, 239]; - let b = Bytes::from(vec.clone()); - assert_eq!(b, vec); - assert_eq!(vec, b); - - let wrong_vec = vec![1, 3, 52, 137]; - assert_ne!(b, wrong_vec); - assert_ne!(wrong_vec, b); - } - - #[test] - fn test_slice_partialeq() { - let vec = vec![1, 35, 69, 103, 137, 171, 205, 239]; - let b = Bytes::from(vec.clone()); - assert_eq!(b, vec[..]); - assert_eq!(vec[..], b); - - let wrong_vec = [1, 3, 52, 137]; - assert_ne!(b, wrong_vec[..]); - assert_ne!(wrong_vec[..], b); - } - - #[test] - fn test_bytes_partialeq() { - let b = bytes::Bytes::from("0123456789abcdef"); - let wrapped_b = Bytes::from(b.clone()); - assert_eq!(wrapped_b, b); - - let wrong_b = bytes::Bytes::from("0123absd"); - assert_ne!(wrong_b, b); - } - - #[test] - fn arbitrary() { - proptest::proptest!(|(bytes: Bytes)| { - let mut buf = vec![]; - bytes.clone().to_compact(&mut buf); - - let (decoded, remaining_buf) = Bytes::from_compact(&buf, buf.len()); - - assert!(bytes == decoded); - assert!(remaining_buf.is_empty()); - }); - } -} diff --git a/crates/primitives/src/lib.rs b/crates/primitives/src/lib.rs index bc1340cb8a..a6244451d7 100644 --- a/crates/primitives/src/lib.rs +++ b/crates/primitives/src/lib.rs @@ -10,19 +10,16 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] #![allow(clippy::non_canonical_clone_impl)] -pub mod abi; mod account; pub mod basefee; -mod bits; mod block; -pub mod bloom; mod chain; mod compression; pub mod constants; @@ -33,33 +30,31 @@ pub mod fs; mod genesis; mod hardfork; mod header; -mod hex_bytes; mod integer_list; pub mod listener; mod log; mod net; mod peer; +pub mod proofs; mod prune; mod receipt; +pub mod serde_helper; pub mod stage; mod storage; mod transaction; pub mod trie; mod withdrawal; -/// Helper function for calculating Merkle proofs and hashes -pub mod proofs; - pub use account::{Account, Bytecode}; -pub use bits::H512; pub use block::{ Block, BlockBody, BlockBodyRoots, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumberOrTag, BlockWithSenders, ForkBlock, SealedBlock, SealedBlockWithSenders, }; -pub use bloom::Bloom; +pub use bytes::{Buf, BufMut, BytesMut}; pub use chain::{ AllGenesisFormats, BaseFeeParams, Chain, ChainInfo, ChainSpec, ChainSpecBuilder, - DisplayHardforks, ForkCondition, ForkTimestamps, DEV, GOERLI, HOLESKY, MAINNET, SEPOLIA, + DisplayHardforks, ForkCondition, ForkTimestamps, NamedChain, DEV, GOERLI, HOLESKY, MAINNET, + SEPOLIA, }; pub use compression::*; pub use constants::{ @@ -71,9 +66,8 @@ pub use forkid::{ForkFilter, ForkHash, ForkId, ForkTransition, ValidationError}; pub use genesis::{Genesis, GenesisAccount}; pub use hardfork::Hardfork; pub use header::{Head, Header, HeadersDirection, SealedHeader}; -pub use hex_bytes::Bytes; pub use integer_list::IntegerList; -pub use log::Log; +pub use log::{logs_bloom, Log}; pub use net::{ goerli_nodes, holesky_nodes, mainnet_nodes, sepolia_nodes, NodeRecord, GOERLI_BOOTNODES, HOLESKY_BOOTNODES, MAINNET_BOOTNODES, SEPOLIA_BOOTNODES, @@ -83,8 +77,7 @@ pub use prune::{ PruneBatchSizes, PruneCheckpoint, PruneMode, PruneModes, PrunePart, PrunePartError, ReceiptsLogPruneConfig, MINIMUM_PRUNING_DISTANCE, }; -pub use receipt::{Receipt, ReceiptWithBloom, ReceiptWithBloomRef}; -pub use revm_primitives::JumpMap; +pub use receipt::{Receipt, ReceiptWithBloom, ReceiptWithBloomRef, Receipts}; pub use serde_helper::JsonU256; pub use storage::StorageEntry; pub use transaction::{ @@ -99,73 +92,35 @@ pub use transaction::{ }; pub use withdrawal::Withdrawal; -/// A block hash. -pub type BlockHash = H256; -/// A block number. -pub type BlockNumber = u64; -/// An Ethereum address. -pub type Address = H160; -/// A transaction hash is a kecack hash of an RLP encoded signed transaction. -pub type TxHash = H256; -/// The sequence number of all existing transactions. -pub type TxNumber = u64; -/// The index of transaction in a block. -pub type TxIndex = u64; -/// Chain identifier type (introduced in EIP-155). -pub type ChainId = u64; -/// An account storage key. -pub type StorageKey = H256; -/// An account storage value. -pub type StorageValue = U256; -/// Solidity contract functions are addressed using the first four byte of the Keccak-256 hash of -/// their signature -pub type Selector = [u8; 4]; - -pub use ethers_core::{ - types::{BigEndianHash, H128, H64, U64}, - utils as rpc_utils, -}; -pub use revm_primitives::{B160 as H160, B256 as H256, U256}; -pub use ruint::{ - aliases::{U128, U8}, - UintTryTo, +// Re-exports +pub use self::ruint::UintTryTo; +pub use alloy_primitives::{ + self, address, b256, bloom, bytes, eip191_hash_message, hex, hex_literal, keccak256, ruint, + Address, BlockHash, BlockNumber, Bloom, BloomInput, Bytes, ChainId, Selector, StorageKey, + StorageValue, TxHash, TxIndex, TxNumber, B128, B256, B512, B64, U128, U256, U64, U8, }; +pub use revm_primitives::{self, JumpMap}; #[doc(hidden)] -mod __reexport { - pub use bytes; - pub use hex; - pub use hex_literal; - pub use tiny_keccak; -} +#[deprecated = "use B128 instead"] +pub type H128 = B128; -// Useful reexports -pub use __reexport::*; +#[doc(hidden)] +#[deprecated = "use B256 instead"] +pub type H256 = B256; -/// Various utilities -pub mod utils { - pub use ethers_core::types::serde_helpers; -} +#[doc(hidden)] +#[deprecated = "use B512 instead"] +pub type H512 = B512; + +#[doc(hidden)] +#[deprecated = "use B64 instead"] +pub type H64 = B64; + +#[cfg(any(test, feature = "arbitrary"))] +pub use arbitrary; /// EIP-4844 + KZG helpers pub mod kzg { pub use c_kzg::*; } - -/// Helpers for working with serde -pub mod serde_helper; - -/// Returns the keccak256 hash for the given data. -#[inline] -pub fn keccak256(data: impl AsRef<[u8]>) -> H256 { - use tiny_keccak::{Hasher, Keccak}; - - let mut buf = [0u8; 32]; - let mut hasher = Keccak::v256(); - hasher.update(data.as_ref()); - hasher.finalize(&mut buf); - buf.into() -} - -#[cfg(any(test, feature = "arbitrary"))] -pub use arbitrary; diff --git a/crates/primitives/src/log.rs b/crates/primitives/src/log.rs index 3b72967b68..0ac8109b05 100644 --- a/crates/primitives/src/log.rs +++ b/crates/primitives/src/log.rs @@ -1,6 +1,6 @@ -use crate::{Address, Bytes, H256}; +use crate::{Address, Bloom, Bytes, B256}; +use alloy_rlp::{RlpDecodable, RlpEncodable}; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{RlpDecodable, RlpEncodable}; /// Ethereum Log #[main_codec(rlp)] @@ -12,10 +12,25 @@ pub struct Log { #[cfg_attr( any(test, feature = "arbitrary"), proptest( - strategy = "proptest::collection::vec(proptest::arbitrary::any::(), 0..=5)" + strategy = "proptest::collection::vec(proptest::arbitrary::any::(), 0..=5)" ) )] - pub topics: Vec, + pub topics: Vec, /// Arbitrary length data. pub data: Bytes, } + +/// Calculate receipt logs bloom. +pub fn logs_bloom<'a, It>(logs: It) -> Bloom +where + It: IntoIterator, +{ + let mut bloom = Bloom::ZERO; + for log in logs { + bloom.m3_2048(log.address.as_slice()); + for topic in &log.topics { + bloom.m3_2048(topic.as_slice()); + } + } + bloom +} diff --git a/crates/primitives/src/net.rs b/crates/primitives/src/net.rs index b6b3d4f37f..eeec5abae0 100644 --- a/crates/primitives/src/net.rs +++ b/crates/primitives/src/net.rs @@ -1,6 +1,5 @@ use crate::PeerId; -use reth_rlp::RlpDecodable; -use reth_rlp_derive::RlpEncodable; +use alloy_rlp::{RlpDecodable, RlpEncodable}; use secp256k1::{SecretKey, SECP256K1}; use serde_with::{DeserializeFromStr, SerializeDisplay}; use std::{ @@ -91,7 +90,7 @@ impl NodeRecord { impl fmt::Display for NodeRecord { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { f.write_str("enode://")?; - hex::encode(self.id.as_bytes()).fmt(f)?; + crate::hex::encode(self.id.as_slice()).fmt(f)?; f.write_char('@')?; match self.address { IpAddr::V4(ip) => { @@ -232,9 +231,9 @@ impl FromStr for NodeRecord { #[cfg(test)] mod tests { use super::*; + use alloy_rlp::{Decodable, Encodable}; use bytes::BytesMut; use rand::{thread_rng, Rng, RngCore}; - use reth_rlp::{Decodable, Encodable}; #[test] fn test_mapped_ipv6() { @@ -247,7 +246,7 @@ mod tests { address: v6.into(), tcp_port: rng.gen(), udp_port: rng.gen(), - id: PeerId::random(), + id: rng.gen(), }; assert!(record.clone().convert_ipv4_mapped()); @@ -263,7 +262,7 @@ mod tests { address: v4.into(), tcp_port: rng.gen(), udp_port: rng.gen(), - id: PeerId::random(), + id: rng.gen(), }; assert!(!record.clone().convert_ipv4_mapped()); @@ -280,7 +279,7 @@ mod tests { address: IpAddr::V4(ip.into()), tcp_port: rng.gen(), udp_port: rng.gen(), - id: PeerId::random(), + id: rng.gen(), }; let mut buf = BytesMut::new(); @@ -301,7 +300,7 @@ mod tests { address: IpAddr::V6(ip.into()), tcp_port: rng.gen(), udp_port: rng.gen(), - id: PeerId::random(), + id: rng.gen(), }; let mut buf = BytesMut::new(); diff --git a/crates/primitives/src/peer.rs b/crates/primitives/src/peer.rs index 531d16a2d3..af2f6c9d83 100644 --- a/crates/primitives/src/peer.rs +++ b/crates/primitives/src/peer.rs @@ -1,10 +1,10 @@ -use crate::H512; +use crate::B512; // TODO: should we use `PublicKey` for this? Even when dealing with public keys we should try to // prevent misuse /// This represents an uncompressed secp256k1 public key. /// This encodes the concatenation of the x and y components of the affine point in bytes. -pub type PeerId = H512; +pub type PeerId = B512; /// Generic wrapper with peer id #[derive(Debug)] diff --git a/crates/primitives/src/proofs.rs b/crates/primitives/src/proofs.rs index a46fd3b570..809052c273 100644 --- a/crates/primitives/src/proofs.rs +++ b/crates/primitives/src/proofs.rs @@ -1,22 +1,23 @@ +//! Helper function for calculating Merkle proofs and hashes. + use crate::{ - keccak256, + b256, keccak256, trie::{HashBuilder, Nibbles}, Address, GenesisAccount, Header, Log, ReceiptWithBloom, ReceiptWithBloomRef, TransactionSigned, - Withdrawal, H256, + Withdrawal, B256, }; +use alloy_rlp::Encodable; use bytes::{BufMut, BytesMut}; -use hex_literal::hex; use itertools::Itertools; -use reth_rlp::Encodable; use std::collections::HashMap; /// Keccak-256 hash of the RLP of an empty list, KEC("\xc0"). -pub const EMPTY_LIST_HASH: H256 = - H256(hex!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347")); +pub const EMPTY_LIST_HASH: B256 = + b256!("1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"); /// Root hash of an empty trie. -pub const EMPTY_ROOT: H256 = - H256(hex!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421")); +pub const EMPTY_ROOT: B256 = + b256!("56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"); /// Adjust the index of an item for rlp encoding. pub const fn adjust_index_for_rlp(i: usize, len: usize) -> usize { @@ -30,12 +31,12 @@ pub const fn adjust_index_for_rlp(i: usize, len: usize) -> usize { } /// Compute a trie root of the collection of rlp encodable items. -pub fn ordered_trie_root(items: &[T]) -> H256 { +pub fn ordered_trie_root(items: &[T]) -> B256 { ordered_trie_root_with_encoder(items, |item, buf| item.encode(buf)) } /// Compute a trie root of the collection of items with a custom encoder. -pub fn ordered_trie_root_with_encoder(items: &[T], mut encode: F) -> H256 +pub fn ordered_trie_root_with_encoder(items: &[T], mut encode: F) -> B256 where F: FnMut(&T, &mut dyn BufMut), { @@ -62,7 +63,7 @@ where /// Calculate a transaction root. /// /// `(rlp(index), encoded(tx))` pairs. -pub fn calculate_transaction_root(transactions: &[T]) -> H256 +pub fn calculate_transaction_root(transactions: &[T]) -> B256 where T: AsRef, { @@ -70,19 +71,19 @@ where } /// Calculates the root hash of the withdrawals. -pub fn calculate_withdrawals_root(withdrawals: &[Withdrawal]) -> H256 { +pub fn calculate_withdrawals_root(withdrawals: &[Withdrawal]) -> B256 { ordered_trie_root(withdrawals) } /// Calculates the receipt root for a header. -pub fn calculate_receipt_root(receipts: &[ReceiptWithBloom]) -> H256 { +pub fn calculate_receipt_root(receipts: &[ReceiptWithBloom]) -> B256 { ordered_trie_root_with_encoder(receipts, |r, buf| r.encode_inner(buf, false)) } /// Calculates the receipt root for a header for the reference type of [ReceiptWithBloom]. /// /// NOTE: Prefer [calculate_receipt_root] if you have log blooms memoized. -pub fn calculate_receipt_root_ref(receipts: &[&T]) -> H256 +pub fn calculate_receipt_root_ref(receipts: &[&T]) -> B256 where for<'a> ReceiptWithBloomRef<'a>: From<&'a T>, { @@ -92,24 +93,24 @@ where } /// Calculates the log root for headers. -pub fn calculate_log_root(logs: &[Log]) -> H256 { +pub fn calculate_log_root(logs: &[Log]) -> B256 { //https://github.com/ethereum/go-ethereum/blob/356bbe343a30789e77bb38f25983c8f2f2bfbb47/cmd/evm/internal/t8ntool/execution.go#L255 let mut logs_rlp = Vec::new(); - reth_rlp::encode_list(logs, &mut logs_rlp); + alloy_rlp::encode_list(logs, &mut logs_rlp); keccak256(logs_rlp) } /// Calculates the root hash for ommer/uncle headers. -pub fn calculate_ommers_root(ommers: &[Header]) -> H256 { +pub fn calculate_ommers_root(ommers: &[Header]) -> B256 { // RLP Encode let mut ommers_rlp = Vec::new(); - reth_rlp::encode_list(ommers, &mut ommers_rlp); + alloy_rlp::encode_list(ommers, &mut ommers_rlp); keccak256(ommers_rlp) } /// Calculates the root hash for the state, this corresponds to [geth's /// `deriveHash`](https://github.com/ethereum/go-ethereum/blob/6c149fd4ad063f7c24d726a73bc0546badd1bc73/core/genesis.go#L119). -pub fn genesis_state_root(genesis_alloc: &HashMap) -> H256 { +pub fn genesis_state_root(genesis_alloc: &HashMap) -> B256 { let accounts_with_sorted_hashed_keys = genesis_alloc .iter() .map(|(address, account)| (keccak256(address), account)) @@ -130,7 +131,7 @@ pub fn genesis_state_root(genesis_alloc: &HashMap) -> H /// for compatibility with `triehash` crate. #[cfg(any(test, feature = "test-utils"))] pub mod triehash { - use super::{keccak256, H256}; + use super::{keccak256, B256}; use hash_db::Hasher; use plain_hasher::PlainHasher; @@ -141,7 +142,7 @@ pub mod triehash { #[cfg(any(test, feature = "test-utils"))] impl Hasher for KeccakHasher { - type Out = H256; + type Out = B256; type StdHasher = PlainHasher; const LENGTH: usize = 32; @@ -154,15 +155,11 @@ pub mod triehash { #[cfg(test)] mod tests { - use super::{calculate_withdrawals_root, EMPTY_ROOT}; + use super::*; use crate::{ - hex_literal::hex, - proofs::{calculate_receipt_root, calculate_transaction_root, genesis_state_root}, - Address, Block, Bloom, GenesisAccount, Log, Receipt, ReceiptWithBloom, TxType, GOERLI, - H160, H256, HOLESKY, MAINNET, SEPOLIA, U256, + bloom, hex, Block, Receipt, TxType, B256, GOERLI, HOLESKY, MAINNET, SEPOLIA, U256, }; - use reth_rlp::Decodable; - use std::collections::HashMap; + use alloy_rlp::Decodable; #[test] fn check_transaction_root() { @@ -176,8 +173,8 @@ mod tests { #[test] fn check_receipt_root() { - let logs = vec![Log { address: H160::zero(), topics: vec![], data: Default::default() }]; - let bloom = Bloom(hex!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001")); + let logs = vec![Log { address: Address::ZERO, topics: vec![], data: Default::default() }]; + let bloom = bloom!("00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000001"); let receipt = ReceiptWithBloom { receipt: Receipt { tx_type: TxType::EIP2930, @@ -189,10 +186,7 @@ mod tests { }; let receipt = vec![receipt]; let root = calculate_receipt_root(&receipt); - assert_eq!( - root, - H256(hex!("fe70ae4a136d98944951b2123859698d59ad251a381abc9960fa81cae3d0d4a0")) - ); + assert_eq!(root, b256!("fe70ae4a136d98944951b2123859698d59ad251a381abc9960fa81cae3d0d4a0")); } #[test] @@ -231,7 +225,7 @@ mod tests { // with a maximum balance, and is the only account in the state. // these test cases are generated by using geth with a custom genesis.json (with a single // account that has max balance) - let fixtures: Vec<(Address, H256)> = vec![ + let fixtures: Vec<(Address, B256)> = vec![ ( hex!("9fe4abd71ad081f091bd06dd1c16f7e92927561e").into(), hex!("4b35be4231841d212ce2fa43aedbddeadd6eb7d420195664f9f0d55629db8c32").into(), @@ -257,7 +251,7 @@ mod tests { #[test] fn test_chain_state_roots() { let expected_mainnet_state_root = - H256::from(hex!("d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544")); + b256!("d7f8974fb5ac78d9ac099b9ad5018bedc2ce0a72dad1827a1709da30580f0544"); let calculated_mainnet_state_root = genesis_state_root(&MAINNET.genesis.alloc); assert_eq!( expected_mainnet_state_root, calculated_mainnet_state_root, @@ -265,7 +259,7 @@ mod tests { ); let expected_goerli_state_root = - H256::from(hex!("5d6cded585e73c4e322c30c2f782a336316f17dd85a4863b9d838d2d4b8b3008")); + b256!("5d6cded585e73c4e322c30c2f782a336316f17dd85a4863b9d838d2d4b8b3008"); let calculated_goerli_state_root = genesis_state_root(&GOERLI.genesis.alloc); assert_eq!( expected_goerli_state_root, calculated_goerli_state_root, @@ -273,7 +267,7 @@ mod tests { ); let expected_sepolia_state_root = - H256::from(hex!("5eb6e371a698b8d68f665192350ffcecbbbf322916f4b51bd79bb6887da3f494")); + b256!("5eb6e371a698b8d68f665192350ffcecbbbf322916f4b51bd79bb6887da3f494"); let calculated_sepolia_state_root = genesis_state_root(&SEPOLIA.genesis.alloc); assert_eq!( expected_sepolia_state_root, calculated_sepolia_state_root, @@ -281,7 +275,7 @@ mod tests { ); let expected_holesky_state_root = - H256::from(hex!("69d8c9d72f6fa4ad42d4702b433707212f90db395eb54dc20bc85de253788783")); + b256!("69d8c9d72f6fa4ad42d4702b433707212f90db395eb54dc20bc85de253788783"); let calculated_holesky_state_root = genesis_state_root(&HOLESKY.genesis.alloc); assert_eq!( expected_holesky_state_root, calculated_holesky_state_root, diff --git a/crates/primitives/src/prune/mode.rs b/crates/primitives/src/prune/mode.rs index a8bd046a76..e31f519fd5 100644 --- a/crates/primitives/src/prune/mode.rs +++ b/crates/primitives/src/prune/mode.rs @@ -71,7 +71,7 @@ mod tests { #[test] fn test_prune_target_block() { - let tip = 1000; + let tip = 20000; let min_blocks = MINIMUM_PRUNING_DISTANCE; let prune_part = PrunePart::Receipts; @@ -91,7 +91,6 @@ mod tests { PruneMode::Before(tip - MINIMUM_PRUNING_DISTANCE - 1), Ok(Some(tip - MINIMUM_PRUNING_DISTANCE - 2)), ), - // MINIMUM_PRUNING_DISTANCE is 128 (PruneMode::Before(tip - 1), Err(PrunePartError::Configuration(prune_part))), ]; @@ -113,7 +112,7 @@ mod tests { #[test] fn test_should_prune() { - let tip = 1000; + let tip = 20000; let should_prune = true; let tests = vec![ diff --git a/crates/primitives/src/prune/part.rs b/crates/primitives/src/prune/part.rs index 7d1139c257..d1feb8995a 100644 --- a/crates/primitives/src/prune/part.rs +++ b/crates/primitives/src/prune/part.rs @@ -26,6 +26,9 @@ pub enum PrunePartError { /// Invalid configuration of a prune part. #[error("The configuration provided for {0} is invalid.")] Configuration(PrunePart), + /// Receipts have been pruned + #[error("Receipts have been pruned")] + ReceiptsPruned, } #[cfg(test)] diff --git a/crates/primitives/src/prune/target.rs b/crates/primitives/src/prune/target.rs index e715c94ed7..590b505d16 100644 --- a/crates/primitives/src/prune/target.rs +++ b/crates/primitives/src/prune/target.rs @@ -5,8 +5,12 @@ use crate::{ use paste::paste; use serde::{Deserialize, Serialize}; -/// Minimum distance necessary from the tip so blockchain tree can work correctly. -pub const MINIMUM_PRUNING_DISTANCE: u64 = 128; +/// Minimum distance from the tip necessary for the node to work correctly: +/// 1. Minimum 2 epochs (32 blocks per epoch) required to handle any reorg according to the +/// consensus protocol. +/// 2. Another 10k blocks to have a room for maneuver in case when things go wrong and a manual +/// unwind is required. +pub const MINIMUM_PRUNING_DISTANCE: u64 = 32 * 2 + 10_000; /// Pruning configuration for every part of the data that can be pruned. #[derive(Debug, Clone, Default, Deserialize, Eq, PartialEq, Serialize)] @@ -22,19 +26,19 @@ pub struct PruneModes { /// and offers improved performance. #[serde( skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::<64, _>" + deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::" )] pub receipts: Option, /// Account History pruning configuration. #[serde( skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::<64, _>" + deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::" )] pub account_history: Option, /// Storage History pruning configuration. #[serde( skip_serializing_if = "Option::is_none", - deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::<64, _>" + deserialize_with = "deserialize_opt_prune_mode_with_min_blocks::" )] pub storage_history: Option, /// Receipts pruning configuration by retaining only those receipts that contain logs emitted @@ -101,8 +105,8 @@ impl PruneModes { impl_prune_parts!( (sender_recovery, SenderRecovery, None), (transaction_lookup, TransactionLookup, None), - (receipts, Receipts, Some(64)), - (account_history, AccountHistory, Some(64)), - (storage_history, StorageHistory, Some(64)) + (receipts, Receipts, Some(MINIMUM_PRUNING_DISTANCE)), + (account_history, AccountHistory, Some(MINIMUM_PRUNING_DISTANCE)), + (storage_history, StorageHistory, Some(MINIMUM_PRUNING_DISTANCE)) ); } diff --git a/crates/primitives/src/receipt.rs b/crates/primitives/src/receipt.rs index 57cbdcc6d0..5908fff344 100644 --- a/crates/primitives/src/receipt.rs +++ b/crates/primitives/src/receipt.rs @@ -1,12 +1,16 @@ use crate::{ - bloom::logs_bloom, compression::{RECEIPT_COMPRESSOR, RECEIPT_DECOMPRESSOR}, - Bloom, Log, TxType, + logs_bloom, + proofs::calculate_receipt_root_ref, + Bloom, Log, PrunePartError, TxType, B256, }; +use alloy_rlp::{length_of_length, Decodable, Encodable}; use bytes::{Buf, BufMut, BytesMut}; use reth_codecs::{main_codec, Compact, CompactZstd}; -use reth_rlp::{length_of_length, Decodable, Encodable}; -use std::cmp::Ordering; +use std::{ + cmp::Ordering, + ops::{Deref, DerefMut}, +}; /// Receipt containing result of transaction execution. #[main_codec(zstd)] @@ -44,6 +48,100 @@ impl Receipt { } } +/// A collection of receipts organized as a two-dimensional vector. +#[derive(Clone, Debug, PartialEq, Eq, Default)] +pub struct Receipts { + /// A two-dimensional vector of optional `Receipt` instances. + pub receipt_vec: Vec>>, +} + +impl Receipts { + /// Create a new `Receipts` instance with an empty vector. + pub fn new() -> Self { + Self { receipt_vec: vec![] } + } + + /// Create a new `Receipts` instance from an existing vector. + pub fn from_vec(vec: Vec>>) -> Self { + Self { receipt_vec: vec } + } + + /// Create a new `Receipts` instance from a single block receipt. + pub fn from_block_receipt(block_receipts: Vec) -> Self { + Self { receipt_vec: vec![block_receipts.into_iter().map(Option::Some).collect()] } + } + + /// Returns the length of the `Receipts` vector. + pub fn len(&self) -> usize { + self.receipt_vec.len() + } + + /// Returns `true` if the `Receipts` vector is empty. + pub fn is_empty(&self) -> bool { + self.receipt_vec.is_empty() + } + + /// Push a new vector of receipts into the `Receipts` collection. + pub fn push(&mut self, receipts: Vec>) { + self.receipt_vec.push(receipts); + } + + /// Retrieves the receipt root for all recorded receipts from index. + pub fn root_slow(&self, index: usize) -> Option { + Some(calculate_receipt_root_ref( + &self.receipt_vec[index].iter().map(Option::as_ref).collect::>>()?, + )) + } + + /// Retrieves gas spent by transactions as a vector of tuples (transaction index, gas used). + pub fn gas_spent_by_tx(&self) -> Result, PrunePartError> { + self.last() + .map(|block_r| { + block_r + .iter() + .enumerate() + .map(|(id, tx_r)| { + if let Some(receipt) = tx_r.as_ref() { + Ok((id as u64, receipt.cumulative_gas_used)) + } else { + Err(PrunePartError::ReceiptsPruned) + } + }) + .collect::, PrunePartError>>() + }) + .unwrap_or(Ok(vec![])) + } +} + +impl Deref for Receipts { + type Target = Vec>>; + + fn deref(&self) -> &Self::Target { + &self.receipt_vec + } +} + +impl DerefMut for Receipts { + fn deref_mut(&mut self) -> &mut Self::Target { + &mut self.receipt_vec + } +} + +impl IntoIterator for Receipts { + type Item = Vec>; + type IntoIter = std::vec::IntoIter; + + fn into_iter(self) -> Self::IntoIter { + self.receipt_vec.into_iter() + } +} + +impl FromIterator>> for Receipts { + fn from_iter>>>(iter: I) -> Self { + Self::from_vec(iter.into_iter().collect()) + } +} + impl From for ReceiptWithBloom { fn from(receipt: Receipt) -> Self { let bloom = receipt.bloom_slow(); @@ -90,23 +188,23 @@ impl ReceiptWithBloom { } /// Decodes the receipt payload - fn decode_receipt(buf: &mut &[u8], tx_type: TxType) -> Result { + fn decode_receipt(buf: &mut &[u8], tx_type: TxType) -> alloy_rlp::Result { let b = &mut &**buf; - let rlp_head = reth_rlp::Header::decode(b)?; + let rlp_head = alloy_rlp::Header::decode(b)?; if !rlp_head.list { - return Err(reth_rlp::DecodeError::UnexpectedString) + return Err(alloy_rlp::Error::UnexpectedString) } let started_len = b.len(); - let success = reth_rlp::Decodable::decode(b)?; - let cumulative_gas_used = reth_rlp::Decodable::decode(b)?; + let success = alloy_rlp::Decodable::decode(b)?; + let cumulative_gas_used = alloy_rlp::Decodable::decode(b)?; let bloom = Decodable::decode(b)?; - let logs = reth_rlp::Decodable::decode(b)?; + let logs = alloy_rlp::Decodable::decode(b)?; let this = Self { receipt: Receipt { tx_type, success, cumulative_gas_used, logs }, bloom }; let consumed = started_len - b.len(); if consumed != rlp_head.payload_length { - return Err(reth_rlp::DecodeError::ListLengthMismatch { + return Err(alloy_rlp::Error::ListLengthMismatch { expected: rlp_head.payload_length, got: consumed, }) @@ -126,19 +224,19 @@ impl Encodable for ReceiptWithBloom { } impl Decodable for ReceiptWithBloom { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { // a receipt is either encoded as a string (non legacy) or a list (legacy). // We should not consume the buffer if we are decoding a legacy receipt, so let's // check if the first byte is between 0x80 and 0xbf. let rlp_type = *buf .first() - .ok_or(reth_rlp::DecodeError::Custom("cannot decode a receipt from empty bytes"))?; + .ok_or(alloy_rlp::Error::Custom("cannot decode a receipt from empty bytes"))?; - match rlp_type.cmp(&reth_rlp::EMPTY_LIST_CODE) { + match rlp_type.cmp(&alloy_rlp::EMPTY_LIST_CODE) { Ordering::Less => { // strip out the string header - let _header = reth_rlp::Header::decode(buf)?; - let receipt_type = *buf.first().ok_or(reth_rlp::DecodeError::Custom( + let _header = alloy_rlp::Header::decode(buf)?; + let receipt_type = *buf.first().ok_or(alloy_rlp::Error::Custom( "typed receipt cannot be decoded from an empty slice", ))?; if receipt_type == 0x01 { @@ -151,11 +249,11 @@ impl Decodable for ReceiptWithBloom { buf.advance(1); Self::decode_receipt(buf, TxType::EIP4844) } else { - Err(reth_rlp::DecodeError::Custom("invalid receipt type")) + Err(alloy_rlp::Error::Custom("invalid receipt type")) } } Ordering::Equal => { - Err(reth_rlp::DecodeError::Custom("an empty list is not a valid receipt encoding")) + Err(alloy_rlp::Error::Custom("an empty list is not a valid receipt encoding")) } Ordering::Greater => Self::decode_receipt(buf, TxType::Legacy), } @@ -211,8 +309,8 @@ struct ReceiptWithBloomEncoder<'a> { impl<'a> ReceiptWithBloomEncoder<'a> { /// Returns the rlp header for the receipt payload. - fn receipt_rlp_header(&self) -> reth_rlp::Header { - let mut rlp_head = reth_rlp::Header { list: true, payload_length: 0 }; + fn receipt_rlp_header(&self) -> alloy_rlp::Header { + let mut rlp_head = alloy_rlp::Header { list: true, payload_length: 0 }; rlp_head.payload_length += self.receipt.success.length(); rlp_head.payload_length += self.receipt.cumulative_gas_used.length(); @@ -243,7 +341,7 @@ impl<'a> ReceiptWithBloomEncoder<'a> { if with_header { let payload_length = payload.len() + 1; - let header = reth_rlp::Header { list: false, payload_length }; + let header = alloy_rlp::Header { list: false, payload_length }; header.encode(out); } @@ -289,13 +387,12 @@ impl<'a> Encodable for ReceiptWithBloomEncoder<'a> { #[cfg(test)] mod tests { use super::*; - use crate::{hex_literal::hex, Address, H256}; - use ethers_core::types::Bytes; - use reth_rlp::{Decodable, Encodable}; - use std::str::FromStr; + use crate::hex_literal::hex; + use alloy_primitives::{address, b256, bytes, Bytes}; + use alloy_rlp::{Decodable, Encodable}; - #[test] // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] fn encode_legacy_receipt() { let expected = hex!("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff"); @@ -305,18 +402,12 @@ mod tests { tx_type: TxType::Legacy, cumulative_gas_used: 0x1u64, logs: vec![Log { - address: Address::from_str("0000000000000000000000000000000000000011").unwrap(), + address: address!("0000000000000000000000000000000000000011"), topics: vec![ - H256::from_str( - "000000000000000000000000000000000000000000000000000000000000dead", - ) - .unwrap(), - H256::from_str( - "000000000000000000000000000000000000000000000000000000000000beef", - ) - .unwrap(), + b256!("000000000000000000000000000000000000000000000000000000000000dead"), + b256!("000000000000000000000000000000000000000000000000000000000000beef"), ], - data: Bytes::from_str("0100ff").unwrap().0.into(), + data: bytes!("0100ff"), }], success: false, }, @@ -330,8 +421,8 @@ mod tests { assert_eq!(data, expected); } - #[test] // Test vector from: https://eips.ethereum.org/EIPS/eip-2481 + #[test] fn decode_legacy_receipt() { let data = hex!("f901668001b9010000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000f85ff85d940000000000000000000000000000000000000011f842a0000000000000000000000000000000000000000000000000000000000000deada0000000000000000000000000000000000000000000000000000000000000beef830100ff"); @@ -341,18 +432,12 @@ mod tests { tx_type: TxType::Legacy, cumulative_gas_used: 0x1u64, logs: vec![Log { - address: Address::from_str("0000000000000000000000000000000000000011").unwrap(), + address: address!("0000000000000000000000000000000000000011"), topics: vec![ - H256::from_str( - "000000000000000000000000000000000000000000000000000000000000dead", - ) - .unwrap(), - H256::from_str( - "000000000000000000000000000000000000000000000000000000000000beef", - ) - .unwrap(), + b256!("000000000000000000000000000000000000000000000000000000000000dead"), + b256!("000000000000000000000000000000000000000000000000000000000000beef"), ], - data: Bytes::from_str("0100ff").unwrap().0.into(), + data: bytes!("0100ff"), }], success: false, }, @@ -371,22 +456,18 @@ mod tests { tx_type: TxType::Legacy, logs: vec![ Log { - address: Address::from_str("0x4bf56695415f725e43c3e04354b604bcfb6dfb6e") - .unwrap(), - topics: vec![H256::from_str( - "0xc69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9", - ) - .unwrap()], - data: crate::Bytes::from(vec![1; 0xffffff]), + address: address!("4bf56695415f725e43c3e04354b604bcfb6dfb6e"), + topics: vec![b256!( + "c69dc3d7ebff79e41f525be431d5cd3cc08f80eaf0f7819054a726eeb7086eb9" + )], + data: Bytes::from(vec![1; 0xffffff]), }, Log { - address: Address::from_str("0xfaca325c86bf9c2d5b413cd7b90b209be92229c2") - .unwrap(), - topics: vec![H256::from_str( - "0x8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2", - ) - .unwrap()], - data: crate::Bytes::from(vec![1; 0xffffff]), + address: address!("faca325c86bf9c2d5b413cd7b90b209be92229c2"), + topics: vec![b256!( + "8cca58667b1e9ffa004720ac99a3d61a138181963b294d270d91c53d36402ae2" + )], + data: Bytes::from(vec![1; 0xffffff]), }, ], }; diff --git a/crates/primitives/src/serde_helper/mod.rs b/crates/primitives/src/serde_helper/mod.rs index 23db0bde83..e561ef8bdc 100644 --- a/crates/primitives/src/serde_helper/mod.rs +++ b/crates/primitives/src/serde_helper/mod.rs @@ -1,29 +1,30 @@ -//! Various serde utilities +//! [serde] utilities. + +use crate::{B256, U64}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; mod storage; -use serde::Serializer; pub use storage::*; mod jsonu256; -use crate::H256; pub use jsonu256::*; pub mod num; + mod prune; pub use prune::deserialize_opt_prune_mode_with_min_blocks; -/// serde functions for handling primitive `u64` as [U64](crate::U64) +/// serde functions for handling primitive `u64` as [`U64`]. pub mod u64_hex { - use crate::U64; - use serde::{Deserialize, Deserializer, Serialize, Serializer}; + use super::*; /// Deserializes an `u64` from [U64] accepting a hex quantity string with optional 0x prefix pub fn deserialize<'de, D>(deserializer: D) -> Result where D: Deserializer<'de>, { - U64::deserialize(deserializer).map(|val| val.as_u64()) + U64::deserialize(deserializer).map(|val| val.to()) } /// Serializes u64 as hex string @@ -32,56 +33,23 @@ pub mod u64_hex { } } -/// serde functions for handling bytes as hex strings, such as [bytes::Bytes] -pub mod hex_bytes { - use serde::{Deserialize, Deserializer, Serializer}; - - /// Serialize a byte vec as a hex string with 0x prefix - pub fn serialize(x: T, s: S) -> Result - where - S: Serializer, - T: AsRef<[u8]>, - { - s.serialize_str(&format!("0x{}", hex::encode(x.as_ref()))) - } - - /// Deserialize a hex string into a byte vec - /// Accepts a hex string with optional 0x prefix - pub fn deserialize<'de, T, D>(d: D) -> Result - where - D: Deserializer<'de>, - T: From>, - { - let value = String::deserialize(d)?; - if let Some(value) = value.strip_prefix("0x") { - hex::decode(value) - } else { - hex::decode(&value) - } - .map(Into::into) - .map_err(|e| serde::de::Error::custom(e.to_string())) - } -} - -/// Serialize a byte vec as a hex string _without_ 0x prefix. +/// Serialize a byte vec as a hex string _without_ the "0x" prefix. /// -/// This behaves exactly as [hex::encode] +/// This behaves the same as [`hex::encode`](crate::hex::encode). pub fn serialize_hex_string_no_prefix(x: T, s: S) -> Result where S: Serializer, T: AsRef<[u8]>, { - s.serialize_str(&hex::encode(x.as_ref())) + s.serialize_str(&crate::hex::encode(x.as_ref())) } -/// Serialize a byte vec as a hex string _without_ 0x prefix -pub fn serialize_h256_hex_string_no_prefix(x: &H256, s: S) -> Result +/// Serialize a [B256] as a hex string _without_ the "0x" prefix. +pub fn serialize_b256_hex_string_no_prefix(x: &B256, s: S) -> Result where S: Serializer, { - let val = format!("{:?}", x); - // skip the 0x prefix - s.serialize_str(&val.as_str()[2..]) + s.serialize_str(&format!("{x:x}")) } #[cfg(test)] diff --git a/crates/primitives/src/serde_helper/num.rs b/crates/primitives/src/serde_helper/num.rs index 02544ee0f0..e2262ccca7 100644 --- a/crates/primitives/src/serde_helper/num.rs +++ b/crates/primitives/src/serde_helper/num.rs @@ -15,15 +15,15 @@ use std::str::FromStr; /// let number: U64HexOrNumber = serde_json::from_str(number_json).unwrap(); /// let hex: U64HexOrNumber = serde_json::from_str(hex_json).unwrap(); /// assert_eq!(number, hex); -/// assert_eq!(hex.as_u64(), 100); +/// assert_eq!(hex.to(), 100); /// ``` #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize)] pub struct U64HexOrNumber(U64); impl U64HexOrNumber { /// Returns the wrapped u64 - pub fn as_u64(self) -> u64 { - self.0.as_u64() + pub fn to(self) -> u64 { + self.0.to() } } @@ -41,7 +41,7 @@ impl From for U64HexOrNumber { impl From for u64 { fn from(value: U64HexOrNumber) -> Self { - value.as_u64() + value.to() } } diff --git a/crates/primitives/src/serde_helper/storage.rs b/crates/primitives/src/serde_helper/storage.rs index aa5e012874..7d0b5045f4 100644 --- a/crates/primitives/src/serde_helper/storage.rs +++ b/crates/primitives/src/serde_helper/storage.rs @@ -1,4 +1,4 @@ -use crate::{Bytes, H256, U256}; +use crate::{Bytes, B256, U256}; use serde::{Deserialize, Deserializer, Serialize}; use std::{collections::HashMap, fmt::Write}; @@ -9,9 +9,9 @@ use std::{collections::HashMap, fmt::Write}; /// storage keys. /// /// In `eth_getStorageAt`, this is used for deserialization of the `index` field. Internally, the -/// index is a [H256], but in `eth_getStorageAt` requests, its serialization can be _up to_ 32 +/// index is a [B256], but in `eth_getStorageAt` requests, its serialization can be _up to_ 32 /// bytes. To support this, the storage key is deserialized first as a U256, and converted to a -/// H256 for use internally. +/// B256 for use internally. /// /// `eth_getProof` also takes storage keys up to 32 bytes as input, so the `keys` field is /// similarly deserialized. However, geth populates the storage proof `key` fields in the response @@ -19,22 +19,22 @@ use std::{collections::HashMap, fmt::Write}; /// * See how `storageKey`s (the input) are populated in the `StorageResult` (the output): /// /// -/// The contained [H256] and From implementation for String are used to preserve the input and +/// The contained [B256] and From implementation for String are used to preserve the input and /// implement this behavior from geth. #[derive(Clone, Debug, Default, Eq, PartialEq, Serialize, Deserialize)] #[serde(from = "U256", into = "String")] -pub struct JsonStorageKey(pub H256); +pub struct JsonStorageKey(pub B256); impl From for JsonStorageKey { fn from(value: U256) -> Self { - // SAFETY: Address (H256) and U256 have the same number of bytes - JsonStorageKey(H256::from(value.to_be_bytes())) + // SAFETY: Address (B256) and U256 have the same number of bytes + JsonStorageKey(B256::from(value.to_be_bytes())) } } impl From for String { fn from(value: JsonStorageKey) -> Self { - // SAFETY: Address (H256) and U256 have the same number of bytes + // SAFETY: Address (B256) and U256 have the same number of bytes let uint = U256::from_be_bytes(value.0 .0); // serialize byte by byte @@ -54,25 +54,25 @@ impl From for String { } } -/// Converts a Bytes value into a H256, accepting inputs that are less than 32 bytes long. These +/// Converts a Bytes value into a B256, accepting inputs that are less than 32 bytes long. These /// inputs will be left padded with zeros. -pub fn from_bytes_to_h256<'de, D>(bytes: Bytes) -> Result +pub fn from_bytes_to_b256<'de, D>(bytes: Bytes) -> Result where D: Deserializer<'de>, { if bytes.0.len() > 32 { - return Err(serde::de::Error::custom("input too long to be a H256")) + return Err(serde::de::Error::custom("input too long to be a B256")) } // left pad with zeros to 32 bytes let mut padded = [0u8; 32]; padded[32 - bytes.0.len()..].copy_from_slice(&bytes.0); - // then convert to H256 without a panic - Ok(H256::from_slice(&padded)) + // then convert to B256 without a panic + Ok(B256::from_slice(&padded)) } -/// Deserializes the input into an Option>, using [from_bytes_to_h256] which +/// Deserializes the input into an Option>, using [from_bytes_to_b256] which /// allows cropped values: /// /// ```json @@ -82,7 +82,7 @@ where /// ``` pub fn deserialize_storage_map<'de, D>( deserializer: D, -) -> Result>, D::Error> +) -> Result>, D::Error> where D: Deserializer<'de>, { @@ -91,8 +91,8 @@ where Some(mut map) => { let mut res_map = HashMap::with_capacity(map.len()); for (k, v) in map.drain() { - let k_deserialized = from_bytes_to_h256::<'de, D>(k)?; - let v_deserialized = from_bytes_to_h256::<'de, D>(v)?; + let k_deserialized = from_bytes_to_b256::<'de, D>(k)?; + let v_deserialized = from_bytes_to_b256::<'de, D>(v)?; res_map.insert(k_deserialized, v_deserialized); } Ok(Some(res_map)) diff --git a/crates/primitives/src/stage/checkpoints.rs b/crates/primitives/src/stage/checkpoints.rs index 4d7311dd06..fa5725df55 100644 --- a/crates/primitives/src/stage/checkpoints.rs +++ b/crates/primitives/src/stage/checkpoints.rs @@ -1,6 +1,6 @@ use crate::{ trie::{hash_builder::HashBuilderState, StoredSubNode}, - Address, BlockNumber, H256, + Address, BlockNumber, B256, }; use bytes::{Buf, BufMut}; use reth_codecs::{derive_arbitrary, main_codec, Compact}; @@ -16,7 +16,7 @@ pub struct MerkleCheckpoint { /// The target block number. pub target_block: BlockNumber, /// The last hashed account key processed. - pub last_account_key: H256, + pub last_account_key: B256, /// The last walker key processed. pub last_walker_key: Vec, /// Previously recorded walker stack. @@ -29,7 +29,7 @@ impl MerkleCheckpoint { /// Creates a new Merkle checkpoint. pub fn new( target_block: BlockNumber, - last_account_key: H256, + last_account_key: B256, last_walker_key: Vec, walker_stack: Vec, state: HashBuilderState, @@ -71,7 +71,7 @@ impl Compact for MerkleCheckpoint { { let target_block = buf.get_u64(); - let last_account_key = H256::from_slice(&buf[..32]); + let last_account_key = B256::from_slice(&buf[..32]); buf.advance(32); let last_walker_key_len = buf.get_u16() as usize; @@ -119,7 +119,7 @@ pub struct StorageHashingCheckpoint { /// The next account to start hashing from. pub address: Option
, /// The next storage slot to start hashing from. - pub storage: Option, + pub storage: Option, /// Block range which this checkpoint is valid for. pub block_range: CheckpointBlockRange, /// Progress measured in storage slots. @@ -395,13 +395,13 @@ mod tests { let mut rng = rand::thread_rng(); let checkpoint = MerkleCheckpoint { target_block: rng.gen(), - last_account_key: H256::from_low_u64_be(rng.gen()), - last_walker_key: H256::from_low_u64_be(rng.gen()).to_vec(), - walker_stack: Vec::from([StoredSubNode { - key: H256::from_low_u64_be(rng.gen()).to_vec(), + last_account_key: rng.gen(), + last_walker_key: B256::random_with(&mut rng).to_vec(), + walker_stack: vec![StoredSubNode { + key: B256::random_with(&mut rng).to_vec(), nibble: Some(rng.gen()), node: None, - }]), + }], state: HashBuilderState::default(), }; @@ -416,7 +416,7 @@ mod tests { let mut rng = rand::thread_rng(); let checkpoints = vec![ StageUnitCheckpoint::Account(AccountHashingCheckpoint { - address: Some(Address::from_low_u64_be(rng.gen())), + address: Some(rng.gen()), block_range: CheckpointBlockRange { from: rng.gen(), to: rng.gen() }, progress: EntitiesCheckpoint { processed: rng.gen::() as u64, @@ -424,8 +424,8 @@ mod tests { }, }), StageUnitCheckpoint::Storage(StorageHashingCheckpoint { - address: Some(Address::from_low_u64_be(rng.gen())), - storage: Some(H256::from_low_u64_be(rng.gen())), + address: Some(rng.gen()), + storage: Some(rng.gen()), block_range: CheckpointBlockRange { from: rng.gen(), to: rng.gen() }, progress: EntitiesCheckpoint { processed: rng.gen::() as u64, diff --git a/crates/primitives/src/storage.rs b/crates/primitives/src/storage.rs index 89879aa49e..91bdce4704 100644 --- a/crates/primitives/src/storage.rs +++ b/crates/primitives/src/storage.rs @@ -1,4 +1,4 @@ -use super::{H256, U256}; +use super::{B256, U256}; use reth_codecs::{derive_arbitrary, Compact}; use serde::{Deserialize, Serialize}; @@ -9,20 +9,20 @@ use serde::{Deserialize, Serialize}; #[derive(Debug, Default, Copy, Clone, PartialEq, Eq, Serialize, Deserialize, PartialOrd, Ord)] pub struct StorageEntry { /// Storage key. - pub key: H256, + pub key: B256, /// Value on storage key. pub value: U256, } impl StorageEntry { /// Create a new StorageEntry with given key and value. - pub fn new(key: H256, value: U256) -> Self { + pub fn new(key: B256, value: U256) -> Self { Self { key, value } } } -impl From<(H256, U256)> for StorageEntry { - fn from((key, value): (H256, U256)) -> Self { +impl From<(B256, U256)> for StorageEntry { + fn from((key, value): (B256, U256)) -> Self { StorageEntry { key, value } } } @@ -36,7 +36,7 @@ impl Compact for StorageEntry { B: bytes::BufMut + AsMut<[u8]>, { // for now put full bytes and later compress it. - buf.put_slice(&self.key.to_fixed_bytes()[..]); + buf.put_slice(&self.key[..]); self.value.to_compact(buf) + 32 } @@ -44,7 +44,7 @@ impl Compact for StorageEntry { where Self: Sized, { - let key = H256::from_slice(&buf[..32]); + let key = B256::from_slice(&buf[..32]); let (value, out) = U256::from_compact(&buf[32..], len - 32); (Self { key, value }, out) } diff --git a/crates/primitives/src/transaction/access_list.rs b/crates/primitives/src/transaction/access_list.rs index 11b119898a..6a5abd7d79 100644 --- a/crates/primitives/src/transaction/access_list.rs +++ b/crates/primitives/src/transaction/access_list.rs @@ -1,8 +1,8 @@ use std::mem; -use crate::{Address, H256}; +use crate::{Address, B256}; +use alloy_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper}; use revm_primitives::U256; use serde::{Deserialize, Serialize}; @@ -18,17 +18,17 @@ pub struct AccessListItem { #[cfg_attr( any(test, feature = "arbitrary"), proptest( - strategy = "proptest::collection::vec(proptest::arbitrary::any::(), 0..=20)" + strategy = "proptest::collection::vec(proptest::arbitrary::any::(), 0..=20)" ) )] - pub storage_keys: Vec, + pub storage_keys: Vec, } impl AccessListItem { /// Calculates a heuristic for the in-memory size of the [AccessListItem]. #[inline] pub fn size(&self) -> usize { - mem::size_of::
() + self.storage_keys.capacity() * mem::size_of::() + mem::size_of::
() + self.storage_keys.capacity() * mem::size_of::() } } diff --git a/crates/primitives/src/transaction/eip1559.rs b/crates/primitives/src/transaction/eip1559.rs index 4bc5cc4754..5a9e07fa74 100644 --- a/crates/primitives/src/transaction/eip1559.rs +++ b/crates/primitives/src/transaction/eip1559.rs @@ -1,8 +1,8 @@ use super::access_list::AccessList; -use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, H256}; +use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, B256}; +use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; use bytes::BytesMut; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{length_of_length, Decodable, DecodeError, Encodable, Header}; use std::mem; /// A transaction with a priority fee ([EIP-1559](https://eips.ethereum.org/EIPS/eip-1559)). @@ -98,7 +98,7 @@ impl TxEip1559 { /// - `value` /// - `data` (`input`) /// - `access_list` - pub(crate) fn decode_inner(buf: &mut &[u8]) -> Result { + pub(crate) fn decode_inner(buf: &mut &[u8]) -> alloy_rlp::Result { Ok(Self { chain_id: Decodable::decode(buf)?, nonce: Decodable::decode(buf)?, @@ -107,7 +107,7 @@ impl TxEip1559 { gas_limit: Decodable::decode(buf)?, to: Decodable::decode(buf)?, value: Decodable::decode(buf)?, - input: Bytes(Decodable::decode(buf)?), + input: Decodable::decode(buf)?, access_list: Decodable::decode(buf)?, }) } @@ -206,7 +206,7 @@ impl TxEip1559 { /// Outputs the signature hash of the transaction by first encoding without a signature, then /// hashing. - pub(crate) fn signature_hash(&self) -> H256 { + pub(crate) fn signature_hash(&self) -> B256 { let mut buf = BytesMut::with_capacity(self.payload_len_for_signature()); self.encode_for_signing(&mut buf); keccak256(&buf) @@ -218,7 +218,7 @@ mod tests { use super::TxEip1559; use crate::{ transaction::{signature::Signature, TransactionKind}, - AccessList, Address, Transaction, TransactionSigned, H256, U256, + AccessList, Address, Transaction, TransactionSigned, B256, U256, }; use std::str::FromStr; @@ -227,7 +227,7 @@ mod tests { use crate::hex_literal::hex; let signer: Address = hex!("dd6b8b3dc6b7ad97db52f08a275ff4483e024cea").into(); - let hash: H256 = + let hash: B256 = hex!("0ec0b6a2df4d87424e5f6ad2a654e27aaeb7dac20ae9e8385cc09087ad532ee0").into(); let tx = Transaction::Eip1559( TxEip1559 { diff --git a/crates/primitives/src/transaction/eip2930.rs b/crates/primitives/src/transaction/eip2930.rs index 78e1878891..afafa0de00 100644 --- a/crates/primitives/src/transaction/eip2930.rs +++ b/crates/primitives/src/transaction/eip2930.rs @@ -1,8 +1,8 @@ use super::access_list::AccessList; -use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, H256}; +use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, B256}; +use alloy_rlp::{length_of_length, Decodable, Encodable, Header}; use bytes::BytesMut; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{length_of_length, Decodable, DecodeError, Encodable, Header}; use std::mem; /// Transaction with an [`AccessList`] ([EIP-2930](https://eips.ethereum.org/EIPS/eip-2930)). @@ -80,7 +80,7 @@ impl TxEip2930 { /// - `value` /// - `data` (`input`) /// - `access_list` - pub(crate) fn decode_inner(buf: &mut &[u8]) -> Result { + pub(crate) fn decode_inner(buf: &mut &[u8]) -> alloy_rlp::Result { Ok(Self { chain_id: Decodable::decode(buf)?, nonce: Decodable::decode(buf)?, @@ -88,7 +88,7 @@ impl TxEip2930 { gas_limit: Decodable::decode(buf)?, to: Decodable::decode(buf)?, value: Decodable::decode(buf)?, - input: Bytes(Decodable::decode(buf)?), + input: Decodable::decode(buf)?, access_list: Decodable::decode(buf)?, }) } @@ -171,7 +171,7 @@ impl TxEip2930 { /// Outputs the signature hash of the transaction by first encoding without a signature, then /// hashing. - pub(crate) fn signature_hash(&self) -> H256 { + pub(crate) fn signature_hash(&self) -> B256 { let mut buf = BytesMut::with_capacity(self.payload_len_for_signature()); self.encode_for_signing(&mut buf); keccak256(&buf) @@ -185,8 +185,8 @@ mod tests { transaction::{signature::Signature, TransactionKind}, Address, Bytes, Transaction, TransactionSigned, U256, }; + use alloy_rlp::{Decodable, Encodable}; use bytes::BytesMut; - use reth_rlp::{Decodable, Encodable}; #[test] fn test_decode_create() { diff --git a/crates/primitives/src/transaction/eip4844.rs b/crates/primitives/src/transaction/eip4844.rs index 27c5c1d36c..f7ee8c0d4e 100644 --- a/crates/primitives/src/transaction/eip4844.rs +++ b/crates/primitives/src/transaction/eip4844.rs @@ -7,11 +7,11 @@ use crate::{ BYTES_PER_COMMITMENT, BYTES_PER_PROOF, }, kzg_to_versioned_hash, Bytes, ChainId, Signature, Transaction, TransactionKind, - TransactionSigned, TxHash, TxType, EIP4844_TX_TYPE_ID, H256, + TransactionSigned, TxHash, TxType, B256, EIP4844_TX_TYPE_ID, }; +use alloy_rlp::{length_of_length, Decodable, Encodable, Error as RlpError, Header}; use bytes::BytesMut; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{length_of_length, Decodable, DecodeError, Encodable, Header}; use serde::{Deserialize, Serialize}; use std::{mem, ops::Deref}; @@ -71,7 +71,7 @@ pub struct TxEip4844 { pub access_list: AccessList, /// It contains a vector of fixed size hash(32 bytes) - pub blob_versioned_hashes: Vec, + pub blob_versioned_hashes: Vec, /// Max fee per data gas /// @@ -190,7 +190,7 @@ impl TxEip4844 { /// - `access_list` /// - `max_fee_per_blob_gas` /// - `blob_versioned_hashes` - pub fn decode_inner(buf: &mut &[u8]) -> Result { + pub fn decode_inner(buf: &mut &[u8]) -> alloy_rlp::Result { Ok(Self { chain_id: Decodable::decode(buf)?, nonce: Decodable::decode(buf)?, @@ -199,7 +199,7 @@ impl TxEip4844 { gas_limit: Decodable::decode(buf)?, to: Decodable::decode(buf)?, value: Decodable::decode(buf)?, - input: Bytes(Decodable::decode(buf)?), + input: Decodable::decode(buf)?, access_list: Decodable::decode(buf)?, max_fee_per_blob_gas: Decodable::decode(buf)?, blob_versioned_hashes: Decodable::decode(buf)?, @@ -250,7 +250,7 @@ impl TxEip4844 { mem::size_of::() + // value self.access_list.size() + // access_list self.input.len() + // input - self.blob_versioned_hashes.capacity() * mem::size_of::() + // blob hashes size + self.blob_versioned_hashes.capacity() * mem::size_of::() + // blob hashes size mem::size_of::() // max_fee_per_data_gas } @@ -306,7 +306,7 @@ impl TxEip4844 { /// Outputs the signature hash of the transaction by first encoding without a signature, then /// hashing. - pub(crate) fn signature_hash(&self) -> H256 { + pub(crate) fn signature_hash(&self) -> B256 { let mut buf = BytesMut::with_capacity(self.payload_len_for_signature()); self.encode_for_signing(&mut buf); keccak256(&buf) @@ -517,11 +517,11 @@ impl BlobTransaction { /// /// Note: this should be used only when implementing other RLP decoding methods, and does not /// represent the full RLP decoding of the `PooledTransactionsElement` type. - pub(crate) fn decode_inner(data: &mut &[u8]) -> Result { + pub(crate) fn decode_inner(data: &mut &[u8]) -> alloy_rlp::Result { // decode the _first_ list header for the rest of the transaction let header = Header::decode(data)?; if !header.list { - return Err(DecodeError::Custom("PooledTransactions blob tx must be encoded as a list")) + return Err(RlpError::Custom("PooledTransactions blob tx must be encoded as a list")) } // Now we need to decode the inner 4844 transaction and its signature: @@ -529,7 +529,7 @@ impl BlobTransaction { // `[chain_id, nonce, max_priority_fee_per_gas, ..., y_parity, r, s]` let header = Header::decode(data)?; if !header.list { - return Err(DecodeError::Custom( + return Err(RlpError::Custom( "PooledTransactions inner blob tx must be encoded as a list", )) } @@ -569,6 +569,7 @@ impl BlobTransaction { /// This represents a set of blobs, and its corresponding commitments and proofs. #[derive(Clone, Debug, PartialEq, Eq, Default, Serialize, Deserialize)] +#[repr(C)] pub struct BlobTransactionSidecar { /// The blob data. pub blobs: Vec, @@ -586,10 +587,7 @@ impl BlobTransactionSidecar { /// - `commitments` /// - `proofs` pub(crate) fn encode_inner(&self, out: &mut dyn bytes::BufMut) { - // Encode the blobs, commitments, and proofs - self.blobs.encode(out); - self.commitments.encode(out); - self.proofs.encode(out); + BlobTransactionSidecarRlp::wrap_ref(self).encode(out); } /// Outputs the RLP length of the [BlobTransactionSidecar] fields, without a RLP header. @@ -603,12 +601,8 @@ impl BlobTransactionSidecar { /// - `blobs` /// - `commitments` /// - `proofs` - pub(crate) fn decode_inner(buf: &mut &[u8]) -> Result { - Ok(Self { - blobs: Decodable::decode(buf)?, - commitments: Decodable::decode(buf)?, - proofs: Decodable::decode(buf)?, - }) + pub(crate) fn decode_inner(buf: &mut &[u8]) -> alloy_rlp::Result { + Ok(BlobTransactionSidecarRlp::decode(buf)?.unwrap()) } /// Calculates a size heuristic for the in-memory size of the [BlobTransactionSidecar]. @@ -619,3 +613,41 @@ impl BlobTransactionSidecar { self.proofs.len() * BYTES_PER_PROOF // proofs } } + +// Wrapper for c-kzg rlp +#[repr(C)] +struct BlobTransactionSidecarRlp { + blobs: Vec<[u8; c_kzg::BYTES_PER_BLOB]>, + commitments: Vec<[u8; 48]>, + proofs: Vec<[u8; 48]>, +} + +const _: [(); std::mem::size_of::()] = + [(); std::mem::size_of::()]; + +impl BlobTransactionSidecarRlp { + fn wrap_ref(other: &BlobTransactionSidecar) -> &Self { + // SAFETY: Same repr and size + unsafe { &*(other as *const BlobTransactionSidecar).cast::() } + } + + fn unwrap(self) -> BlobTransactionSidecar { + // SAFETY: Same repr and size + unsafe { std::mem::transmute(self) } + } + + fn encode(&self, out: &mut dyn bytes::BufMut) { + // Encode the blobs, commitments, and proofs + self.blobs.encode(out); + self.commitments.encode(out); + self.proofs.encode(out); + } + + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { + Ok(Self { + blobs: Decodable::decode(buf)?, + commitments: Decodable::decode(buf)?, + proofs: Decodable::decode(buf)?, + }) + } +} diff --git a/crates/primitives/src/transaction/legacy.rs b/crates/primitives/src/transaction/legacy.rs index ec61490a54..90bce9128c 100644 --- a/crates/primitives/src/transaction/legacy.rs +++ b/crates/primitives/src/transaction/legacy.rs @@ -1,7 +1,7 @@ -use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, H256}; +use crate::{keccak256, Bytes, ChainId, Signature, TransactionKind, TxType, B256}; +use alloy_rlp::{length_of_length, Encodable, Header}; use bytes::BytesMut; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{length_of_length, Encodable, Header}; use std::mem; /// Legacy transaction. @@ -154,7 +154,7 @@ impl TxLegacy { /// hashing. /// /// See [Self::encode_for_signing] for more information on the encoding format. - pub(crate) fn signature_hash(&self) -> H256 { + pub(crate) fn signature_hash(&self) -> B256 { let mut buf = BytesMut::with_capacity(self.payload_len_for_signature()); self.encode_for_signing(&mut buf); keccak256(&buf) @@ -166,7 +166,7 @@ mod tests { use super::TxLegacy; use crate::{ transaction::{signature::Signature, TransactionKind}, - Address, Transaction, TransactionSigned, H256, U256, + Address, Transaction, TransactionSigned, B256, U256, }; #[test] @@ -174,7 +174,7 @@ mod tests { use crate::hex_literal::hex; let signer: Address = hex!("398137383b3d25c92898c656696e41950e47316b").into(); - let hash: H256 = + let hash: B256 = hex!("bb3a336e3f823ec18197f1e13ee875700f08f03e2cab75f0d0b118dabb44cba0").into(); let tx = Transaction::Legacy(TxLegacy { diff --git a/crates/primitives/src/transaction/meta.rs b/crates/primitives/src/transaction/meta.rs index 10199d827e..77c4afda32 100644 --- a/crates/primitives/src/transaction/meta.rs +++ b/crates/primitives/src/transaction/meta.rs @@ -1,14 +1,14 @@ -use crate::H256; +use crate::B256; /// Additional fields in the context of a block that contains this transaction. #[derive(Debug, Clone, Copy, Default, Eq, PartialEq)] pub struct TransactionMeta { /// Hash of the transaction. - pub tx_hash: H256, + pub tx_hash: B256, /// Index of the transaction in the block pub index: u64, /// Hash of the block. - pub block_hash: H256, + pub block_hash: B256, /// Number of the block. pub block_number: u64, /// Base fee of the block. diff --git a/crates/primitives/src/transaction/mod.rs b/crates/primitives/src/transaction/mod.rs index 3f6aa1005b..f111ee0ff9 100644 --- a/crates/primitives/src/transaction/mod.rs +++ b/crates/primitives/src/transaction/mod.rs @@ -1,10 +1,19 @@ use crate::{ compression::{TRANSACTION_COMPRESSOR, TRANSACTION_DECOMPRESSOR}, - keccak256, Address, Bytes, TxHash, H256, + keccak256, Address, Bytes, TxHash, B256, +}; +use alloy_rlp::{ + Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE, EMPTY_STRING_CODE, }; -pub use access_list::{AccessList, AccessListItem, AccessListWithGasUsed}; use bytes::{Buf, BytesMut}; use derive_more::{AsRef, Deref}; +use once_cell::sync::Lazy; +use rayon::prelude::{IntoParallelIterator, ParallelIterator}; +use reth_codecs::{add_arbitrary_tests, derive_arbitrary, Compact}; +use serde::{Deserialize, Serialize}; +use std::mem; + +pub use access_list::{AccessList, AccessListItem, AccessListWithGasUsed}; pub use eip1559::TxEip1559; pub use eip2930::TxEip2930; pub use eip4844::{ @@ -13,14 +22,8 @@ pub use eip4844::{ pub use error::InvalidTransactionError; pub use legacy::TxLegacy; pub use meta::TransactionMeta; -use once_cell::sync::Lazy; pub use pooled::{PooledTransactionsElement, PooledTransactionsElementEcRecovered}; -use rayon::prelude::{IntoParallelIterator, ParallelIterator}; -use reth_codecs::{add_arbitrary_tests, derive_arbitrary, Compact}; -use reth_rlp::{Decodable, DecodeError, Encodable, Header, EMPTY_LIST_CODE, EMPTY_STRING_CODE}; -use serde::{Deserialize, Serialize}; pub use signature::Signature; -use std::mem; pub use tx_type::{ TxType, EIP1559_TX_TYPE_ID, EIP2930_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID, }; @@ -98,7 +101,7 @@ pub enum Transaction { impl Transaction { /// Heavy operation that return signature hash over rlp encoded transaction. /// It is only for signature signing or signer recovery. - pub fn signature_hash(&self) -> H256 { + pub fn signature_hash(&self) -> B256 { match self { Transaction::Legacy(tx) => tx.signature_hash(), Transaction::Eip2930(tx) => tx.signature_hash(), @@ -594,7 +597,7 @@ impl Compact for TransactionKind { } impl Encodable for TransactionKind { - fn encode(&self, out: &mut dyn reth_rlp::BufMut) { + fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { match self { TransactionKind::Call(to) => to.encode(out), TransactionKind::Create => out.put_u8(EMPTY_STRING_CODE), @@ -609,7 +612,7 @@ impl Encodable for TransactionKind { } impl Decodable for TransactionKind { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { if let Some(&first) = buf.first() { if first == EMPTY_STRING_CODE { buf.advance(1); @@ -619,7 +622,7 @@ impl Decodable for TransactionKind { Ok(TransactionKind::Call(addr)) } } else { - Err(DecodeError::InputTooShort) + Err(RlpError::InputTooShort) } } } @@ -641,7 +644,7 @@ pub struct TransactionSignedNoHash { impl TransactionSignedNoHash { /// Calculates the transaction hash. If used more than once, it's better to convert it to /// [`TransactionSigned`] first. - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { let mut buf = Vec::new(); self.transaction.encode_with_signature(&self.signature, &mut buf, false); keccak256(&buf) @@ -836,10 +839,10 @@ impl TransactionSigned { /// Returns the enveloped encoded transactions. /// /// See also [TransactionSigned::encode_enveloped] - pub fn envelope_encoded(&self) -> bytes::Bytes { + pub fn envelope_encoded(&self) -> Bytes { let mut buf = BytesMut::new(); self.encode_enveloped(&mut buf); - buf.freeze() + buf.freeze().into() } /// Encodes the transaction into the "raw" format (e.g. `eth_sendRawTransaction`). @@ -875,7 +878,7 @@ impl TransactionSigned { /// Calculate transaction hash, eip2728 transaction does not contain rlp header and start with /// tx type. - pub fn recalculate_hash(&self) -> H256 { + pub fn recalculate_hash(&self) -> B256 { let mut buf = Vec::new(); self.encode_inner(&mut buf, false); keccak256(&buf) @@ -902,7 +905,7 @@ impl TransactionSigned { // so decoding methods do not need to manually advance the buffer pub(crate) fn decode_rlp_legacy_transaction_tuple( data: &mut &[u8], - ) -> Result<(TxLegacy, TxHash, Signature), DecodeError> { + ) -> alloy_rlp::Result<(TxLegacy, TxHash, Signature)> { // keep this around, so we can use it to calculate the hash let original_encoding = *data; @@ -914,7 +917,7 @@ impl TransactionSigned { gas_limit: Decodable::decode(data)?, to: Decodable::decode(data)?, value: Decodable::decode(data)?, - input: Bytes(Decodable::decode(data)?), + input: Decodable::decode(data)?, chain_id: None, }; let (signature, extracted_id) = Signature::decode_with_eip155_chain_id(data)?; @@ -930,9 +933,7 @@ impl TransactionSigned { /// This expects `rlp(legacy_tx)` // TODO: make buf advancement semantics consistent with `decode_enveloped_typed_transaction`, // so decoding methods do not need to manually advance the buffer - pub fn decode_rlp_legacy_transaction( - data: &mut &[u8], - ) -> Result { + pub fn decode_rlp_legacy_transaction(data: &mut &[u8]) -> alloy_rlp::Result { let (transaction, hash, signature) = TransactionSigned::decode_rlp_legacy_transaction_tuple(data)?; let signed = @@ -945,17 +946,17 @@ impl TransactionSigned { /// CAUTION: this expects that `data` is `[id, rlp(tx)]` pub fn decode_enveloped_typed_transaction( data: &mut &[u8], - ) -> Result { + ) -> alloy_rlp::Result { // keep this around so we can use it to calculate the hash let original_encoding = *data; - let tx_type = *data.first().ok_or(DecodeError::InputTooShort)?; + let tx_type = *data.first().ok_or(RlpError::InputTooShort)?; data.advance(1); // decode the list header for the rest of the transaction let header = Header::decode(data)?; if !header.list { - return Err(DecodeError::Custom("typed tx fields must be encoded as a list")) + return Err(RlpError::Custom("typed tx fields must be encoded as a list")) } // length of tx encoding = tx type byte (size = 1) + length of header + payload length @@ -966,7 +967,7 @@ impl TransactionSigned { 1 => Transaction::Eip2930(TxEip2930::decode_inner(data)?), 2 => Transaction::Eip1559(TxEip1559::decode_inner(data)?), 3 => Transaction::Eip4844(TxEip4844::decode_inner(data)?), - _ => return Err(DecodeError::Custom("unsupported typed transaction type")), + _ => return Err(RlpError::Custom("unsupported typed transaction type")), }; let signature = Signature::decode(data)?; @@ -982,11 +983,11 @@ impl TransactionSigned { /// For legacy transactions, the format is encoded as: `rlp(tx)` /// For EIP-2718 typed transaction, the format is encoded as the type of the transaction /// followed by the rlp of the transaction: `type` + `rlp(tx)` - pub fn decode_enveloped(tx: Bytes) -> Result { + pub fn decode_enveloped(tx: Bytes) -> alloy_rlp::Result { let mut data = tx.as_ref(); if data.is_empty() { - return Err(DecodeError::InputTooShort) + return Err(RlpError::InputTooShort) } // Check if the tx is a list @@ -1020,7 +1021,7 @@ impl Encodable for TransactionSigned { /// /// CAUTION: this expects that the given buf contains rlp impl Decodable for TransactionSigned { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { // decode header let mut original_encoding = *buf; let header = Header::decode(buf)?; @@ -1129,11 +1130,11 @@ impl Encodable for TransactionSignedEcRecovered { } impl Decodable for TransactionSignedEcRecovered { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { let signed_transaction = TransactionSigned::decode(buf)?; let signer = signed_transaction .recover_signer() - .ok_or(DecodeError::Custom("Unable to recover decoded transaction signer."))?; + .ok_or(RlpError::Custom("Unable to recover decoded transaction signer."))?; Ok(TransactionSignedEcRecovered { signer, signed_transaction }) } } @@ -1184,16 +1185,16 @@ impl IntoRecoveredTransaction for TransactionSignedEcRecovered { #[cfg(test)] mod tests { use crate::{ - sign_message, + hex, sign_message, transaction::{ signature::Signature, TransactionKind, TxEip1559, TxLegacy, PARALLEL_SENDER_RECOVERY_THRESHOLD, }, - Address, Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, H256, U256, + Address, Bytes, Transaction, TransactionSigned, TransactionSignedEcRecovered, B256, U256, }; + use alloy_primitives::{b256, bytes}; + use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use bytes::BytesMut; - use ethers_core::utils::hex; - use reth_rlp::{Decodable, DecodeError, Encodable}; use secp256k1::{KeyPair, Secp256k1}; use std::str::FromStr; @@ -1201,15 +1202,13 @@ mod tests { fn test_decode_empty_typed_tx() { let input = [0x80u8]; let res = TransactionSigned::decode(&mut &input[..]).unwrap_err(); - assert_eq!(DecodeError::InputTooShort, res); + assert_eq!(RlpError::InputTooShort, res); } #[test] fn test_decode_create_goerli() { // test that an example create tx from goerli decodes properly - let tx_bytes = - hex::decode("b901f202f901ee05228459682f008459682f11830209bf8080b90195608060405234801561001057600080fd5b50610175806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80630c49c36c14610030575b600080fd5b61003861004e565b604051610045919061011d565b60405180910390f35b60606020600052600f6020527f68656c6c6f2073746174656d696e64000000000000000000000000000000000060405260406000f35b600081519050919050565b600082825260208201905092915050565b60005b838110156100be5780820151818401526020810190506100a3565b838111156100cd576000848401525b50505050565b6000601f19601f8301169050919050565b60006100ef82610084565b6100f9818561008f565b93506101098185602086016100a0565b610112816100d3565b840191505092915050565b6000602082019050818103600083015261013781846100e4565b90509291505056fea264697066735822122051449585839a4ea5ac23cae4552ef8a96b64ff59d0668f76bfac3796b2bdbb3664736f6c63430008090033c080a0136ebffaa8fc8b9fda9124de9ccb0b1f64e90fbd44251b4c4ac2501e60b104f9a07eb2999eec6d185ef57e91ed099afb0a926c5b536f0155dd67e537c7476e1471") - .unwrap(); + let tx_bytes = hex!("b901f202f901ee05228459682f008459682f11830209bf8080b90195608060405234801561001057600080fd5b50610175806100206000396000f3fe608060405234801561001057600080fd5b506004361061002b5760003560e01c80630c49c36c14610030575b600080fd5b61003861004e565b604051610045919061011d565b60405180910390f35b60606020600052600f6020527f68656c6c6f2073746174656d696e64000000000000000000000000000000000060405260406000f35b600081519050919050565b600082825260208201905092915050565b60005b838110156100be5780820151818401526020810190506100a3565b838111156100cd576000848401525b50505050565b6000601f19601f8301169050919050565b60006100ef82610084565b6100f9818561008f565b93506101098185602086016100a0565b610112816100d3565b840191505092915050565b6000602082019050818103600083015261013781846100e4565b90509291505056fea264697066735822122051449585839a4ea5ac23cae4552ef8a96b64ff59d0668f76bfac3796b2bdbb3664736f6c63430008090033c080a0136ebffaa8fc8b9fda9124de9ccb0b1f64e90fbd44251b4c4ac2501e60b104f9a07eb2999eec6d185ef57e91ed099afb0a926c5b536f0155dd67e537c7476e1471"); let decoded = TransactionSigned::decode(&mut &tx_bytes[..]).unwrap(); assert_eq!(tx_bytes.len(), decoded.length()); @@ -1217,12 +1216,12 @@ mod tests { let mut encoded = BytesMut::new(); decoded.encode(&mut encoded); - assert_eq!(tx_bytes, encoded); + assert_eq!(tx_bytes, encoded[..]); } #[test] fn decode_transaction_consumes_buffer() { - let bytes = &mut &hex::decode("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469").unwrap()[..]; + let bytes = &mut &hex!("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469")[..]; let _transaction_res = TransactionSigned::decode(bytes).unwrap(); assert_eq!( bytes.len(), @@ -1234,7 +1233,7 @@ mod tests { #[test] fn decode_multiple_network_txs() { - let bytes = hex::decode("f86b02843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba00eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5aea03a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18").unwrap(); + let bytes = hex!("f86b02843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba00eb96ca19e8a77102767a41fc85a36afd5c61ccb09911cec5d3e86e193d9c5aea03a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18"); let transaction = Transaction::Legacy(TxLegacy { chain_id: Some(4u64), nonce: 2, @@ -1253,19 +1252,17 @@ mod tests { s: U256::from_str("0x3a456401896b1b6055311536bf00a718568c744d8c1f9df59879e8350220ca18") .unwrap(), }; - let hash = - H256::from_str("0xa517b206d2223278f860ea017d3626cacad4f52ff51030dc9a96b432f17f8d34") - .ok(); - test_decode_and_encode(bytes, transaction, signature, hash); + let hash = b256!("a517b206d2223278f860ea017d3626cacad4f52ff51030dc9a96b432f17f8d34"); + test_decode_and_encode(&bytes, transaction, signature, Some(hash)); - let bytes = hex::decode("f86b01843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac3960468702769bb01b2a00802ba0e24d8bd32ad906d6f8b8d7741e08d1959df021698b19ee232feba15361587d0aa05406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631da").unwrap(); + let bytes = hex!("f86b01843b9aca00830186a094d3e8763675e4c425df46cc3b5c0f6cbdac3960468702769bb01b2a00802ba0e24d8bd32ad906d6f8b8d7741e08d1959df021698b19ee232feba15361587d0aa05406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631da"); let transaction = Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 1u64, gas_price: 1000000000, gas_limit: 100000u64, to: TransactionKind::Call(Address::from_slice( - &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + &hex!("d3e8763675e4c425df46cc3b5c0f6cbdac396046")[..], )), value: 693361000000000u64.into(), input: Default::default(), @@ -1277,16 +1274,16 @@ mod tests { s: U256::from_str("0x5406ad177223213df262cb66ccbb2f46bfdccfdfbbb5ffdda9e2c02d977631da") .unwrap(), }; - test_decode_and_encode(bytes, transaction, signature, None); + test_decode_and_encode(&bytes, transaction, signature, None); - let bytes = hex::decode("f86b0384773594008398968094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba0ce6834447c0a4193c40382e6c57ae33b241379c5418caac9cdc18d786fd12071a03ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88").unwrap(); + let bytes = hex!("f86b0384773594008398968094d3e8763675e4c425df46cc3b5c0f6cbdac39604687038d7ea4c68000802ba0ce6834447c0a4193c40382e6c57ae33b241379c5418caac9cdc18d786fd12071a03ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88"); let transaction = Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 3, gas_price: 2000000000, gas_limit: 10000000, to: TransactionKind::Call(Address::from_slice( - &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + &hex!("d3e8763675e4c425df46cc3b5c0f6cbdac396046")[..], )), value: 1000000000000000u64.into(), input: Bytes::default(), @@ -1298,9 +1295,9 @@ mod tests { s: U256::from_str("0x3ca3ae86580e94550d7c071e3a02eadb5a77830947c9225165cf9100901bee88") .unwrap(), }; - test_decode_and_encode(bytes, transaction, signature, None); + test_decode_and_encode(&bytes, transaction, signature, None); - let bytes = hex::decode("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469").unwrap(); + let bytes = hex!("b87502f872041a8459682f008459682f0d8252089461815774383099e24810ab832a5b2a5425c154d58829a2241af62c000080c001a059e6b67f48fb32e7e570dfb11e042b5ad2e55e3ce3ce9cd989c7e06e07feeafda0016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469"); let transaction = Transaction::Eip1559(TxEip1559 { chain_id: 4, nonce: 26, @@ -1308,7 +1305,7 @@ mod tests { max_fee_per_gas: 1500000013, gas_limit: 21000, to: TransactionKind::Call(Address::from_slice( - &hex::decode("61815774383099e24810ab832a5b2a5425c154d5").unwrap()[..], + &hex!("61815774383099e24810ab832a5b2a5425c154d5")[..], )), value: 3000000000000000000u64.into(), input: Default::default(), @@ -1321,16 +1318,16 @@ mod tests { s: U256::from_str("0x016b83f4f980694ed2eee4d10667242b1f40dc406901b34125b008d334d47469") .unwrap(), }; - test_decode_and_encode(bytes, transaction, signature, None); + test_decode_and_encode(&bytes, transaction, signature, None); - let bytes = hex::decode("f8650f84832156008287fb94cf7f9e66af820a19257a2108375b180b0ec491678204d2802ca035b7bfeb9ad9ece2cbafaaf8e202e706b4cfaeb233f46198f00b44d4a566a981a0612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860").unwrap(); + let bytes = hex!("f8650f84832156008287fb94cf7f9e66af820a19257a2108375b180b0ec491678204d2802ca035b7bfeb9ad9ece2cbafaaf8e202e706b4cfaeb233f46198f00b44d4a566a981a0612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860"); let transaction = Transaction::Legacy(TxLegacy { chain_id: Some(4), nonce: 15, gas_price: 2200000000, gas_limit: 34811, to: TransactionKind::Call(Address::from_slice( - &hex::decode("cf7f9e66af820a19257a2108375b180b0ec49167").unwrap()[..], + &hex!("cf7f9e66af820a19257a2108375b180b0ec49167")[..], )), value: 1234u64.into(), input: Bytes::default(), @@ -1342,14 +1339,14 @@ mod tests { s: U256::from_str("0x612638fb29427ca33b9a3be2a0a561beecfe0269655be160d35e72d366a6a860") .unwrap(), }; - test_decode_and_encode(bytes, transaction, signature, None); + test_decode_and_encode(&bytes, transaction, signature, None); } fn test_decode_and_encode( - bytes: Vec, + bytes: &[u8], transaction: Transaction, signature: Signature, - hash: Option, + hash: Option, ) { let expected = TransactionSigned::from_transaction_and_signature(transaction, signature); if let Some(hash) = hash { @@ -1370,7 +1367,7 @@ mod tests { use crate::hex_literal::hex; // transaction is from ropsten - let hash: H256 = + let hash: B256 = hex!("559fb34c4a7f115db26cbf8505389475caaab3df45f5c7a0faa4abfa3835306c").into(); let signer: Address = hex!("641c5d790f862a58ec7abcfd644c0442e9c201b3").into(); let raw =hex!("f88b8212b085028fa6ae00830f424094aad593da0c8116ef7d2d594dd6a63241bccfc26c80a48318b64b000000000000000000000000641c5d790f862a58ec7abcfd644c0442e9c201b32aa0a6ef9e170bca5ffb7ac05433b13b7043de667fbb0b4a5e45d3b54fb2d6efcc63a0037ec2c05c3d60c5f5f78244ce0a3859e3a18a36c61efb061b383507d3ce19d2"); @@ -1384,18 +1381,18 @@ mod tests { #[test] fn test_envelop_encode() { // random tx: - let input = hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap(); + let input = hex!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); let decoded = TransactionSigned::decode(&mut &input[..]).unwrap(); let encoded = decoded.envelope_encoded(); - assert_eq!(encoded, input); + assert_eq!(encoded[..], input); } #[test] fn test_envelop_decode() { // random tx: - let input = &hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap()[..]; - let decoded = TransactionSigned::decode_enveloped(input.into()).unwrap(); + let input = bytes!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); + let decoded = TransactionSigned::decode_enveloped(input.clone()).unwrap(); let encoded = decoded.envelope_encoded(); assert_eq!(encoded, input); @@ -1404,7 +1401,7 @@ mod tests { #[test] fn test_decode_signed_ec_recovered_transaction() { // random tx: - let input = hex::decode("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76").unwrap(); + let input = hex!("02f871018302a90f808504890aef60826b6c94ddf4c5025d1a5742cf12f74eec246d4432c295e487e09c3bbcc12b2b80c080a0f21a4eacd0bf8fea9c5105c543be5a1d8c796516875710fafafdf16d16d8ee23a001280915021bb446d1973501a67f93d2b38894a514b976e7b46dc2fe54598d76"); let tx = TransactionSigned::decode(&mut &input[..]).unwrap(); let recovered = tx.into_ecrecovered().unwrap(); @@ -1447,7 +1444,7 @@ mod tests { let key_pair = KeyPair::new(&secp, &mut rng); let signature = - sign_message(H256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); + sign_message(B256::from_slice(&key_pair.secret_bytes()[..]), tx.signature_hash()).unwrap(); TransactionSigned::from_transaction_and_signature(tx, signature) }).collect(); diff --git a/crates/primitives/src/transaction/pooled.rs b/crates/primitives/src/transaction/pooled.rs index 8c5df26575..a2c5a2aa79 100644 --- a/crates/primitives/src/transaction/pooled.rs +++ b/crates/primitives/src/transaction/pooled.rs @@ -2,11 +2,11 @@ //! response to `GetPooledTransactions`. use crate::{ Address, BlobTransaction, Bytes, Signature, Transaction, TransactionSigned, - TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxHash, TxLegacy, EIP4844_TX_TYPE_ID, H256, + TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxHash, TxLegacy, B256, EIP4844_TX_TYPE_ID, }; +use alloy_rlp::{Decodable, Encodable, Error as RlpError, Header, EMPTY_LIST_CODE}; use bytes::Buf; use derive_more::{AsRef, Deref}; -use reth_rlp::{Decodable, DecodeError, Encodable, Header, EMPTY_LIST_CODE}; use serde::{Deserialize, Serialize}; /// A response to `GetPooledTransactions`. This can include either a blob transaction, or a @@ -59,7 +59,7 @@ impl PooledTransactionsElement { /// Heavy operation that return signature hash over rlp encoded transaction. /// It is only for signature signing or signer recovery. - pub fn signature_hash(&self) -> H256 { + pub fn signature_hash(&self) -> B256 { match self { Self::Legacy { transaction, .. } => transaction.signature_hash(), Self::Eip2930 { transaction, .. } => transaction.signature_hash(), @@ -115,11 +115,11 @@ impl PooledTransactionsElement { /// followed by the rlp of the transaction: `type` + `rlp(tx)` /// /// For encoded EIP-4844 transactions, the blob sidecar _must_ be included. - pub fn decode_enveloped(tx: Bytes) -> Result { + pub fn decode_enveloped(tx: Bytes) -> alloy_rlp::Result { let mut data = tx.as_ref(); if data.is_empty() { - return Err(DecodeError::InputTooShort) + return Err(RlpError::InputTooShort) } // Check if the tx is a list - tx types are less than EMPTY_LIST_CODE (0xc0) @@ -131,7 +131,7 @@ impl PooledTransactionsElement { Ok(Self::Legacy { transaction, signature, hash }) } else { // decode the type byte, only decode BlobTransaction if it is a 4844 transaction - let tx_type = *data.first().ok_or(DecodeError::InputTooShort)?; + let tx_type = *data.first().ok_or(RlpError::InputTooShort)?; if tx_type == EIP4844_TX_TYPE_ID { // Recall that the blob transaction response `TranactionPayload` is encoded like @@ -158,10 +158,10 @@ impl PooledTransactionsElement { // because we checked the tx type, we can be sure that the transaction is not a // blob transaction or legacy match typed_tx.transaction { - Transaction::Legacy(_) => Err(DecodeError::Custom( + Transaction::Legacy(_) => Err(RlpError::Custom( "legacy transactions should not be a result of EIP-2718 decoding", )), - Transaction::Eip4844(_) => Err(DecodeError::Custom( + Transaction::Eip4844(_) => Err(RlpError::Custom( "EIP-4844 transactions can only be decoded with transaction type 0x03", )), Transaction::Eip2930(tx) => Ok(PooledTransactionsElement::Eip2930 { @@ -256,7 +256,7 @@ impl Decodable for PooledTransactionsElement { /// Decodes an enveloped post EIP-4844 [PooledTransactionsElement]. /// /// CAUTION: this expects that `buf` is `[id, rlp(tx)]` - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { // From the EIP-4844 spec: // Blob transactions have two network representations. During transaction gossip responses // (`PooledTransactions`), the EIP-2718 `TransactionPayload` of the blob transaction is @@ -269,7 +269,7 @@ impl Decodable for PooledTransactionsElement { // // First, we check whether or not the transaction is a legacy transaction. if buf.is_empty() { - return Err(DecodeError::InputTooShort) + return Err(RlpError::InputTooShort) } // keep this around for buffer advancement post-legacy decoding @@ -292,7 +292,7 @@ impl Decodable for PooledTransactionsElement { Ok(Self::Legacy { transaction, signature, hash }) } else { // decode the type byte, only decode BlobTransaction if it is a 4844 transaction - let tx_type = *buf.first().ok_or(DecodeError::InputTooShort)?; + let tx_type = *buf.first().ok_or(RlpError::InputTooShort)?; if tx_type == EIP4844_TX_TYPE_ID { // Recall that the blob transaction response `TranactionPayload` is encoded like @@ -319,10 +319,10 @@ impl Decodable for PooledTransactionsElement { // because we checked the tx type, we can be sure that the transaction is not a // blob transaction or legacy match typed_tx.transaction { - Transaction::Legacy(_) => Err(DecodeError::Custom( + Transaction::Legacy(_) => Err(RlpError::Custom( "legacy transactions should not be a result of EIP-2718 decoding", )), - Transaction::Eip4844(_) => Err(DecodeError::Custom( + Transaction::Eip4844(_) => Err(RlpError::Custom( "EIP-4844 transactions can only be decoded with transaction type 0x03", )), Transaction::Eip2930(tx) => Ok(PooledTransactionsElement::Eip2930 { diff --git a/crates/primitives/src/transaction/signature.rs b/crates/primitives/src/transaction/signature.rs index cfe639a04e..bfc8f78769 100644 --- a/crates/primitives/src/transaction/signature.rs +++ b/crates/primitives/src/transaction/signature.rs @@ -1,7 +1,8 @@ -use crate::{transaction::util::secp256k1, Address, H256, U256}; +use crate::{transaction::util::secp256k1, Address, B256, U256}; +use alloy_primitives::Bytes; +use alloy_rlp::{Decodable, Encodable, Error as RlpError}; use bytes::Buf; use reth_codecs::{derive_arbitrary, Compact}; -use reth_rlp::{Decodable, DecodeError, Encodable}; use serde::{Deserialize, Serialize}; /// r, s: Values corresponding to the signature of the @@ -50,7 +51,7 @@ impl Signature { /// Encodes the `v` value using the legacy scheme with EIP-155 support depends on chain_id. pub(crate) fn encode_with_eip155_chain_id( &self, - out: &mut dyn reth_rlp::BufMut, + out: &mut dyn alloy_rlp::BufMut, chain_id: Option, ) { self.v(chain_id).encode(out); @@ -73,7 +74,7 @@ impl Signature { /// This will return a chain ID if the `v` value is [EIP-155](https://github.com/ethereum/EIPs/blob/master/EIPS/eip-155.md) compatible. pub(crate) fn decode_with_eip155_chain_id( buf: &mut &[u8], - ) -> Result<(Self, Option), DecodeError> { + ) -> alloy_rlp::Result<(Self, Option)> { let v = u64::decode(buf)?; let r = Decodable::decode(buf)?; let s = Decodable::decode(buf)?; @@ -85,7 +86,7 @@ impl Signature { } else { // non-EIP-155 legacy scheme, v = 27 for even y-parity, v = 28 for odd y-parity if v != 27 && v != 28 { - return Err(DecodeError::Custom("invalid Ethereum signature (V is not 27 or 28)")) + return Err(RlpError::Custom("invalid Ethereum signature (V is not 27 or 28)")) } let odd_y_parity = v == 28; Ok((Signature { r, s, odd_y_parity }, None)) @@ -98,14 +99,14 @@ impl Signature { } /// Encode the `odd_y_parity`, `r`, `s` values without a RLP header. - pub fn encode(&self, out: &mut dyn reth_rlp::BufMut) { + pub fn encode(&self, out: &mut dyn alloy_rlp::BufMut) { self.odd_y_parity.encode(out); self.r.encode(out); self.s.encode(out); } /// Decodes the `odd_y_parity`, `r`, `s` values without a RLP header. - pub fn decode(buf: &mut &[u8]) -> Result { + pub fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { Ok(Signature { odd_y_parity: Decodable::decode(buf)?, r: Decodable::decode(buf)?, @@ -114,7 +115,7 @@ impl Signature { } /// Recover signer address from message hash. - pub fn recover_signer(&self, hash: H256) -> Option
{ + pub fn recover_signer(&self, hash: B256) -> Option
{ let mut sig: [u8; 65] = [0; 65]; sig[0..32].copy_from_slice(&self.r.to_be_bytes::<32>()); @@ -123,7 +124,7 @@ impl Signature { // NOTE: we are removing error from underlying crypto library as it will restrain primitive // errors and we care only if recovery is passing or not. - secp256k1::recover_signer(&sig, hash.as_fixed_bytes()).ok() + secp256k1::recover_signer(&sig, &hash.0).ok() } /// Turn this signature into its byte @@ -137,6 +138,11 @@ impl Signature { sig } + /// Turn this signature into its hex-encoded representation. + pub fn to_hex_bytes(&self) -> Bytes { + crate::hex::encode(self.to_bytes()).into() + } + /// Calculates a heuristic for the in-memory size of the [Signature]. #[inline] pub fn size(&self) -> usize { @@ -146,7 +152,7 @@ impl Signature { #[cfg(test)] mod tests { - use crate::{Address, Signature, H256, U256}; + use crate::{Address, Signature, B256, U256}; use bytes::BytesMut; use std::str::FromStr; @@ -220,7 +226,7 @@ mod tests { odd_y_parity: false, }; let hash = - H256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") + B256::from_str("daf5a779ae972f972197303d7b574746c7ef83eadac0f2791ad23db92e4c8e53") .unwrap(); let signer = signature.recover_signer(hash).unwrap(); let expected = Address::from_str("0x9d8a62f656a8d1615c1294fd71e9cfb3e4855a4f").unwrap(); diff --git a/crates/primitives/src/transaction/util.rs b/crates/primitives/src/transaction/util.rs index da3fb63494..ce180e2dd3 100644 --- a/crates/primitives/src/transaction/util.rs +++ b/crates/primitives/src/transaction/util.rs @@ -1,8 +1,6 @@ -use crate::{keccak256, Address}; - pub(crate) mod secp256k1 { use super::*; - use crate::Signature; + use crate::{keccak256, Address, Signature}; pub(crate) use ::secp256k1::Error; use ::secp256k1::{ ecdsa::{RecoverableSignature, RecoveryId}, @@ -45,17 +43,17 @@ pub(crate) mod secp256k1 { Address::from_slice(&hash[12..]) } } + #[cfg(test)] mod tests { - - use super::secp256k1; - use crate::{hex_literal::hex, Address}; + use super::*; + use crate::{address, hex}; #[test] fn sanity_ecrecover_call() { let sig = hex!("650acf9d3f5f0a2c799776a1254355d5f4061762a237396a99a0e0e3fc2bcd6729514a0dacb2e623ac4abd157cb18163ff942280db4d5caad66ddf941ba12e0300"); let hash = hex!("47173285a8d7341e5e972fc677286384f802f8ef42a5ec5f03bbfa254cb01fad"); - let out: Address = hex!("c08b5542d177ac6686946920409741463a15dddb").into(); + let out = address!("c08b5542d177ac6686946920409741463a15dddb"); assert_eq!(secp256k1::recover_signer(&sig, &hash), Ok(out)); } diff --git a/crates/primitives/src/trie/hash_builder/mod.rs b/crates/primitives/src/trie/hash_builder/mod.rs index 171b844bce..35a41243da 100644 --- a/crates/primitives/src/trie/hash_builder/mod.rs +++ b/crates/primitives/src/trie/hash_builder/mod.rs @@ -2,7 +2,7 @@ use super::{ nodes::{rlp_hash, BranchNode, ExtensionNode, LeafNode}, BranchNodeCompact, Nibbles, TrieMask, }; -use crate::{keccak256, proofs::EMPTY_ROOT, H256}; +use crate::{keccak256, proofs::EMPTY_ROOT, B256}; use std::{collections::HashMap, fmt::Debug}; mod state; @@ -115,7 +115,7 @@ impl HashBuilder { pub fn print_stack(&self) { println!("============ STACK ==============="); for item in &self.stack { - println!("{}", hex::encode(item)); + println!("{}", crate::hex::encode(item)); } println!("============ END STACK ==============="); } @@ -130,7 +130,7 @@ impl HashBuilder { } /// Adds a new branch element & its hash to the trie hash builder. - pub fn add_branch(&mut self, key: Nibbles, value: H256, stored_in_database: bool) { + pub fn add_branch(&mut self, key: Nibbles, value: B256, stored_in_database: bool) { assert!(key > self.key || (self.key.is_empty() && key.is_empty())); if !self.key.is_empty() { self.update(&key); @@ -149,7 +149,7 @@ impl HashBuilder { } /// Returns the current root hash of the trie builder. - pub fn root(&mut self) -> H256 { + pub fn root(&mut self) -> B256 { // Clears the internal state if !self.key.is_empty() { self.update(&Nibbles::default()); @@ -159,10 +159,10 @@ impl HashBuilder { self.current_root() } - fn current_root(&self) -> H256 { + fn current_root(&self) -> B256 { if let Some(node_ref) = self.stack.last() { - if node_ref.len() == H256::len_bytes() + 1 { - H256::from_slice(&node_ref[1..]) + if node_ref.len() == B256::len_bytes() + 1 { + B256::from_slice(&node_ref[1..]) } else { keccak256(node_ref) } @@ -189,7 +189,7 @@ impl HashBuilder { tracing::Level::TRACE, "loop", i, - current = hex::encode(¤t.hex_data), + current = crate::hex::encode(¤t.hex_data), ?build_extensions ); let _enter = span.enter(); @@ -247,7 +247,7 @@ impl HashBuilder { tracing::debug!(target: "trie::hash_builder", ?leaf_node, "pushing leaf node"); tracing::trace!(target: "trie::hash_builder", rlp = { self.rlp_buf.clear(); - hex::encode(&leaf_node.rlp(&mut self.rlp_buf)) + crate::hex::encode(&leaf_node.rlp(&mut self.rlp_buf)) }, "leaf node rlp"); self.rlp_buf.clear(); @@ -277,7 +277,7 @@ impl HashBuilder { tracing::debug!(target: "trie::hash_builder", ?extension_node, "pushing extension node"); tracing::trace!(target: "trie::hash_builder", rlp = { self.rlp_buf.clear(); - hex::encode(&extension_node.rlp(&mut self.rlp_buf)) + crate::hex::encode(&extension_node.rlp(&mut self.rlp_buf)) }, "extension node rlp"); self.rlp_buf.clear(); self.stack.push(extension_node.rlp(&mut self.rlp_buf)); @@ -323,7 +323,7 @@ impl HashBuilder { /// Given the size of the longest common prefix, it proceeds to create a branch node /// from the state mask and existing stack state, and store its RLP to the top of the stack, /// after popping all the relevant elements from the stack. - fn push_branch_node(&mut self, len: usize) -> Vec { + fn push_branch_node(&mut self, len: usize) -> Vec { let state_mask = self.groups[len]; let hash_mask = self.hash_masks[len]; let branch_node = BranchNode::new(&self.stack); @@ -343,7 +343,7 @@ impl HashBuilder { self.stack.resize(first_child_idx, vec![]); tracing::debug!(target: "trie::hash_builder", "pushing branch node with {:?} mask from stack", state_mask); - tracing::trace!(target: "trie::hash_builder", rlp = hex::encode(&rlp), "branch node rlp"); + tracing::trace!(target: "trie::hash_builder", rlp = crate::hex::encode(&rlp), "branch node rlp"); self.stack.push(rlp); children } @@ -352,7 +352,7 @@ impl HashBuilder { /// to update the masks for the next level and store the branch node and the /// masks in the database. We will use that when consuming the intermediate nodes /// from the database to efficiently build the trie. - fn store_branch_node(&mut self, current: &Nibbles, len: usize, children: Vec) { + fn store_branch_node(&mut self, current: &Nibbles, len: usize, children: Vec) { if len > 0 { let parent_index = len - 1; self.hash_masks[parent_index] |= TrieMask::from_nibble(current[parent_index]); @@ -415,11 +415,11 @@ impl HashBuilder { #[cfg(test)] mod tests { use super::*; - use crate::{hex_literal::hex, proofs::triehash::KeccakHasher, H256, U256}; + use crate::{hex_literal::hex, proofs::triehash::KeccakHasher, B256, U256}; use proptest::prelude::*; use std::collections::{BTreeMap, HashMap}; - fn trie_root(iter: I) -> H256 + fn trie_root(iter: I) -> B256 where I: IntoIterator, K: AsRef<[u8]> + Ord, @@ -439,7 +439,7 @@ mod tests { K: AsRef<[u8]> + Ord, { let hashed = iter - .map(|(k, v)| (keccak256(k.as_ref()), reth_rlp::encode_fixed_size(v).to_vec())) + .map(|(k, v)| (keccak256(k.as_ref()), alloy_rlp::encode_fixed_size(v).to_vec())) // Collect into a btree map to sort the data .collect::>(); @@ -477,7 +477,7 @@ mod tests { #[test] fn arbitrary_hashed_root() { - proptest!(|(state: BTreeMap)| { + proptest!(|(state: BTreeMap)| { assert_hashed_trie_root(state.iter()); }); } @@ -557,15 +557,15 @@ mod tests { #[test] fn test_root_rlp_hashed_data() { let data = HashMap::from([ - (H256::from_low_u64_le(1), U256::from(2)), - (H256::from_low_u64_be(3), U256::from(4)), + (B256::with_last_byte(1), U256::from(2)), + (B256::with_last_byte(3), U256::from(4)), ]); assert_hashed_trie_root(data.iter()); } #[test] fn test_root_known_hash() { - let root_hash = H256::random(); + let root_hash = B256::random(); let mut hb = HashBuilder::default(); hb.add_branch(Nibbles::default(), root_hash, false); assert_eq!(hb.root(), root_hash); @@ -590,7 +590,7 @@ mod tests { // Skip the 0th element given in this example they have a common prefix and will // collapse to a Branch node. use crate::bytes::BytesMut; - use reth_rlp::Encodable; + use alloy_rlp::Encodable; let leaf1 = LeafNode::new(&Nibbles::unpack(&raw_input[0].0[1..]), input[0].1); let leaf2 = LeafNode::new(&Nibbles::unpack(&raw_input[1].0[1..]), input[1].1); let mut branch: [&dyn Encodable; 17] = [b""; 17]; @@ -599,7 +599,7 @@ mod tests { branch[4] = &leaf1; branch[7] = &leaf2; let mut branch_node_rlp = BytesMut::new(); - reth_rlp::encode_list::(&branch, &mut branch_node_rlp); + alloy_rlp::encode_list::<_, dyn Encodable>(&branch, &mut branch_node_rlp); let branch_node_hash = keccak256(branch_node_rlp); let mut hb2 = HashBuilder::default(); diff --git a/crates/primitives/src/trie/hash_builder/value.rs b/crates/primitives/src/trie/hash_builder/value.rs index e3b5559d01..45d4c0ce1c 100644 --- a/crates/primitives/src/trie/hash_builder/value.rs +++ b/crates/primitives/src/trie/hash_builder/value.rs @@ -1,4 +1,4 @@ -use crate::H256; +use crate::B256; use reth_codecs::{derive_arbitrary, Compact}; use serde::{Deserialize, Serialize}; @@ -7,7 +7,7 @@ use serde::{Deserialize, Serialize}; #[derive(Clone, PartialEq, Serialize, Deserialize)] pub enum HashBuilderValue { /// Value of the leaf node. - Hash(H256), + Hash(B256), /// Hash of adjacent nodes. Bytes(Vec), } @@ -35,7 +35,7 @@ impl Compact for HashBuilderValue { { match buf[0] { 0 => { - let (hash, buf) = H256::from_compact(&buf[1..], 32); + let (hash, buf) = B256::from_compact(&buf[1..], 32); (Self::Hash(hash), buf) } 1 => { @@ -50,7 +50,7 @@ impl Compact for HashBuilderValue { impl std::fmt::Debug for HashBuilderValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { - Self::Bytes(bytes) => write!(f, "Bytes({:?})", hex::encode(bytes)), + Self::Bytes(bytes) => write!(f, "Bytes({:?})", crate::hex::encode(bytes)), Self::Hash(hash) => write!(f, "Hash({:?})", hash), } } @@ -68,8 +68,8 @@ impl From<&[u8]> for HashBuilderValue { } } -impl From for HashBuilderValue { - fn from(value: H256) -> Self { +impl From for HashBuilderValue { + fn from(value: B256) -> Self { Self::Hash(value) } } diff --git a/crates/primitives/src/trie/nibbles.rs b/crates/primitives/src/trie/nibbles.rs index 3bbf74a8b7..f53c0ecc9a 100644 --- a/crates/primitives/src/trie/nibbles.rs +++ b/crates/primitives/src/trie/nibbles.rs @@ -1,7 +1,7 @@ use crate::Bytes; +use alloy_rlp::RlpEncodableWrapper; use derive_more::{Deref, DerefMut, From, Index}; use reth_codecs::{main_codec, Compact}; -use reth_rlp::RlpEncodableWrapper; use serde::{Deserialize, Serialize}; /// The nibbles are the keys for the AccountsTrie and the subkeys for the StorageTrie. @@ -95,14 +95,14 @@ impl From<&[u8; N]> for Nibbles { impl std::fmt::Debug for Nibbles { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { - f.debug_struct("Nibbles").field("hex_data", &hex::encode(&self.hex_data)).finish() + f.debug_struct("Nibbles").field("hex_data", &crate::hex::encode(&self.hex_data)).finish() } } impl Nibbles { /// Creates a new [Nibbles] instance from bytes. - pub fn from_hex(hex: Vec) -> Self { - Nibbles { hex_data: Bytes::from(hex) } + pub fn from_hex>(hex: T) -> Self { + Nibbles { hex_data: hex.into() } } /// Take a byte array (slice or vector) as input and convert it into a [Nibbles] struct @@ -292,16 +292,14 @@ impl Nibbles { #[cfg(test)] mod tests { use super::*; + use crate::hex; use proptest::prelude::*; #[test] fn hashed_regression() { - let nibbles = hex::decode("05010406040a040203030f010805020b050c04070003070e0909070f010b0a0805020301070c0a0902040b0f000f0006040a04050f020b090701000a0a040b").unwrap(); - let nibbles = Nibbles::from_hex(nibbles); + let nibbles = Nibbles::from_hex(hex!("05010406040a040203030f010805020b050c04070003070e0909070f010b0a0805020301070c0a0902040b0f000f0006040a04050f020b090701000a0a040b")); let path = nibbles.encode_path_leaf(true); - let expected = - hex::decode("351464a4233f1852b5c47037e997f1ba852317ca924bf0f064a45f2b9710aa4b") - .unwrap(); + let expected = hex!("351464a4233f1852b5c47037e997f1ba852317ca924bf0f064a45f2b9710aa4b"); assert_eq!(path, expected); } diff --git a/crates/primitives/src/trie/nodes/branch.rs b/crates/primitives/src/trie/nodes/branch.rs index 36ee35eb98..073c2e125f 100644 --- a/crates/primitives/src/trie/nodes/branch.rs +++ b/crates/primitives/src/trie/nodes/branch.rs @@ -1,8 +1,8 @@ use super::{super::TrieMask, rlp_node, CHILD_INDEX_RANGE}; -use crate::H256; +use crate::B256; +use alloy_rlp::{BufMut, EMPTY_STRING_CODE}; use bytes::Buf; use reth_codecs::Compact; -use reth_rlp::{BufMut, EMPTY_STRING_CODE}; use serde::{Deserialize, Serialize}; /// A Branch node is only a pointer to the stack of nodes and is used to @@ -26,7 +26,7 @@ impl<'a> BranchNode<'a> { &self, state_mask: TrieMask, hash_mask: TrieMask, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_ { let mut index = self.stack.len() - state_mask.count_ones() as usize; CHILD_INDEX_RANGE.filter_map(move |digit| { let mut child = None; @@ -36,7 +36,7 @@ impl<'a> BranchNode<'a> { } index += 1; } - child.map(|child| H256::from_slice(&child[1..])) + child.map(|child| B256::from_slice(&child[1..])) }) } @@ -47,7 +47,7 @@ impl<'a> BranchNode<'a> { // Create the RLP header from the mask elements present. let mut i = first_child_idx; let header = CHILD_INDEX_RANGE.fold( - reth_rlp::Header { list: true, payload_length: 1 }, + alloy_rlp::Header { list: true, payload_length: 1 }, |mut header, digit| { if state_mask.is_bit_set(digit) { header.payload_length += self.stack[i].len(); @@ -107,9 +107,9 @@ pub struct BranchNodeCompact { pub hash_mask: TrieMask, /// Collection of hashes associated with the children of the branch node. /// Each child hash is calculated by hashing two consecutive sub-branch roots. - pub hashes: Vec, + pub hashes: Vec, /// An optional root hash of the subtree rooted at this branch node. - pub root_hash: Option, + pub root_hash: Option, } impl BranchNodeCompact { @@ -118,8 +118,8 @@ impl BranchNodeCompact { state_mask: impl Into, tree_mask: impl Into, hash_mask: impl Into, - hashes: Vec, - root_hash: Option, + hashes: Vec, + root_hash: Option, ) -> Self { let (state_mask, tree_mask, hash_mask) = (state_mask.into(), tree_mask.into(), hash_mask.into()); @@ -130,7 +130,7 @@ impl BranchNodeCompact { } /// Returns the hash associated with the given nibble. - pub fn hash_for_nibble(&self, nibble: u8) -> H256 { + pub fn hash_for_nibble(&self, nibble: u8) -> B256 { let mask = *TrieMask::from_nibble(nibble) - 1; let index = (*self.hash_mask & mask).count_ones(); self.hashes[index as usize] @@ -151,13 +151,13 @@ impl Compact for BranchNodeCompact { buf_size += hash_mask.to_compact(buf); if let Some(root_hash) = root_hash { - buf_size += H256::len_bytes(); - buf.put_slice(root_hash.as_bytes()); + buf_size += B256::len_bytes(); + buf.put_slice(root_hash.as_slice()); } for hash in &hashes { - buf_size += H256::len_bytes(); - buf.put_slice(hash.as_bytes()); + buf_size += B256::len_bytes(); + buf.put_slice(hash.as_slice()); } buf_size @@ -167,7 +167,7 @@ impl Compact for BranchNodeCompact { where Self: Sized, { - let hash_len = H256::len_bytes(); + let hash_len = B256::len_bytes(); // Assert the buffer is long enough to contain the masks and the hashes. assert_eq!(buf.len() % hash_len, 6); @@ -183,15 +183,15 @@ impl Compact for BranchNodeCompact { // Check if the root hash is present if hash_mask.count_ones() as usize + 1 == num_hashes { - root_hash = Some(H256::from_slice(&buf[..hash_len])); + root_hash = Some(B256::from_slice(&buf[..hash_len])); buf.advance(hash_len); num_hashes -= 1; } // Consume all remaining hashes. - let mut hashes = Vec::::with_capacity(num_hashes); + let mut hashes = Vec::::with_capacity(num_hashes); for _ in 0..num_hashes { - hashes.push(H256::from_slice(&buf[..hash_len])); + hashes.push(B256::from_slice(&buf[..hash_len])); buf.advance(hash_len); } @@ -202,7 +202,7 @@ impl Compact for BranchNodeCompact { #[cfg(test)] mod tests { use super::*; - use hex_literal::hex; + use crate::hex_literal::hex; #[test] fn node_encoding() { diff --git a/crates/primitives/src/trie/nodes/extension.rs b/crates/primitives/src/trie/nodes/extension.rs index d124084bde..09fe529ecf 100644 --- a/crates/primitives/src/trie/nodes/extension.rs +++ b/crates/primitives/src/trie/nodes/extension.rs @@ -1,5 +1,5 @@ use super::{super::Nibbles, rlp_node}; -use reth_rlp::{BufMut, Encodable}; +use alloy_rlp::{BufMut, Encodable}; /// An intermediate node that exists solely to compress the trie's paths. It contains a path segment /// (a shared prefix of keys) and a single child pointer. Essentially, an extension node can be @@ -30,7 +30,7 @@ impl<'a> ExtensionNode<'a> { impl Encodable for ExtensionNode<'_> { fn encode(&self, out: &mut dyn BufMut) { - let h = reth_rlp::Header { + let h = alloy_rlp::Header { list: true, payload_length: self.prefix.as_slice().length() + self.node.len(), }; @@ -45,8 +45,8 @@ impl Encodable for ExtensionNode<'_> { impl std::fmt::Debug for ExtensionNode<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("ExtensionNode") - .field("prefix", &hex::encode(&self.prefix)) - .field("node", &hex::encode(self.node)) + .field("prefix", &crate::hex::encode(&self.prefix)) + .field("node", &crate::hex::encode(self.node)) .finish() } } diff --git a/crates/primitives/src/trie/nodes/leaf.rs b/crates/primitives/src/trie/nodes/leaf.rs index 5ccbcfb0c3..9fb016e150 100644 --- a/crates/primitives/src/trie/nodes/leaf.rs +++ b/crates/primitives/src/trie/nodes/leaf.rs @@ -1,5 +1,5 @@ use super::{super::Nibbles, rlp_node}; -use reth_rlp::{BufMut, Encodable}; +use alloy_rlp::{BufMut, Encodable}; /// A leaf node represents the endpoint or terminal node in the trie. In other words, a leaf node is /// where actual values are stored. @@ -33,7 +33,7 @@ impl<'a> LeafNode<'a> { // Handroll because `key` must be encoded as a slice impl Encodable for LeafNode<'_> { fn encode(&self, out: &mut dyn BufMut) { - #[derive(reth_rlp::RlpEncodable)] + #[derive(alloy_rlp::RlpEncodable)] struct S<'a> { encoded_path: &'a [u8], value: &'a [u8], @@ -45,8 +45,8 @@ impl Encodable for LeafNode<'_> { impl std::fmt::Debug for LeafNode<'_> { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("LeafNode") - .field("key", &hex::encode(&self.key)) - .field("value", &hex::encode(self.value)) + .field("key", &crate::hex::encode(&self.key)) + .field("value", &crate::hex::encode(self.value)) .finish() } } diff --git a/crates/primitives/src/trie/nodes/mod.rs b/crates/primitives/src/trie/nodes/mod.rs index c4807444d8..78c413a925 100644 --- a/crates/primitives/src/trie/nodes/mod.rs +++ b/crates/primitives/src/trie/nodes/mod.rs @@ -1,5 +1,5 @@ -use crate::{keccak256, H256}; -use reth_rlp::EMPTY_STRING_CODE; +use crate::{keccak256, B256}; +use alloy_rlp::EMPTY_STRING_CODE; use std::ops::Range; mod branch; @@ -17,7 +17,7 @@ pub const CHILD_INDEX_RANGE: Range = 0..16; /// Given an RLP encoded node, returns either RLP(node) or RLP(keccak(RLP(node))) fn rlp_node(rlp: &[u8]) -> Vec { - if rlp.len() < H256::len_bytes() { + if rlp.len() < B256::len_bytes() { rlp.to_vec() } else { rlp_hash(keccak256(rlp)) @@ -25,6 +25,6 @@ fn rlp_node(rlp: &[u8]) -> Vec { } /// Optimization for quick encoding of a hash as RLP -pub fn rlp_hash(hash: H256) -> Vec { - [[EMPTY_STRING_CODE + H256::len_bytes() as u8].as_slice(), hash.0.as_slice()].concat() +pub fn rlp_hash(hash: B256) -> Vec { + [[EMPTY_STRING_CODE + B256::len_bytes() as u8].as_slice(), hash.0.as_slice()].concat() } diff --git a/crates/primitives/src/trie/subnode.rs b/crates/primitives/src/trie/subnode.rs index 2113894a1d..232a672792 100644 --- a/crates/primitives/src/trie/subnode.rs +++ b/crates/primitives/src/trie/subnode.rs @@ -73,7 +73,7 @@ impl Compact for StoredSubNode { #[cfg(test)] mod tests { use super::*; - use crate::{trie::TrieMask, H256}; + use crate::{trie::TrieMask, B256}; #[test] fn subnode_roundtrip() { @@ -84,7 +84,7 @@ mod tests { state_mask: TrieMask::new(1), tree_mask: TrieMask::new(0), hash_mask: TrieMask::new(1), - hashes: vec![H256::zero()], + hashes: vec![B256::ZERO], root_hash: None, }), }; diff --git a/crates/primitives/src/withdrawal.rs b/crates/primitives/src/withdrawal.rs index 6df6d73ecb..c99a51664c 100644 --- a/crates/primitives/src/withdrawal.rs +++ b/crates/primitives/src/withdrawal.rs @@ -1,6 +1,6 @@ use crate::{constants::GWEI_TO_WEI, serde_helper::u64_hex, Address}; +use alloy_rlp::{RlpDecodable, RlpEncodable}; use reth_codecs::{main_codec, Compact}; -use reth_rlp::{RlpDecodable, RlpEncodable}; use std::mem; /// Withdrawal represents a validator withdrawal from the consensus layer. diff --git a/crates/prune/Cargo.toml b/crates/prune/Cargo.toml index 3e0f94e156..6259777eb6 100644 --- a/crates/prune/Cargo.toml +++ b/crates/prune/Cargo.toml @@ -16,6 +16,10 @@ reth-primitives.workspace = true reth-db.workspace = true reth-provider.workspace = true reth-interfaces.workspace = true +reth-snapshot = { path = "../snapshot" } + +# async +tokio = { workspace = true, features = ["sync"] } # metrics reth-metrics.workspace = true diff --git a/crates/prune/src/lib.rs b/crates/prune/src/lib.rs index 4087ad94fd..812381bd23 100644 --- a/crates/prune/src/lib.rs +++ b/crates/prune/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, unreachable_pub, rustdoc::all)] // TODO(danipopes): missing_docs #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/prune/src/pruner.rs b/crates/prune/src/pruner.rs index e23b400e8a..69fdeca7a6 100644 --- a/crates/prune/src/pruner.rs +++ b/crates/prune/src/pruner.rs @@ -20,6 +20,7 @@ use reth_provider::{ BlockReader, DatabaseProviderRW, ProviderFactory, PruneCheckpointReader, PruneCheckpointWriter, TransactionsProvider, }; +use reth_snapshot::HighestSnapshotsTracker; use std::{collections::BTreeMap, ops::RangeInclusive, sync::Arc, time::Instant}; use tokio_stream::wrappers::UnboundedReceiverStream; use tracing::{debug, error, instrument, trace}; @@ -48,6 +49,8 @@ pub struct Pruner { /// Maximum entries to prune per block, per prune part. batch_sizes: PruneBatchSizes, listeners: EventListeners, + #[allow(dead_code)] + highest_snapshots_tracker: HighestSnapshotsTracker, } impl Pruner { @@ -58,6 +61,7 @@ impl Pruner { min_block_interval: usize, modes: PruneModes, batch_sizes: PruneBatchSizes, + highest_snapshots_tracker: HighestSnapshotsTracker, ) -> Self { Self { metrics: Metrics::default(), @@ -67,6 +71,7 @@ impl Pruner { modes, batch_sizes, listeners: Default::default(), + highest_snapshots_tracker, } } @@ -90,9 +95,11 @@ impl Pruner { let provider = self.provider_factory.provider_rw()?; let mut done = true; - let mut parts_done = BTreeMap::new(); + // TODO(alexey): prune snapshot parts of data (headers, transactions) + // let highest_snapshots = *self.highest_snapshots_tracker.borrow(); + if let Some((to_block, prune_mode)) = self.modes.prune_target_block_receipts(tip_block_number)? { @@ -416,7 +423,8 @@ impl Pruner { tip_block_number: BlockNumber, ) -> PrunerResult { // Contract log filtering removes every receipt possible except the ones in the list. So, - // for the other receipts it's as if they had a `PruneMode::Distance()` of 128. + // for the other receipts it's as if they had a `PruneMode::Distance()` of + // `MINIMUM_PRUNING_DISTANCE`. let to_block = PruneMode::Distance(MINIMUM_PRUNING_DISTANCE) .prune_target_block( tip_block_number, @@ -945,29 +953,38 @@ mod tests { }; use reth_primitives::{ BlockNumber, PruneBatchSizes, PruneCheckpoint, PruneMode, PruneModes, PrunePart, - ReceiptsLogPruneConfig, TxNumber, H256, MAINNET, + ReceiptsLogPruneConfig, TxNumber, B256, MAINNET, }; use reth_provider::{PruneCheckpointReader, TransactionsProvider}; use reth_stages::test_utils::TestTransaction; use std::{collections::BTreeMap, ops::AddAssign}; + use tokio::sync::watch; #[test] fn is_pruning_needed() { let db = create_test_rw_db(); - let pruner = - Pruner::new(db, MAINNET.clone(), 5, PruneModes::none(), PruneBatchSizes::default()); + let mut pruner = Pruner::new( + db, + MAINNET.clone(), + 5, + PruneModes::none(), + PruneBatchSizes::default(), + watch::channel(None).1, + ); // No last pruned block number was set before let first_block_number = 1; assert!(pruner.is_pruning_needed(first_block_number)); + pruner.last_pruned_block_number = Some(first_block_number); - // Delta is not less than min block interval + // Tip block number delta is >= than min block interval let second_block_number = first_block_number + pruner.min_block_interval as u64; assert!(pruner.is_pruning_needed(second_block_number)); + pruner.last_pruned_block_number = Some(second_block_number); - // Delta is less than min block interval + // Tip block number delta is < than min block interval let third_block_number = second_block_number; - assert!(pruner.is_pruning_needed(third_block_number)); + assert!(!pruner.is_pruning_needed(third_block_number)); } #[test] @@ -975,7 +992,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=100, H256::zero(), 0..10); + let blocks = random_block_range(&mut rng, 0..=100, B256::ZERO, 0..10); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let mut receipts = Vec::new(); @@ -1005,6 +1022,7 @@ mod tests { PruneModes { receipts: Some(prune_mode), ..Default::default() }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_receipts(10), + watch::channel(None).1, ); let next_tx_number_to_prune = tx @@ -1070,7 +1088,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=100, H256::zero(), 0..10); + let blocks = random_block_range(&mut rng, 0..=100, B256::ZERO, 0..10); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let mut tx_hash_numbers = Vec::new(); @@ -1099,6 +1117,7 @@ mod tests { PruneModes { transaction_lookup: Some(prune_mode), ..Default::default() }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_transaction_lookup(10), + watch::channel(None).1, ); let next_tx_number_to_prune = tx @@ -1164,7 +1183,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=100, H256::zero(), 0..10); + let blocks = random_block_range(&mut rng, 0..=100, B256::ZERO, 0..10); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let mut transaction_senders = Vec::new(); @@ -1196,6 +1215,7 @@ mod tests { PruneModes { sender_recovery: Some(prune_mode), ..Default::default() }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_transaction_senders(10), + watch::channel(None).1, ); let next_tx_number_to_prune = tx @@ -1261,7 +1281,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=7000, H256::zero(), 0..1); + let blocks = random_block_range(&mut rng, 0..=7000, B256::ZERO, 0..1); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let accounts = @@ -1302,6 +1322,7 @@ mod tests { PruneModes { account_history: Some(prune_mode), ..Default::default() }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_account_history(2000), + watch::channel(None).1, ); let provider = tx.inner_rw(); @@ -1391,7 +1412,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=7000, H256::zero(), 0..1); + let blocks = random_block_range(&mut rng, 0..=7000, B256::ZERO, 0..1); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let accounts = @@ -1432,6 +1453,7 @@ mod tests { PruneModes { storage_history: Some(prune_mode), ..Default::default() }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_storage_history(2000), + watch::channel(None).1, ); let provider = tx.inner_rw(); @@ -1523,15 +1545,19 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let tip = 300; - let blocks = random_block_range(&mut rng, 0..=tip, H256::zero(), 1..5); + let tip = 20000; + let blocks = [ + random_block_range(&mut rng, 0..=100, B256::ZERO, 1..5), + random_block_range(&mut rng, (100 + 1)..=(tip - 100), B256::ZERO, 0..1), + random_block_range(&mut rng, (tip - 100 + 1)..=tip, B256::ZERO, 1..5), + ] + .concat(); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let mut receipts = Vec::new(); let (deposit_contract_addr, _) = random_eoa_account(&mut rng); for block in &blocks { - assert!(!block.body.is_empty()); for (txi, transaction) in block.body.iter().enumerate() { let mut receipt = random_receipt(&mut rng, transaction, Some(1)); receipt.logs.push(random_log( @@ -1570,6 +1596,7 @@ mod tests { }, // Less than total amount of blocks to prune to test the batching logic PruneBatchSizes::default().with_storage_history(10), + watch::channel(None).1, ); let result = pruner.prune_receipts_by_logs(&provider, tip); diff --git a/crates/revm/revm-inspectors/Cargo.toml b/crates/revm/revm-inspectors/Cargo.toml index f04b968dc5..1edcd0d34e 100644 --- a/crates/revm/revm-inspectors/Cargo.toml +++ b/crates/revm/revm-inspectors/Cargo.toml @@ -13,9 +13,10 @@ description = "revm inspector implementations used by reth" reth-primitives.workspace = true reth-rpc-types.workspace = true +# eth +alloy-sol-types.workspace = true + revm.workspace = true -# remove from reth and reexport from revm -hashbrown = "0.14" serde = { workspace = true, features = ["derive"] } thiserror = { workspace = true, optional = true } @@ -24,10 +25,6 @@ serde_json = { workspace = true, optional = true } # js-tracing-inspector boa_engine = { workspace = true, optional = true } boa_gc = { workspace = true, optional = true } -# pin this until https://github.com/boa-dev/boa/issues/3299 is mitigated -icu_collections = "=1.2.0" -icu_provider_macros = "=1.2.0" -tinystr = "=0.7.1" tokio = { version = "1", features = ["sync"], optional = true } diff --git a/crates/revm/revm-inspectors/src/access_list.rs b/crates/revm/revm-inspectors/src/access_list.rs index 60edfabb76..7388703174 100644 --- a/crates/revm/revm-inspectors/src/access_list.rs +++ b/crates/revm/revm-inspectors/src/access_list.rs @@ -1,10 +1,9 @@ -use hashbrown::{HashMap, HashSet}; -use reth_primitives::{AccessList, AccessListItem, Address, H256}; +use reth_primitives::{AccessList, AccessListItem, Address, B256}; use revm::{ interpreter::{opcode, InstructionResult, Interpreter}, Database, EVMData, Inspector, }; -use std::collections::BTreeSet; +use std::collections::{BTreeSet, HashMap, HashSet}; /// An [Inspector] that collects touched accounts and storage slots. /// @@ -14,7 +13,7 @@ pub struct AccessListInspector { /// All addresses that should be excluded from the final accesslist excluded: HashSet
, /// All addresses and touched slots - access_list: HashMap>, + access_list: HashMap>, } impl AccessListInspector { @@ -74,7 +73,7 @@ where self.access_list .entry(cur_contract) .or_default() - .insert(H256::from(slot.to_be_bytes())); + .insert(B256::from(slot.to_be_bytes())); } } opcode::EXTCODECOPY | @@ -83,7 +82,7 @@ where opcode::BALANCE | opcode::SELFDESTRUCT => { if let Ok(slot) = interpreter.stack().peek(0) { - let addr: Address = H256::from(slot.to_be_bytes()).into(); + let addr = Address::from_word(B256::from(slot.to_be_bytes())); if !self.excluded.contains(&addr) { self.access_list.entry(addr).or_default(); } @@ -91,7 +90,7 @@ where } opcode::DELEGATECALL | opcode::CALL | opcode::STATICCALL | opcode::CALLCODE => { if let Ok(slot) = interpreter.stack().peek(1) { - let addr: Address = H256::from(slot.to_be_bytes()).into(); + let addr = Address::from_word(B256::from(slot.to_be_bytes())); if !self.excluded.contains(&addr) { self.access_list.entry(addr).or_default(); } diff --git a/crates/revm/revm-inspectors/src/lib.rs b/crates/revm/revm-inspectors/src/lib.rs index a783ad865d..67854ba0c5 100644 --- a/crates/revm/revm-inspectors/src/lib.rs +++ b/crates/revm/revm-inspectors/src/lib.rs @@ -8,10 +8,10 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] -#![deny(unused_must_use, rust_2018_idioms)] +#![deny(unused_must_use, rust_2018_idioms, unused_crate_dependencies)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] /// An inspector implementation for an EIP2930 Accesslist diff --git a/crates/revm/revm-inspectors/src/stack/maybe_owned.rs b/crates/revm/revm-inspectors/src/stack/maybe_owned.rs index 1449093e24..68848940d9 100644 --- a/crates/revm/revm-inspectors/src/stack/maybe_owned.rs +++ b/crates/revm/revm-inspectors/src/stack/maybe_owned.rs @@ -1,7 +1,7 @@ use reth_primitives::U256; use revm::{ interpreter::{CallInputs, CreateInputs, Gas, InstructionResult, Interpreter}, - primitives::{db::Database, Bytes, B160, B256}, + primitives::{db::Database, Address, Bytes, B256}, EVMData, Inspector, }; use std::{ @@ -96,7 +96,7 @@ where fn log( &mut self, evm_data: &mut EVMData<'_, DB>, - address: &B160, + address: &Address, topics: &[B256], data: &Bytes, ) { @@ -158,7 +158,7 @@ where &mut self, data: &mut EVMData<'_, DB>, inputs: &mut CreateInputs, - ) -> (InstructionResult, Option, Gas, Bytes) { + ) -> (InstructionResult, Option
, Gas, Bytes) { match self { MaybeOwnedInspector::Owned(insp) => return insp.borrow_mut().create(data, inputs), MaybeOwnedInspector::Stacked(_) => {} @@ -172,10 +172,10 @@ where data: &mut EVMData<'_, DB>, inputs: &CreateInputs, ret: InstructionResult, - address: Option, + address: Option
, remaining_gas: Gas, out: Bytes, - ) -> (InstructionResult, Option, Gas, Bytes) { + ) -> (InstructionResult, Option
, Gas, Bytes) { match self { MaybeOwnedInspector::Owned(insp) => { return insp.borrow_mut().create_end(data, inputs, ret, address, remaining_gas, out) @@ -186,7 +186,7 @@ where (ret, address, remaining_gas, out) } - fn selfdestruct(&mut self, contract: B160, target: B160, value: U256) { + fn selfdestruct(&mut self, contract: Address, target: Address, value: U256) { match self { MaybeOwnedInspector::Owned(insp) => { return insp.borrow_mut().selfdestruct(contract, target, value) diff --git a/crates/revm/revm-inspectors/src/stack/mod.rs b/crates/revm/revm-inspectors/src/stack/mod.rs index c11fbc917f..5e3c9f6677 100644 --- a/crates/revm/revm-inspectors/src/stack/mod.rs +++ b/crates/revm/revm-inspectors/src/stack/mod.rs @@ -1,12 +1,11 @@ -use std::fmt::Debug; - -use reth_primitives::{bytes::Bytes, Address, TxHash, H256, U256}; +use reth_primitives::{Address, Bytes, TxHash, B256, U256}; use revm::{ inspectors::CustomPrintTracer, interpreter::{CallInputs, CreateInputs, Gas, InstructionResult, Interpreter}, primitives::Env, Database, EVMData, Inspector, }; +use std::fmt::Debug; /// A wrapped [Inspector] that can be reused in the stack mod maybe_owned; @@ -139,7 +138,7 @@ where &mut self, evm_data: &mut EVMData<'_, DB>, address: &Address, - topics: &[H256], + topics: &[B256], data: &Bytes, ) { call_inspectors!(inspector, [&mut self.custom_print_tracer], { diff --git a/crates/revm/revm-inspectors/src/tracing/builder/geth.rs b/crates/revm/revm-inspectors/src/tracing/builder/geth.rs index f2bbdeda75..bb730fa42c 100644 --- a/crates/revm/revm-inspectors/src/tracing/builder/geth.rs +++ b/crates/revm/revm-inspectors/src/tracing/builder/geth.rs @@ -4,7 +4,7 @@ use crate::tracing::{ types::{CallTraceNode, CallTraceStepStackItem}, TracingInspectorConfig, }; -use reth_primitives::{Address, Bytes, H256, U256}; +use reth_primitives::{Address, Bytes, B256, U256}; use reth_rpc_types::trace::geth::{ AccountState, CallConfig, CallFrame, DefaultFrame, DiffMode, GethDefaultTracingOptions, PreStateConfig, PreStateFrame, PreStateMode, StructLog, @@ -33,7 +33,7 @@ impl GethTraceBuilder { &self, main_trace_node: &CallTraceNode, opts: &GethDefaultTracingOptions, - storage: &mut HashMap>, + storage: &mut HashMap>, struct_logs: &mut Vec, ) { // A stack with all the steps of the trace and all its children's steps. @@ -62,7 +62,7 @@ impl GethTraceBuilder { } if opts.is_return_data_enabled() { - log.return_data = Some(trace_node.trace.output.clone().into()); + log.return_data = Some(trace_node.trace.output.clone()); } // Add step to geth trace @@ -192,14 +192,14 @@ impl GethTraceBuilder { let mut prestate = PreStateMode::default(); for (addr, _) in account_diffs { let db_acc = db.basic(addr)?.unwrap_or_default(); + prestate.0.insert( addr, - AccountState { - balance: Some(db_acc.balance), - nonce: Some(U256::from(db_acc.nonce)), - code: db_acc.code.as_ref().map(|code| Bytes::from(code.original_bytes())), - storage: None, - }, + AccountState::from_account_info( + db_acc.nonce, + db_acc.balance, + db_acc.code.as_ref().map(|code| code.original_bytes()), + ), ); } self.update_storage_from_trace(&mut prestate.0, false); @@ -208,18 +208,16 @@ impl GethTraceBuilder { let mut state_diff = DiffMode::default(); for (addr, changed_acc) in account_diffs { let db_acc = db.basic(addr)?.unwrap_or_default(); - let pre_state = AccountState { - balance: Some(db_acc.balance), - nonce: Some(U256::from(db_acc.nonce)), - code: db_acc.code.as_ref().map(|code| Bytes::from(code.original_bytes())), - storage: None, - }; - let post_state = AccountState { - balance: Some(changed_acc.balance), - nonce: Some(U256::from(changed_acc.nonce)), - code: changed_acc.code.as_ref().map(|code| Bytes::from(code.original_bytes())), - storage: None, - }; + let pre_state = AccountState::from_account_info( + db_acc.nonce, + db_acc.balance, + db_acc.code.as_ref().map(|code| code.original_bytes()), + ); + let post_state = AccountState::from_account_info( + changed_acc.nonce, + changed_acc.balance, + changed_acc.code.as_ref().map(|code| code.original_bytes()), + ); state_diff.pre.insert(addr, pre_state); state_diff.post.insert(addr, post_state); } diff --git a/crates/revm/revm-inspectors/src/tracing/builder/parity.rs b/crates/revm/revm-inspectors/src/tracing/builder/parity.rs index fbf5e122ba..87a780191e 100644 --- a/crates/revm/revm-inspectors/src/tracing/builder/parity.rs +++ b/crates/revm/revm-inspectors/src/tracing/builder/parity.rs @@ -165,12 +165,8 @@ impl ParityTraceBuilder { let (trace, vm_trace, state_diff) = self.into_trace_type_traces(trace_types); - let mut trace = TraceResults { - output: output.into(), - trace: trace.unwrap_or_default(), - vm_trace, - state_diff, - }; + let mut trace = + TraceResults { output, trace: trace.unwrap_or_default(), vm_trace, state_diff }; // we're setting the gas used of the root trace explicitly to the gas used of the execution // result @@ -561,7 +557,7 @@ where let code_hash = if db_acc.code_hash != KECCAK_EMPTY { db_acc.code_hash } else { continue }; - curr_ref.code = db.code_by_hash(code_hash)?.original_bytes().into(); + curr_ref.code = db.code_by_hash(code_hash)?.original_bytes(); } Ok(()) diff --git a/crates/revm/revm-inspectors/src/tracing/fourbyte.rs b/crates/revm/revm-inspectors/src/tracing/fourbyte.rs index 977d2a5595..8d116b8d4f 100644 --- a/crates/revm/revm-inspectors/src/tracing/fourbyte.rs +++ b/crates/revm/revm-inspectors/src/tracing/fourbyte.rs @@ -21,7 +21,7 @@ //! //! See also -use reth_primitives::{bytes::Bytes, hex, Selector}; +use reth_primitives::{hex, Bytes, Selector}; use reth_rpc_types::trace::geth::FourByteFrame; use revm::{ interpreter::{CallInputs, Gas, InstructionResult}, diff --git a/crates/revm/revm-inspectors/src/tracing/js/bindings.rs b/crates/revm/revm-inspectors/src/tracing/js/bindings.rs index 90978adced..e077e55830 100644 --- a/crates/revm/revm-inspectors/src/tracing/js/bindings.rs +++ b/crates/revm/revm-inspectors/src/tracing/js/bindings.rs @@ -16,7 +16,7 @@ use boa_engine::{ Context, JsArgs, JsError, JsNativeError, JsObject, JsResult, JsValue, }; use boa_gc::{empty_trace, Finalize, Gc, Trace}; -use reth_primitives::{bytes::Bytes, Account, Address, H256, KECCAK_EMPTY, U256}; +use reth_primitives::{Account, Address, Bytes, B256, KECCAK_EMPTY, U256}; use revm::{ interpreter::{ opcode::{PUSH0, PUSH32}, @@ -327,7 +327,7 @@ impl Contract { let get_caller = FunctionObjectBuilder::new( context, NativeFunction::from_copy_closure(move |_this, _args, ctx| { - to_buf_value(caller.as_bytes().to_vec(), ctx) + to_buf_value(caller.as_slice().to_vec(), ctx) }), ) .length(0) @@ -336,7 +336,7 @@ impl Contract { let get_address = FunctionObjectBuilder::new( context, NativeFunction::from_copy_closure(move |_this, _args, ctx| { - to_buf_value(contract.as_bytes().to_vec(), ctx) + to_buf_value(contract.as_slice().to_vec(), ctx) }), ) .length(0) @@ -419,7 +419,7 @@ impl CallFrame { let get_from = FunctionObjectBuilder::new( ctx, NativeFunction::from_copy_closure(move |_this, _args, ctx| { - to_buf_value(caller.as_bytes().to_vec(), ctx) + to_buf_value(caller.as_slice().to_vec(), ctx) }), ) .length(0) @@ -428,7 +428,7 @@ impl CallFrame { let get_to = FunctionObjectBuilder::new( ctx, NativeFunction::from_copy_closure(move |_this, _args, ctx| { - to_buf_value(contract.as_bytes().to_vec(), ctx) + to_buf_value(contract.as_slice().to_vec(), ctx) }), ) .length(0) @@ -491,9 +491,9 @@ pub(crate) struct EvmContext { pub(crate) output: Bytes, /// Number, block number pub(crate) time: String, - pub(crate) block_hash: Option, + pub(crate) block_hash: Option, pub(crate) tx_index: Option, - pub(crate) tx_hash: Option, + pub(crate) tx_hash: Option, } impl EvmContext { @@ -537,13 +537,13 @@ impl EvmContext { obj.set("output", to_buf(output.to_vec(), ctx)?, false, ctx)?; obj.set("time", time, false, ctx)?; if let Some(block_hash) = block_hash { - obj.set("blockHash", to_buf(block_hash.as_bytes().to_vec(), ctx)?, false, ctx)?; + obj.set("blockHash", to_buf(block_hash.as_slice().to_vec(), ctx)?, false, ctx)?; } if let Some(tx_index) = tx_index { obj.set("txIndex", tx_index as u64, false, ctx)?; } if let Some(tx_hash) = tx_hash { - obj.set("txHash", to_buf(tx_hash.as_bytes().to_vec(), ctx)?, false, ctx)?; + obj.set("txHash", to_buf(tx_hash.as_slice().to_vec(), ctx)?, false, ctx)?; } Ok(obj) @@ -744,8 +744,8 @@ impl EvmDBInner { )))) } }; - let value: H256 = value.into(); - to_buf(value.as_bytes().to_vec(), ctx) + let value: B256 = value.into(); + to_buf(value.as_slice().to_vec(), ctx) } } @@ -765,22 +765,22 @@ mod tests { fn test_contract() { let mut ctx = Context::default(); let contract = Contract { - caller: Address::zero(), - contract: Address::zero(), + caller: Address::ZERO, + contract: Address::ZERO, value: U256::from(1337u64), input: vec![0x01, 0x02, 0x03].into(), }; - let big_int = ctx.eval(Source::from_bytes(BIG_INT_JS.as_bytes())).unwrap(); + let big_int = ctx.eval(Source::from_bytes(BIG_INT_JS)).unwrap(); ctx.register_global_property("bigint", big_int, Attribute::all()).unwrap(); let obj = contract.clone().into_js_object(&mut ctx).unwrap(); - let s = r#"({ + let s = "({ call: function(contract) { return contract.getCaller(); }, value: function(contract) { return contract.getValue(); }, input: function(contract) { return contract.getInput(); } - })"#; + })"; - let eval_obj = ctx.eval(Source::from_bytes(s.as_bytes())).unwrap(); + let eval_obj = ctx.eval(Source::from_bytes(s)).unwrap(); let call = eval_obj.as_object().unwrap().get("call", &mut ctx).unwrap(); let res = call diff --git a/crates/revm/revm-inspectors/src/tracing/js/builtins.rs b/crates/revm/revm-inspectors/src/tracing/js/builtins.rs index 88bfc2f9b1..b3988974e4 100644 --- a/crates/revm/revm-inspectors/src/tracing/js/builtins.rs +++ b/crates/revm/revm-inspectors/src/tracing/js/builtins.rs @@ -8,7 +8,7 @@ use boa_engine::{ use boa_gc::{empty_trace, Finalize, Trace}; use reth_primitives::{ contract::{create2_address_from_code, create_address}, - hex, keccak256, Address, H256, U256, + hex, keccak256, Address, B256, U256, }; use std::collections::HashSet; @@ -107,8 +107,8 @@ pub(crate) fn bytes_to_address(buf: Vec) -> Address { /// Converts a buffer type to a hash. /// /// If the buffer is larger than the hash size, it will be cropped from the left -pub(crate) fn bytes_to_hash(buf: Vec) -> H256 { - let mut hash = H256::default(); +pub(crate) fn bytes_to_hash(buf: Vec) -> B256 { + let mut hash = B256::default(); let mut buf = &buf[..]; let hash_len = hash.0.len(); if buf.len() > hash_len { diff --git a/crates/revm/revm-inspectors/src/tracing/js/mod.rs b/crates/revm/revm-inspectors/src/tracing/js/mod.rs index 3be256be7f..bc07b72756 100644 --- a/crates/revm/revm-inspectors/src/tracing/js/mod.rs +++ b/crates/revm/revm-inspectors/src/tracing/js/mod.rs @@ -11,13 +11,13 @@ use crate::tracing::{ utils::get_create_address, }; use boa_engine::{Context, JsError, JsObject, JsResult, JsValue, Source}; -use reth_primitives::{bytes::Bytes, Account, Address, H256, U256}; +use reth_primitives::{Account, Address, Bytes, B256, U256}; use revm::{ interpreter::{ return_revert, CallInputs, CallScheme, CreateInputs, Gas, InstructionResult, Interpreter, }, precompile::Precompiles, - primitives::{Env, ExecutionResult, Output, ResultAndState, TransactTo, B160, B256}, + primitives::{Env, ExecutionResult, Output, ResultAndState, TransactTo}, Database, EVMData, Inspector, }; use tokio::sync::mpsc; @@ -316,7 +316,7 @@ where fn log( &mut self, _evm_data: &mut EVMData<'_, DB>, - _address: &B160, + _address: &Address, _topics: &[B256], _data: &Bytes, ) { @@ -419,7 +419,7 @@ where &mut self, data: &mut EVMData<'_, DB>, inputs: &mut CreateInputs, - ) -> (InstructionResult, Option, Gas, Bytes) { + ) -> (InstructionResult, Option
, Gas, Bytes) { self.register_precompiles(&data.precompiles); let _ = data.journaled_state.load_account(inputs.caller, data.db); @@ -451,10 +451,10 @@ where _data: &mut EVMData<'_, DB>, _inputs: &CreateInputs, ret: InstructionResult, - address: Option, + address: Option
, remaining_gas: Gas, out: Bytes, - ) -> (InstructionResult, Option, Gas, Bytes) { + ) -> (InstructionResult, Option
, Gas, Bytes) { if self.exit_fn.is_some() { let frame_result = FrameResult { gas_used: remaining_gas.spend(), output: out.clone(), error: None }; @@ -468,7 +468,7 @@ where (ret, address, remaining_gas, out) } - fn selfdestruct(&mut self, _contract: B160, _target: B160, _value: U256) { + fn selfdestruct(&mut self, _contract: Address, _target: Address, _value: U256) { if self.enter_fn.is_some() { let call = self.active_call(); let frame = @@ -491,7 +491,7 @@ pub enum JsDbRequest { /// Bindings for [Database::code_by_hash] Code { /// The code hash of the code to be loaded - code_hash: H256, + code_hash: B256, /// The response channel resp: std::sync::mpsc::Sender>, }, diff --git a/crates/revm/revm-inspectors/src/tracing/mod.rs b/crates/revm/revm-inspectors/src/tracing/mod.rs index e3e338815a..f5b1794c93 100644 --- a/crates/revm/revm-inspectors/src/tracing/mod.rs +++ b/crates/revm/revm-inspectors/src/tracing/mod.rs @@ -3,7 +3,7 @@ use crate::tracing::{ utils::get_create_address, }; pub use arena::CallTraceArena; -use reth_primitives::{bytes::Bytes, Address, H256, U256}; +use reth_primitives::{Address, Bytes, B256, U256}; use revm::{ inspectors::GasInspector, interpreter::{ @@ -396,7 +396,7 @@ where &mut self, evm_data: &mut EVMData<'_, DB>, address: &Address, - topics: &[H256], + topics: &[B256], data: &Bytes, ) { self.gas_inspector.log(evm_data, address, topics, data); diff --git a/crates/revm/revm-inspectors/src/tracing/types.rs b/crates/revm/revm-inspectors/src/tracing/types.rs index ac8aea52d0..b5a607f434 100644 --- a/crates/revm/revm-inspectors/src/tracing/types.rs +++ b/crates/revm/revm-inspectors/src/tracing/types.rs @@ -1,7 +1,8 @@ //! Types for representing call trace items. use crate::tracing::{config::TraceStyle, utils::convert_memory}; -use reth_primitives::{abi::decode_revert_reason, bytes::Bytes, Address, H256, U256}; +use alloy_sol_types::decode_revert_reason; +use reth_primitives::{Address, Bytes, B256, U256, U64}; use reth_rpc_types::trace::{ geth::{AccountState, CallFrame, CallLogFrame, GethDefaultTracingOptions, StructLog}, parity::{ @@ -303,25 +304,25 @@ impl CallTraceNode { if self.kind().is_any_create() { let code = self.trace.output.clone(); if acc.code == Delta::Unchanged { - acc.code = Delta::Added(code.into()) + acc.code = Delta::Added(code) } } // iterate over all storage diffs for change in self.trace.steps.iter().filter_map(|s| s.storage_change) { let StorageChange { key, value, had_value } = change; - let h256_value = H256::from(value); + let b256_value = B256::from(value); match acc.storage.entry(key.into()) { Entry::Vacant(entry) => { if let Some(had_value) = had_value { if value != had_value { entry.insert(Delta::Changed(ChangedType { from: had_value.into(), - to: h256_value, + to: b256_value, })); } } else { - entry.insert(Delta::Added(h256_value)); + entry.insert(Delta::Added(b256_value)); } } Entry::Occupied(mut entry) => { @@ -331,29 +332,29 @@ impl CallTraceNode { if value != had_value { Delta::Changed(ChangedType { from: had_value.into(), - to: h256_value, + to: b256_value, }) } else { Delta::Unchanged } } else { - Delta::Added(h256_value) + Delta::Added(b256_value) } } Delta::Added(added) => { - if added == &h256_value { + if added == &b256_value { Delta::Added(*added) } else { - Delta::Changed(ChangedType { from: *added, to: h256_value }) + Delta::Changed(ChangedType { from: *added, to: b256_value }) } } - Delta::Removed(_) => Delta::Added(h256_value), + Delta::Removed(_) => Delta::Added(b256_value), Delta::Changed(c) => { - if c.from == h256_value { + if c.from == b256_value { // remains unchanged if the value is the same Delta::Unchanged } else { - Delta::Changed(ChangedType { from: c.from, to: h256_value }) + Delta::Changed(ChangedType { from: c.from, to: b256_value }) } } }; @@ -381,13 +382,13 @@ impl CallTraceNode { match self.kind() { CallKind::Call | CallKind::StaticCall | CallKind::CallCode | CallKind::DelegateCall => { TraceOutput::Call(CallOutput { - gas_used: self.trace.gas_used.into(), - output: self.trace.output.clone().into(), + gas_used: U64::from(self.trace.gas_used), + output: self.trace.output.clone(), }) } CallKind::Create | CallKind::Create2 => TraceOutput::Create(CreateOutput { - gas_used: self.trace.gas_used.into(), - code: self.trace.output.clone().into(), + gas_used: U64::from(self.trace.gas_used), + code: self.trace.output.clone(), address: self.trace.address, }), } @@ -447,16 +448,16 @@ impl CallTraceNode { from: self.trace.caller, to: self.trace.address, value: self.trace.value, - gas: self.trace.gas_limit.into(), - input: self.trace.data.clone().into(), + gas: U64::from(self.trace.gas_limit), + input: self.trace.data.clone(), call_type: self.kind().into(), }) } CallKind::Create | CallKind::Create2 => Action::Create(CreateAction { from: self.trace.caller, value: self.trace.value, - gas: self.trace.gas_limit.into(), - init: self.trace.data.clone().into(), + gas: U64::from(self.trace.gas_limit), + init: self.trace.data.clone(), }), } } @@ -472,8 +473,8 @@ impl CallTraceNode { value: Some(self.trace.value), gas: U256::from(self.trace.gas_limit), gas_used: U256::from(self.trace.gas_used), - input: self.trace.data.clone().into(), - output: (!self.trace.output.is_empty()).then(|| self.trace.output.clone().into()), + input: self.trace.data.clone(), + output: (!self.trace.output.is_empty()).then(|| self.trace.output.clone()), error: None, revert_reason: None, calls: Default::default(), @@ -482,7 +483,7 @@ impl CallTraceNode { // we need to populate error and revert reason if !self.trace.success { - call_frame.revert_reason = decode_revert_reason(self.trace.output.clone()); + call_frame.revert_reason = decode_revert_reason(self.trace.output.as_ref()); // Note: the call tracer mimics parity's trace transaction and geth maps errors to parity style error messages, call_frame.error = self.trace.as_error(TraceStyle::Parity); } @@ -494,7 +495,7 @@ impl CallTraceNode { .map(|log| CallLogFrame { address: Some(self.execution_address()), topics: Some(log.topics.clone()), - data: Some(log.data.clone().into()), + data: Some(log.data.clone()), }) .collect(); } @@ -517,16 +518,15 @@ impl CallTraceNode { let acc_state = account_states.entry(addr).or_default(); for change in self.trace.steps.iter().filter_map(|s| s.storage_change) { let StorageChange { key, value, had_value } = change; - let storage_map = acc_state.storage.get_or_insert_with(BTreeMap::new); let value_to_insert = if post_value { - H256::from(value) + B256::from(value) } else { match had_value { - Some(had_value) => H256::from(had_value), + Some(had_value) => B256::from(had_value), None => continue, } }; - storage_map.insert(key.into(), value_to_insert); + acc_state.storage.insert(key.into(), value_to_insert); } } } @@ -554,7 +554,7 @@ pub(crate) enum LogCallOrder { #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) struct RawLog { /// Indexed event params are represented as log topics. - pub(crate) topics: Vec, + pub(crate) topics: Vec, /// Others are just plain data. pub(crate) data: Bytes, } diff --git a/crates/revm/revm-primitives/Cargo.toml b/crates/revm/revm-primitives/Cargo.toml index a17ca3cfb4..41f706f238 100644 --- a/crates/revm/revm-primitives/Cargo.toml +++ b/crates/revm/revm-primitives/Cargo.toml @@ -9,7 +9,5 @@ repository.workspace = true description = "core reth specific revm utilities" [dependencies] -# reth +# reth reth-primitives.workspace = true - -revm.workspace = true diff --git a/crates/revm/revm-primitives/src/compat.rs b/crates/revm/revm-primitives/src/compat.rs index 507580c9b4..8b6f0b22d3 100644 --- a/crates/revm/revm-primitives/src/compat.rs +++ b/crates/revm/revm-primitives/src/compat.rs @@ -1,29 +1,23 @@ -use reth_primitives::{Account, Log as RethLog, H160, H256, KECCAK_EMPTY}; -use revm::primitives::{AccountInfo, Log}; +use reth_primitives::{ + revm_primitives::{AccountInfo, Log}, + Account, Log as RethLog, KECCAK_EMPTY, +}; -/// Check equality between [`reth_primitives::Log`] and [`revm::primitives::Log`] +/// Check equality between Revm and Reth `Log`s. pub fn is_log_equal(revm_log: &Log, reth_log: &reth_primitives::Log) -> bool { - revm_log.topics.len() == reth_log.topics.len() && - revm_log.address.0 == reth_log.address.0 && - revm_log.data == reth_log.data.0 && - !revm_log - .topics - .iter() - .zip(reth_log.topics.iter()) - .any(|(revm_topic, reth_topic)| revm_topic.0 != reth_topic.0) + revm_log.address == reth_log.address && + revm_log.data == reth_log.data && + revm_log.topics == reth_log.topics } -/// Into reth primitive [Log] from [revm::primitives::Log]. +/// Converts a Revm `Log` into a Reth `Log`. pub fn into_reth_log(log: Log) -> RethLog { - RethLog { - address: H160(log.address.0), - topics: log.topics.into_iter().map(|h| H256(h.0)).collect(), - data: log.data.into(), - } + RethLog { address: log.address, topics: log.topics, data: log.data } } -/// Create reth primitive [Account] from [revm::primitives::AccountInfo]. -/// Check if revm bytecode hash is [KECCAK_EMPTY] and put None to reth [Account] +/// Converts a Revm [`AccountInfo`] into a Reth [`Account`]. +/// +/// Sets `bytecode_hash` to `None` if `code_hash` is [`KECCAK_EMPTY`]. pub fn into_reth_acc(revm_acc: AccountInfo) -> Account { let code_hash = revm_acc.code_hash; Account { @@ -33,7 +27,9 @@ pub fn into_reth_acc(revm_acc: AccountInfo) -> Account { } } -/// Create revm primitive [AccountInfo] from [reth_primitives::Account]. +/// Converts a Revm [`AccountInfo`] into a Reth [`Account`]. +/// +/// Sets `code_hash` to [`KECCAK_EMPTY`] if `bytecode_hash` is `None`. pub fn into_revm_acc(reth_acc: Account) -> AccountInfo { AccountInfo { balance: reth_acc.balance, diff --git a/crates/revm/revm-primitives/src/config.rs b/crates/revm/revm-primitives/src/config.rs index 352be83215..209405c176 100644 --- a/crates/revm/revm-primitives/src/config.rs +++ b/crates/revm/revm-primitives/src/config.rs @@ -1,6 +1,6 @@ //! Reth block execution/validation configuration and constants -use reth_primitives::{ChainSpec, Hardfork, Head}; +use reth_primitives::{revm_primitives, ChainSpec, Hardfork, Head}; /// Returns the spec id at the given timestamp. /// @@ -9,42 +9,42 @@ use reth_primitives::{ChainSpec, Hardfork, Head}; pub fn revm_spec_by_timestamp_after_merge( chain_spec: &ChainSpec, timestamp: u64, -) -> revm::primitives::SpecId { +) -> revm_primitives::SpecId { if chain_spec.is_cancun_active_at_timestamp(timestamp) { - revm::primitives::CANCUN + revm_primitives::CANCUN } else if chain_spec.is_shanghai_active_at_timestamp(timestamp) { - revm::primitives::SHANGHAI + revm_primitives::SHANGHAI } else { - revm::primitives::MERGE + revm_primitives::MERGE } } /// return revm_spec from spec configuration. -pub fn revm_spec(chain_spec: &ChainSpec, block: Head) -> revm::primitives::SpecId { +pub fn revm_spec(chain_spec: &ChainSpec, block: Head) -> revm_primitives::SpecId { if chain_spec.fork(Hardfork::Cancun).active_at_head(&block) { - revm::primitives::CANCUN + revm_primitives::CANCUN } else if chain_spec.fork(Hardfork::Shanghai).active_at_head(&block) { - revm::primitives::SHANGHAI + revm_primitives::SHANGHAI } else if chain_spec.fork(Hardfork::Paris).active_at_head(&block) { - revm::primitives::MERGE + revm_primitives::MERGE } else if chain_spec.fork(Hardfork::London).active_at_head(&block) { - revm::primitives::LONDON + revm_primitives::LONDON } else if chain_spec.fork(Hardfork::Berlin).active_at_head(&block) { - revm::primitives::BERLIN + revm_primitives::BERLIN } else if chain_spec.fork(Hardfork::Istanbul).active_at_head(&block) { - revm::primitives::ISTANBUL + revm_primitives::ISTANBUL } else if chain_spec.fork(Hardfork::Petersburg).active_at_head(&block) { - revm::primitives::PETERSBURG + revm_primitives::PETERSBURG } else if chain_spec.fork(Hardfork::Byzantium).active_at_head(&block) { - revm::primitives::BYZANTIUM + revm_primitives::BYZANTIUM } else if chain_spec.fork(Hardfork::SpuriousDragon).active_at_head(&block) { - revm::primitives::SPURIOUS_DRAGON + revm_primitives::SPURIOUS_DRAGON } else if chain_spec.fork(Hardfork::Tangerine).active_at_head(&block) { - revm::primitives::TANGERINE + revm_primitives::TANGERINE } else if chain_spec.fork(Hardfork::Homestead).active_at_head(&block) { - revm::primitives::HOMESTEAD + revm_primitives::HOMESTEAD } else if chain_spec.fork(Hardfork::Frontier).active_at_head(&block) { - revm::primitives::FRONTIER + revm_primitives::FRONTIER } else { panic!( "invalid hardfork chainspec: expected at least one hardfork, got {:?}", @@ -55,63 +55,64 @@ pub fn revm_spec(chain_spec: &ChainSpec, block: Head) -> revm::primitives::SpecI #[cfg(test)] mod tests { - use crate::config::revm_spec; + use super::*; use reth_primitives::{ChainSpecBuilder, Head, MAINNET, U256}; + #[test] fn test_to_revm_spec() { assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().cancun_activated().build(), Head::default()), - revm::primitives::CANCUN + revm_primitives::CANCUN ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().shanghai_activated().build(), Head::default()), - revm::primitives::SHANGHAI + revm_primitives::SHANGHAI ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().paris_activated().build(), Head::default()), - revm::primitives::MERGE + revm_primitives::MERGE ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().london_activated().build(), Head::default()), - revm::primitives::LONDON + revm_primitives::LONDON ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().berlin_activated().build(), Head::default()), - revm::primitives::BERLIN + revm_primitives::BERLIN ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().istanbul_activated().build(), Head::default()), - revm::primitives::ISTANBUL + revm_primitives::ISTANBUL ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().petersburg_activated().build(), Head::default()), - revm::primitives::PETERSBURG + revm_primitives::PETERSBURG ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().byzantium_activated().build(), Head::default()), - revm::primitives::BYZANTIUM + revm_primitives::BYZANTIUM ); assert_eq!( revm_spec( &ChainSpecBuilder::mainnet().spurious_dragon_activated().build(), Head::default() ), - revm::primitives::SPURIOUS_DRAGON + revm_primitives::SPURIOUS_DRAGON ); assert_eq!( revm_spec( &ChainSpecBuilder::mainnet().tangerine_whistle_activated().build(), Head::default() ), - revm::primitives::TANGERINE + revm_primitives::TANGERINE ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().homestead_activated().build(), Head::default()), - revm::primitives::HOMESTEAD + revm_primitives::HOMESTEAD ); assert_eq!( revm_spec(&ChainSpecBuilder::mainnet().frontier_activated().build(), Head::default()), - revm::primitives::FRONTIER + revm_primitives::FRONTIER ); } @@ -126,7 +127,7 @@ mod tests { ..Default::default() } ), - revm::primitives::MERGE + revm_primitives::MERGE ); // TTD trumps the block number assert_eq!( @@ -139,43 +140,43 @@ mod tests { ..Default::default() } ), - revm::primitives::MERGE + revm_primitives::MERGE ); assert_eq!( revm_spec(&MAINNET, Head { number: 15537394 - 10, ..Default::default() }), - revm::primitives::LONDON + revm_primitives::LONDON ); assert_eq!( revm_spec(&MAINNET, Head { number: 12244000 + 10, ..Default::default() }), - revm::primitives::BERLIN + revm_primitives::BERLIN ); assert_eq!( revm_spec(&MAINNET, Head { number: 12244000 - 10, ..Default::default() }), - revm::primitives::ISTANBUL + revm_primitives::ISTANBUL ); assert_eq!( revm_spec(&MAINNET, Head { number: 7280000 + 10, ..Default::default() }), - revm::primitives::PETERSBURG + revm_primitives::PETERSBURG ); assert_eq!( revm_spec(&MAINNET, Head { number: 7280000 - 10, ..Default::default() }), - revm::primitives::BYZANTIUM + revm_primitives::BYZANTIUM ); assert_eq!( revm_spec(&MAINNET, Head { number: 2675000 + 10, ..Default::default() }), - revm::primitives::SPURIOUS_DRAGON + revm_primitives::SPURIOUS_DRAGON ); assert_eq!( revm_spec(&MAINNET, Head { number: 2675000 - 10, ..Default::default() }), - revm::primitives::TANGERINE + revm_primitives::TANGERINE ); assert_eq!( revm_spec(&MAINNET, Head { number: 1150000 + 10, ..Default::default() }), - revm::primitives::HOMESTEAD + revm_primitives::HOMESTEAD ); assert_eq!( revm_spec(&MAINNET, Head { number: 1150000 - 10, ..Default::default() }), - revm::primitives::FRONTIER + revm_primitives::FRONTIER ); } } diff --git a/crates/revm/revm-primitives/src/env.rs b/crates/revm/revm-primitives/src/env.rs index b2e5467909..0637cc376a 100644 --- a/crates/revm/revm-primitives/src/env.rs +++ b/crates/revm/revm-primitives/src/env.rs @@ -1,10 +1,11 @@ use crate::config::revm_spec; use reth_primitives::{ constants::{BEACON_ROOTS_ADDRESS, SYSTEM_ADDRESS}, - recover_signer, Address, Bytes, Chain, ChainSpec, Head, Header, Transaction, TransactionKind, - TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxEip4844, TxLegacy, H256, U256, + recover_signer, + revm_primitives::{AnalysisKind, BlockEnv, CfgEnv, Env, SpecId, TransactTo, TxEnv}, + Address, Bytes, Chain, ChainSpec, Head, Header, Transaction, TransactionKind, + TransactionSignedEcRecovered, TxEip1559, TxEip2930, TxEip4844, TxLegacy, B256, U256, }; -use revm::primitives::{AnalysisKind, BlockEnv, CfgEnv, Env, SpecId, TransactTo, TxEnv}; /// Convenience function to call both [fill_cfg_env] and [fill_block_env] pub fn fill_cfg_and_block_env( @@ -99,10 +100,10 @@ pub fn recover_header_signer(header: &Header) -> Option
{ let signature: [u8; 65] = header.extra_data[signature_start_byte..].try_into().ok()?; let seal_hash = { let mut header_to_seal = header.clone(); - header_to_seal.extra_data = Bytes::from(&header.extra_data[..signature_start_byte]); + header_to_seal.extra_data = Bytes::from(header.extra_data[..signature_start_byte].to_vec()); header_to_seal.hash_slow() }; - recover_signer(&signature, seal_hash.as_fixed_bytes()).ok() + recover_signer(&signature, &seal_hash.0).ok() } /// Returns a new [TxEnv] filled with the transaction's data. @@ -127,7 +128,7 @@ pub fn tx_env_with_recovered(transaction: &TransactionSignedEcRecovered) -> TxEn /// * the call does not follow the EIP-1559 burn semantics - no value should be transferred as /// part of the call /// * if no code exists at `BEACON_ROOTS_ADDRESS`, the call must fail silently -pub fn fill_tx_env_with_beacon_root_contract_call(env: &mut Env, parent_beacon_block_root: H256) { +pub fn fill_tx_env_with_beacon_root_contract_call(env: &mut Env, parent_beacon_block_root: B256) { env.tx = TxEnv { caller: SYSTEM_ADDRESS, transact_to: TransactTo::Call(BEACON_ROOTS_ADDRESS), @@ -135,7 +136,7 @@ pub fn fill_tx_env_with_beacon_root_contract_call(env: &mut Env, parent_beacon_b nonce: None, gas_limit: 30_000_000, value: U256::ZERO, - data: parent_beacon_block_root.to_fixed_bytes().to_vec().into(), + data: parent_beacon_block_root.0.to_vec().into(), // Setting the gas price to zero enforces that no value is transferred as part of the call, // and that the call will not count against the block's gas limit gas_price: U256::ZERO, @@ -186,7 +187,7 @@ where TransactionKind::Create => TransactTo::create(), }; tx_env.value = U256::from(*value); - tx_env.data = input.0.clone(); + tx_env.data = input.clone(); tx_env.chain_id = *chain_id; tx_env.nonce = Some(*nonce); tx_env.access_list.clear(); @@ -209,20 +210,14 @@ where TransactionKind::Create => TransactTo::create(), }; tx_env.value = U256::from(*value); - tx_env.data = input.0.clone(); + tx_env.data = input.clone(); tx_env.chain_id = Some(*chain_id); tx_env.nonce = Some(*nonce); tx_env.access_list = access_list .0 .iter() .map(|l| { - ( - l.address, - l.storage_keys - .iter() - .map(|k| U256::from_be_bytes(k.to_fixed_bytes())) - .collect(), - ) + (l.address, l.storage_keys.iter().map(|k| U256::from_be_bytes(k.0)).collect()) }) .collect(); } @@ -245,20 +240,14 @@ where TransactionKind::Create => TransactTo::create(), }; tx_env.value = U256::from(*value); - tx_env.data = input.0.clone(); + tx_env.data = input.clone(); tx_env.chain_id = Some(*chain_id); tx_env.nonce = Some(*nonce); tx_env.access_list = access_list .0 .iter() .map(|l| { - ( - l.address, - l.storage_keys - .iter() - .map(|k| U256::from_be_bytes(k.to_fixed_bytes())) - .collect(), - ) + (l.address, l.storage_keys.iter().map(|k| U256::from_be_bytes(k.0)).collect()) }) .collect(); } @@ -283,20 +272,14 @@ where TransactionKind::Create => TransactTo::create(), }; tx_env.value = U256::from(*value); - tx_env.data = input.0.clone(); + tx_env.data = input.clone(); tx_env.chain_id = Some(*chain_id); tx_env.nonce = Some(*nonce); tx_env.access_list = access_list .0 .iter() .map(|l| { - ( - l.address, - l.storage_keys - .iter() - .map(|k| U256::from_be_bytes(k.to_fixed_bytes())) - .collect(), - ) + (l.address, l.storage_keys.iter().map(|k| U256::from_be_bytes(k.0)).collect()) }) .collect(); tx_env.blob_hashes = blob_versioned_hashes.clone(); diff --git a/crates/revm/revm-primitives/src/lib.rs b/crates/revm/revm-primitives/src/lib.rs index 692cd28258..cc1c7bf20e 100644 --- a/crates/revm/revm-primitives/src/lib.rs +++ b/crates/revm/revm-primitives/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -11,12 +11,9 @@ pub mod config; -/// Helpers for configuring revm [Env](revm::primitives::Env) +/// Helpers for configuring Revm [`Env`](reth_primitives::revm_primitives::Env). pub mod env; /// Helpers for type compatibility between reth and revm types mod compat; pub use compat::*; - -/// Re-exports revm types; -pub use revm::*; diff --git a/crates/revm/src/database.rs b/crates/revm/src/database.rs index ffe55bd57f..610ff79376 100644 --- a/crates/revm/src/database.rs +++ b/crates/revm/src/database.rs @@ -1,5 +1,5 @@ use reth_interfaces::RethError; -use reth_primitives::{H160, H256, KECCAK_EMPTY, U256}; +use reth_primitives::{Address, B256, KECCAK_EMPTY, U256}; use reth_provider::StateProvider; use revm::{ db::{CacheDB, DatabaseRef}, @@ -42,7 +42,7 @@ impl StateProviderDatabase { impl Database for StateProviderDatabase { type Error = RethError; - fn basic(&mut self, address: H160) -> Result, Self::Error> { + fn basic(&mut self, address: Address) -> Result, Self::Error> { Ok(self.0.basic_account(address)?.map(|account| AccountInfo { balance: account.balance, nonce: account.nonce, @@ -51,19 +51,19 @@ impl Database for StateProviderDatabase { })) } - fn code_by_hash(&mut self, code_hash: H256) -> Result { + fn code_by_hash(&mut self, code_hash: B256) -> Result { let bytecode = self.0.bytecode_by_hash(code_hash)?; Ok(bytecode.map(|b| b.0).unwrap_or_else(Bytecode::new)) } - fn storage(&mut self, address: H160, index: U256) -> Result { - let index = H256(index.to_be_bytes()); + fn storage(&mut self, address: Address, index: U256) -> Result { + let index = B256::new(index.to_be_bytes()); let ret = self.0.storage(address, index)?.unwrap_or_default(); Ok(ret) } - fn block_hash(&mut self, number: U256) -> Result { + fn block_hash(&mut self, number: U256) -> Result { // The `number` represents the block number, so it is safe to cast it to u64. Ok(self.0.block_hash(number.try_into().unwrap())?.unwrap_or_default()) } @@ -72,7 +72,7 @@ impl Database for StateProviderDatabase { impl DatabaseRef for StateProviderDatabase { type Error = ::Error; - fn basic(&self, address: H160) -> Result, Self::Error> { + fn basic(&self, address: Address) -> Result, Self::Error> { Ok(self.0.basic_account(address)?.map(|account| AccountInfo { balance: account.balance, nonce: account.nonce, @@ -81,7 +81,7 @@ impl DatabaseRef for StateProviderDatabase { })) } - fn code_by_hash(&self, code_hash: H256) -> Result { + fn code_by_hash(&self, code_hash: B256) -> Result { let bytecode = self.0.bytecode_by_hash(code_hash)?; if let Some(bytecode) = bytecode { @@ -91,13 +91,13 @@ impl DatabaseRef for StateProviderDatabase { } } - fn storage(&self, address: H160, index: U256) -> Result { - let index = H256(index.to_be_bytes()); + fn storage(&self, address: Address, index: U256) -> Result { + let index = B256::new(index.to_be_bytes()); let ret = self.0.storage(address, index)?.unwrap_or_default(); Ok(ret) } - fn block_hash(&self, number: U256) -> Result { + fn block_hash(&self, number: U256) -> Result { // Note: this unwrap is potentially unsafe Ok(self.0.block_hash(number.try_into().unwrap())?.unwrap_or_default()) } diff --git a/crates/revm/src/eth_dao_fork.rs b/crates/revm/src/eth_dao_fork.rs index 67e26cbd0c..8da91c35af 100644 --- a/crates/revm/src/eth_dao_fork.rs +++ b/crates/revm/src/eth_dao_fork.rs @@ -1,126 +1,127 @@ //! DAO FOrk related constants from [EIP-779](https://eips.ethereum.org/EIPS/eip-779). //! It happened on Ethereum block 1_920_000 -use reth_primitives::{hex_literal::hex, H160}; + +use reth_primitives::{address, Address}; /// Dao hardfork beneficiary that received ether from accounts from DAO and DAO creator children. -pub static DAO_HARDFORK_BENEFICIARY: H160 = H160(hex!("bf4ed7b27f1d666546e30d74d50d173d20bca754")); +pub static DAO_HARDFORK_BENEFICIARY: Address = address!("bf4ed7b27f1d666546e30d74d50d173d20bca754"); /// DAO hardfork account that ether was taken and added to beneficiary -pub static DAO_HARDKFORK_ACCOUNTS: [H160; 116] = [ - H160(hex!("d4fe7bc31cedb7bfb8a345f31e668033056b2728")), - H160(hex!("b3fb0e5aba0e20e5c49d252dfd30e102b171a425")), - H160(hex!("2c19c7f9ae8b751e37aeb2d93a699722395ae18f")), - H160(hex!("ecd135fa4f61a655311e86238c92adcd779555d2")), - H160(hex!("1975bd06d486162d5dc297798dfc41edd5d160a7")), - H160(hex!("a3acf3a1e16b1d7c315e23510fdd7847b48234f6")), - H160(hex!("319f70bab6845585f412ec7724b744fec6095c85")), - H160(hex!("06706dd3f2c9abf0a21ddcc6941d9b86f0596936")), - H160(hex!("5c8536898fbb74fc7445814902fd08422eac56d0")), - H160(hex!("6966ab0d485353095148a2155858910e0965b6f9")), - H160(hex!("779543a0491a837ca36ce8c635d6154e3c4911a6")), - H160(hex!("2a5ed960395e2a49b1c758cef4aa15213cfd874c")), - H160(hex!("5c6e67ccd5849c0d29219c4f95f1a7a93b3f5dc5")), - H160(hex!("9c50426be05db97f5d64fc54bf89eff947f0a321")), - H160(hex!("200450f06520bdd6c527622a273333384d870efb")), - H160(hex!("be8539bfe837b67d1282b2b1d61c3f723966f049")), - H160(hex!("6b0c4d41ba9ab8d8cfb5d379c69a612f2ced8ecb")), - H160(hex!("f1385fb24aad0cd7432824085e42aff90886fef5")), - H160(hex!("d1ac8b1ef1b69ff51d1d401a476e7e612414f091")), - H160(hex!("8163e7fb499e90f8544ea62bbf80d21cd26d9efd")), - H160(hex!("51e0ddd9998364a2eb38588679f0d2c42653e4a6")), - H160(hex!("627a0a960c079c21c34f7612d5d230e01b4ad4c7")), - H160(hex!("f0b1aa0eb660754448a7937c022e30aa692fe0c5")), - H160(hex!("24c4d950dfd4dd1902bbed3508144a54542bba94")), - H160(hex!("9f27daea7aca0aa0446220b98d028715e3bc803d")), - H160(hex!("a5dc5acd6a7968a4554d89d65e59b7fd3bff0f90")), - H160(hex!("d9aef3a1e38a39c16b31d1ace71bca8ef58d315b")), - H160(hex!("63ed5a272de2f6d968408b4acb9024f4cc208ebf")), - H160(hex!("6f6704e5a10332af6672e50b3d9754dc460dfa4d")), - H160(hex!("77ca7b50b6cd7e2f3fa008e24ab793fd56cb15f6")), - H160(hex!("492ea3bb0f3315521c31f273e565b868fc090f17")), - H160(hex!("0ff30d6de14a8224aa97b78aea5388d1c51c1f00")), - H160(hex!("9ea779f907f0b315b364b0cfc39a0fde5b02a416")), - H160(hex!("ceaeb481747ca6c540a000c1f3641f8cef161fa7")), - H160(hex!("cc34673c6c40e791051898567a1222daf90be287")), - H160(hex!("579a80d909f346fbfb1189493f521d7f48d52238")), - H160(hex!("e308bd1ac5fda103967359b2712dd89deffb7973")), - H160(hex!("4cb31628079fb14e4bc3cd5e30c2f7489b00960c")), - H160(hex!("ac1ecab32727358dba8962a0f3b261731aad9723")), - H160(hex!("4fd6ace747f06ece9c49699c7cabc62d02211f75")), - H160(hex!("440c59b325d2997a134c2c7c60a8c61611212bad")), - H160(hex!("4486a3d68fac6967006d7a517b889fd3f98c102b")), - H160(hex!("9c15b54878ba618f494b38f0ae7443db6af648ba")), - H160(hex!("27b137a85656544b1ccb5a0f2e561a5703c6a68f")), - H160(hex!("21c7fdb9ed8d291d79ffd82eb2c4356ec0d81241")), - H160(hex!("23b75c2f6791eef49c69684db4c6c1f93bf49a50")), - H160(hex!("1ca6abd14d30affe533b24d7a21bff4c2d5e1f3b")), - H160(hex!("b9637156d330c0d605a791f1c31ba5890582fe1c")), - H160(hex!("6131c42fa982e56929107413a9d526fd99405560")), - H160(hex!("1591fc0f688c81fbeb17f5426a162a7024d430c2")), - H160(hex!("542a9515200d14b68e934e9830d91645a980dd7a")), - H160(hex!("c4bbd073882dd2add2424cf47d35213405b01324")), - H160(hex!("782495b7b3355efb2833d56ecb34dc22ad7dfcc4")), - H160(hex!("58b95c9a9d5d26825e70a82b6adb139d3fd829eb")), - H160(hex!("3ba4d81db016dc2890c81f3acec2454bff5aada5")), - H160(hex!("b52042c8ca3f8aa246fa79c3feaa3d959347c0ab")), - H160(hex!("e4ae1efdfc53b73893af49113d8694a057b9c0d1")), - H160(hex!("3c02a7bc0391e86d91b7d144e61c2c01a25a79c5")), - H160(hex!("0737a6b837f97f46ebade41b9bc3e1c509c85c53")), - H160(hex!("97f43a37f595ab5dd318fb46e7a155eae057317a")), - H160(hex!("52c5317c848ba20c7504cb2c8052abd1fde29d03")), - H160(hex!("4863226780fe7c0356454236d3b1c8792785748d")), - H160(hex!("5d2b2e6fcbe3b11d26b525e085ff818dae332479")), - H160(hex!("5f9f3392e9f62f63b8eac0beb55541fc8627f42c")), - H160(hex!("057b56736d32b86616a10f619859c6cd6f59092a")), - H160(hex!("9aa008f65de0b923a2a4f02012ad034a5e2e2192")), - H160(hex!("304a554a310c7e546dfe434669c62820b7d83490")), - H160(hex!("914d1b8b43e92723e64fd0a06f5bdb8dd9b10c79")), - H160(hex!("4deb0033bb26bc534b197e61d19e0733e5679784")), - H160(hex!("07f5c1e1bc2c93e0402f23341973a0e043f7bf8a")), - H160(hex!("35a051a0010aba705c9008d7a7eff6fb88f6ea7b")), - H160(hex!("4fa802324e929786dbda3b8820dc7834e9134a2a")), - H160(hex!("9da397b9e80755301a3b32173283a91c0ef6c87e")), - H160(hex!("8d9edb3054ce5c5774a420ac37ebae0ac02343c6")), - H160(hex!("0101f3be8ebb4bbd39a2e3b9a3639d4259832fd9")), - H160(hex!("5dc28b15dffed94048d73806ce4b7a4612a1d48f")), - H160(hex!("bcf899e6c7d9d5a215ab1e3444c86806fa854c76")), - H160(hex!("12e626b0eebfe86a56d633b9864e389b45dcb260")), - H160(hex!("a2f1ccba9395d7fcb155bba8bc92db9bafaeade7")), - H160(hex!("ec8e57756626fdc07c63ad2eafbd28d08e7b0ca5")), - H160(hex!("d164b088bd9108b60d0ca3751da4bceb207b0782")), - H160(hex!("6231b6d0d5e77fe001c2a460bd9584fee60d409b")), - H160(hex!("1cba23d343a983e9b5cfd19496b9a9701ada385f")), - H160(hex!("a82f360a8d3455c5c41366975bde739c37bfeb8a")), - H160(hex!("9fcd2deaff372a39cc679d5c5e4de7bafb0b1339")), - H160(hex!("005f5cee7a43331d5a3d3eec71305925a62f34b6")), - H160(hex!("0e0da70933f4c7849fc0d203f5d1d43b9ae4532d")), - H160(hex!("d131637d5275fd1a68a3200f4ad25c71a2a9522e")), - H160(hex!("bc07118b9ac290e4622f5e77a0853539789effbe")), - H160(hex!("47e7aa56d6bdf3f36be34619660de61275420af8")), - H160(hex!("acd87e28b0c9d1254e868b81cba4cc20d9a32225")), - H160(hex!("adf80daec7ba8dcf15392f1ac611fff65d94f880")), - H160(hex!("5524c55fb03cf21f549444ccbecb664d0acad706")), - H160(hex!("40b803a9abce16f50f36a77ba41180eb90023925")), - H160(hex!("fe24cdd8648121a43a7c86d289be4dd2951ed49f")), - H160(hex!("17802f43a0137c506ba92291391a8a8f207f487d")), - H160(hex!("253488078a4edf4d6f42f113d1e62836a942cf1a")), - H160(hex!("86af3e9626fce1957c82e88cbf04ddf3a2ed7915")), - H160(hex!("b136707642a4ea12fb4bae820f03d2562ebff487")), - H160(hex!("dbe9b615a3ae8709af8b93336ce9b477e4ac0940")), - H160(hex!("f14c14075d6c4ed84b86798af0956deef67365b5")), - H160(hex!("ca544e5c4687d109611d0f8f928b53a25af72448")), - H160(hex!("aeeb8ff27288bdabc0fa5ebb731b6f409507516c")), - H160(hex!("cbb9d3703e651b0d496cdefb8b92c25aeb2171f7")), - H160(hex!("6d87578288b6cb5549d5076a207456a1f6a63dc0")), - H160(hex!("b2c6f0dfbb716ac562e2d85d6cb2f8d5ee87603e")), - H160(hex!("accc230e8a6e5be9160b8cdf2864dd2a001c28b6")), - H160(hex!("2b3455ec7fedf16e646268bf88846bd7a2319bb2")), - H160(hex!("4613f3bca5c44ea06337a9e439fbc6d42e501d0a")), - H160(hex!("d343b217de44030afaa275f54d31a9317c7f441e")), - H160(hex!("84ef4b2357079cd7a7c69fd7a37cd0609a679106")), - H160(hex!("da2fef9e4a3230988ff17df2165440f37e8b1708")), - H160(hex!("f4c64518ea10f995918a454158c6b61407ea345c")), - H160(hex!("7602b46df5390e432ef1c307d4f2c9ff6d65cc97")), - H160(hex!("bb9bc244d798123fde783fcc1c72d3bb8c189413")), - H160(hex!("807640a13483f8ac783c557fcdf27be11ea4ac7a")), +pub static DAO_HARDKFORK_ACCOUNTS: [Address; 116] = [ + address!("d4fe7bc31cedb7bfb8a345f31e668033056b2728"), + address!("b3fb0e5aba0e20e5c49d252dfd30e102b171a425"), + address!("2c19c7f9ae8b751e37aeb2d93a699722395ae18f"), + address!("ecd135fa4f61a655311e86238c92adcd779555d2"), + address!("1975bd06d486162d5dc297798dfc41edd5d160a7"), + address!("a3acf3a1e16b1d7c315e23510fdd7847b48234f6"), + address!("319f70bab6845585f412ec7724b744fec6095c85"), + address!("06706dd3f2c9abf0a21ddcc6941d9b86f0596936"), + address!("5c8536898fbb74fc7445814902fd08422eac56d0"), + address!("6966ab0d485353095148a2155858910e0965b6f9"), + address!("779543a0491a837ca36ce8c635d6154e3c4911a6"), + address!("2a5ed960395e2a49b1c758cef4aa15213cfd874c"), + address!("5c6e67ccd5849c0d29219c4f95f1a7a93b3f5dc5"), + address!("9c50426be05db97f5d64fc54bf89eff947f0a321"), + address!("200450f06520bdd6c527622a273333384d870efb"), + address!("be8539bfe837b67d1282b2b1d61c3f723966f049"), + address!("6b0c4d41ba9ab8d8cfb5d379c69a612f2ced8ecb"), + address!("f1385fb24aad0cd7432824085e42aff90886fef5"), + address!("d1ac8b1ef1b69ff51d1d401a476e7e612414f091"), + address!("8163e7fb499e90f8544ea62bbf80d21cd26d9efd"), + address!("51e0ddd9998364a2eb38588679f0d2c42653e4a6"), + address!("627a0a960c079c21c34f7612d5d230e01b4ad4c7"), + address!("f0b1aa0eb660754448a7937c022e30aa692fe0c5"), + address!("24c4d950dfd4dd1902bbed3508144a54542bba94"), + address!("9f27daea7aca0aa0446220b98d028715e3bc803d"), + address!("a5dc5acd6a7968a4554d89d65e59b7fd3bff0f90"), + address!("d9aef3a1e38a39c16b31d1ace71bca8ef58d315b"), + address!("63ed5a272de2f6d968408b4acb9024f4cc208ebf"), + address!("6f6704e5a10332af6672e50b3d9754dc460dfa4d"), + address!("77ca7b50b6cd7e2f3fa008e24ab793fd56cb15f6"), + address!("492ea3bb0f3315521c31f273e565b868fc090f17"), + address!("0ff30d6de14a8224aa97b78aea5388d1c51c1f00"), + address!("9ea779f907f0b315b364b0cfc39a0fde5b02a416"), + address!("ceaeb481747ca6c540a000c1f3641f8cef161fa7"), + address!("cc34673c6c40e791051898567a1222daf90be287"), + address!("579a80d909f346fbfb1189493f521d7f48d52238"), + address!("e308bd1ac5fda103967359b2712dd89deffb7973"), + address!("4cb31628079fb14e4bc3cd5e30c2f7489b00960c"), + address!("ac1ecab32727358dba8962a0f3b261731aad9723"), + address!("4fd6ace747f06ece9c49699c7cabc62d02211f75"), + address!("440c59b325d2997a134c2c7c60a8c61611212bad"), + address!("4486a3d68fac6967006d7a517b889fd3f98c102b"), + address!("9c15b54878ba618f494b38f0ae7443db6af648ba"), + address!("27b137a85656544b1ccb5a0f2e561a5703c6a68f"), + address!("21c7fdb9ed8d291d79ffd82eb2c4356ec0d81241"), + address!("23b75c2f6791eef49c69684db4c6c1f93bf49a50"), + address!("1ca6abd14d30affe533b24d7a21bff4c2d5e1f3b"), + address!("b9637156d330c0d605a791f1c31ba5890582fe1c"), + address!("6131c42fa982e56929107413a9d526fd99405560"), + address!("1591fc0f688c81fbeb17f5426a162a7024d430c2"), + address!("542a9515200d14b68e934e9830d91645a980dd7a"), + address!("c4bbd073882dd2add2424cf47d35213405b01324"), + address!("782495b7b3355efb2833d56ecb34dc22ad7dfcc4"), + address!("58b95c9a9d5d26825e70a82b6adb139d3fd829eb"), + address!("3ba4d81db016dc2890c81f3acec2454bff5aada5"), + address!("b52042c8ca3f8aa246fa79c3feaa3d959347c0ab"), + address!("e4ae1efdfc53b73893af49113d8694a057b9c0d1"), + address!("3c02a7bc0391e86d91b7d144e61c2c01a25a79c5"), + address!("0737a6b837f97f46ebade41b9bc3e1c509c85c53"), + address!("97f43a37f595ab5dd318fb46e7a155eae057317a"), + address!("52c5317c848ba20c7504cb2c8052abd1fde29d03"), + address!("4863226780fe7c0356454236d3b1c8792785748d"), + address!("5d2b2e6fcbe3b11d26b525e085ff818dae332479"), + address!("5f9f3392e9f62f63b8eac0beb55541fc8627f42c"), + address!("057b56736d32b86616a10f619859c6cd6f59092a"), + address!("9aa008f65de0b923a2a4f02012ad034a5e2e2192"), + address!("304a554a310c7e546dfe434669c62820b7d83490"), + address!("914d1b8b43e92723e64fd0a06f5bdb8dd9b10c79"), + address!("4deb0033bb26bc534b197e61d19e0733e5679784"), + address!("07f5c1e1bc2c93e0402f23341973a0e043f7bf8a"), + address!("35a051a0010aba705c9008d7a7eff6fb88f6ea7b"), + address!("4fa802324e929786dbda3b8820dc7834e9134a2a"), + address!("9da397b9e80755301a3b32173283a91c0ef6c87e"), + address!("8d9edb3054ce5c5774a420ac37ebae0ac02343c6"), + address!("0101f3be8ebb4bbd39a2e3b9a3639d4259832fd9"), + address!("5dc28b15dffed94048d73806ce4b7a4612a1d48f"), + address!("bcf899e6c7d9d5a215ab1e3444c86806fa854c76"), + address!("12e626b0eebfe86a56d633b9864e389b45dcb260"), + address!("a2f1ccba9395d7fcb155bba8bc92db9bafaeade7"), + address!("ec8e57756626fdc07c63ad2eafbd28d08e7b0ca5"), + address!("d164b088bd9108b60d0ca3751da4bceb207b0782"), + address!("6231b6d0d5e77fe001c2a460bd9584fee60d409b"), + address!("1cba23d343a983e9b5cfd19496b9a9701ada385f"), + address!("a82f360a8d3455c5c41366975bde739c37bfeb8a"), + address!("9fcd2deaff372a39cc679d5c5e4de7bafb0b1339"), + address!("005f5cee7a43331d5a3d3eec71305925a62f34b6"), + address!("0e0da70933f4c7849fc0d203f5d1d43b9ae4532d"), + address!("d131637d5275fd1a68a3200f4ad25c71a2a9522e"), + address!("bc07118b9ac290e4622f5e77a0853539789effbe"), + address!("47e7aa56d6bdf3f36be34619660de61275420af8"), + address!("acd87e28b0c9d1254e868b81cba4cc20d9a32225"), + address!("adf80daec7ba8dcf15392f1ac611fff65d94f880"), + address!("5524c55fb03cf21f549444ccbecb664d0acad706"), + address!("40b803a9abce16f50f36a77ba41180eb90023925"), + address!("fe24cdd8648121a43a7c86d289be4dd2951ed49f"), + address!("17802f43a0137c506ba92291391a8a8f207f487d"), + address!("253488078a4edf4d6f42f113d1e62836a942cf1a"), + address!("86af3e9626fce1957c82e88cbf04ddf3a2ed7915"), + address!("b136707642a4ea12fb4bae820f03d2562ebff487"), + address!("dbe9b615a3ae8709af8b93336ce9b477e4ac0940"), + address!("f14c14075d6c4ed84b86798af0956deef67365b5"), + address!("ca544e5c4687d109611d0f8f928b53a25af72448"), + address!("aeeb8ff27288bdabc0fa5ebb731b6f409507516c"), + address!("cbb9d3703e651b0d496cdefb8b92c25aeb2171f7"), + address!("6d87578288b6cb5549d5076a207456a1f6a63dc0"), + address!("b2c6f0dfbb716ac562e2d85d6cb2f8d5ee87603e"), + address!("accc230e8a6e5be9160b8cdf2864dd2a001c28b6"), + address!("2b3455ec7fedf16e646268bf88846bd7a2319bb2"), + address!("4613f3bca5c44ea06337a9e439fbc6d42e501d0a"), + address!("d343b217de44030afaa275f54d31a9317c7f441e"), + address!("84ef4b2357079cd7a7c69fd7a37cd0609a679106"), + address!("da2fef9e4a3230988ff17df2165440f37e8b1708"), + address!("f4c64518ea10f995918a454158c6b61407ea345c"), + address!("7602b46df5390e432ef1c307d4f2c9ff6d65cc97"), + address!("bb9bc244d798123fde783fcc1c72d3bb8c189413"), + address!("807640a13483f8ac783c557fcdf27be11ea4ac7a"), ]; diff --git a/crates/revm/src/lib.rs b/crates/revm/src/lib.rs index 058661e972..ab2e409232 100644 --- a/crates/revm/src/lib.rs +++ b/crates/revm/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -30,7 +30,7 @@ pub use reth_revm_inspectors::*; pub use reth_revm_primitives::*; /// Re-export everything -pub use revm; +pub use revm::{self, *}; /// Ethereum DAO hardfork state change data. pub mod eth_dao_fork; diff --git a/crates/revm/src/processor.rs b/crates/revm/src/processor.rs index 3fa89ac212..35e32ce8ff 100644 --- a/crates/revm/src/processor.rs +++ b/crates/revm/src/processor.rs @@ -12,8 +12,8 @@ use reth_interfaces::{ }; use reth_primitives::{ Address, Block, BlockNumber, Bloom, ChainSpec, Hardfork, Header, PruneMode, PruneModes, - PrunePartError, Receipt, ReceiptWithBloom, TransactionSigned, H256, MINIMUM_PRUNING_DISTANCE, - U256, + PrunePartError, Receipt, ReceiptWithBloom, Receipts, TransactionSigned, B256, + MINIMUM_PRUNING_DISTANCE, U256, }; use reth_provider::{ BlockExecutor, BlockExecutorStats, BundleStateWithReceipts, PrunableBlockExecutor, @@ -57,7 +57,7 @@ pub struct EVMProcessor<'a> { /// The inner vector stores receipts ordered by transaction number. /// /// If receipt is None it means it is pruned. - receipts: Vec>>, + receipts: Receipts, /// First block will be initialized to `None` /// and be set to the block number of first block executed. first_block: Option, @@ -86,7 +86,7 @@ impl<'a> EVMProcessor<'a> { chain_spec, evm, stack: InspectorStack::new(InspectorStackConfig::default()), - receipts: Vec::new(), + receipts: Receipts::new(), first_block: None, tip: None, prune_modes: PruneModes::none(), @@ -119,7 +119,7 @@ impl<'a> EVMProcessor<'a> { chain_spec, evm, stack: InspectorStack::new(InspectorStackConfig::default()), - receipts: Vec::new(), + receipts: Receipts::new(), first_block: None, tip: None, prune_modes: PruneModes::none(), @@ -349,27 +349,11 @@ impl<'a> EVMProcessor<'a> { // Check if gas used matches the value set in header. if block.gas_used != cumulative_gas_used { + let receipts = Receipts::from_block_receipt(receipts); return Err(BlockValidationError::BlockGasUsed { got: cumulative_gas_used, expected: block.gas_used, - gas_spent_by_tx: self - .receipts - .last() - .map(|block_r| { - block_r - .iter() - .enumerate() - .map(|(id, tx_r)| { - ( - id as u64, - tx_r.as_ref() - .expect("receipts have not been pruned") - .cumulative_gas_used, - ) - }) - .collect() - }) - .unwrap_or_default(), + gas_spent_by_tx: receipts.gas_spent_by_tx()?, } .into()) } @@ -528,7 +512,7 @@ impl<'a> PrunableBlockExecutor for EVMProcessor<'a> { /// Verify receipts pub fn verify_receipt<'a>( - expected_receipts_root: H256, + expected_receipts_root: B256, expected_logs_bloom: Bloom, receipts: impl Iterator + Clone, ) -> Result<(), BlockExecutionError> { @@ -544,7 +528,7 @@ pub fn verify_receipt<'a>( } // Create header log bloom. - let logs_bloom = receipts_with_bloom.iter().fold(Bloom::zero(), |bloom, r| bloom | r.bloom); + let logs_bloom = receipts_with_bloom.iter().fold(Bloom::ZERO, |bloom, r| bloom | r.bloom); if logs_bloom != expected_logs_bloom { return Err(BlockValidationError::BloomLogDiff { expected: Box::new(expected_logs_bloom), @@ -558,27 +542,24 @@ pub fn verify_receipt<'a>( #[cfg(test)] mod tests { + use super::*; use reth_interfaces::RethResult; use reth_primitives::{ + bytes, constants::{BEACON_ROOTS_ADDRESS, SYSTEM_ADDRESS}, - hex_literal::hex, keccak256, Account, Bytecode, Bytes, ChainSpecBuilder, ForkCondition, StorageKey, MAINNET, }; use reth_provider::{AccountReader, BlockHashReader, StateRootProvider}; - use reth_revm_primitives::TransitionState; - use revm::Database; + use revm::{Database, TransitionState}; use std::collections::HashMap; - use super::*; - - const BEACON_ROOT_CONTRACT_CODE: [u8; 97] = - hex!("3373fffffffffffffffffffffffffffffffffffffffe14604d57602036146024575f5ffd5b5f35801560495762001fff810690815414603c575f5ffd5b62001fff01545f5260205ff35b5f5ffd5b62001fff42064281555f359062001fff015500"); + static BEACON_ROOT_CONTRACT_CODE: Bytes = bytes!("3373fffffffffffffffffffffffffffffffffffffffe14604d57602036146024575f5ffd5b5f35801560495762001fff810690815414603c575f5ffd5b62001fff01545f5260205ff35b5f5ffd5b62001fff42064281555f359062001fff015500"); #[derive(Debug, Default, Clone, Eq, PartialEq)] struct StateProviderTest { accounts: HashMap, Account)>, - contracts: HashMap, - block_hash: HashMap, + contracts: HashMap, + block_hash: HashMap, } impl StateProviderTest { @@ -593,7 +574,7 @@ mod tests { if let Some(bytecode) = bytecode { let hash = keccak256(&bytecode); account.bytecode_hash = Some(hash); - self.contracts.insert(hash, Bytecode::new_raw(bytecode.into())); + self.contracts.insert(hash, Bytecode::new_raw(bytecode)); } self.accounts.insert(address, (storage, account)); } @@ -607,7 +588,7 @@ mod tests { } impl BlockHashReader for StateProviderTest { - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { Ok(self.block_hash.get(&number).cloned()) } @@ -615,7 +596,7 @@ mod tests { &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { let range = start..end; Ok(self .block_hash @@ -626,7 +607,7 @@ mod tests { } impl StateRootProvider for StateProviderTest { - fn state_root(&self, _bundle_state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, _bundle_state: &BundleStateWithReceipts) -> RethResult { todo!() } } @@ -643,15 +624,15 @@ mod tests { .and_then(|(storage, _)| storage.get(&storage_key).cloned())) } - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult> { Ok(self.contracts.get(&code_hash).cloned()) } fn proof( &self, _address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { todo!() } } @@ -663,18 +644,16 @@ mod tests { let mut db = StateProviderTest::default(); - let beacon_root_contract_code = Bytes::from(BEACON_ROOT_CONTRACT_CODE); - let beacon_root_contract_account = Account { balance: U256::ZERO, - bytecode_hash: Some(keccak256(beacon_root_contract_code.clone())), + bytecode_hash: Some(keccak256(BEACON_ROOT_CONTRACT_CODE.clone())), nonce: 1, }; db.insert_account( BEACON_ROOTS_ADDRESS, beacon_root_contract_account, - Some(beacon_root_contract_code), + Some(BEACON_ROOT_CONTRACT_CODE.clone()), HashMap::new(), ); @@ -704,7 +683,7 @@ mod tests { ); // fix header, set a gas limit - header.parent_beacon_block_root = Some(H256::from_low_u64_be(0x1337)); + header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); // Now execute a block with the fixed header, ensure that it does not fail executor @@ -735,7 +714,7 @@ mod tests { .db_mut() .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) .expect("storage value should exist"); - assert_eq!(parent_beacon_block_root_storage, U256::from(0x1337)); + assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); } #[test] @@ -745,7 +724,7 @@ mod tests { let header = Header { timestamp: 1, number: 1, - parent_beacon_block_root: Some(H256::from_low_u64_be(0x1337)), + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), excess_blob_gas: Some(0), ..Header::default() }; @@ -787,18 +766,16 @@ mod tests { // during the pre-block call let mut db = StateProviderTest::default(); - let beacon_root_contract_code = Bytes::from(BEACON_ROOT_CONTRACT_CODE); - let beacon_root_contract_account = Account { balance: U256::ZERO, - bytecode_hash: Some(keccak256(beacon_root_contract_code.clone())), + bytecode_hash: Some(keccak256(BEACON_ROOT_CONTRACT_CODE.clone())), nonce: 1, }; db.insert_account( BEACON_ROOTS_ADDRESS, beacon_root_contract_account, - Some(beacon_root_contract_code), + Some(BEACON_ROOT_CONTRACT_CODE.clone()), HashMap::new(), ); @@ -818,7 +795,7 @@ mod tests { let header = Header { timestamp: 1, number: 1, - parent_beacon_block_root: Some(H256::from_low_u64_be(0x1337)), + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), excess_blob_gas: Some(0), ..Header::default() }; @@ -845,18 +822,16 @@ mod tests { fn eip_4788_genesis_call() { let mut db = StateProviderTest::default(); - let beacon_root_contract_code = Bytes::from(BEACON_ROOT_CONTRACT_CODE); - let beacon_root_contract_account = Account { balance: U256::ZERO, - bytecode_hash: Some(keccak256(beacon_root_contract_code.clone())), + bytecode_hash: Some(keccak256(BEACON_ROOT_CONTRACT_CODE.clone())), nonce: 1, }; db.insert_account( BEACON_ROOTS_ADDRESS, beacon_root_contract_account, - Some(beacon_root_contract_code), + Some(BEACON_ROOT_CONTRACT_CODE.clone()), HashMap::new(), ); @@ -874,7 +849,7 @@ mod tests { executor.init_env(&header, U256::ZERO); // attempt to execute the genesis block with non-zero parent beacon block root, expect err - header.parent_beacon_block_root = Some(H256::from_low_u64_be(0x1337)); + header.parent_beacon_block_root = Some(B256::with_last_byte(0x69)); let _err = executor .execute_and_verify_receipt( &Block { header: header.clone(), body: vec![], ommers: vec![], withdrawals: None }, @@ -886,7 +861,7 @@ mod tests { ); // fix header - header.parent_beacon_block_root = Some(H256::zero()); + header.parent_beacon_block_root = Some(B256::ZERO); // now try to process the genesis block again, this time ensuring that a system contract // call does not occur @@ -917,7 +892,7 @@ mod tests { let header = Header { timestamp: 1, number: 1, - parent_beacon_block_root: Some(H256::from_low_u64_be(0x1337)), + parent_beacon_block_root: Some(B256::with_last_byte(0x69)), base_fee_per_gas: Some(u64::MAX), excess_blob_gas: Some(0), ..Header::default() @@ -925,18 +900,16 @@ mod tests { let mut db = StateProviderTest::default(); - let beacon_root_contract_code = Bytes::from(BEACON_ROOT_CONTRACT_CODE); - let beacon_root_contract_account = Account { balance: U256::ZERO, - bytecode_hash: Some(keccak256(beacon_root_contract_code.clone())), + bytecode_hash: Some(keccak256(BEACON_ROOT_CONTRACT_CODE.clone())), nonce: 1, }; db.insert_account( BEACON_ROOTS_ADDRESS, beacon_root_contract_account, - Some(beacon_root_contract_code), + Some(BEACON_ROOT_CONTRACT_CODE.clone()), HashMap::new(), ); @@ -983,6 +956,6 @@ mod tests { .db_mut() .storage(BEACON_ROOTS_ADDRESS, U256::from(parent_beacon_block_root_index)) .unwrap(); - assert_eq!(parent_beacon_block_root_storage, U256::from(0x1337)); + assert_eq!(parent_beacon_block_root_storage, U256::from(0x69)); } } diff --git a/crates/revm/src/state_change.rs b/crates/revm/src/state_change.rs index c0d2cf82d9..4ebe5b544f 100644 --- a/crates/revm/src/state_change.rs +++ b/crates/revm/src/state_change.rs @@ -1,10 +1,10 @@ use reth_consensus_common::calc; use reth_interfaces::executor::{BlockExecutionError, BlockValidationError}; use reth_primitives::{ - constants::SYSTEM_ADDRESS, Address, ChainSpec, Header, Withdrawal, H256, U256, + constants::SYSTEM_ADDRESS, Address, ChainSpec, Header, Withdrawal, B256, U256, }; -use reth_revm_primitives::{env::fill_tx_env_with_beacon_root_contract_call, Database}; -use revm::{primitives::ResultAndState, DatabaseCommit, EVM}; +use reth_revm_primitives::env::fill_tx_env_with_beacon_root_contract_call; +use revm::{primitives::ResultAndState, Database, DatabaseCommit, EVM}; use std::{collections::HashMap, fmt::Debug}; /// Collect all balance changes at the end of the block. @@ -61,7 +61,7 @@ pub fn apply_beacon_root_contract_call( chain_spec: &ChainSpec, block_timestamp: u64, block_number: u64, - block_parent_beacon_block_root: Option, + block_parent_beacon_block_root: Option, evm: &mut EVM, ) -> Result<(), BlockExecutionError> where @@ -71,7 +71,7 @@ where // if the block number is zero (genesis block) then the parent beacon block root must // be 0x0 and no system transaction may occur as per EIP-4788 if block_number == 0 { - if block_parent_beacon_block_root != Some(H256::zero()) { + if block_parent_beacon_block_root != Some(B256::ZERO) { return Err(BlockValidationError::CancunGenesisParentBeaconBlockRootNotZero.into()) } } else { @@ -111,6 +111,8 @@ where /// Returns a map of addresses to their balance increments if the Shanghai hardfork is active at the /// given timestamp. +/// +/// Zero-valued withdrawals are filtered out. #[inline] pub fn post_block_withdrawals_balance_increments( chain_spec: &ChainSpec, @@ -129,6 +131,8 @@ pub fn post_block_withdrawals_balance_increments( /// Applies all withdrawal balance increments if shanghai is active at the given timestamp to the /// given `balance_increments` map. +/// +/// Zero-valued withdrawals are filtered out. #[inline] pub fn insert_post_block_withdrawals_balance_increments( chain_spec: &ChainSpec, @@ -140,8 +144,10 @@ pub fn insert_post_block_withdrawals_balance_increments( if chain_spec.is_shanghai_active_at_timestamp(block_timestamp) { if let Some(withdrawals) = withdrawals { for withdrawal in withdrawals { - *balance_increments.entry(withdrawal.address).or_default() += - withdrawal.amount_wei(); + if withdrawal.amount > 0 { + *balance_increments.entry(withdrawal.address).or_default() += + withdrawal.amount_wei(); + } } } } diff --git a/crates/rlp/Cargo.toml b/crates/rlp/Cargo.toml deleted file mode 100644 index 8a92f6388d..0000000000 --- a/crates/rlp/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -name = "reth-rlp" -version.workspace = true -edition.workspace = true -rust-version.workspace = true -license = "Apache-2.0" -description = "Fast RLP serialization library" -homepage.workspace = true -repository.workspace = true - -[dependencies] -arrayvec = { version = "0.7", default-features = false } -auto_impl = "1" -bytes.workspace = true -ethnum = { version = "1", default-features = false, optional = true } -smol_str = { version = "0.2", default-features = false, optional = true } -ethereum-types = { version = "0.14", features = ["codec"], optional = true } -revm-primitives = { workspace = true, features = ["serde"] } -reth-rlp-derive = { path = "./rlp-derive", optional = true } - -# for eip-4844 -c-kzg = { workspace = true, optional = true } - -[dev-dependencies] -reth-rlp = { workspace = true, features = [ - "derive", - "std", - "ethnum", - "ethereum-types", - "smol_str", -] } -hex-literal.workspace = true -criterion = "0.5.0" -pprof = { version = "0.12", features = ["flamegraph", "frame-pointer", "criterion"] } - -[features] -default = ["kzg"] -alloc = [] -derive = ["reth-rlp-derive"] -std = ["alloc"] -kzg = ["c-kzg"] - -[[bench]] -name = "bench" -harness = false diff --git a/crates/rlp/LICENCE b/crates/rlp/LICENCE deleted file mode 100644 index 9c8f3ea087..0000000000 --- a/crates/rlp/LICENCE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/crates/rlp/README.md b/crates/rlp/README.md deleted file mode 100644 index 0fda03ed21..0000000000 --- a/crates/rlp/README.md +++ /dev/null @@ -1,4 +0,0 @@ -## RLP encoder/decoder - -Forked from an earlier Apache licenced version of the `fastrlp` crate, before it changed licence to GPL. -NOTE: The Rust fastrlp implementation is itself a port of the [Golang Apache licensed fastrlp](https://github.com/umbracle/fastrlp) \ No newline at end of file diff --git a/crates/rlp/benches/bench.rs b/crates/rlp/benches/bench.rs deleted file mode 100644 index e696951831..0000000000 --- a/crates/rlp/benches/bench.rs +++ /dev/null @@ -1,74 +0,0 @@ -// Copyright 2020 Parity Technologies -// -// Licensed under the Apache License, Version 2.0 or the MIT license -// , at your -// option. This file may not be copied, modified, or distributed -// except according to those terms. - -//! benchmarking for rlp - -use bytes::BytesMut; -use criterion::{criterion_group, criterion_main, Criterion}; -use ethnum::*; -use hex_literal::hex; -use pprof::criterion::{Output, PProfProfiler}; -use reth_rlp::*; - -fn bench_encode(c: &mut Criterion) { - c.bench_function("encode_u64", |b| { - b.iter(|| { - let mut out = BytesMut::new(); - 0x1023_4567_89ab_cdefu64.encode(&mut out); - }) - }); - c.bench_function("encode_u256", |b| { - b.iter(|| { - let mut out = BytesMut::new(); - let uint = U256::from_be_bytes(hex!( - "8090a0b0c0d0e0f00910203040506077000000000000000100000000000012f0" - )); - uint.encode(&mut out); - }) - }); - c.bench_function("encode_1000_u64", |b| { - b.iter(|| { - let mut out = BytesMut::new(); - reth_rlp::encode_list((0..1000u64).collect::>().as_slice(), &mut out); - }) - }); -} - -fn bench_decode(c: &mut Criterion) { - c.bench_function("decode_u64", |b| { - b.iter(|| { - let data = [0x88, 0x10, 0x23, 0x45, 0x67, 0x89, 0xab, 0xcd, 0xef]; - let _ = u64::decode(&mut &data[..]).unwrap(); - }) - }); - c.bench_function("decode_u256", |b| { - b.iter(|| { - let data = [ - 0xa0, 0x80, 0x90, 0xa0, 0xb0, 0xc0, 0xd0, 0xe0, 0xf0, 0x09, 0x10, 0x20, 0x30, 0x40, - 0x50, 0x60, 0x77, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, - 0x00, 0x00, 0x00, 0x12, 0xf0, - ]; - let _ = U256::decode(&mut &data[..]).unwrap(); - }) - }); - c.bench_function("decode_1000_u64", |b| { - let input = (0..1000u64).collect::>(); - let mut data = BytesMut::new(); - reth_rlp::encode_list(input.as_slice(), &mut data); - b.iter(|| { - let _ = Vec::::decode(&mut &data[..]).unwrap(); - }); - }); -} - -criterion_group! { - name = benches; - config = Criterion::default().with_profiler(PProfProfiler::new(100, Output::Flamegraph(None))); - targets = bench_encode, bench_decode -} -criterion_main!(benches); diff --git a/crates/rlp/rlp-derive/Cargo.toml b/crates/rlp/rlp-derive/Cargo.toml deleted file mode 100644 index 72760af83d..0000000000 --- a/crates/rlp/rlp-derive/Cargo.toml +++ /dev/null @@ -1,17 +0,0 @@ -[package] -name = "reth-rlp-derive" -version.workspace = true -license = "Apache-2.0" -edition.workspace = true -rust-version.workspace = true -description = "Procedural macros for reth-rlp" -homepage.workspace = true -repository.workspace = true - -[lib] -proc-macro = true - -[dependencies] -syn = "2" -quote.workspace = true -proc-macro2.workspace = true diff --git a/crates/rlp/rlp-derive/LICENCE b/crates/rlp/rlp-derive/LICENCE deleted file mode 100644 index 9c8f3ea087..0000000000 --- a/crates/rlp/rlp-derive/LICENCE +++ /dev/null @@ -1,201 +0,0 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ - - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION - - 1. Definitions. - - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. - - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "{}" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright {yyyy} {name of copyright owner} - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. \ No newline at end of file diff --git a/crates/rlp/rlp-derive/README.md b/crates/rlp/rlp-derive/README.md deleted file mode 100644 index a105019ab6..0000000000 --- a/crates/rlp/rlp-derive/README.md +++ /dev/null @@ -1,4 +0,0 @@ -## RLP derive crate - -Forked from an earlier Apache licenced version of the `fastrlp-derive` crate, before it changed licence to GPL. -NOTE: The Rust fastrlp implementation is itself a port of the [Golang Apache licensed fastrlp](https://github.com/umbracle/fastrlp) diff --git a/crates/rlp/rlp-derive/src/de.rs b/crates/rlp/rlp-derive/src/de.rs deleted file mode 100644 index aab545fd86..0000000000 --- a/crates/rlp/rlp-derive/src/de.rs +++ /dev/null @@ -1,123 +0,0 @@ -use proc_macro2::TokenStream; -use quote::quote; -use syn::{Error, Result}; - -use crate::utils::{attributes_include, field_ident, is_optional, parse_struct, EMPTY_STRING_CODE}; - -pub(crate) fn impl_decodable(ast: &syn::DeriveInput) -> Result { - let body = parse_struct(ast, "RlpDecodable")?; - - let fields = body.fields.iter().enumerate(); - - let supports_trailing_opt = attributes_include(&ast.attrs, "trailing"); - - let mut encountered_opt_item = false; - let mut stmts = Vec::with_capacity(body.fields.len()); - for (i, field) in fields { - let is_opt = is_optional(field); - if is_opt { - if !supports_trailing_opt { - return Err(Error::new_spanned(field, "Optional fields are disabled. Add `#[rlp(trailing)]` attribute to the struct in order to enable")); - } - encountered_opt_item = true; - } else if encountered_opt_item && !attributes_include(&field.attrs, "default") { - return Err(Error::new_spanned( - field, - "All subsequent fields must be either optional or default.", - )) - } - - stmts.push(decodable_field(i, field, is_opt)); - } - - let name = &ast.ident; - let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); - - let impl_block = quote! { - impl #impl_generics reth_rlp::Decodable for #name #ty_generics #where_clause { - fn decode(mut buf: &mut &[u8]) -> Result { - let b = &mut &**buf; - let rlp_head = reth_rlp::Header::decode(b)?; - - if !rlp_head.list { - return Err(reth_rlp::DecodeError::UnexpectedString); - } - - let started_len = b.len(); - let this = Self { - #(#stmts)* - }; - - let consumed = started_len - b.len(); - if consumed != rlp_head.payload_length { - return Err(reth_rlp::DecodeError::ListLengthMismatch { - expected: rlp_head.payload_length, - got: consumed, - }); - } - - *buf = *b; - - Ok(this) - } - } - }; - - Ok(quote! { - const _: () = { - extern crate reth_rlp; - #impl_block - }; - }) -} - -pub(crate) fn impl_decodable_wrapper(ast: &syn::DeriveInput) -> Result { - let body = parse_struct(ast, "RlpDecodableWrapper")?; - - assert_eq!( - body.fields.iter().count(), - 1, - "#[derive(RlpDecodableWrapper)] is only defined for structs with one field." - ); - - let name = &ast.ident; - let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); - - let impl_block = quote! { - impl #impl_generics reth_rlp::Decodable for #name #ty_generics #where_clause { - fn decode(buf: &mut &[u8]) -> Result { - Ok(Self(reth_rlp::Decodable::decode(buf)?)) - } - } - }; - - Ok(quote! { - const _: () = { - extern crate reth_rlp; - #impl_block - }; - }) -} - -fn decodable_field(index: usize, field: &syn::Field, is_opt: bool) -> TokenStream { - let ident = field_ident(index, field); - - if attributes_include(&field.attrs, "default") { - quote! { #ident: Default::default(), } - } else if is_opt { - quote! { - #ident: if started_len - b.len() < rlp_head.payload_length { - if b.first().map(|b| *b == #EMPTY_STRING_CODE).unwrap_or_default() { - bytes::Buf::advance(b, 1); - None - } else { - Some(reth_rlp::Decodable::decode(b)?) - } - } else { - None - }, - } - } else { - quote! { #ident: reth_rlp::Decodable::decode(b)?, } - } -} diff --git a/crates/rlp/rlp-derive/src/en.rs b/crates/rlp/rlp-derive/src/en.rs deleted file mode 100644 index dda14741f0..0000000000 --- a/crates/rlp/rlp-derive/src/en.rs +++ /dev/null @@ -1,209 +0,0 @@ -use std::iter::Peekable; - -use proc_macro2::TokenStream; -use quote::quote; -use syn::{Error, Result}; - -use crate::utils::{attributes_include, field_ident, is_optional, parse_struct, EMPTY_STRING_CODE}; - -pub(crate) fn impl_encodable(ast: &syn::DeriveInput) -> Result { - let body = parse_struct(ast, "RlpEncodable")?; - - let mut fields = body - .fields - .iter() - .enumerate() - .filter(|(_, field)| !attributes_include(&field.attrs, "skip")) - .peekable(); - - let supports_trailing_opt = attributes_include(&ast.attrs, "trailing"); - - let mut encountered_opt_item = false; - let mut length_stmts = Vec::with_capacity(body.fields.len()); - let mut stmts = Vec::with_capacity(body.fields.len()); - - while let Some((i, field)) = fields.next() { - let is_opt = is_optional(field); - if is_opt { - if !supports_trailing_opt { - return Err(Error::new_spanned(field, "Optional fields are disabled. Add `#[rlp(trailing)]` attribute to the struct in order to enable")); - } - encountered_opt_item = true; - } else if encountered_opt_item { - return Err(Error::new_spanned(field, "All subsequent fields must be optional.")) - } - - length_stmts.push(encodable_length(i, field, is_opt, fields.clone())); - stmts.push(encodable_field(i, field, is_opt, fields.clone())); - } - - let name = &ast.ident; - let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); - - let impl_block = quote! { - trait E { - fn rlp_header(&self) -> reth_rlp::Header; - } - - impl #impl_generics E for #name #ty_generics #where_clause { - fn rlp_header(&self) -> reth_rlp::Header { - let mut rlp_head = reth_rlp::Header { list: true, payload_length: 0 }; - #(#length_stmts)* - rlp_head - } - } - - impl #impl_generics reth_rlp::Encodable for #name #ty_generics #where_clause { - fn length(&self) -> usize { - let rlp_head = E::rlp_header(self); - return reth_rlp::length_of_length(rlp_head.payload_length) + rlp_head.payload_length; - } - fn encode(&self, out: &mut dyn reth_rlp::BufMut) { - E::rlp_header(self).encode(out); - #(#stmts)* - } - } - }; - - Ok(quote! { - const _: () = { - extern crate reth_rlp; - #impl_block - }; - }) -} - -pub(crate) fn impl_encodable_wrapper(ast: &syn::DeriveInput) -> Result { - let body = parse_struct(ast, "RlpEncodableWrapper")?; - - let ident = { - let fields: Vec<_> = body.fields.iter().collect(); - if fields.len() == 1 { - let field = fields.first().expect("fields.len() == 1; qed"); - field_ident(0, field) - } else { - panic!("#[derive(RlpEncodableWrapper)] is only defined for structs with one field.") - } - }; - - let name = &ast.ident; - let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); - - let impl_block = quote! { - impl #impl_generics reth_rlp::Encodable for #name #ty_generics #where_clause { - fn length(&self) -> usize { - self.#ident.length() - } - fn encode(&self, out: &mut dyn reth_rlp::BufMut) { - self.#ident.encode(out) - } - } - }; - - Ok(quote! { - const _: () = { - extern crate reth_rlp; - #impl_block - }; - }) -} - -pub(crate) fn impl_max_encoded_len(ast: &syn::DeriveInput) -> Result { - let body = parse_struct(ast, "RlpMaxEncodedLen")?; - - let stmts: Vec<_> = body - .fields - .iter() - .enumerate() - .filter(|(_, field)| !attributes_include(&field.attrs, "skip")) - .map(|(index, field)| encodable_max_length(index, field)) - .collect(); - let name = &ast.ident; - - let impl_block = quote! { - unsafe impl reth_rlp::MaxEncodedLen<{ reth_rlp::const_add(reth_rlp::length_of_length(#(#stmts)*), #(#stmts)*) }> for #name {} - unsafe impl reth_rlp::MaxEncodedLenAssoc for #name { - const LEN: usize = { reth_rlp::const_add(reth_rlp::length_of_length(#(#stmts)*), { #(#stmts)* }) }; - } - }; - - Ok(quote! { - const _: () = { - extern crate reth_rlp; - #impl_block - }; - }) -} - -fn encodable_length<'a>( - index: usize, - field: &syn::Field, - is_opt: bool, - mut remaining: Peekable>, -) -> TokenStream { - let ident = field_ident(index, field); - - if is_opt { - let default = if remaining.peek().is_some() { - let condition = remaining_opt_fields_some_condition(remaining); - quote! { #condition as usize } - } else { - quote! { 0 } - }; - - quote! { rlp_head.payload_length += &self.#ident.as_ref().map(|val| reth_rlp::Encodable::length(val)).unwrap_or(#default); } - } else { - quote! { rlp_head.payload_length += reth_rlp::Encodable::length(&self.#ident); } - } -} - -fn encodable_max_length(index: usize, field: &syn::Field) -> TokenStream { - let fieldtype = &field.ty; - - if index == 0 { - quote! { <#fieldtype as reth_rlp::MaxEncodedLenAssoc>::LEN } - } else { - quote! { + <#fieldtype as reth_rlp::MaxEncodedLenAssoc>::LEN } - } -} - -fn encodable_field<'a>( - index: usize, - field: &syn::Field, - is_opt: bool, - mut remaining: Peekable>, -) -> TokenStream { - let ident = field_ident(index, field); - - if is_opt { - let if_some_encode = quote! { - if let Some(val) = self.#ident.as_ref() { - reth_rlp::Encodable::encode(val, out) - } - }; - - if remaining.peek().is_some() { - let condition = remaining_opt_fields_some_condition(remaining); - quote! { - #if_some_encode - else if #condition { - out.put_u8(#EMPTY_STRING_CODE); - } - } - } else { - quote! { #if_some_encode } - } - } else { - quote! { reth_rlp::Encodable::encode(&self.#ident, out); } - } -} - -fn remaining_opt_fields_some_condition<'a>( - remaining: impl Iterator, -) -> TokenStream { - let conditions = remaining.map(|(index, field)| { - let ident = field_ident(index, field); - quote! { self.#ident.is_some() } - }); - quote! { #(#conditions) ||* } -} diff --git a/crates/rlp/rlp-derive/src/lib.rs b/crates/rlp/rlp-derive/src/lib.rs deleted file mode 100644 index 31240dcac1..0000000000 --- a/crates/rlp/rlp-derive/src/lib.rs +++ /dev/null @@ -1,83 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_cfg))] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" -)] -#![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] -#![deny(unused_must_use, unused_crate_dependencies)] -#![doc(test( - no_crate_inject, - attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables)) -))] - -//! Derive macro for `#[derive(RlpEncodable, RlpDecodable)]`. -//! -//! For example of usage see `./tests/rlp.rs`. -//! -//! This library also supports up to 1 `#[rlp(default)]` in a struct, -//! which is similar to [`#[serde(default)]`](https://serde.rs/field-attrs.html#default) -//! with the caveat that we use the `Default` value if -//! the field deserialization fails, as we don't serialize field -//! names and there is no way to tell if it is present or not. - -extern crate proc_macro; - -mod de; -mod en; -mod utils; - -use de::*; -use en::*; -use proc_macro::TokenStream; - -/// Derives `Encodable` for the type which encodes the all fields as list: `` -#[proc_macro_derive(RlpEncodable, attributes(rlp))] -pub fn encodable(input: TokenStream) -> TokenStream { - syn::parse(input) - .and_then(|ast| impl_encodable(&ast)) - .unwrap_or_else(|err| err.to_compile_error()) - .into() -} - -/// Derives `Encodable` for the type which encodes the fields as-is, without a header: `` -#[proc_macro_derive(RlpEncodableWrapper, attributes(rlp))] -pub fn encodable_wrapper(input: TokenStream) -> TokenStream { - syn::parse(input) - .and_then(|ast| impl_encodable_wrapper(&ast)) - .unwrap_or_else(|err| err.to_compile_error()) - .into() -} - -/// Derives `MaxEncodedLen` for types of constant size. -#[proc_macro_derive(RlpMaxEncodedLen, attributes(rlp))] -pub fn max_encoded_len(input: TokenStream) -> TokenStream { - syn::parse(input) - .and_then(|ast| impl_max_encoded_len(&ast)) - .unwrap_or_else(|err| err.to_compile_error()) - .into() -} - -/// Derives `Decodable` for the type whose implementation expects an rlp-list input: `` -/// -/// This is the inverse of `RlpEncodable`. -#[proc_macro_derive(RlpDecodable, attributes(rlp))] -pub fn decodable(input: TokenStream) -> TokenStream { - syn::parse(input) - .and_then(|ast| impl_decodable(&ast)) - .unwrap_or_else(|err| err.to_compile_error()) - .into() -} - -/// Derives `Decodable` for the type whose implementation expects only the individual fields -/// encoded: `` -/// -/// This is the inverse of `RlpEncodableWrapper`. -#[proc_macro_derive(RlpDecodableWrapper, attributes(rlp))] -pub fn decodable_wrapper(input: TokenStream) -> TokenStream { - syn::parse(input) - .and_then(|ast| impl_decodable_wrapper(&ast)) - .unwrap_or_else(|err| err.to_compile_error()) - .into() -} diff --git a/crates/rlp/rlp-derive/src/utils.rs b/crates/rlp/rlp-derive/src/utils.rs deleted file mode 100644 index f4d4c87679..0000000000 --- a/crates/rlp/rlp-derive/src/utils.rs +++ /dev/null @@ -1,57 +0,0 @@ -use proc_macro2::TokenStream; -use quote::quote; -use syn::{Attribute, DataStruct, Error, Field, Meta, Result, Type, TypePath}; - -pub(crate) const EMPTY_STRING_CODE: u8 = 0x80; - -pub(crate) fn parse_struct<'a>( - ast: &'a syn::DeriveInput, - derive_attr: &str, -) -> Result<&'a DataStruct> { - if let syn::Data::Struct(s) = &ast.data { - Ok(s) - } else { - Err(Error::new_spanned( - ast, - format!("#[derive({derive_attr})] is only defined for structs."), - )) - } -} - -pub(crate) fn attributes_include(attrs: &[Attribute], attr_name: &str) -> bool { - for attr in attrs.iter() { - if attr.path().is_ident("rlp") { - if let Meta::List(meta) = &attr.meta { - let mut is_attr = false; - let _ = meta.parse_nested_meta(|meta| { - is_attr = meta.path.is_ident(attr_name); - Ok(()) - }); - if is_attr { - return true - } - } - } - } - false -} - -pub(crate) fn is_optional(field: &Field) -> bool { - if let Type::Path(TypePath { qself, path }) = &field.ty { - qself.is_none() && - path.leading_colon.is_none() && - path.segments.len() == 1 && - path.segments.first().unwrap().ident == "Option" - } else { - false - } -} - -pub(crate) fn field_ident(index: usize, field: &syn::Field) -> TokenStream { - if let Some(ident) = &field.ident { - quote! { #ident } - } else { - let index = syn::Index::from(index); - quote! { #index } - } -} diff --git a/crates/rlp/src/decode.rs b/crates/rlp/src/decode.rs deleted file mode 100644 index 94d466612b..0000000000 --- a/crates/rlp/src/decode.rs +++ /dev/null @@ -1,669 +0,0 @@ -use crate::types::Header; -use bytes::{Buf, Bytes, BytesMut}; - -pub trait Decodable: Sized { - fn decode(buf: &mut &[u8]) -> Result; -} - -#[cfg(feature = "alloc")] -mod alloc_impl { - use super::*; - - impl Decodable for ::alloc::boxed::Box - where - T: Decodable + Sized, - { - fn decode(buf: &mut &[u8]) -> Result { - T::decode(buf).map(::alloc::boxed::Box::new) - } - } - - impl Decodable for ::alloc::sync::Arc - where - T: Decodable + Sized, - { - fn decode(buf: &mut &[u8]) -> Result { - T::decode(buf).map(::alloc::sync::Arc::new) - } - } - - impl Decodable for ::alloc::string::String { - fn decode(from: &mut &[u8]) -> Result { - let h = Header::decode(from)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - let mut to = ::alloc::vec::Vec::with_capacity(h.payload_length); - to.extend_from_slice(&from[..h.payload_length]); - from.advance(h.payload_length); - - Self::from_utf8(to).map_err(|_| DecodeError::Custom("invalid string")) - } - } -} - -#[derive(Clone, Copy, Debug, Eq, PartialEq)] -pub enum DecodeError { - Overflow, - LeadingZero, - InputTooShort, - NonCanonicalSingleByte, - NonCanonicalSize, - UnexpectedLength, - UnexpectedString, - UnexpectedList, - ListLengthMismatch { expected: usize, got: usize }, - Custom(&'static str), -} - -#[cfg(feature = "std")] -impl std::error::Error for DecodeError {} - -impl core::fmt::Display for DecodeError { - fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result { - match self { - DecodeError::Overflow => write!(f, "overflow"), - DecodeError::LeadingZero => write!(f, "leading zero"), - DecodeError::InputTooShort => write!(f, "input too short"), - DecodeError::NonCanonicalSingleByte => write!(f, "non-canonical single byte"), - DecodeError::NonCanonicalSize => write!(f, "non-canonical size"), - DecodeError::UnexpectedLength => write!(f, "unexpected length"), - DecodeError::UnexpectedString => write!(f, "unexpected string"), - DecodeError::UnexpectedList => write!(f, "unexpected list"), - DecodeError::ListLengthMismatch { expected, got } => { - write!(f, "list length mismatch: expected {expected}, got {got}") - } - DecodeError::Custom(err) => write!(f, "{err}"), - } - } -} - -impl Header { - /// Returns the decoded header. - /// - /// Returns an error if the given `buf`'s len is less than the expected payload. - pub fn decode(buf: &mut &[u8]) -> Result { - if !buf.has_remaining() { - return Err(DecodeError::InputTooShort) - } - - let b = buf[0]; - let h: Self = { - if b < 0x80 { - Self { list: false, payload_length: 1 } - } else if b < 0xB8 { - buf.advance(1); - let h = Self { list: false, payload_length: b as usize - 0x80 }; - - if h.payload_length == 1 { - if !buf.has_remaining() { - return Err(DecodeError::InputTooShort) - } - if buf[0] < 0x80 { - return Err(DecodeError::NonCanonicalSingleByte) - } - } - - h - } else if b < 0xC0 { - buf.advance(1); - let len_of_len = b as usize - 0xB7; - if buf.len() < len_of_len { - return Err(DecodeError::InputTooShort) - } - let payload_length = usize::try_from(u64::from_be_bytes( - static_left_pad(&buf[..len_of_len]).ok_or(DecodeError::LeadingZero)?, - )) - .map_err(|_| DecodeError::Custom("Input too big"))?; - buf.advance(len_of_len); - if payload_length < 56 { - return Err(DecodeError::NonCanonicalSize) - } - - Self { list: false, payload_length } - } else if b < 0xF8 { - buf.advance(1); - Self { list: true, payload_length: b as usize - 0xC0 } - } else { - buf.advance(1); - let list = true; - let len_of_len = b as usize - 0xF7; - if buf.len() < len_of_len { - return Err(DecodeError::InputTooShort) - } - let payload_length = usize::try_from(u64::from_be_bytes( - static_left_pad(&buf[..len_of_len]).ok_or(DecodeError::LeadingZero)?, - )) - .map_err(|_| DecodeError::Custom("Input too big"))?; - buf.advance(len_of_len); - if payload_length < 56 { - return Err(DecodeError::NonCanonicalSize) - } - - Self { list, payload_length } - } - }; - - if buf.remaining() < h.payload_length { - return Err(DecodeError::InputTooShort) - } - - Ok(h) - } -} - -fn static_left_pad(data: &[u8]) -> Option<[u8; LEN]> { - if data.len() > LEN { - return None - } - - let mut v = [0; LEN]; - - if data.is_empty() { - return Some(v) - } - - if data[0] == 0 { - return None - } - - v[LEN - data.len()..].copy_from_slice(data); - Some(v) -} - -macro_rules! decode_integer { - ($t:ty) => { - impl Decodable for $t { - fn decode(buf: &mut &[u8]) -> Result { - let h = Header::decode(buf)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - if h.payload_length > (<$t>::BITS as usize / 8) { - return Err(DecodeError::Overflow) - } - if buf.remaining() < h.payload_length { - return Err(DecodeError::InputTooShort) - } - // In the case of 0x80, the Header will be decoded, leaving h.payload_length to be - // zero. - // 0x80 is the canonical encoding of 0, so we return 0 here. - if h.payload_length == 0 { - return Ok(<$t>::from(0u8)) - } - let v = <$t>::from_be_bytes( - static_left_pad(&buf[..h.payload_length]).ok_or(DecodeError::LeadingZero)?, - ); - buf.advance(h.payload_length); - Ok(v) - } - } - }; -} - -decode_integer!(usize); -decode_integer!(u8); -decode_integer!(u16); -decode_integer!(u32); -decode_integer!(u64); -decode_integer!(u128); - -impl Decodable for bool { - fn decode(buf: &mut &[u8]) -> Result { - Ok(match u8::decode(buf)? { - 0 => false, - 1 => true, - _ => return Err(DecodeError::Custom("invalid bool value, must be 0 or 1")), - }) - } -} - -#[cfg(feature = "std")] -impl Decodable for std::net::IpAddr { - fn decode(buf: &mut &[u8]) -> Result { - use std::net::{IpAddr, Ipv4Addr, Ipv6Addr}; - - let h = Header::decode(buf)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - let o = match h.payload_length { - 4 => { - let mut to = [0_u8; 4]; - to.copy_from_slice(&buf[..4]); - IpAddr::V4(Ipv4Addr::from(to)) - } - 16 => { - let mut to = [0u8; 16]; - to.copy_from_slice(&buf[..16]); - IpAddr::V6(Ipv6Addr::from(to)) - } - _ => return Err(DecodeError::UnexpectedLength), - }; - buf.advance(h.payload_length); - Ok(o) - } -} - -#[cfg(feature = "ethnum")] -decode_integer!(ethnum::U256); - -#[cfg(feature = "ethereum-types")] -mod ethereum_types_support { - use super::*; - use ethereum_types::*; - use revm_primitives::{ruint::aliases::U128 as RU128, B160, B256, U256 as RU256}; - - macro_rules! fixed_hash_impl { - ($t:ty) => { - impl Decodable for $t { - fn decode(buf: &mut &[u8]) -> Result { - Decodable::decode(buf).map(Self) - } - } - }; - } - - fixed_hash_impl!(B160); - fixed_hash_impl!(B256); - - fixed_hash_impl!(H64); - fixed_hash_impl!(H128); - fixed_hash_impl!(H160); - fixed_hash_impl!(H256); - fixed_hash_impl!(H512); - fixed_hash_impl!(H520); - - macro_rules! fixed_uint_impl { - ($t:ty, $n_bytes:tt) => { - impl Decodable for $t { - fn decode(buf: &mut &[u8]) -> Result { - let h = Header::decode(buf)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - if h.payload_length > $n_bytes { - return Err(DecodeError::Overflow) - } - if buf.remaining() < h.payload_length { - return Err(DecodeError::InputTooShort) - } - // In the case of 0x80, the Header will be decoded, leaving h.payload_length to - // be zero. - // 0x80 is the canonical encoding of 0, so we return 0 here. - if h.payload_length == 0 { - return Ok(<$t>::from(0u8)) - } - let n = <$t>::from_big_endian( - &static_left_pad::<$n_bytes>(&buf[..h.payload_length]) - .ok_or(DecodeError::LeadingZero)?, - ); - buf.advance(h.payload_length); - Ok(n) - } - } - }; - } - - macro_rules! fixed_revm_uint_impl { - ($t:ty, $n_bytes:tt) => { - impl Decodable for $t { - fn decode(buf: &mut &[u8]) -> Result { - let h = Header::decode(buf)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - if h.payload_length > $n_bytes { - return Err(DecodeError::Overflow) - } - if buf.remaining() < h.payload_length { - return Err(DecodeError::InputTooShort) - } - // In the case of 0x80, the Header will be decoded, leaving h.payload_length to - // be zero. - // 0x80 is the canonical encoding of 0, so we return 0 here. - if h.payload_length == 0 { - return Ok(<$t>::from(0u8)) - } - let n = <$t>::from_be_bytes( - static_left_pad::<$n_bytes>(&buf[..h.payload_length]) - .ok_or(DecodeError::LeadingZero)?, - ); - buf.advance(h.payload_length); - Ok(n) - } - } - }; - } - - fixed_revm_uint_impl!(RU256, 32); - fixed_revm_uint_impl!(RU128, 16); - - fixed_uint_impl!(U64, 8); - fixed_uint_impl!(U128, 16); - fixed_uint_impl!(U256, 32); - fixed_uint_impl!(U512, 64); -} - -impl Decodable for [u8; N] { - fn decode(from: &mut &[u8]) -> Result { - let h = Header::decode(from)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - if h.payload_length != N { - return Err(DecodeError::UnexpectedLength) - } - - let mut to = [0_u8; N]; - to.copy_from_slice(&from[..N]); - from.advance(N); - - Ok(to) - } -} - -impl Decodable for BytesMut { - fn decode(from: &mut &[u8]) -> Result { - let h = Header::decode(from)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - let mut to = BytesMut::with_capacity(h.payload_length); - to.extend_from_slice(&from[..h.payload_length]); - from.advance(h.payload_length); - - Ok(to) - } -} - -impl Decodable for Bytes { - fn decode(buf: &mut &[u8]) -> Result { - BytesMut::decode(buf).map(BytesMut::freeze) - } -} - -pub struct Rlp<'a> { - payload_view: &'a [u8], -} - -impl<'a> Rlp<'a> { - pub fn new(mut payload: &'a [u8]) -> Result { - let h = Header::decode(&mut payload)?; - if !h.list { - return Err(DecodeError::UnexpectedString) - } - - let payload_view = &payload[..h.payload_length]; - Ok(Self { payload_view }) - } - - pub fn get_next(&mut self) -> Result, DecodeError> { - if self.payload_view.is_empty() { - return Ok(None) - } - - Ok(Some(T::decode(&mut self.payload_view)?)) - } -} - -#[cfg(feature = "alloc")] -impl Decodable for alloc::vec::Vec -where - E: Decodable, -{ - fn decode(buf: &mut &[u8]) -> Result { - let h = Header::decode(buf)?; - if !h.list { - return Err(DecodeError::UnexpectedString) - } - - let payload_view = &mut &buf[..h.payload_length]; - - let mut to = alloc::vec::Vec::new(); - while !payload_view.is_empty() { - to.push(E::decode(payload_view)?); - } - - buf.advance(h.payload_length); - - Ok(to) - } -} - -#[cfg(feature = "smol_str")] -impl Decodable for smol_str::SmolStr { - fn decode(from: &mut &[u8]) -> Result { - let h = Header::decode(from)?; - if h.list { - return Err(DecodeError::UnexpectedList) - } - let data = &from[..h.payload_length]; - let s = match core::str::from_utf8(data) { - Ok(s) => Ok(smol_str::SmolStr::from(s)), - Err(_) => Err(DecodeError::Custom("invalid string")), - }; - from.advance(h.payload_length); - s - } -} - -#[cfg(test)] -mod tests { - extern crate alloc; - - use super::*; - use crate::Encodable; - use alloc::vec; - use core::fmt::Debug; - use ethereum_types::{U128, U256, U512, U64}; - use ethnum::AsU256; - use hex_literal::hex; - - fn check_decode<'a, T, IT>(fixtures: IT) - where - T: Decodable + PartialEq + Debug, - IT: IntoIterator, &'a [u8])>, - { - for (expected, mut input) in fixtures { - assert_eq!(T::decode(&mut input), expected); - if expected.is_ok() { - assert_eq!(input, &[]); - } - } - } - - fn check_decode_list(fixtures: IT) - where - T: Decodable + PartialEq + Debug, - IT: IntoIterator, DecodeError>, &'static [u8])>, - { - for (expected, mut input) in fixtures { - assert_eq!(vec::Vec::::decode(&mut input), expected); - if expected.is_ok() { - assert_eq!(input, &[]); - } - } - } - - #[test] - fn rlp_strings() { - check_decode::(vec![ - (Ok(hex!("00")[..].to_vec().into()), &hex!("00")[..]), - ( - Ok(hex!("6f62636465666768696a6b6c6d")[..].to_vec().into()), - &hex!("8D6F62636465666768696A6B6C6D")[..], - ), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - ]) - } - - #[test] - fn rlp_fixed_length() { - check_decode(vec![ - (Ok(hex!("6f62636465666768696a6b6c6d")), &hex!("8D6F62636465666768696A6B6C6D")[..]), - (Err(DecodeError::UnexpectedLength), &hex!("8C6F62636465666768696A6B6C")[..]), - (Err(DecodeError::UnexpectedLength), &hex!("8E6F62636465666768696A6B6C6D6E")[..]), - ]) - } - - #[test] - fn rlp_u64() { - check_decode(vec![ - (Ok(9_u64), &hex!("09")[..]), - (Ok(0_u64), &hex!("80")[..]), - (Ok(0x0505_u64), &hex!("820505")[..]), - (Ok(0xCE05050505_u64), &hex!("85CE05050505")[..]), - (Err(DecodeError::Overflow), &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::InputTooShort), &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Err(DecodeError::Overflow), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[test] - fn rlp_u256() { - check_decode(vec![ - (Ok(9_u8.as_u256()), &hex!("09")[..]), - (Ok(0_u8.as_u256()), &hex!("80")[..]), - (Ok(0x0505_u16.as_u256()), &hex!("820505")[..]), - (Ok(0xCE05050505_u64.as_u256()), &hex!("85CE05050505")[..]), - (Ok(0xFFFFFFFFFFFFFFFFFF7C_u128.as_u256()), &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::InputTooShort), &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Err(DecodeError::Overflow), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[cfg(feature = "ethereum-types")] - #[test] - fn rlp_ethereum_types_u64() { - check_decode(vec![ - (Ok(U64::from(9_u8)), &hex!("09")[..]), - (Ok(U64::from(0_u8)), &hex!("80")[..]), - (Ok(U64::from(0x0505_u16)), &hex!("820505")[..]), - (Ok(U64::from(0xCE05050505_u64)), &hex!("85CE05050505")[..]), - (Err(DecodeError::Overflow), &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::InputTooShort), &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Err(DecodeError::Overflow), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[cfg(feature = "ethereum-types")] - #[test] - fn rlp_ethereum_types_u128() { - check_decode(vec![ - (Ok(U128::from(9_u8)), &hex!("09")[..]), - (Ok(U128::from(0_u8)), &hex!("80")[..]), - (Ok(U128::from(0x0505_u16)), &hex!("820505")[..]), - (Ok(U128::from(0xCE05050505_u64)), &hex!("85CE05050505")[..]), - (Ok(U128::from(0xFFFFFFFFFFFFFFFFFF7C_u128)), &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::InputTooShort), &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Err(DecodeError::Overflow), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[cfg(feature = "ethereum-types")] - #[test] - fn rlp_ethereum_types_u256() { - check_decode(vec![ - (Ok(U256::from(9_u8)), &hex!("09")[..]), - (Ok(U256::from(0_u8)), &hex!("80")[..]), - (Ok(U256::from(0x0505_u16)), &hex!("820505")[..]), - (Ok(U256::from(0xCE05050505_u64)), &hex!("85CE05050505")[..]), - (Ok(U256::from(0xFFFFFFFFFFFFFFFFFF7C_u128)), &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::InputTooShort), &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..]), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Err(DecodeError::Overflow), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[cfg(feature = "ethereum-types")] - #[test] - fn rlp_ethereum_types_u512() { - check_decode(vec![ - (Ok(U512::from(9_u8)), &hex!("09")[..]), - (Ok(U512::from(0_u8)), &hex!("80")[..]), - (Ok(U512::from(0x0505_u16)), &hex!("820505")[..]), - (Ok(U512::from(0xCE05050505_u64)), &hex!("85CE05050505")[..]), - ( - Ok(U512::from(0xFFFFFFFFFFFFFFFFFF7C_u128)), - &hex!("8AFFFFFFFFFFFFFFFFFF7C")[..], - ), - ( - Err(DecodeError::InputTooShort), - &hex!("8BFFFFFFFFFFFFFFFFFF7C")[..], - ), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - (Err(DecodeError::LeadingZero), &hex!("00")[..]), - (Err(DecodeError::NonCanonicalSingleByte), &hex!("8105")[..]), - (Err(DecodeError::LeadingZero), &hex!("8200F4")[..]), - (Err(DecodeError::NonCanonicalSize), &hex!("B8020004")[..]), - ( - Ok(U512::from_dec_str("115792089237316195423570985008687907853269984676653278628940326933415738736640").unwrap()), - &hex!("A101000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ( - Err(DecodeError::Overflow), - &hex!("B84101000000000000000000000000000000000000008B000000000000000000000000000000000000000000000000000000000000008B000000000000000000000000")[..], - ), - ]) - } - - #[test] - fn rlp_vectors() { - check_decode_list(vec![ - (Ok(vec![]), &hex!("C0")[..]), - (Ok(vec![0xBBCCB5_u64, 0xFFC0B5_u64]), &hex!("C883BBCCB583FFC0B5")[..]), - ]) - } - - #[cfg(feature = "smol_str")] - #[test] - fn rlp_smol_str() { - use smol_str::SmolStr; - let mut b = BytesMut::new(); - "test smol str".to_string().encode(&mut b); - check_decode::(vec![ - (Ok(SmolStr::new("test smol str")), b.as_ref()), - (Err(DecodeError::UnexpectedList), &hex!("C0")[..]), - ]) - } -} diff --git a/crates/rlp/src/encode.rs b/crates/rlp/src/encode.rs deleted file mode 100644 index cbad6be186..0000000000 --- a/crates/rlp/src/encode.rs +++ /dev/null @@ -1,746 +0,0 @@ -use crate::types::*; -use arrayvec::ArrayVec; -use auto_impl::auto_impl; -use bytes::{BufMut, Bytes, BytesMut}; -use core::borrow::Borrow; - -macro_rules! to_be_bytes_trimmed { - ($be:ident, $x:expr) => {{ - $be = $x.to_be_bytes(); - &$be[($x.leading_zeros() / 8) as usize..] - }}; -} - -impl Header { - /// Encodes the header into the `out` buffer. - pub fn encode(&self, out: &mut dyn BufMut) { - if self.payload_length < 56 { - let code = if self.list { EMPTY_LIST_CODE } else { EMPTY_STRING_CODE }; - out.put_u8(code + self.payload_length as u8); - } else { - let len_be; - let len_be = to_be_bytes_trimmed!(len_be, self.payload_length); - let code = if self.list { 0xF7 } else { 0xB7 }; - out.put_u8(code + len_be.len() as u8); - out.put_slice(len_be); - } - } - - /// Returns the length of the encoded header - pub fn length(&self) -> usize { - let mut out = BytesMut::new(); - self.encode(&mut out); - out.len() - } -} - -pub const fn length_of_length(payload_length: usize) -> usize { - if payload_length < 56 { - 1 - } else { - 1 + 8 - payload_length.leading_zeros() as usize / 8 - } -} - -#[doc(hidden)] -pub const fn const_add(a: usize, b: usize) -> usize { - a + b -} - -/// A trait for types that have a maximum encoded length. -/// -/// # Safety -/// Invalid value can cause the encoder to crash. -#[doc(hidden)] -pub unsafe trait MaxEncodedLen: Encodable {} - -/// A trait for types that have a maximum encoded length. -/// -/// # Safety -/// Invalid value can cause the encoder to crash. -#[doc(hidden)] -pub unsafe trait MaxEncodedLenAssoc: Encodable { - const LEN: usize; -} - -/// Use this to define length of an encoded entity -/// -/// # Safety -/// Invalid value can cause the encoder to crash. -#[macro_export] -macro_rules! impl_max_encoded_len { - ($t:ty, $len:block) => { - unsafe impl MaxEncodedLen<{ $len }> for $t {} - unsafe impl MaxEncodedLenAssoc for $t { - const LEN: usize = $len; - } - }; -} - -#[auto_impl(&)] -#[cfg_attr(feature = "alloc", auto_impl(Box, Arc))] -pub trait Encodable { - /// Appends the rlp encoded object to the specified output buffer. - fn encode(&self, out: &mut dyn BufMut); - - /// Returns the length of the encoded object. - /// - /// NOTE: This includes the length of the rlp [Header]. - fn length(&self) -> usize { - let mut out = BytesMut::new(); - self.encode(&mut out); - out.len() - } -} - -impl<'a> Encodable for &'a [u8] { - fn encode(&self, out: &mut dyn BufMut) { - if self.len() != 1 || self[0] >= EMPTY_STRING_CODE { - Header { list: false, payload_length: self.len() }.encode(out); - } - out.put_slice(self); - } - - fn length(&self) -> usize { - let mut len = self.len(); - if self.len() != 1 || self[0] >= EMPTY_STRING_CODE { - len += length_of_length(self.len()); - } - len - } -} - -impl Encodable for [u8; LEN] { - fn encode(&self, out: &mut dyn BufMut) { - (self as &[u8]).encode(out) - } - - fn length(&self) -> usize { - (self as &[u8]).length() - } -} - -unsafe impl MaxEncodedLenAssoc for [u8; LEN] { - const LEN: usize = LEN + length_of_length(LEN); -} - -macro_rules! encodable_uint { - ($t:ty) => { - #[allow(clippy::cmp_owned)] - impl Encodable for $t { - fn length(&self) -> usize { - if *self < <$t>::from(EMPTY_STRING_CODE) { - 1 - } else { - 1 + (<$t>::BITS as usize / 8) - (self.leading_zeros() as usize / 8) - } - } - - fn encode(&self, out: &mut dyn BufMut) { - if *self == 0 { - out.put_u8(EMPTY_STRING_CODE); - } else if *self < <$t>::from(EMPTY_STRING_CODE) { - out.put_u8(u8::try_from(*self).unwrap()); - } else { - let be; - let be = to_be_bytes_trimmed!(be, *self); - out.put_u8(EMPTY_STRING_CODE + be.len() as u8); - out.put_slice(be); - } - } - } - }; -} - -macro_rules! max_encoded_len_uint { - ($t:ty) => { - impl_max_encoded_len!($t, { - length_of_length(<$t>::MAX.to_be_bytes().len()) + <$t>::MAX.to_be_bytes().len() - }); - }; -} - -encodable_uint!(usize); -max_encoded_len_uint!(usize); - -encodable_uint!(u8); -max_encoded_len_uint!(u8); - -encodable_uint!(u16); -max_encoded_len_uint!(u16); - -encodable_uint!(u32); -max_encoded_len_uint!(u32); - -encodable_uint!(u64); -max_encoded_len_uint!(u64); - -encodable_uint!(u128); -max_encoded_len_uint!(u128); - -impl Encodable for bool { - fn encode(&self, out: &mut dyn BufMut) { - (*self as u8).encode(out) - } - - fn length(&self) -> usize { - (*self as u8).length() - } -} - -impl_max_encoded_len!(bool, { ::LEN }); - -#[cfg(feature = "smol_str")] -impl Encodable for smol_str::SmolStr { - fn encode(&self, out: &mut dyn BufMut) { - self.as_bytes().encode(out); - } - fn length(&self) -> usize { - self.as_bytes().length() - } -} - -#[cfg(feature = "std")] -impl Encodable for std::net::IpAddr { - fn encode(&self, out: &mut dyn BufMut) { - match self { - std::net::IpAddr::V4(ref o) => (&o.octets()[..]).encode(out), - std::net::IpAddr::V6(ref o) => (&o.octets()[..]).encode(out), - } - } -} - -#[cfg(feature = "ethnum")] -mod ethnum_support { - use super::*; - - encodable_uint!(ethnum::U256); - impl_max_encoded_len!(ethnum::U256, { length_of_length(32) + 32 }); -} - -#[cfg(feature = "ethereum-types")] -mod ethereum_types_support { - use super::*; - use ethereum_types::*; - - use revm_primitives::{ruint::aliases::U128 as RU128, B160, B256, U256 as RU256}; - - macro_rules! fixed_hash_impl { - ($t:ty) => { - impl Encodable for $t { - fn length(&self) -> usize { - self.0.length() - } - - fn encode(&self, out: &mut dyn bytes::BufMut) { - self.0.encode(out) - } - } - impl_max_encoded_len!($t, { length_of_length(<$t>::len_bytes()) + <$t>::len_bytes() }); - }; - } - - fixed_hash_impl!(B160); - fixed_hash_impl!(B256); - fixed_hash_impl!(H64); - fixed_hash_impl!(H128); - fixed_hash_impl!(H160); - fixed_hash_impl!(H256); - fixed_hash_impl!(H512); - fixed_hash_impl!(H520); - - macro_rules! fixed_uint_impl { - ($t:ty, $n_bytes:tt) => { - impl Encodable for $t { - fn length(&self) -> usize { - if *self < <$t>::from(EMPTY_STRING_CODE) { - 1 - } else { - 1 + $n_bytes - (self.leading_zeros() as usize / 8) - } - } - - fn encode(&self, out: &mut dyn bytes::BufMut) { - let mut temp_arr = [0u8; $n_bytes]; - self.to_big_endian(&mut temp_arr[..]); - // cut the leading zeros after converting to big endian - let sliced = &temp_arr[(self.leading_zeros() / 8) as usize..]; - sliced.encode(out); - } - } - }; - } - - fixed_uint_impl!(U64, 8); - fixed_uint_impl!(U128, 16); - fixed_uint_impl!(U256, 32); - fixed_uint_impl!(U512, 64); - - macro_rules! fixed_revm_uint_impl { - ($t:ty, $n_bytes:tt) => { - impl Encodable for $t { - fn length(&self) -> usize { - if *self < <$t>::from(EMPTY_STRING_CODE) { - 1 - } else { - 1 + self.byte_len() - } - } - - fn encode(&self, out: &mut dyn bytes::BufMut) { - let be = self.to_be_bytes::<$n_bytes>(); - (&be[self.leading_zeros() / 8..]).encode(out); - } - } - }; - } - - fixed_revm_uint_impl!(RU128, 16); - fixed_revm_uint_impl!(RU256, 32); - impl_max_encoded_len!(RU256, { length_of_length(32) + 32 }); -} - -macro_rules! slice_impl { - ($t:ty) => { - impl $crate::Encodable for $t { - fn length(&self) -> usize { - (&self[..]).length() - } - - fn encode(&self, out: &mut dyn bytes::BufMut) { - (&self[..]).encode(out) - } - } - }; -} - -#[cfg(feature = "alloc")] -mod alloc_support { - use super::*; - - extern crate alloc; - - impl Encodable for ::alloc::vec::Vec - where - T: Encodable, - { - fn encode(&self, out: &mut dyn BufMut) { - encode_list(self, out) - } - - fn length(&self) -> usize { - list_length(self) - } - } - - impl Encodable for ::alloc::string::String { - fn encode(&self, out: &mut dyn BufMut) { - self.as_bytes().encode(out); - } - fn length(&self) -> usize { - self.as_bytes().length() - } - } -} - -impl Encodable for &str { - fn encode(&self, out: &mut dyn BufMut) { - self.as_bytes().encode(out); - } - fn length(&self) -> usize { - self.as_bytes().length() - } -} - -slice_impl!(Bytes); -slice_impl!(BytesMut); - -fn rlp_list_header(v: &[K]) -> Header -where - E: Encodable + ?Sized, - K: Borrow, -{ - let mut h = Header { list: true, payload_length: 0 }; - for x in v { - h.payload_length += x.borrow().length(); - } - h -} - -pub fn list_length(v: &[K]) -> usize -where - E: Encodable, - K: Borrow, -{ - let payload_length = rlp_list_header(v).payload_length; - length_of_length(payload_length) + payload_length -} - -/// RLP encode the list of items. -pub fn encode_list(v: &[K], out: &mut dyn BufMut) -where - E: Encodable + ?Sized, - K: Borrow, -{ - let h = rlp_list_header(v); - h.encode(out); - for x in v { - x.borrow().encode(out); - } -} - -/// RLP encode an iterator over items. -/// -/// NOTE: This function clones the iterator. If the items are expensive to clone, consider -/// using [encode_list] instead. -pub fn encode_iter(i: impl Iterator + Clone, out: &mut dyn BufMut) -where - K: Encodable, -{ - let mut h = Header { list: true, payload_length: 0 }; - for x in i.clone() { - h.payload_length += x.length(); - } - - h.encode(out); - for x in i { - x.encode(out); - } -} - -pub fn encode_fixed_size, const LEN: usize>(v: &E) -> ArrayVec { - let mut out = ArrayVec::from([0_u8; LEN]); - - let mut s = out.as_mut_slice(); - - v.encode(&mut s); - - let final_len = LEN - s.len(); - out.truncate(final_len); - - out -} - -#[cfg(feature = "kzg")] -mod kzg_support { - extern crate c_kzg; - - use super::BufMut; - use crate::{Decodable, DecodeError, Encodable}; - use c_kzg::{Blob, Bytes48, KzgCommitment, KzgProof, BYTES_PER_BLOB, BYTES_PER_COMMITMENT}; - use core::ops::Deref; - - impl Encodable for Blob { - fn encode(&self, out: &mut dyn BufMut) { - // Deref is implemented to get the underlying bytes - self.deref().encode(out); - } - - fn length(&self) -> usize { - self.deref().length() - } - } - - impl Decodable for Blob { - fn decode(buf: &mut &[u8]) -> Result { - let bytes: [u8; BYTES_PER_BLOB] = Decodable::decode(buf)?; - Ok(Blob::from(bytes)) - } - } - - impl Encodable for Bytes48 { - fn encode(&self, out: &mut dyn BufMut) { - self.deref().encode(out); - } - - fn length(&self) -> usize { - self.deref().length() - } - } - - impl Decodable for Bytes48 { - fn decode(buf: &mut &[u8]) -> Result { - let bytes: [u8; BYTES_PER_COMMITMENT] = Decodable::decode(buf)?; - Ok(Bytes48::from(bytes)) - } - } - - /// Only [Encodable] is implemented for [KzgCommitment] because this is a validated type - it - /// should be decoded using [Decodable] into a [Bytes48] type, validated, _then_ converted - /// into a [KzgCommitment]. - impl Encodable for KzgCommitment { - fn encode(&self, out: &mut dyn BufMut) { - self.deref().encode(out); - } - - fn length(&self) -> usize { - self.deref().length() - } - } - - /// Only [Encodable] is implemented for [KzgProof] because this is a validated type - it should - /// be decoded using [Decodable] into a [Bytes48] type, validated, _then_ converted into a - /// [KzgProof]. - impl Encodable for KzgProof { - fn encode(&self, out: &mut dyn BufMut) { - self.deref().encode(out); - } - - fn length(&self) -> usize { - self.deref().length() - } - } -} - -#[cfg(test)] -mod tests { - extern crate alloc; - - use super::*; - use alloc::vec; - use bytes::BytesMut; - use hex_literal::hex; - - fn encoded(t: T) -> BytesMut { - let mut out = BytesMut::new(); - t.encode(&mut out); - out - } - - fn encoded_list(t: &[T]) -> BytesMut { - let mut out1 = BytesMut::new(); - encode_list(t, &mut out1); - - let v = t.to_vec(); - assert_eq!(out1.len(), v.length()); - - let mut out2 = BytesMut::new(); - v.encode(&mut out2); - assert_eq!(out1, out2); - - out1 - } - - fn encoded_iter<'a, T: Encodable + 'a>(iter: impl Iterator + Clone) -> BytesMut { - let mut out = BytesMut::new(); - encode_iter(iter, &mut out); - out - } - - #[test] - fn rlp_str() { - assert_eq!(encoded("")[..], hex!("80")[..]); - assert_eq!(encoded("{")[..], hex!("7b")[..]); - assert_eq!(encoded("test str")[..], hex!("887465737420737472")[..]); - } - - #[test] - fn rlp_strings() { - assert_eq!(encoded(hex!(""))[..], hex!("80")[..]); - assert_eq!(encoded(hex!("7B"))[..], hex!("7b")[..]); - assert_eq!(encoded(hex!("80"))[..], hex!("8180")[..]); - assert_eq!(encoded(hex!("ABBA"))[..], hex!("82abba")[..]); - } - - fn u8_fixtures() -> impl IntoIterator { - vec![ - (0, &hex!("80")[..]), - (1, &hex!("01")[..]), - (0x7F, &hex!("7F")[..]), - (0x80, &hex!("8180")[..]), - ] - } - - fn c>( - it: impl IntoIterator, - ) -> impl Iterator { - it.into_iter().map(|(k, v)| (k.into(), v)) - } - - fn u16_fixtures() -> impl IntoIterator { - c(u8_fixtures()).chain(vec![(0x400, &hex!("820400")[..])]) - } - - fn u32_fixtures() -> impl IntoIterator { - c(u16_fixtures()) - .chain(vec![(0xFFCCB5, &hex!("83ffccb5")[..]), (0xFFCCB5DD, &hex!("84ffccb5dd")[..])]) - } - - fn u64_fixtures() -> impl IntoIterator { - c(u32_fixtures()).chain(vec![ - (0xFFCCB5DDFF, &hex!("85ffccb5ddff")[..]), - (0xFFCCB5DDFFEE, &hex!("86ffccb5ddffee")[..]), - (0xFFCCB5DDFFEE14, &hex!("87ffccb5ddffee14")[..]), - (0xFFCCB5DDFFEE1483, &hex!("88ffccb5ddffee1483")[..]), - ]) - } - - fn u128_fixtures() -> impl IntoIterator { - c(u64_fixtures()).chain(vec![( - 0x10203E405060708090A0B0C0D0E0F2, - &hex!("8f10203e405060708090a0b0c0d0e0f2")[..], - )]) - } - - #[cfg(feature = "ethnum")] - fn u256_fixtures() -> impl IntoIterator { - c(u128_fixtures()).chain(vec![( - ethnum::U256::from_str_radix( - "0100020003000400050006000700080009000A0B4B000C000D000E01", - 16, - ) - .unwrap(), - &hex!("9c0100020003000400050006000700080009000a0b4b000c000d000e01")[..], - )]) - } - - #[cfg(feature = "ethereum-types")] - fn eth_u64_fixtures() -> impl IntoIterator { - c(u64_fixtures()).chain(vec![ - ( - ethereum_types::U64::from_str_radix("FFCCB5DDFF", 16).unwrap(), - &hex!("85ffccb5ddff")[..], - ), - ( - ethereum_types::U64::from_str_radix("FFCCB5DDFFEE", 16).unwrap(), - &hex!("86ffccb5ddffee")[..], - ), - ( - ethereum_types::U64::from_str_radix("FFCCB5DDFFEE14", 16).unwrap(), - &hex!("87ffccb5ddffee14")[..], - ), - ( - ethereum_types::U64::from_str_radix("FFCCB5DDFFEE1483", 16).unwrap(), - &hex!("88ffccb5ddffee1483")[..], - ), - ]) - } - - #[cfg(feature = "ethereum-types")] - fn eth_u128_fixtures() -> impl IntoIterator { - c(u128_fixtures()).chain(vec![( - ethereum_types::U128::from_str_radix("10203E405060708090A0B0C0D0E0F2", 16).unwrap(), - &hex!("8f10203e405060708090a0b0c0d0e0f2")[..], - )]) - } - - #[cfg(feature = "ethereum-types")] - fn eth_u256_fixtures() -> impl IntoIterator { - c(u128_fixtures()).chain(vec![( - ethereum_types::U256::from_str_radix( - "0100020003000400050006000700080009000A0B4B000C000D000E01", - 16, - ) - .unwrap(), - &hex!("9c0100020003000400050006000700080009000a0b4b000c000d000e01")[..], - )]) - } - - #[cfg(feature = "ethereum-types")] - fn eth_u512_fixtures() -> impl IntoIterator { - c(eth_u256_fixtures()).chain(vec![( - ethereum_types::U512::from_str_radix( - "0100020003000400050006000700080009000A0B4B000C000D000E010100020003000400050006000700080009000A0B4B000C000D000E01", - 16, - ) - .unwrap(), - &hex!("b8380100020003000400050006000700080009000A0B4B000C000D000E010100020003000400050006000700080009000A0B4B000C000D000E01")[..], - )]) - } - - macro_rules! uint_rlp_test { - ($fixtures:expr) => { - for (input, output) in $fixtures { - assert_eq!(encoded(input), output); - } - }; - } - - #[test] - fn rlp_uints() { - uint_rlp_test!(u8_fixtures()); - uint_rlp_test!(u16_fixtures()); - uint_rlp_test!(u32_fixtures()); - uint_rlp_test!(u64_fixtures()); - uint_rlp_test!(u128_fixtures()); - #[cfg(feature = "ethnum")] - uint_rlp_test!(u256_fixtures()); - } - - #[cfg(feature = "ethereum-types")] - #[test] - fn rlp_eth_uints() { - uint_rlp_test!(eth_u64_fixtures()); - uint_rlp_test!(eth_u128_fixtures()); - uint_rlp_test!(eth_u256_fixtures()); - uint_rlp_test!(eth_u512_fixtures()); - } - - #[test] - fn rlp_list() { - assert_eq!(encoded_list::(&[]), &hex!("c0")[..]); - assert_eq!(encoded_list::(&[0x00u8]), &hex!("c180")[..]); - assert_eq!(encoded_list(&[0xFFCCB5_u64, 0xFFC0B5_u64]), &hex!("c883ffccb583ffc0b5")[..]); - } - - #[test] - fn rlp_iter() { - assert_eq!(encoded_iter::([].iter()), &hex!("c0")[..]); - assert_eq!( - encoded_iter([0xFFCCB5_u64, 0xFFC0B5_u64].iter()), - &hex!("c883ffccb583ffc0b5")[..] - ); - } - - #[cfg(feature = "smol_str")] - #[test] - fn rlp_smol_str() { - use smol_str::SmolStr; - assert_eq!(encoded(SmolStr::new(""))[..], hex!("80")[..]); - let mut b = BytesMut::new(); - "test smol str".to_string().encode(&mut b); - assert_eq!(&encoded(SmolStr::new("test smol str"))[..], b.as_ref()); - let mut b = BytesMut::new(); - "abcdefgh".to_string().encode(&mut b); - assert_eq!(&encoded(SmolStr::new("abcdefgh"))[..], b.as_ref()); - } - - #[test] - fn to_be_bytes_trimmed() { - macro_rules! test_to_be_bytes_trimmed { - ($($x:expr => $expected:expr),+ $(,)?) => {$( - let be; - assert_eq!(to_be_bytes_trimmed!(be, $x), $expected); - )+}; - } - - test_to_be_bytes_trimmed! { - 0u8 => [], - 0u16 => [], - 0u32 => [], - 0u64 => [], - 0usize => [], - 0u128 => [], - - 1u8 => [1], - 1u16 => [1], - 1u32 => [1], - 1u64 => [1], - 1usize => [1], - 1u128 => [1], - - u8::MAX => [0xff], - u16::MAX => [0xff, 0xff], - u32::MAX => [0xff, 0xff, 0xff, 0xff], - u64::MAX => [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff], - u128::MAX => [0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff], - - 1u8 => [1], - 255u8 => [255], - 256u16 => [1, 0], - 65535u16 => [255, 255], - 65536u32 => [1, 0, 0], - 65536u64 => [1, 0, 0], - } - } -} diff --git a/crates/rlp/src/lib.rs b/crates/rlp/src/lib.rs deleted file mode 100644 index 897b3d1787..0000000000 --- a/crates/rlp/src/lib.rs +++ /dev/null @@ -1,43 +0,0 @@ -#![cfg_attr(docsrs, feature(doc_cfg))] -#![doc( - html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", - html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" -)] -#![warn(unreachable_pub, rustdoc::all)] -#![deny(unused_must_use)] -#![doc(test( - no_crate_inject, - attr(deny(warnings, rust_2018_idioms), allow(dead_code, unused_variables)) -))] -#![cfg_attr(not(feature = "std"), no_std)] - -//! A fast RLP implementation. -//! -//! ## Feature Flags -//! -//! This crate works on `#[no_std]` targets if `std` is not enabled. -//! -//! - `derive`: Enables derive macros. -//! - `std`: Uses the Rust standard library. - -#[cfg(feature = "alloc")] -extern crate alloc; - -mod decode; -mod encode; -mod types; - -pub use bytes::BufMut; - -pub use decode::{Decodable, DecodeError, Rlp}; -pub use encode::{ - const_add, encode_fixed_size, encode_iter, encode_list, length_of_length, list_length, - Encodable, MaxEncodedLen, MaxEncodedLenAssoc, -}; -pub use types::*; - -#[cfg(feature = "derive")] -pub use reth_rlp_derive::{ - RlpDecodable, RlpDecodableWrapper, RlpEncodable, RlpEncodableWrapper, RlpMaxEncodedLen, -}; diff --git a/crates/rlp/src/types.rs b/crates/rlp/src/types.rs deleted file mode 100644 index 4cbc6f10f6..0000000000 --- a/crates/rlp/src/types.rs +++ /dev/null @@ -1,8 +0,0 @@ -#[derive(Debug, Clone, Default, PartialEq, Eq)] -pub struct Header { - pub list: bool, - pub payload_length: usize, -} - -pub const EMPTY_STRING_CODE: u8 = 0x80; -pub const EMPTY_LIST_CODE: u8 = 0xC0; diff --git a/crates/rlp/tests/rlp.rs b/crates/rlp/tests/rlp.rs deleted file mode 100644 index c7c3e52e41..0000000000 --- a/crates/rlp/tests/rlp.rs +++ /dev/null @@ -1,135 +0,0 @@ -use bytes::{Bytes, BytesMut}; -use ethnum::U256; -use hex_literal::hex; -use reth_rlp::{DecodeError, *}; - -#[derive(Debug, PartialEq, RlpEncodable, RlpDecodable)] -struct Item { - a: Bytes, -} - -#[derive(Debug, PartialEq, RlpEncodable, RlpDecodable, RlpMaxEncodedLen)] -struct Test4Numbers { - a: u8, - b: u64, - c: U256, - #[rlp(skip)] - #[rlp(default)] - s: U256, - d: U256, -} - -#[derive(Debug, PartialEq, RlpEncodableWrapper, RlpDecodableWrapper)] -pub struct W(Test4Numbers); - -#[derive(Debug, PartialEq, RlpEncodable)] -struct Test4NumbersGenerics<'a, D: Encodable> { - a: u8, - b: u64, - c: &'a U256, - d: &'a D, -} - -#[derive(Debug, PartialEq, RlpEncodable, RlpDecodable)] -#[rlp(trailing)] -struct TestOpt { - a: u8, - b: u64, - c: Option, - d: Option, -} - -fn encoded(t: &T) -> BytesMut { - let mut out = BytesMut::new(); - t.encode(&mut out); - out -} - -#[test] -fn test_encode_item() { - let item = Item { a: b"dog".to_vec().into() }; - - let expected = vec![0xc4, 0x83, b'd', b'o', b'g']; - let out = encoded(&item); - assert_eq!(&*out, expected); - - let decoded = Decodable::decode(&mut &*expected).expect("decode failure"); - assert_eq!(item, decoded); - - let item = Test4Numbers { - a: 0x05, - b: 0xdeadbeefbaadcafe, - c: U256::from_be_bytes(hex!( - "56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421" - )), - s: U256::from_be_bytes(hex!( - "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" - )), - d: U256::from_be_bytes(hex!( - "c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470" - )), - }; - - let expected = hex!("f84c0588deadbeefbaadcafea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470").to_vec(); - let out = encoded(&item); - assert_eq!(&*out, expected); - - let out = reth_rlp::encode_fixed_size(&item); - assert_eq!(&*out, expected); - - let decoded: Test4Numbers = Decodable::decode(&mut &*expected).unwrap(); - assert_eq!(decoded.a, item.a); - assert_eq!(decoded.b, item.b); - assert_eq!(decoded.c, item.c); - assert_eq!(decoded.d, item.d); - assert_eq!(decoded.s, U256::ZERO); - - let mut rlp_view = Rlp::new(&expected).unwrap(); - assert_eq!(rlp_view.get_next().unwrap(), Some(item.a)); - assert_eq!(rlp_view.get_next().unwrap(), Some(item.b)); - assert_eq!(rlp_view.get_next().unwrap(), Some(item.c)); - assert_eq!(rlp_view.get_next().unwrap(), Some(item.d)); - assert_eq!(rlp_view.get_next::().unwrap(), None); - - assert_eq!( - encoded(&Test4NumbersGenerics { a: item.a, b: item.b, c: &item.c, d: &item.d }), - expected - ); - - assert_eq!(encoded(&W(item)), expected); - assert_eq!(W::decode(&mut &*expected).unwrap().0, decoded); - assert_eq!(Test4Numbers::LEN, 79); -} - -#[test] -fn invalid_decode_sideeffect() { - let fixture = hex!("f84d0588deadbeefbaadcafea056e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421a0c5d2460186f7233c927e7db2dcc703c0e500b653ca82273b7bfad8045d85a470"); - let mut sl: &[u8] = &fixture; - - assert_eq!(Test4Numbers::decode(&mut sl), Err(DecodeError::InputTooShort)); - - assert_eq!(sl.len(), fixture.len()); -} - -#[test] -fn test_opt_fields_roundtrip() { - let expected = hex!("c20102"); - let item = TestOpt { a: 1, b: 2, c: None, d: None }; - assert_eq!(&*encoded(&item), expected); - assert_eq!(TestOpt::decode(&mut &expected[..]).unwrap(), item); - - let expected = hex!("c3010203"); - let item = TestOpt { a: 1, b: 2, c: Some(3), d: None }; - assert_eq!(&*encoded(&item), expected); - assert_eq!(TestOpt::decode(&mut &expected[..]).unwrap(), item); - - let expected = hex!("c401020304"); - let item = TestOpt { a: 1, b: 2, c: Some(3), d: Some(4) }; - assert_eq!(&*encoded(&item), expected); - assert_eq!(TestOpt::decode(&mut &expected[..]).unwrap(), item); - - let expected = hex!("c401028004"); - let item = TestOpt { a: 1, b: 2, c: None, d: Some(4) }; - assert_eq!(&*encoded(&item), expected); - assert_eq!(TestOpt::decode(&mut &expected[..]).unwrap(), item); -} diff --git a/crates/rpc/ipc/src/lib.rs b/crates/rpc/ipc/src/lib.rs index 84fba7177a..4cfc537fcd 100644 --- a/crates/rpc/ipc/src/lib.rs +++ b/crates/rpc/ipc/src/lib.rs @@ -7,7 +7,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc-api/src/debug.rs b/crates/rpc/rpc-api/src/debug.rs index 8422a17535..1eb498aad1 100644 --- a/crates/rpc/rpc-api/src/debug.rs +++ b/crates/rpc/rpc-api/src/debug.rs @@ -1,5 +1,5 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{BlockId, BlockNumberOrTag, Bytes, H160, H256}; +use reth_primitives::{Address, BlockId, BlockNumberOrTag, Bytes, B256}; use reth_rpc_types::{ trace::geth::{ BlockTraceResult, GethDebugTracingCallOptions, GethDebugTracingOptions, GethTrace, @@ -22,7 +22,7 @@ pub trait DebugApi { /// Returns a EIP-2718 binary-encoded transaction. #[method(name = "getRawTransaction")] - async fn raw_transaction(&self, hash: H256) -> RpcResult; + async fn raw_transaction(&self, hash: B256) -> RpcResult; /// Returns an array of EIP-2718 binary-encoded receipts. #[method(name = "getRawReceipts")] @@ -61,7 +61,7 @@ pub trait DebugApi { #[method(name = "traceBlockByHash")] async fn debug_trace_block_by_hash( &self, - block: H256, + block: B256, opts: Option, ) -> RpcResult>; @@ -82,7 +82,7 @@ pub trait DebugApi { #[method(name = "traceTransaction")] async fn debug_trace_transaction( &self, - tx_hash: H256, + tx_hash: B256, opts: Option, ) -> RpcResult; @@ -103,24 +103,28 @@ pub trait DebugApi { opts: Option, ) -> RpcResult; - /// The `debug_traceCallMany` method lets you run an `eth_callmany` within the context of the + /// The `debug_traceCallMany` method lets you run an `eth_callMany` within the context of the /// given block execution using the final state of parent block as the base followed by n - /// transactions + /// transactions. /// /// The first argument is a list of bundles. Each bundle can overwrite the block headers. This /// will affect all transaction in that bundle. - /// BlockNumber and transaction_index are optinal. Transaction_index - /// specifys the number of tx in the block to replay and -1 means all transactions should be + /// BlockNumber and transaction_index are optional. Transaction_index + /// specifies the number of tx in the block to replay and -1 means all transactions should be /// replayed. /// The trace can be configured similar to `debug_traceTransaction`. /// State override apply to all bundles. + /// + /// This methods is similar to many `eth_callMany`, hence this returns nested lists of traces. + /// Where the length of the outer list is the number of bundles and the length of the inner list + /// (`Vec`) is the number of transactions in the bundle. #[method(name = "traceCallMany")] async fn debug_trace_call_many( &self, bundles: Vec, state_context: Option, opts: Option, - ) -> RpcResult>; + ) -> RpcResult>>; /// Sets the logging backtrace location. When a backtrace location is set and a log message is /// emitted at that location, the stack of the goroutine executing the log statement will @@ -212,8 +216,8 @@ pub trait DebugApi { #[method(name = "getModifiedAccountsByHash")] async fn debug_get_modified_accounts_by_hash( &self, - start_hash: H256, - end_hash: H256, + start_hash: B256, + end_hash: B256, ) -> RpcResult<()>; /// Returns all accounts that have changed between the two blocks specified. A change is defined @@ -234,7 +238,7 @@ pub trait DebugApi { #[method(name = "intermediateRoots")] async fn debug_intermediate_roots( &self, - block_hash: H256, + block_hash: B256, opts: Option, ) -> RpcResult<()>; @@ -250,7 +254,7 @@ pub trait DebugApi { /// Returns the preimage for a sha3 hash, if known. #[method(name = "preimage")] - async fn debug_preimage(&self, hash: H256) -> RpcResult<()>; + async fn debug_preimage(&self, hash: B256) -> RpcResult<()>; /// Retrieves a block and returns its pretty printed form. #[method(name = "printBlock")] @@ -258,7 +262,7 @@ pub trait DebugApi { /// Fetches and retrieves the seed hash of the block by number. #[method(name = "seedHash")] - async fn debug_seed_hash(&self, number: u64) -> RpcResult; + async fn debug_seed_hash(&self, number: u64) -> RpcResult; /// Sets the rate (in samples/sec) of goroutine block profile data collection. A non-zero rate /// enables block profiling, setting it to zero stops the profile. Collected profile data can be @@ -328,10 +332,10 @@ pub trait DebugApi { #[method(name = "storageRangeAt")] async fn debug_storage_range_at( &self, - block_hash: H256, + block_hash: B256, tx_idx: usize, - contract_address: H160, - key_start: H256, + contract_address: Address, + key_start: B256, max_result: u64, ) -> RpcResult<()>; @@ -341,7 +345,7 @@ pub trait DebugApi { #[method(name = "traceBadBlock")] async fn debug_trace_bad_block( &self, - block_hash: H256, + block_hash: B256, opts: Option, ) -> RpcResult<()>; diff --git a/crates/rpc/rpc-api/src/engine.rs b/crates/rpc/rpc-api/src/engine.rs index 2c8075ebab..ee04b2ee8e 100644 --- a/crates/rpc/rpc-api/src/engine.rs +++ b/crates/rpc/rpc-api/src/engine.rs @@ -1,5 +1,5 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{Address, BlockHash, BlockId, BlockNumberOrTag, Bytes, H256, U256, U64}; +use reth_primitives::{Address, BlockHash, BlockId, BlockNumberOrTag, Bytes, B256, U256, U64}; use reth_rpc_types::{ engine::{ ExecutionPayloadBodiesV1, ExecutionPayloadEnvelopeV2, ExecutionPayloadEnvelopeV3, @@ -29,8 +29,8 @@ pub trait EngineApi { async fn new_payload_v3( &self, payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: H256, + versioned_hashes: Vec, + parent_beacon_block_root: B256, ) -> RpcResult; /// See also @@ -170,7 +170,7 @@ pub trait EngineEthApi { /// Returns information about a block by hash. #[method(name = "getBlockByHash")] - async fn block_by_hash(&self, hash: H256, full: bool) -> RpcResult>; + async fn block_by_hash(&self, hash: B256, full: bool) -> RpcResult>; /// Returns information about a block by number. #[method(name = "getBlockByNumber")] @@ -182,7 +182,7 @@ pub trait EngineEthApi { /// Sends signed transaction, returning its hash. #[method(name = "sendRawTransaction")] - async fn send_raw_transaction(&self, bytes: Bytes) -> RpcResult; + async fn send_raw_transaction(&self, bytes: Bytes) -> RpcResult; /// Returns logs matching given filter object. #[method(name = "getLogs")] diff --git a/crates/rpc/rpc-api/src/eth.rs b/crates/rpc/rpc-api/src/eth.rs index 385442f5f3..29ef6b5a3a 100644 --- a/crates/rpc/rpc-api/src/eth.rs +++ b/crates/rpc/rpc-api/src/eth.rs @@ -1,7 +1,7 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; use reth_primitives::{ serde_helper::{num::U64HexOrNumber, JsonStorageKey}, - AccessListWithGasUsed, Address, BlockId, BlockNumberOrTag, Bytes, H256, H64, U256, U64, + AccessListWithGasUsed, Address, BlockId, BlockNumberOrTag, Bytes, B256, B64, U256, U64, }; use reth_rpc_types::{ state::StateOverride, BlockOverrides, Bundle, CallRequest, EIP1186AccountProofResponse, @@ -40,7 +40,7 @@ pub trait EthApi { /// Returns information about a block by hash. #[method(name = "getBlockByHash")] - async fn block_by_hash(&self, hash: H256, full: bool) -> RpcResult>; + async fn block_by_hash(&self, hash: B256, full: bool) -> RpcResult>; /// Returns information about a block by number. #[method(name = "getBlockByNumber")] @@ -52,7 +52,7 @@ pub trait EthApi { /// Returns the number of transactions in a block from a block matching the given block hash. #[method(name = "getBlockTransactionCountByHash")] - async fn block_transaction_count_by_hash(&self, hash: H256) -> RpcResult>; + async fn block_transaction_count_by_hash(&self, hash: B256) -> RpcResult>; /// Returns the number of transactions in a block matching the given block number. #[method(name = "getBlockTransactionCountByNumber")] @@ -63,7 +63,7 @@ pub trait EthApi { /// Returns the number of uncles in a block from a block matching the given block hash. #[method(name = "getUncleCountByBlockHash")] - async fn block_uncles_count_by_hash(&self, hash: H256) -> RpcResult>; + async fn block_uncles_count_by_hash(&self, hash: B256) -> RpcResult>; /// Returns the number of uncles in a block with given block number. #[method(name = "getUncleCountByBlockNumber")] @@ -83,7 +83,7 @@ pub trait EthApi { #[method(name = "getUncleByBlockHashAndIndex")] async fn uncle_by_block_hash_and_index( &self, - hash: H256, + hash: B256, index: Index, ) -> RpcResult>; @@ -97,13 +97,13 @@ pub trait EthApi { /// Returns the information about a transaction requested by transaction hash. #[method(name = "getTransactionByHash")] - async fn transaction_by_hash(&self, hash: H256) -> RpcResult>; + async fn transaction_by_hash(&self, hash: B256) -> RpcResult>; /// Returns information about a transaction by block hash and transaction index position. #[method(name = "getTransactionByBlockHashAndIndex")] async fn transaction_by_block_hash_and_index( &self, - hash: H256, + hash: B256, index: Index, ) -> RpcResult>; @@ -117,7 +117,7 @@ pub trait EthApi { /// Returns the receipt of a transaction by transaction hash. #[method(name = "getTransactionReceipt")] - async fn transaction_receipt(&self, hash: H256) -> RpcResult>; + async fn transaction_receipt(&self, hash: B256) -> RpcResult>; /// Returns the balance of the account of given address. #[method(name = "getBalance")] @@ -130,7 +130,7 @@ pub trait EthApi { address: Address, index: JsonStorageKey, block_number: Option, - ) -> RpcResult; + ) -> RpcResult; /// Returns the number of transactions sent from an address at given block number. #[method(name = "getTransactionCount")] @@ -236,20 +236,20 @@ pub trait EthApi { /// It accepts the miner hash rate and an identifier which must be unique between nodes. /// Returns `true` if the block was successfully submitted, `false` otherwise. #[method(name = "submitHashrate")] - async fn submit_hashrate(&self, hashrate: U256, id: H256) -> RpcResult; + async fn submit_hashrate(&self, hashrate: U256, id: B256) -> RpcResult; /// Used for submitting a proof-of-work solution. #[method(name = "submitWork")] - async fn submit_work(&self, nonce: H64, pow_hash: H256, mix_digest: H256) -> RpcResult; + async fn submit_work(&self, nonce: B64, pow_hash: B256, mix_digest: B256) -> RpcResult; /// Sends transaction; will block waiting for signer to return the /// transaction hash. #[method(name = "sendTransaction")] - async fn send_transaction(&self, request: TransactionRequest) -> RpcResult; + async fn send_transaction(&self, request: TransactionRequest) -> RpcResult; /// Sends signed transaction, returning its hash. #[method(name = "sendRawTransaction")] - async fn send_raw_transaction(&self, bytes: Bytes) -> RpcResult; + async fn send_raw_transaction(&self, bytes: Bytes) -> RpcResult; /// Returns an Ethereum specific signature with: sign(keccak256("\x19Ethereum Signed Message:\n" /// + len(message) + message))). diff --git a/crates/rpc/rpc-api/src/lib.rs b/crates/rpc/rpc-api/src/lib.rs index ea595a1ab5..6f212d2df8 100644 --- a/crates/rpc/rpc-api/src/lib.rs +++ b/crates/rpc/rpc-api/src/lib.rs @@ -9,7 +9,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc-api/src/otterscan.rs b/crates/rpc/rpc-api/src/otterscan.rs index 9ac585991f..110d922f31 100644 --- a/crates/rpc/rpc-api/src/otterscan.rs +++ b/crates/rpc/rpc-api/src/otterscan.rs @@ -1,5 +1,5 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{Address, BlockId, BlockNumberOrTag, TxHash, H256}; +use reth_primitives::{Address, BlockId, BlockNumberOrTag, TxHash, B256}; use reth_rpc_types::{ BlockDetails, ContractCreator, InternalOperation, OtsBlockTransactions, TraceEntry, Transaction, TransactionsWithReceipts, @@ -42,7 +42,7 @@ pub trait Otterscan { /// Tailor-made and expanded version of eth_getBlockByHash for block details page in Otterscan. #[method(name = "getBlockDetailsByHash")] - async fn get_block_details_by_hash(&self, block_hash: H256) -> RpcResult>; + async fn get_block_details_by_hash(&self, block_hash: B256) -> RpcResult>; /// Get paginated transactions for a certain block. Also remove some verbose fields like logs. #[method(name = "getBlockTransactions")] diff --git a/crates/rpc/rpc-api/src/trace.rs b/crates/rpc/rpc-api/src/trace.rs index d38af056d7..41b41d1fe2 100644 --- a/crates/rpc/rpc-api/src/trace.rs +++ b/crates/rpc/rpc-api/src/trace.rs @@ -1,5 +1,5 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{BlockId, Bytes, H256}; +use reth_primitives::{BlockId, Bytes, B256}; use reth_rpc_types::{ state::StateOverride, trace::{filter::TraceFilter, parity::*}, @@ -55,7 +55,7 @@ pub trait TraceApi { #[method(name = "replayTransaction")] async fn replay_transaction( &self, - transaction: H256, + transaction: B256, trace_types: HashSet, ) -> RpcResult; @@ -79,7 +79,7 @@ pub trait TraceApi { #[method(name = "get")] async fn trace_get( &self, - hash: H256, + hash: B256, indices: Vec, ) -> RpcResult>; @@ -87,6 +87,6 @@ pub trait TraceApi { #[method(name = "transaction")] async fn trace_transaction( &self, - hash: H256, + hash: B256, ) -> RpcResult>>; } diff --git a/crates/rpc/rpc-api/src/web3.rs b/crates/rpc/rpc-api/src/web3.rs index e9c8fdbc53..36d2211563 100644 --- a/crates/rpc/rpc-api/src/web3.rs +++ b/crates/rpc/rpc-api/src/web3.rs @@ -1,5 +1,5 @@ use jsonrpsee::{core::RpcResult, proc_macros::rpc}; -use reth_primitives::{Bytes, H256}; +use reth_primitives::{Bytes, B256}; /// Web3 rpc interface. #[cfg_attr(not(feature = "client"), rpc(server, namespace = "web3"))] @@ -12,5 +12,5 @@ pub trait Web3Api { /// Returns sha3 of the given data. #[method(name = "sha3")] - fn sha3(&self, input: Bytes) -> RpcResult; + fn sha3(&self, input: Bytes) -> RpcResult; } diff --git a/crates/rpc/rpc-builder/src/lib.rs b/crates/rpc/rpc-builder/src/lib.rs index 10b404fa79..7c671b0c69 100644 --- a/crates/rpc/rpc-builder/src/lib.rs +++ b/crates/rpc/rpc-builder/src/lib.rs @@ -93,7 +93,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc-builder/tests/it/auth.rs b/crates/rpc/rpc-builder/tests/it/auth.rs index fefa25024c..d1141483bb 100644 --- a/crates/rpc/rpc-builder/tests/it/auth.rs +++ b/crates/rpc/rpc-builder/tests/it/auth.rs @@ -2,7 +2,7 @@ use crate::utils::launch_auth; use jsonrpsee::core::client::{ClientT, SubscriptionClientT}; -use reth_primitives::Block; +use reth_primitives::{Block, U64}; use reth_rpc::JwtSecret; use reth_rpc_api::clients::EngineApiClient; use reth_rpc_types::engine::{ForkchoiceState, PayloadId, TransitionConfiguration}; @@ -21,7 +21,7 @@ where EngineApiClient::get_payload_v1(client, PayloadId::new([0, 0, 0, 0, 0, 0, 0, 0])).await; EngineApiClient::get_payload_v2(client, PayloadId::new([0, 0, 0, 0, 0, 0, 0, 0])).await; EngineApiClient::get_payload_bodies_by_hash_v1(client, vec![]).await; - EngineApiClient::get_payload_bodies_by_range_v1(client, 0u64.into(), 1u64.into()).await; + EngineApiClient::get_payload_bodies_by_range_v1(client, U64::ZERO, U64::from(1u64)).await; EngineApiClient::exchange_transition_configuration(client, TransitionConfiguration::default()) .await; EngineApiClient::exchange_capabilities(client, vec![]).await; diff --git a/crates/rpc/rpc-builder/tests/it/http.rs b/crates/rpc/rpc-builder/tests/it/http.rs index c1414d556c..9d588dbc75 100644 --- a/crates/rpc/rpc-builder/tests/it/http.rs +++ b/crates/rpc/rpc-builder/tests/it/http.rs @@ -9,7 +9,7 @@ use jsonrpsee::{ types::error::ErrorCode, }; use reth_primitives::{ - hex_literal::hex, Address, BlockId, BlockNumberOrTag, Bytes, NodeRecord, TxHash, H256, H64, + hex_literal::hex, Address, BlockId, BlockNumberOrTag, Bytes, NodeRecord, TxHash, B256, B64, U256, }; use reth_rpc_api::{ @@ -65,7 +65,7 @@ where { let address = Address::default(); let index = Index::default(); - let hash = H256::default(); + let hash = B256::default(); let tx_hash = TxHash::default(); let block_number = BlockNumberOrTag::default(); let call_request = CallRequest::default(); @@ -111,7 +111,7 @@ where EthApiClient::syncing(client).await.unwrap(); EthApiClient::send_transaction(client, transaction_request).await.unwrap_err(); EthApiClient::hashrate(client).await.unwrap(); - EthApiClient::submit_hashrate(client, U256::default(), H256::default()).await.unwrap(); + EthApiClient::submit_hashrate(client, U256::default(), B256::default()).await.unwrap(); EthApiClient::gas_price(client).await.unwrap_err(); EthApiClient::max_priority_fee_per_gas(client).await.unwrap_err(); @@ -123,7 +123,7 @@ where assert!(is_unimplemented(EthApiClient::is_mining(client).await.err().unwrap())); assert!(is_unimplemented(EthApiClient::get_work(client).await.err().unwrap())); assert!(is_unimplemented( - EthApiClient::submit_work(client, H64::default(), H256::default(), H256::default()) + EthApiClient::submit_work(client, B64::default(), B256::default(), B256::default()) .await .err() .unwrap() @@ -141,7 +141,7 @@ where DebugApiClient::raw_header(client, block_id).await.unwrap(); DebugApiClient::raw_block(client, block_id).await.unwrap(); - DebugApiClient::raw_transaction(client, H256::default()).await.unwrap(); + DebugApiClient::raw_transaction(client, B256::default()).await.unwrap(); DebugApiClient::raw_receipts(client, block_id).await.unwrap(); assert!(is_unimplemented(DebugApiClient::bad_blocks(client).await.err().unwrap())); } @@ -175,7 +175,7 @@ where TraceApiClient::trace_call_many(client, vec![], Some(BlockNumberOrTag::Latest.into())) .await .unwrap(); - TraceApiClient::replay_transaction(client, H256::default(), HashSet::default()) + TraceApiClient::replay_transaction(client, B256::default(), HashSet::default()) .await .err() .unwrap(); @@ -206,7 +206,7 @@ where let page_number = 1; let page_size = 10; let nonce = 1; - let block_hash = H256::default(); + let block_hash = B256::default(); OtterscanClient::has_code(client, address, None).await.unwrap(); diff --git a/crates/rpc/rpc-engine-api/Cargo.toml b/crates/rpc/rpc-engine-api/Cargo.toml index 7d8161c5db..ebd0bfeb78 100644 --- a/crates/rpc/rpc-engine-api/Cargo.toml +++ b/crates/rpc/rpc-engine-api/Cargo.toml @@ -30,7 +30,7 @@ jsonrpsee-core.workspace = true tracing.workspace = true [dev-dependencies] -reth-rlp.workspace = true +alloy-rlp.workspace = true reth-interfaces = { workspace = true, features = ["test-utils"] } reth-provider = { workspace = true, features = ["test-utils"] } reth-payload-builder = { workspace = true, features = ["test-utils"] } diff --git a/crates/rpc/rpc-engine-api/src/engine_api.rs b/crates/rpc/rpc-engine-api/src/engine_api.rs index 14b907f348..1e7cea0979 100644 --- a/crates/rpc/rpc-engine-api/src/engine_api.rs +++ b/crates/rpc/rpc-engine-api/src/engine_api.rs @@ -6,7 +6,7 @@ use jsonrpsee_core::RpcResult; use reth_beacon_consensus::BeaconConsensusEngineHandle; use reth_interfaces::consensus::ForkchoiceState; use reth_payload_builder::PayloadStore; -use reth_primitives::{BlockHash, BlockHashOrNumber, BlockNumber, ChainSpec, Hardfork, H256, U64}; +use reth_primitives::{BlockHash, BlockHashOrNumber, BlockNumber, ChainSpec, Hardfork, B256, U64}; use reth_provider::{BlockReader, EvmEnvProvider, HeaderProvider, StateProviderFactory}; use reth_rpc_api::EngineApiServer; use reth_rpc_types::engine::{ @@ -97,8 +97,8 @@ where pub async fn new_payload_v3( &self, payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: H256, + versioned_hashes: Vec, + parent_beacon_block_root: B256, ) -> EngineApiResult { let payload = ExecutionPayload::from(payload); let payload_or_attrs = @@ -361,7 +361,7 @@ where let local_hash = self .inner .provider - .block_hash(terminal_block_number.as_u64()) + .block_hash(terminal_block_number.to()) .map_err(|err| EngineApiError::Internal(Box::new(err)))?; // Transition configuration exchange is successful if block hashes match @@ -606,8 +606,8 @@ where async fn new_payload_v3( &self, payload: ExecutionPayloadV3, - versioned_hashes: Vec, - parent_beacon_block_root: H256, + versioned_hashes: Vec, + parent_beacon_block_root: B256, ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_newPayloadV3"); Ok(EngineApi::new_payload_v3(self, payload, versioned_hashes, parent_beacon_block_root) @@ -726,7 +726,7 @@ where count: U64, ) -> RpcResult { trace!(target: "rpc::engine", "Serving engine_getPayloadBodiesByRangeV1"); - Ok(EngineApi::get_payload_bodies_by_range(self, start.as_u64(), count.as_u64()).await?) + Ok(EngineApi::get_payload_bodies_by_range(self, start.to(), count.to()).await?) } /// Handler for `engine_exchangeTransitionConfigurationV1` @@ -759,7 +759,7 @@ mod tests { use reth_beacon_consensus::BeaconEngineMessage; use reth_interfaces::test_utils::generators::random_block; use reth_payload_builder::test_utils::spawn_test_payload_service; - use reth_primitives::{SealedBlock, H256, MAINNET}; + use reth_primitives::{SealedBlock, B256, MAINNET}; use reth_provider::test_utils::MockEthProvider; use reth_tasks::TokioTaskExecutor; use std::sync::Arc; @@ -837,7 +837,7 @@ mod tests { let (start, count) = (1, 10); let blocks = - random_block_range(&mut rng, start..=start + count - 1, H256::default(), 0..2); + random_block_range(&mut rng, start..=start + count - 1, B256::default(), 0..2); handle.provider.extend_blocks(blocks.iter().cloned().map(|b| (b.hash(), b.unseal()))); let expected = blocks @@ -857,7 +857,7 @@ mod tests { let (start, count) = (1, 100); let blocks = - random_block_range(&mut rng, start..=start + count - 1, H256::default(), 0..2); + random_block_range(&mut rng, start..=start + count - 1, B256::default(), 0..2); // Insert only blocks in ranges 1-25 and 50-75 let first_missing_range = 26..=50; @@ -952,7 +952,7 @@ mod tests { let transition_config = TransitionConfiguration { terminal_total_difficulty: handle.chain_spec.fork(Hardfork::Paris).ttd().unwrap(), terminal_block_hash: consensus_terminal_block.hash(), - terminal_block_number: terminal_block_number.into(), + terminal_block_number: U64::from(terminal_block_number), }; // Unknown block number @@ -990,7 +990,7 @@ mod tests { let transition_config = TransitionConfiguration { terminal_total_difficulty: handle.chain_spec.fork(Hardfork::Paris).ttd().unwrap(), terminal_block_hash: terminal_block.hash(), - terminal_block_number: terminal_block_number.into(), + terminal_block_number: U64::from(terminal_block_number), }; handle.provider.add_block(terminal_block.hash(), terminal_block.unseal()); diff --git a/crates/rpc/rpc-engine-api/src/error.rs b/crates/rpc/rpc-engine-api/src/error.rs index d42add66d2..8ec5466088 100644 --- a/crates/rpc/rpc-engine-api/src/error.rs +++ b/crates/rpc/rpc-engine-api/src/error.rs @@ -1,7 +1,7 @@ use jsonrpsee_types::error::{INTERNAL_ERROR_CODE, INVALID_PARAMS_CODE}; use reth_beacon_consensus::{BeaconForkChoiceUpdateError, BeaconOnNewPayloadError}; use reth_payload_builder::error::PayloadBuilderError; -use reth_primitives::{H256, U256}; +use reth_primitives::{B256, U256}; use thiserror::Error; /// The Engine API result type @@ -73,9 +73,9 @@ pub enum EngineApiError { )] TerminalBlockHash { /// Execution terminal block hash. `None` if block number is not found in the database. - execution: Option, + execution: Option, /// Consensus terminal block hash. - consensus: H256, + consensus: B256, }, /// An error occurred while processing the fork choice update in the beacon consensus engine. #[error(transparent)] diff --git a/crates/rpc/rpc-engine-api/src/lib.rs b/crates/rpc/rpc-engine-api/src/lib.rs index f8aa0b47bd..c75a34021a 100644 --- a/crates/rpc/rpc-engine-api/src/lib.rs +++ b/crates/rpc/rpc-engine-api/src/lib.rs @@ -4,7 +4,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -33,5 +33,5 @@ pub use reth_rpc_api::EngineApiServer; #[allow(unused_imports)] mod tests { // silence unused import warning - use reth_rlp as _; + use alloy_rlp as _; } diff --git a/crates/rpc/rpc-engine-api/src/payload.rs b/crates/rpc/rpc-engine-api/src/payload.rs index 0c9b1429a2..28500d09aa 100644 --- a/crates/rpc/rpc-engine-api/src/payload.rs +++ b/crates/rpc/rpc-engine-api/src/payload.rs @@ -1,4 +1,4 @@ -use reth_primitives::H256; +use reth_primitives::B256; use reth_rpc_types::engine::{ExecutionPayload, PayloadAttributes}; /// Either an [ExecutionPayload] or a [PayloadAttributes]. @@ -8,7 +8,7 @@ pub(crate) enum PayloadOrAttributes<'a> { /// The inner execution payload payload: &'a ExecutionPayload, /// The parent beacon block root - parent_beacon_block_root: Option, + parent_beacon_block_root: Option, }, /// A [PayloadAttributes]. PayloadAttributes(&'a PayloadAttributes), @@ -19,7 +19,7 @@ impl<'a> PayloadOrAttributes<'a> { /// block root. pub(crate) fn from_execution_payload( payload: &'a ExecutionPayload, - parent_beacon_block_root: Option, + parent_beacon_block_root: Option, ) -> Self { Self::ExecutionPayload { payload, parent_beacon_block_root } } @@ -36,12 +36,12 @@ impl<'a> PayloadOrAttributes<'a> { pub(crate) fn timestamp(&self) -> u64 { match self { Self::ExecutionPayload { payload, .. } => payload.timestamp(), - Self::PayloadAttributes(attributes) => attributes.timestamp.as_u64(), + Self::PayloadAttributes(attributes) => attributes.timestamp.to(), } } /// Return the parent beacon block root for the payload or attributes. - pub(crate) fn parent_beacon_block_root(&self) -> Option { + pub(crate) fn parent_beacon_block_root(&self) -> Option { match self { Self::ExecutionPayload { parent_beacon_block_root, .. } => *parent_beacon_block_root, Self::PayloadAttributes(attributes) => attributes.parent_beacon_block_root, diff --git a/crates/rpc/rpc-engine-api/tests/it/payload.rs b/crates/rpc/rpc-engine-api/tests/it/payload.rs index fdc19854b5..f8aa231af7 100644 --- a/crates/rpc/rpc-engine-api/tests/it/payload.rs +++ b/crates/rpc/rpc-engine-api/tests/it/payload.rs @@ -1,15 +1,14 @@ //! Some payload tests +use alloy_rlp::{Decodable, Error as RlpError}; use assert_matches::assert_matches; use reth_interfaces::test_utils::generators::{ - self, random_block, random_block_range, random_header, + self, random_block, random_block_range, random_header, Rng, }; use reth_primitives::{ bytes::{Bytes, BytesMut}, - proofs::{self}, - Block, SealedBlock, TransactionSigned, H256, U256, + proofs, Block, SealedBlock, TransactionSigned, B256, U256, }; -use reth_rlp::{Decodable, DecodeError}; use reth_rpc_types::engine::{ ExecutionPayload, ExecutionPayloadBodyV1, ExecutionPayloadV1, PayloadError, }; @@ -35,7 +34,7 @@ fn transform_block Block>(src: SealedBlock, f: F) -> Executi #[test] fn payload_body_roundtrip() { let mut rng = generators::rng(); - for block in random_block_range(&mut rng, 0..=99, H256::default(), 0..2) { + for block in random_block_range(&mut rng, 0..=99, B256::default(), 0..2) { let unsealed = block.clone().unseal(); let payload_body: ExecutionPayloadBodyV1 = convert_to_payload_body_v1(unsealed); @@ -60,7 +59,8 @@ fn payload_body_roundtrip() { #[test] fn payload_validation() { let mut rng = generators::rng(); - let block = random_block(&mut rng, 100, Some(H256::random()), Some(3), Some(0)); + let parent = rng.gen(); + let block = random_block(&mut rng, 100, Some(parent), Some(3), Some(0)); // Valid extra data let block_with_valid_extra_data = transform_block(block.clone(), |mut b| { @@ -100,10 +100,7 @@ fn payload_validation() { *tx = Bytes::new().into(); }); let payload_with_invalid_txs = try_payload_v1_to_block(payload_with_invalid_txs); - assert_matches!( - payload_with_invalid_txs, - Err(PayloadError::Decode(DecodeError::InputTooShort)) - ); + assert_matches!(payload_with_invalid_txs, Err(PayloadError::Decode(RlpError::InputTooShort))); // Non empty ommers let block_with_ommers = transform_block(block.clone(), |mut b| { diff --git a/crates/rpc/rpc-testing-util/src/debug.rs b/crates/rpc/rpc-testing-util/src/debug.rs index aedb83ac16..9e2711cf52 100644 --- a/crates/rpc/rpc-testing-util/src/debug.rs +++ b/crates/rpc/rpc-testing-util/src/debug.rs @@ -1,6 +1,6 @@ //! Helpers for testing debug trace calls. -use reth_primitives::H256; +use reth_primitives::B256; use reth_rpc_api::clients::DebugApiClient; use reth_rpc_types::trace::geth::{GethDebugTracerType, GethDebugTracingOptions}; @@ -16,7 +16,7 @@ pub trait DebugApiExt { /// Same as [DebugApiClient::debug_trace_transaction] but returns the result as json. async fn debug_trace_transaction_json( &self, - hash: H256, + hash: B256, opts: GethDebugTracingOptions, ) -> Result; } @@ -27,7 +27,7 @@ impl DebugApiExt for T { async fn debug_trace_transaction_json( &self, - hash: H256, + hash: B256, opts: GethDebugTracingOptions, ) -> Result { let mut params = jsonrpsee::core::params::ArrayParams::new(); diff --git a/crates/rpc/rpc-testing-util/src/lib.rs b/crates/rpc/rpc-testing-util/src/lib.rs index 85b479d5ef..f4a5740cb9 100644 --- a/crates/rpc/rpc-testing-util/src/lib.rs +++ b/crates/rpc/rpc-testing-util/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc-types-compat/Cargo.toml b/crates/rpc/rpc-types-compat/Cargo.toml index b6f203ef4d..84e71d6dd6 100644 --- a/crates/rpc/rpc-types-compat/Cargo.toml +++ b/crates/rpc/rpc-types-compat/Cargo.toml @@ -13,5 +13,5 @@ Compatibility layer for reth-primitives and ethereum RPC types [dependencies] reth-primitives.workspace = true reth-rpc-types.workspace = true -reth-rlp.workspace = true +alloy-rlp.workspace = true diff --git a/crates/rpc/rpc-types-compat/src/block.rs b/crates/rpc/rpc-types-compat/src/block.rs index 6551124d9f..b074b553db 100644 --- a/crates/rpc/rpc-types-compat/src/block.rs +++ b/crates/rpc/rpc-types-compat/src/block.rs @@ -1,8 +1,8 @@ //! Compatibility functions for rpc `Block` type. use crate::transaction::from_recovered_with_block_context; -use reth_primitives::{Block as PrimitiveBlock, Header as PrimitiveHeader, H256, U256}; -use reth_rlp::Encodable; +use alloy_rlp::Encodable; +use reth_primitives::{Block as PrimitiveBlock, Header as PrimitiveHeader, B256, U256}; use reth_rpc_types::{Block, BlockError, BlockTransactions, BlockTransactionsKind, Header}; /// Converts the given primitive block into a [Block] response with the given @@ -13,7 +13,7 @@ pub fn from_block( block: PrimitiveBlock, total_difficulty: U256, kind: BlockTransactionsKind, - block_hash: Option, + block_hash: Option, ) -> Result { match kind { BlockTransactionsKind::Hashes => { @@ -31,7 +31,7 @@ pub fn from_block( pub fn from_block_with_tx_hashes( block: PrimitiveBlock, total_difficulty: U256, - block_hash: Option, + block_hash: Option, ) -> Block { let block_hash = block_hash.unwrap_or_else(|| block.header.hash_slow()); let transactions = block.body.iter().map(|tx| tx.hash()).collect(); @@ -52,7 +52,7 @@ pub fn from_block_with_tx_hashes( pub fn from_block_full( block: PrimitiveBlock, total_difficulty: U256, - block_hash: Option, + block_hash: Option, ) -> Result { let block_hash = block_hash.unwrap_or_else(|| block.header.hash_slow()); let block_number = block.number; @@ -77,7 +77,7 @@ pub fn from_block_full( } fn from_block_with_transactions( - block_hash: H256, + block_hash: B256, block: PrimitiveBlock, total_difficulty: U256, transactions: BlockTransactions, diff --git a/crates/rpc/rpc-types-compat/src/engine/payload.rs b/crates/rpc/rpc-types-compat/src/engine/payload.rs index e47f6ea290..d69b5af85d 100644 --- a/crates/rpc/rpc-types-compat/src/engine/payload.rs +++ b/crates/rpc/rpc-types-compat/src/engine/payload.rs @@ -1,11 +1,11 @@ //! Standalone Conversion Functions for Handling Different Versions of Execution Payloads in //! Ethereum's Engine +use alloy_rlp::Decodable; use reth_primitives::{ constants::{MAXIMUM_EXTRA_DATA_SIZE, MIN_PROTOCOL_BASE_FEE_U256}, proofs::{self, EMPTY_LIST_HASH}, - Block, Header, SealedBlock, TransactionSigned, UintTryTo, Withdrawal, H256, U256, + Block, Header, SealedBlock, TransactionSigned, UintTryTo, Withdrawal, B256, U256, U64, }; -use reth_rlp::Decodable; use reth_rpc_types::engine::{ payload::{ExecutionPayloadBodyV1, ExecutionPayloadFieldV2, ExecutionPayloadInputV2}, ExecutionPayload, ExecutionPayloadV1, ExecutionPayloadV2, ExecutionPayloadV3, PayloadError, @@ -36,10 +36,10 @@ pub fn try_payload_v1_to_block(payload: ExecutionPayloadV1) -> Result Result ExecutionPayloadV1 { receipts_root: value.receipts_root, logs_bloom: value.logs_bloom, prev_randao: value.mix_hash, - block_number: value.number.into(), - gas_limit: value.gas_limit.into(), - gas_used: value.gas_used.into(), - timestamp: value.timestamp.into(), + block_number: U64::from(value.number), + gas_limit: U64::from(value.gas_limit), + gas_used: U64::from(value.gas_used), + timestamp: U64::from(value.timestamp), extra_data: value.extra_data.clone(), base_fee_per_gas: U256::from(value.base_fee_per_gas.unwrap_or_default()), block_hash: value.hash(), @@ -158,10 +158,10 @@ pub fn try_block_to_payload_v2(value: SealedBlock) -> ExecutionPayloadV2 { receipts_root: value.receipts_root, logs_bloom: value.logs_bloom, prev_randao: value.mix_hash, - block_number: value.number.into(), - gas_limit: value.gas_limit.into(), - gas_used: value.gas_used.into(), - timestamp: value.timestamp.into(), + block_number: U64::from(value.number), + gas_limit: U64::from(value.gas_limit), + gas_used: U64::from(value.gas_used), + timestamp: U64::from(value.timestamp), extra_data: value.extra_data.clone(), base_fee_per_gas: U256::from(value.base_fee_per_gas.unwrap_or_default()), block_hash: value.hash(), @@ -200,10 +200,10 @@ pub fn try_block_to_payload_v3(value: SealedBlock) -> ExecutionPayloadV3 { receipts_root: value.receipts_root, logs_bloom: value.logs_bloom, prev_randao: value.mix_hash, - block_number: value.number.into(), - gas_limit: value.gas_limit.into(), - gas_used: value.gas_used.into(), - timestamp: value.timestamp.into(), + block_number: U64::from(value.number), + gas_limit: U64::from(value.gas_limit), + gas_used: U64::from(value.gas_used), + timestamp: U64::from(value.timestamp), extra_data: value.extra_data.clone(), base_fee_per_gas: U256::from(value.base_fee_per_gas.unwrap_or_default()), block_hash: value.hash(), @@ -212,8 +212,8 @@ pub fn try_block_to_payload_v3(value: SealedBlock) -> ExecutionPayloadV3 { withdrawals, }, - blob_gas_used: value.blob_gas_used.unwrap_or_default().into(), - excess_blob_gas: value.excess_blob_gas.unwrap_or_default().into(), + blob_gas_used: U64::from(value.blob_gas_used.unwrap_or_default()), + excess_blob_gas: U64::from(value.excess_blob_gas.unwrap_or_default()), } } @@ -266,7 +266,7 @@ pub fn convert_block_to_payload_input_v2(value: SealedBlock) -> ExecutionPayload /// See pub fn try_into_block( value: ExecutionPayload, - parent_beacon_block_root: Option, + parent_beacon_block_root: Option, ) -> Result { let mut base_payload = match value { ExecutionPayload::V1(payload) => try_payload_v1_to_block(payload)?, @@ -291,7 +291,7 @@ pub fn try_into_block( /// [SealedBlock]. pub fn try_into_sealed_block( payload: ExecutionPayload, - parent_beacon_block_root: Option, + parent_beacon_block_root: Option, ) -> Result { let block_hash = payload.block_hash(); let base_payload = try_into_block(payload, parent_beacon_block_root)?; @@ -306,7 +306,7 @@ pub fn try_into_sealed_block( /// If the provided block hash does not match the block hash computed from the provided block, this /// returns [PayloadError::BlockHash]. pub fn validate_block_hash( - expected_block_hash: H256, + expected_block_hash: B256, block: Block, ) -> Result { let sealed_block = block.seal_slow(); diff --git a/crates/rpc/rpc-types-compat/src/lib.rs b/crates/rpc/rpc-types-compat/src/lib.rs index cedcd9809b..e71ad4cc11 100644 --- a/crates/rpc/rpc-types-compat/src/lib.rs +++ b/crates/rpc/rpc-types-compat/src/lib.rs @@ -5,7 +5,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc-types-compat/src/transaction/mod.rs b/crates/rpc/rpc-types-compat/src/transaction/mod.rs index 1d5e532057..07a6f5ff23 100644 --- a/crates/rpc/rpc-types-compat/src/transaction/mod.rs +++ b/crates/rpc/rpc-types-compat/src/transaction/mod.rs @@ -2,7 +2,7 @@ mod signature; use reth_primitives::{ AccessListItem, BlockNumber, Transaction as PrimitiveTransaction, - TransactionKind as PrimitiveTransactionKind, TransactionSignedEcRecovered, TxType, H256, U128, + TransactionKind as PrimitiveTransactionKind, TransactionSignedEcRecovered, TxType, B256, U128, U256, U64, }; use reth_rpc_types::Transaction; @@ -14,7 +14,7 @@ use signature::from_primitive_signature; /// transaction was mined. pub fn from_recovered_with_block_context( tx: TransactionSignedEcRecovered, - block_hash: H256, + block_hash: B256, block_number: BlockNumber, base_fee: Option, tx_index: U256, @@ -32,7 +32,7 @@ pub fn from_recovered(tx: TransactionSignedEcRecovered) -> Transaction { /// environment related fields to `None`. fn fill( tx: TransactionSignedEcRecovered, - block_hash: Option, + block_hash: Option, block_number: Option, base_fee: Option, transaction_index: Option, diff --git a/crates/rpc/rpc-types/Cargo.toml b/crates/rpc/rpc-types/Cargo.toml index 3dbe236b15..bb387fb6a6 100644 --- a/crates/rpc/rpc-types/Cargo.toml +++ b/crates/rpc/rpc-types/Cargo.toml @@ -13,17 +13,15 @@ Reth RPC types [dependencies] # reth reth-primitives.workspace = true -reth-rlp.workspace = true -# errors -thiserror.workspace = true # misc +alloy-rlp.workspace = true +thiserror.workspace = true itertools.workspace = true serde = { workspace = true, features = ["derive"] } serde_json.workspace = true jsonrpsee-types = { workspace = true, optional = true } - [features] default = ["jsonrpsee-types"] @@ -31,5 +29,3 @@ default = ["jsonrpsee-types"] # misc rand.workspace = true similar-asserts = "1.4" - - diff --git a/crates/rpc/rpc-types/src/admin.rs b/crates/rpc/rpc-types/src/admin.rs index d066fd6f7d..7a35463491 100644 --- a/crates/rpc/rpc-types/src/admin.rs +++ b/crates/rpc/rpc-types/src/admin.rs @@ -1,4 +1,4 @@ -use reth_primitives::{NodeRecord, PeerId, H256, U256}; +use reth_primitives::{NodeRecord, PeerId, B256, U256}; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -81,11 +81,11 @@ pub struct EthProtocolInfo { #[serde(deserialize_with = "reth_primitives::serde_helper::deserialize_json_u256")] pub difficulty: U256, /// The block hash of the head of the chain. - pub head: H256, + pub head: B256, /// Network ID in base 10. pub network: u64, /// Genesis block of the current chain. - pub genesis: H256, + pub genesis: B256, } #[cfg(test)] diff --git a/crates/rpc/rpc-types/src/eth/account.rs b/crates/rpc/rpc-types/src/eth/account.rs index 205e3c41f5..3e77166a8a 100644 --- a/crates/rpc/rpc-types/src/eth/account.rs +++ b/crates/rpc/rpc-types/src/eth/account.rs @@ -1,5 +1,5 @@ #![allow(missing_docs)] -use reth_primitives::{serde_helper::JsonStorageKey, Address, Bytes, H256, H512, U256, U64}; +use reth_primitives::{serde_helper::JsonStorageKey, Address, Bytes, B256, B512, U256, U64}; use serde::{Deserialize, Serialize}; /// Account information. @@ -27,9 +27,9 @@ pub struct StorageProof { pub struct EIP1186AccountProofResponse { pub address: Address, pub balance: U256, - pub code_hash: H256, + pub code_hash: B256, pub nonce: U64, - pub storage_hash: H256, + pub storage_hash: B256, pub account_proof: Vec, pub storage_proof: Vec, } @@ -55,7 +55,7 @@ pub struct RecoveredAccount { /// address of the recovered account pub address: Address, /// public key of the recovered account - pub public_key: H512, + pub public_key: B512, /// If the signature contains chain replay protection, /// And the chain_id encoded within the signature /// matches the current chain this would be true, otherwise false. diff --git a/crates/rpc/rpc-types/src/eth/block.rs b/crates/rpc/rpc-types/src/eth/block.rs index 6b478ae3a2..e238e2774e 100644 --- a/crates/rpc/rpc-types/src/eth/block.rs +++ b/crates/rpc/rpc-types/src/eth/block.rs @@ -1,7 +1,7 @@ //! Contains types that represent ethereum types in [reth_primitives] when used in RPC use crate::Transaction; use reth_primitives::{ - Address, Bloom, Bytes, Header as PrimitiveHeader, SealedHeader, Withdrawal, H256, H64, U256, + Address, Bloom, Bytes, Header as PrimitiveHeader, SealedHeader, Withdrawal, B256, B64, U256, U64, }; use serde::{ser::Error, Deserialize, Serialize, Serializer}; @@ -12,7 +12,7 @@ use std::{collections::BTreeMap, ops::Deref}; #[serde(untagged)] pub enum BlockTransactions { /// Only hashes - Hashes(Vec), + Hashes(Vec), /// Full transactions Full(Vec), /// Special case for uncle response. @@ -56,7 +56,7 @@ pub enum BlockError { InvalidSignature, /// A raw block failed to decode #[error("failed to decode raw block {0}")] - RlpDecodeRawBlock(reth_rlp::DecodeError), + RlpDecodeRawBlock(alloy_rlp::Error), } /// Block representation @@ -71,7 +71,7 @@ pub struct Block { #[serde(skip_serializing_if = "Option::is_none")] pub total_difficulty: Option, /// Uncles' hashes - pub uncles: Vec, + pub uncles: Vec, /// Transactions #[serde(skip_serializing_if = "BlockTransactions::is_uncle")] pub transactions: BlockTransactions, @@ -87,20 +87,20 @@ pub struct Block { #[serde(rename_all = "camelCase")] pub struct Header { /// Hash of the block - pub hash: Option, + pub hash: Option, /// Hash of the parent - pub parent_hash: H256, + pub parent_hash: B256, /// Hash of the uncles #[serde(rename = "sha3Uncles")] - pub uncles_hash: H256, + pub uncles_hash: B256, /// Alias of `author` pub miner: Address, /// State root hash - pub state_root: H256, + pub state_root: B256, /// Transactions root hash - pub transactions_root: H256, + pub transactions_root: B256, /// Transactions receipts root hash - pub receipts_root: H256, + pub receipts_root: B256, /// Logs bloom pub logs_bloom: Bloom, /// Difficulty @@ -116,15 +116,15 @@ pub struct Header { /// Extra data pub extra_data: Bytes, /// Mix Hash - pub mix_hash: H256, + pub mix_hash: B256, /// Nonce - pub nonce: Option, + pub nonce: Option, /// Base fee per unit of gas (if past London) #[serde(rename = "baseFeePerGas", skip_serializing_if = "Option::is_none")] pub base_fee_per_gas: Option, /// Withdrawals root hash added by EIP-4895 and is ignored in legacy headers. #[serde(skip_serializing_if = "Option::is_none")] - pub withdrawals_root: Option, + pub withdrawals_root: Option, /// Blob gas used #[serde(rename = "blobGasUsed", skip_serializing_if = "Option::is_none")] pub blob_gas_used: Option, @@ -133,7 +133,7 @@ pub struct Header { pub excess_blob_gas: Option, /// Parent beacon block root #[serde(rename = "parentBeaconBlockRoot", skip_serializing_if = "Option::is_none")] - pub parent_beacon_block_root: Option, + pub parent_beacon_block_root: Option, } // === impl Header === @@ -280,14 +280,14 @@ pub struct BlockOverrides { pub coinbase: Option
, /// Overrides the prevrandao of the block. #[serde(default, skip_serializing_if = "Option::is_none")] - pub random: Option, + pub random: Option, /// Overrides the basefee of the block. #[serde(default, skip_serializing_if = "Option::is_none")] pub base_fee: Option, /// A dictionary that maps blockNumber to a user-defined hash. It could be queried from the /// solidity opcode BLOCKHASH. #[serde(default, skip_serializing_if = "Option::is_none")] - pub block_hash: Option>, + pub block_hash: Option>, } #[cfg(test)] @@ -318,14 +318,14 @@ mod tests { fn serde_block() { let block = Block { header: Header { - hash: Some(H256::from_low_u64_be(1)), - parent_hash: H256::from_low_u64_be(2), - uncles_hash: H256::from_low_u64_be(3), - miner: Address::from_low_u64_be(4), - state_root: H256::from_low_u64_be(5), - transactions_root: H256::from_low_u64_be(6), - receipts_root: H256::from_low_u64_be(7), - withdrawals_root: Some(H256::from_low_u64_be(8)), + hash: Some(B256::with_last_byte(1)), + parent_hash: B256::with_last_byte(2), + uncles_hash: B256::with_last_byte(3), + miner: Address::with_last_byte(4), + state_root: B256::with_last_byte(5), + transactions_root: B256::with_last_byte(6), + receipts_root: B256::with_last_byte(7), + withdrawals_root: Some(B256::with_last_byte(8)), number: Some(U256::from(9)), gas_used: U256::from(10), gas_limit: U256::from(11), @@ -333,16 +333,16 @@ mod tests { logs_bloom: Bloom::default(), timestamp: U256::from(12), difficulty: U256::from(13), - mix_hash: H256::from_low_u64_be(14), - nonce: Some(H64::from_low_u64_be(15)), + mix_hash: B256::with_last_byte(14), + nonce: Some(B64::with_last_byte(15)), base_fee_per_gas: Some(U256::from(20)), blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, }, total_difficulty: Some(U256::from(100000)), - uncles: vec![H256::from_low_u64_be(17)], - transactions: BlockTransactions::Hashes(vec![H256::from_low_u64_be(18)]), + uncles: vec![B256::with_last_byte(17)], + transactions: BlockTransactions::Hashes(vec![B256::with_last_byte(18)]), size: Some(U256::from(19)), withdrawals: Some(vec![]), }; @@ -359,13 +359,13 @@ mod tests { fn serde_block_with_withdrawals_set_as_none() { let block = Block { header: Header { - hash: Some(H256::from_low_u64_be(1)), - parent_hash: H256::from_low_u64_be(2), - uncles_hash: H256::from_low_u64_be(3), - miner: Address::from_low_u64_be(4), - state_root: H256::from_low_u64_be(5), - transactions_root: H256::from_low_u64_be(6), - receipts_root: H256::from_low_u64_be(7), + hash: Some(B256::with_last_byte(1)), + parent_hash: B256::with_last_byte(2), + uncles_hash: B256::with_last_byte(3), + miner: Address::with_last_byte(4), + state_root: B256::with_last_byte(5), + transactions_root: B256::with_last_byte(6), + receipts_root: B256::with_last_byte(7), withdrawals_root: None, number: Some(U256::from(9)), gas_used: U256::from(10), @@ -374,16 +374,16 @@ mod tests { logs_bloom: Bloom::default(), timestamp: U256::from(12), difficulty: U256::from(13), - mix_hash: H256::from_low_u64_be(14), - nonce: Some(H64::from_low_u64_be(15)), + mix_hash: B256::with_last_byte(14), + nonce: Some(B64::with_last_byte(15)), base_fee_per_gas: Some(U256::from(20)), blob_gas_used: None, excess_blob_gas: None, parent_beacon_block_root: None, }, total_difficulty: Some(U256::from(100000)), - uncles: vec![H256::from_low_u64_be(17)], - transactions: BlockTransactions::Hashes(vec![H256::from_low_u64_be(18)]), + uncles: vec![B256::with_last_byte(17)], + transactions: BlockTransactions::Hashes(vec![B256::with_last_byte(18)]), size: Some(U256::from(19)), withdrawals: None, }; diff --git a/crates/rpc/rpc-types/src/eth/call.rs b/crates/rpc/rpc-types/src/eth/call.rs index ca4e664626..8c65b85b1d 100644 --- a/crates/rpc/rpc-types/src/eth/call.rs +++ b/crates/rpc/rpc-types/src/eth/call.rs @@ -1,7 +1,6 @@ -use reth_primitives::{AccessList, Address, BlockId, Bytes, H256, U256, U64, U8}; -use serde::{Deserialize, Deserializer, Serialize, Serializer}; - use crate::BlockOverrides; +use reth_primitives::{AccessList, Address, BlockId, Bytes, B256, U256, U64, U8}; +use serde::{Deserialize, Deserializer, Serialize, Serializer}; /// Bundle of transactions #[derive(Debug, Clone, Default, Eq, PartialEq, Serialize, Deserialize)] @@ -119,7 +118,7 @@ pub struct CallRequest { pub max_fee_per_blob_gas: Option, /// Blob Versioned Hashes for EIP-4844 transactions #[serde(skip_serializing_if = "Option::is_none")] - pub blob_versioned_hashes: Option>, + pub blob_versioned_hashes: Option>, /// EIP-2718 type #[serde(rename = "type")] pub transaction_type: Option, diff --git a/crates/rpc/rpc-types/src/eth/engine/cancun.rs b/crates/rpc/rpc-types/src/eth/engine/cancun.rs index 92dea60742..b3e888021a 100644 --- a/crates/rpc/rpc-types/src/eth/engine/cancun.rs +++ b/crates/rpc/rpc-types/src/eth/engine/cancun.rs @@ -1,6 +1,6 @@ //! Contains types related to the Cancun hardfork that will be used by RPC to communicate with the //! beacon consensus engine. -use reth_primitives::H256; +use reth_primitives::B256; /// Fields introduced in `engine_newPayloadV3` that are not present in the `ExecutionPayload` RPC /// object. @@ -10,8 +10,8 @@ use reth_primitives::H256; #[derive(Debug, Clone, PartialEq, Eq, Hash, Default)] pub struct CancunPayloadFields { /// The parent beacon block root. - pub parent_beacon_block_root: H256, + pub parent_beacon_block_root: B256, /// The expected blob versioned hashes. - pub versioned_hashes: Vec, + pub versioned_hashes: Vec, } diff --git a/crates/rpc/rpc-types/src/eth/engine/forkchoice.rs b/crates/rpc/rpc-types/src/eth/engine/forkchoice.rs index f1c2ca4a79..ef1e9b72c9 100644 --- a/crates/rpc/rpc-types/src/eth/engine/forkchoice.rs +++ b/crates/rpc/rpc-types/src/eth/engine/forkchoice.rs @@ -1,6 +1,6 @@ use super::{PayloadStatus, PayloadStatusEnum}; use crate::engine::PayloadId; -use reth_primitives::H256; +use reth_primitives::B256; use serde::{Deserialize, Serialize}; /// invalid forkchoice state error code. @@ -23,11 +23,11 @@ pub type ForkChoiceUpdateResult = Result Self { + pub fn with_latest_valid_hash(mut self, hash: B256) -> Self { self.payload_status.latest_valid_hash = Some(hash); self } diff --git a/crates/rpc/rpc-types/src/eth/engine/payload.rs b/crates/rpc/rpc-types/src/eth/engine/payload.rs index 266494aed4..316f760fbe 100644 --- a/crates/rpc/rpc-types/src/eth/engine/payload.rs +++ b/crates/rpc/rpc-types/src/eth/engine/payload.rs @@ -1,7 +1,7 @@ pub use crate::Withdrawal; use reth_primitives::{ kzg::{Blob, Bytes48}, - Address, BlobTransactionSidecar, Bloom, Bytes, SealedBlock, H256, H64, U256, U64, + Address, BlobTransactionSidecar, Bloom, Bytes, SealedBlock, B256, B64, U256, U64, }; use serde::{ser::SerializeMap, Deserialize, Serialize, Serializer}; @@ -10,14 +10,14 @@ pub type ExecutionPayloadBodiesV1 = Vec>; /// And 8-byte identifier for an execution payload. #[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize, Hash)] -pub struct PayloadId(H64); +pub struct PayloadId(B64); // === impl PayloadId === impl PayloadId { /// Creates a new payload id from the given identifier. pub fn new(id: [u8; 8]) -> Self { - Self(H64::from(id)) + Self(B64::from(id)) } } @@ -121,19 +121,19 @@ pub struct ExecutionPayloadEnvelopeV3 { #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] #[serde(rename_all = "camelCase")] pub struct ExecutionPayloadV1 { - pub parent_hash: H256, + pub parent_hash: B256, pub fee_recipient: Address, - pub state_root: H256, - pub receipts_root: H256, + pub state_root: B256, + pub receipts_root: B256, pub logs_bloom: Bloom, - pub prev_randao: H256, + pub prev_randao: B256, pub block_number: U64, pub gas_limit: U64, pub gas_used: U64, pub timestamp: U64, pub extra_data: Bytes, pub base_fee_per_gas: U256, - pub block_hash: H256, + pub block_hash: B256, pub transactions: Vec, } @@ -155,10 +155,10 @@ impl From for ExecutionPayloadV1 { receipts_root: value.receipts_root, logs_bloom: value.logs_bloom, prev_randao: value.mix_hash, - block_number: value.number.into(), - gas_limit: value.gas_limit.into(), - gas_used: value.gas_used.into(), - timestamp: value.timestamp.into(), + block_number: U64::from(value.number), + gas_limit: U64::from(value.gas_limit), + gas_used: U64::from(value.gas_used), + timestamp: U64::from(value.timestamp), extra_data: value.extra_data.clone(), base_fee_per_gas: U256::from(value.base_fee_per_gas.unwrap_or_default()), block_hash: value.hash(), @@ -185,7 +185,7 @@ pub struct ExecutionPayloadV2 { impl ExecutionPayloadV2 { /// Returns the timestamp for the execution payload. pub fn timestamp(&self) -> u64 { - self.payload_inner.timestamp.as_u64() + self.payload_inner.timestamp.to() } } @@ -215,7 +215,7 @@ impl ExecutionPayloadV3 { /// Returns the timestamp for the payload. pub fn timestamp(&self) -> u64 { - self.payload_inner.payload_inner.timestamp.as_u64() + self.payload_inner.payload_inner.timestamp.to() } } @@ -267,14 +267,14 @@ impl ExecutionPayload { /// Returns the timestamp for the payload. pub fn timestamp(&self) -> u64 { match self { - ExecutionPayload::V1(payload) => payload.timestamp.as_u64(), + ExecutionPayload::V1(payload) => payload.timestamp.to(), ExecutionPayload::V2(payload) => payload.timestamp(), ExecutionPayload::V3(payload) => payload.timestamp(), } } /// Returns the parent hash for the payload. - pub fn parent_hash(&self) -> H256 { + pub fn parent_hash(&self) -> B256 { match self { ExecutionPayload::V1(payload) => payload.parent_hash, ExecutionPayload::V2(payload) => payload.payload_inner.parent_hash, @@ -283,7 +283,7 @@ impl ExecutionPayload { } /// Returns the block hash for the payload. - pub fn block_hash(&self) -> H256 { + pub fn block_hash(&self) -> B256 { match self { ExecutionPayload::V1(payload) => payload.block_hash, ExecutionPayload::V2(payload) => payload.payload_inner.block_hash, @@ -294,11 +294,9 @@ impl ExecutionPayload { /// Returns the block number for this payload. pub fn block_number(&self) -> u64 { match self { - ExecutionPayload::V1(payload) => payload.block_number.as_u64(), - ExecutionPayload::V2(payload) => payload.payload_inner.block_number.as_u64(), - ExecutionPayload::V3(payload) => { - payload.payload_inner.payload_inner.block_number.as_u64() - } + ExecutionPayload::V1(payload) => payload.block_number.to(), + ExecutionPayload::V2(payload) => payload.payload_inner.block_number.to(), + ExecutionPayload::V3(payload) => payload.payload_inner.payload_inner.block_number.to(), } } } @@ -336,20 +334,23 @@ pub enum PayloadError { /// Invalid payload excess blob gas. #[error("Invalid payload excess blob gas: {0}")] ExcessBlobGas(U256), + /// Pre-cancun Payload has blob transactions. + #[error("Invalid payload, pre-Cancun payload has blob transactions")] + PreCancunBlockWithBlobTransactions, /// Invalid payload block hash. #[error("blockhash mismatch, want {consensus}, got {execution}")] BlockHash { /// The block hash computed from the payload. - execution: H256, + execution: B256, /// The block hash provided with the payload. - consensus: H256, + consensus: B256, }, /// Expected blob versioned hashes do not match the given transactions. #[error("Expected blob versioned hashes do not match the given transactions")] InvalidVersionedHashes, /// Encountered decoding error. #[error(transparent)] - Decode(#[from] reth_rlp::DecodeError), + Decode(#[from] alloy_rlp::Error), } impl PayloadError { @@ -380,7 +381,7 @@ pub struct PayloadAttributes { /// Value for the `timestamp` field of the new payload pub timestamp: U64, /// Value for the `prevRandao` field of the new payload - pub prev_randao: H256, + pub prev_randao: B256, /// Suggested value for the `feeRecipient` field of the new payload pub suggested_fee_recipient: Address, /// Array of [`Withdrawal`] enabled with V2 @@ -391,7 +392,7 @@ pub struct PayloadAttributes { /// /// See also #[serde(default, skip_serializing_if = "Option::is_none")] - pub parent_beacon_block_root: Option, + pub parent_beacon_block_root: Option, } /// This structure contains the result of processing a payload or fork choice update. @@ -401,11 +402,11 @@ pub struct PayloadStatus { #[serde(flatten)] pub status: PayloadStatusEnum, /// Hash of the most recent valid block in the branch defined by payload and its ancestors - pub latest_valid_hash: Option, + pub latest_valid_hash: Option, } impl PayloadStatus { - pub fn new(status: PayloadStatusEnum, latest_valid_hash: Option) -> Self { + pub fn new(status: PayloadStatusEnum, latest_valid_hash: Option) -> Self { Self { status, latest_valid_hash } } @@ -413,12 +414,12 @@ impl PayloadStatus { Self { status, latest_valid_hash: None } } - pub fn with_latest_valid_hash(mut self, latest_valid_hash: H256) -> Self { + pub fn with_latest_valid_hash(mut self, latest_valid_hash: B256) -> Self { self.latest_valid_hash = Some(latest_valid_hash); self } - pub fn maybe_latest_valid_hash(mut self, latest_valid_hash: Option) -> Self { + pub fn maybe_latest_valid_hash(mut self, latest_valid_hash: Option) -> Self { self.latest_valid_hash = latest_valid_hash; self } @@ -557,9 +558,9 @@ pub enum PayloadValidationError { #[error("invalid merkle root: (remote: {remote:?} local: {local:?})")] InvalidStateRoot { /// The state root of the payload we received from remote (CL) - remote: H256, + remote: B256, /// The state root of the payload that we computed locally. - local: H256, + local: B256, }, } diff --git a/crates/rpc/rpc-types/src/eth/engine/transition.rs b/crates/rpc/rpc-types/src/eth/engine/transition.rs index b4addac95b..8c08343595 100644 --- a/crates/rpc/rpc-types/src/eth/engine/transition.rs +++ b/crates/rpc/rpc-types/src/eth/engine/transition.rs @@ -1,4 +1,4 @@ -use reth_primitives::{H256, U256, U64}; +use reth_primitives::{B256, U256, U64}; use serde::{Deserialize, Serialize}; /// This structure contains configurable settings of the transition process. @@ -8,7 +8,7 @@ pub struct TransitionConfiguration { /// Maps on the TERMINAL_TOTAL_DIFFICULTY parameter of EIP-3675 pub terminal_total_difficulty: U256, /// Maps on TERMINAL_BLOCK_HASH parameter of EIP-3675 - pub terminal_block_hash: H256, + pub terminal_block_hash: B256, /// Maps on TERMINAL_BLOCK_NUMBER parameter of EIP-3675 pub terminal_block_number: U64, } diff --git a/crates/rpc/rpc-types/src/eth/filter.rs b/crates/rpc/rpc-types/src/eth/filter.rs index bfe92fbe07..370395f739 100644 --- a/crates/rpc/rpc-types/src/eth/filter.rs +++ b/crates/rpc/rpc-types/src/eth/filter.rs @@ -1,8 +1,7 @@ use crate::Log as RpcLog; use itertools::{EitherOrBoth::*, Itertools}; use reth_primitives::{ - bloom::{Bloom, Input}, - keccak256, Address, BlockNumberOrTag, Log, H160, H256, U256, U64, + keccak256, Address, BlockNumberOrTag, Bloom, BloomInput, Log, B256, U256, U64, }; use serde::{ de::{DeserializeOwned, MapAccess, Visitor}, @@ -30,7 +29,7 @@ impl BloomFilter { /// If the filter is empty (the list is empty), then any bloom matches /// Otherwise, there must be at least one matche for the BloomFilter to match. pub fn matches(&self, bloom: Bloom) -> bool { - self.0.is_empty() || self.0.iter().any(|a| bloom.contains_bloom(a)) + self.0.is_empty() || self.0.iter().any(|a| bloom.contains(a)) } } @@ -95,7 +94,7 @@ impl FilterSet { impl + Eq + Hash> FilterSet { /// Returns a list of Bloom (BloomFilter) corresponding to the filter's values pub fn to_bloom_filter(&self) -> BloomFilter { - self.0.iter().map(|a| Input::Raw(a.as_ref()).into()).collect::>().into() + self.0.iter().map(|a| BloomInput::Raw(a.as_ref()).into()).collect::>().into() } } @@ -116,11 +115,11 @@ impl FilterSet { } /// A single topic -pub type Topic = FilterSet; +pub type Topic = FilterSet; impl From for Topic { fn from(src: U256) -> Self { - Into::::into(src).into() + Into::::into(src).into() } } @@ -137,7 +136,7 @@ pub enum FilterBlockOption { to_block: Option, }, /// The hash of the block if the filter only targets a single block - AtBlockHash(H256), + AtBlockHash(B256), } impl FilterBlockOption { @@ -209,8 +208,8 @@ impl> From> for FilterBlockOption { } } -impl From for FilterBlockOption { - fn from(hash: H256) -> Self { +impl From for FilterBlockOption { + fn from(hash: B256) -> Self { FilterBlockOption::AtBlockHash(hash) } } @@ -242,7 +241,7 @@ impl FilterBlockOption { /// Pins the block hash this filter should target. #[must_use] - pub fn set_hash(&self, hash: H256) -> Self { + pub fn set_hash(&self, hash: B256) -> Self { FilterBlockOption::AtBlockHash(hash) } } @@ -295,10 +294,10 @@ impl Filter { /// Match a block by its hash /// /// ```rust - /// # use reth_primitives::H256; + /// # use reth_primitives::B256; /// # use reth_rpc_types::Filter; /// # fn main() { - /// let filter = Filter::new().select(H256::zero()); + /// let filter = Filter::new().select(B256::ZERO); /// # } /// ``` /// This is the same as `at_block_hash` @@ -353,7 +352,7 @@ impl Filter { /// Pins the block hash for the filter #[must_use] - pub fn at_block_hash>(mut self, hash: T) -> Self { + pub fn at_block_hash>(mut self, hash: T) -> Self { self.block_option = self.block_option.set_hash(hash.into()); self } @@ -448,7 +447,7 @@ impl Filter { } /// Returns the numeric value of the `fromBlock` field - pub fn get_block_hash(&self) -> Option { + pub fn get_block_hash(&self) -> Option { match self.block_option { FilterBlockOption::AtBlockHash(hash) => Some(hash), FilterBlockOption::Range { .. } => None, @@ -501,7 +500,7 @@ impl Serialize for Filter { } type RawAddressFilter = ValueOrArray>; -type RawTopicsFilter = Vec>>>; +type RawTopicsFilter = Vec>>>; impl<'de> Deserialize<'de> for Filter { fn deserialize(deserializer: D) -> Result @@ -523,7 +522,7 @@ impl<'de> Deserialize<'de> for Filter { { let mut from_block: Option> = None; let mut to_block: Option> = None; - let mut block_hash: Option> = None; + let mut block_hash: Option> = None; let mut address: Option> = None; let mut topics: Option> = None; @@ -627,20 +626,20 @@ pub enum ValueOrArray { Array(Vec), } -impl From for ValueOrArray { - fn from(src: H160) -> Self { +impl From
for ValueOrArray
{ + fn from(src: Address) -> Self { ValueOrArray::Value(src) } } -impl From> for ValueOrArray { - fn from(src: Vec) -> Self { +impl From> for ValueOrArray
{ + fn from(src: Vec
) -> Self { ValueOrArray::Array(src) } } -impl From> for ValueOrArray { - fn from(src: Vec) -> Self { +impl From> for ValueOrArray { + fn from(src: Vec) -> Self { ValueOrArray::Array(src) } } @@ -714,7 +713,7 @@ impl FilteredParams { } /// Returns the [BloomFilter] for the given topics - pub fn topics_filter(topics: &[FilterSet]) -> Vec { + pub fn topics_filter(topics: &[FilterSet]) -> Vec { topics.iter().map(|t| t.to_bloom_filter()).collect() } @@ -772,7 +771,7 @@ impl FilteredParams { } /// Returns `true` if the filter matches the given block hash. - pub fn filter_block_hash(&self, block_hash: H256) -> bool { + pub fn filter_block_hash(&self, block_hash: B256) -> bool { if let Some(h) = self.filter.as_ref().and_then(|f| f.get_block_hash()) { if h != block_hash { return false @@ -822,7 +821,7 @@ pub enum FilterChanges { /// New logs. Logs(Vec), /// New hashes (block or transactions) - Hashes(Vec), + Hashes(Vec), /// Empty result, Empty, } @@ -849,7 +848,7 @@ impl<'de> Deserialize<'de> for FilterChanges { #[serde(untagged)] enum Changes { Logs(Vec), - Hashes(Vec), + Hashes(Vec), } let changes = Changes::deserialize(deserializer)?; @@ -922,12 +921,12 @@ mod tests { filter.topics, [ "0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925" - .parse::() + .parse::() .unwrap() .into(), Default::default(), "0x0000000000000000000000000c17e776cd218252adfca8d4e761d3fe757e9778" - .parse::() + .parse::() .unwrap() .into(), Default::default(), @@ -943,7 +942,7 @@ mod tests { filter.topics, [ "0x8c5be1e5ebec7d5bd14f71427d1e84f3dd0314c0f7b2291e5b200ac8c7c3b925" - .parse::() + .parse::() .unwrap() .into(), Default::default(), @@ -974,13 +973,13 @@ mod tests { #[test] fn filter_serialization_test() { let t1 = "0000000000000000000000009729a6fbefefc8f6005933898b13dc45c3a2c8b7" - .parse::() + .parse::() .unwrap(); - let t2 = H256::from([0; 32]); + let t2 = B256::from([0; 32]); let t3 = U256::from(123); let t1_padded = t1; - let t3_padded = H256::from({ + let t3_padded = B256::from({ let mut x = [0; 32]; x[31] = 123; x @@ -1034,15 +1033,15 @@ mod tests { assert_eq!(ser, json!({ "address" : addr, "topics": [t0, t1_padded, t2, t3_padded]})); } - fn build_bloom(address: Address, topic1: H256, topic2: H256) -> Bloom { + fn build_bloom(address: Address, topic1: B256, topic2: B256) -> Bloom { let mut block_bloom = Bloom::default(); - block_bloom.accrue(Input::Raw(&address[..])); - block_bloom.accrue(Input::Raw(&topic1[..])); - block_bloom.accrue(Input::Raw(&topic2[..])); + block_bloom.accrue(BloomInput::Raw(&address[..])); + block_bloom.accrue(BloomInput::Raw(&topic1[..])); + block_bloom.accrue(BloomInput::Raw(&topic2[..])); block_bloom } - fn topic_filter(topic1: H256, topic2: H256, topic3: H256) -> Filter { + fn topic_filter(topic1: B256, topic2: B256, topic3: B256) -> Filter { Filter { block_option: Default::default(), address: Default::default(), @@ -1057,23 +1056,23 @@ mod tests { #[test] fn can_detect_different_topics() { - let topic1 = H256::random(); - let topic2 = H256::random(); - let topic3 = H256::random(); + let topic1 = B256::random(); + let topic2 = B256::random(); + let topic3 = B256::random(); let topics = topic_filter(topic1, topic2, topic3).topics; let topics_bloom = FilteredParams::topics_filter(&topics); assert!(!FilteredParams::matches_topics( - build_bloom(Address::random(), H256::random(), H256::random()), + build_bloom(Address::random(), B256::random(), B256::random()), &topics_bloom )); } #[test] fn can_match_topic() { - let topic1 = H256::random(); - let topic2 = H256::random(); - let topic3 = H256::random(); + let topic1 = B256::random(); + let topic2 = B256::random(); + let topic3 = B256::random(); let topics = topic_filter(topic1, topic2, topic3).topics; let _topics_bloom = FilteredParams::topics_filter(&topics); @@ -1096,7 +1095,7 @@ mod tests { let topics_bloom = FilteredParams::topics_filter(&topics); assert!(FilteredParams::matches_topics( - build_bloom(Address::random(), H256::random(), H256::random()), + build_bloom(Address::random(), B256::random(), B256::random()), &topics_bloom )); } @@ -1104,9 +1103,9 @@ mod tests { #[test] fn can_match_address_and_topics() { let rng_address = Address::random(); - let topic1 = H256::random(); - let topic2 = H256::random(); - let topic3 = H256::random(); + let topic1 = B256::random(); + let topic2 = B256::random(); + let topic3 = B256::random(); let filter = Filter { block_option: Default::default(), @@ -1135,9 +1134,9 @@ mod tests { #[test] fn can_match_topics_wildcard() { - let topic1 = H256::random(); - let topic2 = H256::random(); - let topic3 = H256::random(); + let topic1 = B256::random(); + let topic2 = B256::random(); + let topic3 = B256::random(); let filter = Filter { block_option: Default::default(), @@ -1165,7 +1164,7 @@ mod tests { address: Default::default(), topics: [ Default::default(), - vec![H256::random(), H256::random()].into(), + vec![B256::random(), B256::random()].into(), Default::default(), Default::default(), ], @@ -1174,7 +1173,7 @@ mod tests { let topics_bloom = FilteredParams::topics_filter(&topics_input); assert!(!FilteredParams::matches_topics( - build_bloom(Address::random(), H256::random(), H256::random()), + build_bloom(Address::random(), B256::random(), B256::random()), &topics_bloom )); } @@ -1189,7 +1188,7 @@ mod tests { }; let address_bloom = FilteredParams::address_filter(&filter.address); assert!(FilteredParams::matches_address( - build_bloom(rng_address, H256::random(), H256::random(),), + build_bloom(rng_address, B256::random(), B256::random(),), &address_bloom )); } @@ -1205,7 +1204,7 @@ mod tests { }; let address_bloom = FilteredParams::address_filter(&filter.address); assert!(!FilteredParams::matches_address( - build_bloom(bloom_address, H256::random(), H256::random(),), + build_bloom(bloom_address, B256::random(), B256::random(),), &address_bloom )); } @@ -1239,15 +1238,15 @@ mod tests { .into(), topics: [ "0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef" - .parse::() + .parse::() .unwrap() .into(), "0x00000000000000000000000000b46c2526e227482e2ebb8f4c69e4674d262e75" - .parse::() + .parse::() .unwrap() .into(), "0x00000000000000000000000054a2d42a40f51259dedd1978f6c118a0f0eff078" - .parse::() + .parse::() .unwrap() .into(), Default::default(), diff --git a/crates/rpc/rpc-types/src/eth/log.rs b/crates/rpc/rpc-types/src/eth/log.rs index 2c474abc17..35f9ce5d1c 100644 --- a/crates/rpc/rpc-types/src/eth/log.rs +++ b/crates/rpc/rpc-types/src/eth/log.rs @@ -1,4 +1,4 @@ -use reth_primitives::{Address, Bytes, H256, U256}; +use reth_primitives::{Address, Bytes, B256, U256}; use serde::{Deserialize, Serialize}; /// Ethereum Log emitted by a transaction @@ -8,15 +8,15 @@ pub struct Log { /// Address pub address: Address, /// All topics of the log - pub topics: Vec, + pub topics: Vec, /// Additional data fields of the log pub data: Bytes, /// Hash of the block the transaction that emitted this log was mined in - pub block_hash: Option, + pub block_hash: Option, /// Number of the block the transaction that emitted this log was mined in pub block_number: Option, /// Transaction Hash - pub transaction_hash: Option, + pub transaction_hash: Option, /// Index of the Transaction in the block pub transaction_index: Option, /// Log Index in Block @@ -50,20 +50,20 @@ mod tests { #[test] fn serde_log() { let log = Log { - address: Address::from_low_u64_be(0x1234), - topics: vec![H256::from_low_u64_be(0x1234)], - data: Bytes::from(vec![0x12, 0x34]), - block_hash: Some(H256::from_low_u64_be(0x1234)), - block_number: Some(U256::from(0x1234)), - transaction_hash: Some(H256::from_low_u64_be(0x1234)), - transaction_index: Some(U256::from(0x1234)), - log_index: Some(U256::from(0x1234)), + address: Address::with_last_byte(0x69), + topics: vec![B256::with_last_byte(0x69)], + data: Bytes::from_static(&[0x69]), + block_hash: Some(B256::with_last_byte(0x69)), + block_number: Some(U256::from(0x69)), + transaction_hash: Some(B256::with_last_byte(0x69)), + transaction_index: Some(U256::from(0x69)), + log_index: Some(U256::from(0x69)), removed: false, }; let serialized = serde_json::to_string(&log).unwrap(); assert_eq!( serialized, - r#"{"address":"0x0000000000000000000000000000000000001234","topics":["0x0000000000000000000000000000000000000000000000000000000000001234"],"data":"0x1234","blockHash":"0x0000000000000000000000000000000000000000000000000000000000001234","blockNumber":"0x1234","transactionHash":"0x0000000000000000000000000000000000000000000000000000000000001234","transactionIndex":"0x1234","logIndex":"0x1234","removed":false}"# + r#"{"address":"0x0000000000000000000000000000000000000069","topics":["0x0000000000000000000000000000000000000000000000000000000000000069"],"data":"0x69","blockHash":"0x0000000000000000000000000000000000000000000000000000000000000069","blockNumber":"0x69","transactionHash":"0x0000000000000000000000000000000000000000000000000000000000000069","transactionIndex":"0x69","logIndex":"0x69","removed":false}"# ); let deserialized: Log = serde_json::from_str(&serialized).unwrap(); diff --git a/crates/rpc/rpc-types/src/eth/pubsub.rs b/crates/rpc/rpc-types/src/eth/pubsub.rs index bf2b3b55d4..a0c2e166c2 100644 --- a/crates/rpc/rpc-types/src/eth/pubsub.rs +++ b/crates/rpc/rpc-types/src/eth/pubsub.rs @@ -5,7 +5,7 @@ use crate::{ Log, RichHeader, }; -use reth_primitives::H256; +use reth_primitives::B256; use serde::{de::Error, Deserialize, Deserializer, Serialize, Serializer}; /// Subscription result. @@ -17,7 +17,7 @@ pub enum SubscriptionResult { /// Log Log(Box), /// Transaction hash - TransactionHash(H256), + TransactionHash(B256), /// Full Transaction FullTransaction(Box), /// SyncStatus diff --git a/crates/rpc/rpc-types/src/eth/state.rs b/crates/rpc/rpc-types/src/eth/state.rs index b9e1221850..7b60c1747a 100644 --- a/crates/rpc/rpc-types/src/eth/state.rs +++ b/crates/rpc/rpc-types/src/eth/state.rs @@ -1,6 +1,6 @@ //! bindings for state overrides in eth_call -use reth_primitives::{Address, Bytes, H256, U256, U64}; +use reth_primitives::{Address, Bytes, B256, U256, U64}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; @@ -24,16 +24,17 @@ pub struct AccountOverride { /// Fake key-value mapping to override all slots in the account storage before executing the /// call. #[serde(default, skip_serializing_if = "Option::is_none")] - pub state: Option>, + pub state: Option>, /// Fake key-value mapping to override individual slots in the account storage before executing /// the call. #[serde(default, skip_serializing_if = "Option::is_none")] - pub state_diff: Option>, + pub state_diff: Option>, } #[cfg(test)] mod tests { use super::*; + use reth_primitives::address; #[test] fn test_state_override() { @@ -43,9 +44,8 @@ mod tests { } }"#; let state_override: StateOverride = serde_json::from_str(s).unwrap(); - let acc = state_override - .get(&"0x0000000000000000000000000000000000000124".parse().unwrap()) - .unwrap(); + let acc = + state_override.get(&address!("0000000000000000000000000000000000000124")).unwrap(); assert!(acc.code.is_some()); } #[test] @@ -62,9 +62,8 @@ mod tests { } }"#; let state_override: StateOverride = serde_json::from_str(s).unwrap(); - let acc = state_override - .get(&"0x1b5212AF6b76113afD94cD2B5a78a73B7d7A8222".parse().unwrap()) - .unwrap(); + let acc = + state_override.get(&address!("1b5212AF6b76113afD94cD2B5a78a73B7d7A8222")).unwrap(); assert!(acc.state_diff.is_some()); } } diff --git a/crates/rpc/rpc-types/src/eth/syncing.rs b/crates/rpc/rpc-types/src/eth/syncing.rs index c6f2c6e7a9..d45b606f6b 100644 --- a/crates/rpc/rpc-types/src/eth/syncing.rs +++ b/crates/rpc/rpc-types/src/eth/syncing.rs @@ -1,4 +1,4 @@ -use reth_primitives::{H512, U256, U64}; +use reth_primitives::{B512, U256, U64}; use serde::{Deserialize, Deserializer, Serialize, Serializer}; use std::collections::BTreeMap; @@ -149,7 +149,7 @@ pub struct TransactionStats { /// Block no this transaction was first seen. pub first_seen: u64, /// Peers this transaction was propagated to with count. - pub propagated_to: BTreeMap, + pub propagated_to: BTreeMap, } /// Chain status. diff --git a/crates/rpc/rpc-types/src/eth/trace/geth/call.rs b/crates/rpc/rpc-types/src/eth/trace/geth/call.rs index c0391b0c9a..636bc19c58 100644 --- a/crates/rpc/rpc-types/src/eth/trace/geth/call.rs +++ b/crates/rpc/rpc-types/src/eth/trace/geth/call.rs @@ -1,4 +1,4 @@ -use reth_primitives::{serde_helper::num::from_int_or_hex, Address, Bytes, H256, U256}; +use reth_primitives::{serde_helper::num::from_int_or_hex, Address, Bytes, B256, U256}; use serde::{Deserialize, Serialize}; /// The response object for `debug_traceTransaction` with `"tracer": "callTracer"` @@ -35,7 +35,7 @@ pub struct CallLogFrame { #[serde(default, skip_serializing_if = "Option::is_none")] pub address: Option
, #[serde(default, skip_serializing_if = "Option::is_none")] - pub topics: Option>, + pub topics: Option>, #[serde(default, skip_serializing_if = "Option::is_none")] pub data: Option, } diff --git a/crates/rpc/rpc-types/src/eth/trace/geth/mod.rs b/crates/rpc/rpc-types/src/eth/trace/geth/mod.rs index ebd1a4b2ca..752a0c5be6 100644 --- a/crates/rpc/rpc-types/src/eth/trace/geth/mod.rs +++ b/crates/rpc/rpc-types/src/eth/trace/geth/mod.rs @@ -2,7 +2,7 @@ #![allow(missing_docs)] use crate::{state::StateOverride, BlockOverrides}; -use reth_primitives::{Bytes, H256, U256}; +use reth_primitives::{Bytes, B256, U256}; use serde::{de::DeserializeOwned, ser::SerializeMap, Deserialize, Serialize, Serializer}; use std::collections::BTreeMap; @@ -29,7 +29,7 @@ pub struct BlockTraceResult { /// Block number corresponding to the trace task pub block: U256, /// Block hash corresponding to the trace task - pub hash: H256, + pub hash: B256, /// Trace results produced by the trace task pub traces: Vec, } @@ -80,7 +80,7 @@ pub struct StructLog { skip_serializing_if = "Option::is_none", serialize_with = "serialize_string_storage_map_opt" )] - pub storage: Option>, + pub storage: Option>, /// Current call depth pub depth: u64, /// Refund counter @@ -354,7 +354,7 @@ pub struct GethDebugTracingCallOptions { /// Serializes a storage map as a list of key-value pairs _without_ 0x-prefix fn serialize_string_storage_map_opt( - storage: &Option>, + storage: &Option>, s: S, ) -> Result { match storage { diff --git a/crates/rpc/rpc-types/src/eth/trace/geth/noop.rs b/crates/rpc/rpc-types/src/eth/trace/geth/noop.rs index a3b3f726d8..a9db367350 100644 --- a/crates/rpc/rpc-types/src/eth/trace/geth/noop.rs +++ b/crates/rpc/rpc-types/src/eth/trace/geth/noop.rs @@ -14,7 +14,7 @@ mod tests { use super::*; use crate::trace::geth::*; - const DEFAULT: &str = r#"{}"#; + const DEFAULT: &str = r"{}"; #[test] fn test_serialize_noop_trace() { diff --git a/crates/rpc/rpc-types/src/eth/trace/geth/pre_state.rs b/crates/rpc/rpc-types/src/eth/trace/geth/pre_state.rs index 2f8be5ab36..c0cb3f37d1 100644 --- a/crates/rpc/rpc-types/src/eth/trace/geth/pre_state.rs +++ b/crates/rpc/rpc-types/src/eth/trace/geth/pre_state.rs @@ -1,4 +1,4 @@ -use reth_primitives::{serde_helper::num::from_int_or_hex_opt, Address, Bytes, H256, U256}; +use reth_primitives::{serde_helper::num::from_int_or_hex_opt, Address, Bytes, B256, U256}; use serde::{Deserialize, Serialize}; use std::collections::BTreeMap; @@ -10,16 +10,50 @@ pub enum PreStateFrame { Diff(DiffMode), } +impl PreStateFrame { + /// Returns true if this trace was requested without diffmode. + pub fn is_default(&self) -> bool { + matches!(self, PreStateFrame::Default(_)) + } + + /// Returns true if this trace was requested with diffmode. + pub fn is_diff(&self) -> bool { + matches!(self, PreStateFrame::Diff(_)) + } + + /// Returns the account states after the transaction is executed if this trace was requested + /// without diffmode. + pub fn as_default(&self) -> Option<&PreStateMode> { + match self { + PreStateFrame::Default(mode) => Some(mode), + _ => None, + } + } + + /// Returns the account states before and after the transaction is executed if this trace was + /// requested with diffmode. + pub fn as_diff(&self) -> Option<&DiffMode> { + match self { + PreStateFrame::Diff(mode) => Some(mode), + _ => None, + } + } +} + #[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct PreStateMode(pub BTreeMap); +/// Represents the account states before and after the transaction is executed. #[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] #[serde(deny_unknown_fields)] pub struct DiffMode { - pub pre: BTreeMap, + /// The account states after the transaction is executed. pub post: BTreeMap, + /// The account states before the transaction is executed. + pub pre: BTreeMap, } +/// Represents the state of an account #[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] pub struct AccountState { #[serde( @@ -30,14 +64,26 @@ pub struct AccountState { pub balance: Option, #[serde(default, skip_serializing_if = "Option::is_none")] pub code: Option, - #[serde( - default, - deserialize_with = "from_int_or_hex_opt", - skip_serializing_if = "Option::is_none" - )] - pub nonce: Option, #[serde(default, skip_serializing_if = "Option::is_none")] - pub storage: Option>, + pub nonce: Option, + #[serde(default, skip_serializing_if = "BTreeMap::is_empty")] + pub storage: BTreeMap, +} + +impl AccountState { + /// Creates a new `AccountState` with the given account info. + /// + /// If balance is zero, it will be omitted. + /// If nonce is zero, it will be omitted. + /// If code is empty, it will be omitted. + pub fn from_account_info(nonce: u64, balance: U256, code: Option) -> Self { + Self { + balance: (balance != U256::ZERO).then_some(balance), + code: code.filter(|code| !code.is_empty()), + nonce: (nonce != 0).then_some(nonce), + storage: Default::default(), + } + } } #[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)] @@ -99,4 +145,53 @@ mod tests { assert!(!PreStateConfig { diff_mode: Some(false) }.is_diff_mode()); assert!(!PreStateConfig { diff_mode: None }.is_diff_mode()); } + + #[test] + fn parse_prestate_default_resp() { + let s = r#"{ + "0x0000000000000000000000000000000000000002": { + "balance": "0x0" + }, + "0x008b3b2f992c0e14edaa6e2c662bec549caa8df1": { + "balance": "0x2638035a26d133809" + }, + "0x35a9f94af726f07b5162df7e828cc9dc8439e7d0": { + "balance": "0x7a48734599f7284", + "nonce": 1133 + }, + "0xc8ba32cab1757528daf49033e3673fae77dcf05d": { + "balance": "0x0", + "code": "0x", + "nonce": 1, + "storage": { + "0x0000000000000000000000000000000000000000000000000000000000000000": "0x000000000000000000000000000000000000000000000000000000000024aea6", + "0x59fb7853eb21f604d010b94c123acbeae621f09ce15ee5d7616485b1e78a72e9": "0x00000000000000c42b56a52aedf18667c8ae258a0280a8912641c80c48cd9548", + "0x8d8ebb65ec00cb973d4fe086a607728fd1b9de14aa48208381eed9592f0dee9a": "0x00000000000000784ae4881e40b1f5ebb4437905fbb8a5914454123b0293b35f", + "0xff896b09014882056009dedb136458f017fcef9a4729467d0d00b4fd413fb1f1": "0x000000000000000e78ac39cb1c20e9edc753623b153705d0ccc487e31f9d6749" + } + } +} +"#; + let pre_state: PreStateFrame = serde_json::from_str(s).unwrap(); + assert!(pre_state.is_default()); + } + #[test] + fn parse_prestate_diff_resp() { + let s = r#"{ + "post": { + "0x35a9f94af726f07b5162df7e828cc9dc8439e7d0": { + "nonce": 1135 + } + }, + "pre": { + "0x35a9f94af726f07b5162df7e828cc9dc8439e7d0": { + "balance": "0x7a48429e177130a", + "nonce": 1134 + } + } +} +"#; + let pre_state: PreStateFrame = serde_json::from_str(s).unwrap(); + assert!(pre_state.is_diff()); + } } diff --git a/crates/rpc/rpc-types/src/eth/trace/parity.rs b/crates/rpc/rpc-types/src/eth/trace/parity.rs index 200460b4a9..175e79dc2d 100644 --- a/crates/rpc/rpc-types/src/eth/trace/parity.rs +++ b/crates/rpc/rpc-types/src/eth/trace/parity.rs @@ -3,7 +3,7 @@ //! //! See -use reth_primitives::{Address, Bytes, H256, U256, U64}; +use reth_primitives::{Address, Bytes, B256, U256, U64}; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, @@ -60,7 +60,7 @@ impl TraceResults { pub struct TraceResultsWithTransactionHash { #[serde(flatten)] pub full_trace: TraceResults, - pub transaction_hash: H256, + pub transaction_hash: B256, } #[derive(Clone, Debug, Eq, PartialEq, Serialize, Deserialize)] @@ -88,7 +88,7 @@ pub struct AccountDiff { pub balance: Delta, pub code: Delta, pub nonce: Delta, - pub storage: BTreeMap>, + pub storage: BTreeMap>, } /// New-type for list of account diffs @@ -307,7 +307,7 @@ pub struct LocalizedTransactionTrace { /// /// Note: this deviates from which always returns a block number #[serde(skip_serializing_if = "Option::is_none")] - pub block_hash: Option, + pub block_hash: Option, /// Block number the transaction is included in, None if pending. /// /// Note: this deviates from which always returns a block number @@ -315,7 +315,7 @@ pub struct LocalizedTransactionTrace { pub block_number: Option, /// Hash of the transaction #[serde(skip_serializing_if = "Option::is_none")] - pub transaction_hash: Option, + pub transaction_hash: Option, /// Transaction index within the block, None if pending. #[serde(skip_serializing_if = "Option::is_none")] pub transaction_position: Option, diff --git a/crates/rpc/rpc-types/src/eth/transaction/common.rs b/crates/rpc/rpc-types/src/eth/transaction/common.rs index 839630593a..8f06c5bdde 100644 --- a/crates/rpc/rpc-types/src/eth/transaction/common.rs +++ b/crates/rpc/rpc-types/src/eth/transaction/common.rs @@ -1,7 +1,7 @@ //! Commonly used additional types that are not part of the JSON RPC spec but are often required //! when working with RPC types, such as [Transaction](crate::Transaction) -use reth_primitives::{TxHash, H256}; +use reth_primitives::{TxHash, B256}; /// Additional fields in the context of a block that contains this transaction. #[derive(Debug, Clone, Copy, Default, Eq, PartialEq)] @@ -11,7 +11,7 @@ pub struct TransactionInfo { /// Index of the transaction in the block pub index: Option, /// Hash of the block. - pub block_hash: Option, + pub block_hash: Option, /// Number of the block. pub block_number: Option, /// Base fee of the block. diff --git a/crates/rpc/rpc-types/src/eth/transaction/mod.rs b/crates/rpc/rpc-types/src/eth/transaction/mod.rs index 64a05a65f4..483524e2da 100644 --- a/crates/rpc/rpc-types/src/eth/transaction/mod.rs +++ b/crates/rpc/rpc-types/src/eth/transaction/mod.rs @@ -1,7 +1,7 @@ pub use common::TransactionInfo; pub use receipt::TransactionReceipt; pub use request::TransactionRequest; -use reth_primitives::{AccessListItem, Address, Bytes, H256, U128, U256, U64}; +use reth_primitives::{AccessListItem, Address, Bytes, B256, U128, U256, U64}; use serde::{Deserialize, Serialize}; pub use signature::{Parity, Signature}; pub use typed::*; @@ -17,11 +17,11 @@ mod typed; #[serde(rename_all = "camelCase")] pub struct Transaction { /// Hash - pub hash: H256, + pub hash: B256, /// Nonce pub nonce: U64, /// Block hash - pub block_hash: Option, + pub block_hash: Option, /// Block number pub block_number: Option, /// Transaction Index @@ -57,7 +57,7 @@ pub struct Transaction { pub chain_id: Option, /// Contains the blob hashes for eip-4844 transactions. #[serde(default, skip_serializing_if = "Vec::is_empty")] - pub blob_versioned_hashes: Vec, + pub blob_versioned_hashes: Vec, /// EIP2930 /// /// Pre-pay to warm storage access. @@ -79,13 +79,13 @@ mod tests { #[test] fn serde_transaction() { let transaction = Transaction { - hash: H256::from_low_u64_be(1), + hash: B256::with_last_byte(1), nonce: U64::from(2), - block_hash: Some(H256::from_low_u64_be(3)), + block_hash: Some(B256::with_last_byte(3)), block_number: Some(U256::from(4)), transaction_index: Some(U256::from(5)), - from: Address::from_low_u64_be(6), - to: Some(Address::from_low_u64_be(7)), + from: Address::with_last_byte(6), + to: Some(Address::with_last_byte(7)), value: U256::from(8), gas_price: Some(U128::from(9)), gas: U256::from(10), @@ -116,13 +116,13 @@ mod tests { #[test] fn serde_transaction_with_parity_bit() { let transaction = Transaction { - hash: H256::from_low_u64_be(1), + hash: B256::with_last_byte(1), nonce: U64::from(2), - block_hash: Some(H256::from_low_u64_be(3)), + block_hash: Some(B256::with_last_byte(3)), block_number: Some(U256::from(4)), transaction_index: Some(U256::from(5)), - from: Address::from_low_u64_be(6), - to: Some(Address::from_low_u64_be(7)), + from: Address::with_last_byte(6), + to: Some(Address::with_last_byte(7)), value: U256::from(8), gas_price: Some(U128::from(9)), gas: U256::from(10), diff --git a/crates/rpc/rpc-types/src/eth/transaction/receipt.rs b/crates/rpc/rpc-types/src/eth/transaction/receipt.rs index f4900a6194..18197c9920 100644 --- a/crates/rpc/rpc-types/src/eth/transaction/receipt.rs +++ b/crates/rpc/rpc-types/src/eth/transaction/receipt.rs @@ -1,5 +1,5 @@ use crate::Log; -use reth_primitives::{Address, Bloom, H256, U128, U256, U64, U8}; +use reth_primitives::{Address, Bloom, B256, U128, U256, U64, U8}; use serde::{Deserialize, Serialize}; /// Transaction receipt @@ -7,11 +7,11 @@ use serde::{Deserialize, Serialize}; #[serde(rename_all = "camelCase")] pub struct TransactionReceipt { /// Transaction Hash. - pub transaction_hash: Option, + pub transaction_hash: Option, /// Index within the block. pub transaction_index: U64, /// Hash of the block this transaction was included within. - pub block_hash: Option, + pub block_hash: Option, /// Number of the block this transaction was included within. pub block_number: Option, /// Cumulative gas used within the block after this was executed. @@ -44,7 +44,7 @@ pub struct TransactionReceipt { /// /// EIP98 makes this optional field, if it's missing then skip serializing it #[serde(skip_serializing_if = "Option::is_none", rename = "root")] - pub state_root: Option, + pub state_root: Option, /// Status: either 1 (success) or 0 (failure). Only present after activation of EIP-658 #[serde(skip_serializing_if = "Option::is_none", rename = "status")] pub status_code: Option, diff --git a/crates/rpc/rpc-types/src/eth/transaction/typed.rs b/crates/rpc/rpc-types/src/eth/transaction/typed.rs index b92a889f35..8b36894932 100644 --- a/crates/rpc/rpc-types/src/eth/transaction/typed.rs +++ b/crates/rpc/rpc-types/src/eth/transaction/typed.rs @@ -3,10 +3,10 @@ //! transaction deserialized from the json input of an RPC call. Depending on what fields are set, //! it can be converted into the container type [`TypedTransactionRequest`]. +use alloy_rlp::{BufMut, Decodable, Encodable, Error as RlpError, RlpDecodable, RlpEncodable}; use reth_primitives::{ AccessList, Address, Bytes, Transaction, TxEip1559, TxEip2930, TxLegacy, U128, U256, U64, }; -use reth_rlp::{BufMut, Decodable, DecodeError, Encodable, RlpDecodable, RlpEncodable}; use serde::{Deserialize, Serialize}; /// Container type for various Ethereum transaction requests @@ -33,7 +33,7 @@ impl TypedTransactionRequest { Some(match self { TypedTransactionRequest::Legacy(tx) => Transaction::Legacy(TxLegacy { chain_id: tx.chain_id, - nonce: tx.nonce.as_u64(), + nonce: tx.nonce.to(), gas_price: tx.gas_price.to(), gas_limit: tx.gas_limit.try_into().ok()?, to: tx.kind.into(), @@ -42,7 +42,7 @@ impl TypedTransactionRequest { }), TypedTransactionRequest::EIP2930(tx) => Transaction::Eip2930(TxEip2930 { chain_id: tx.chain_id, - nonce: tx.nonce.as_u64(), + nonce: tx.nonce.to(), gas_price: tx.gas_price.to(), gas_limit: tx.gas_limit.try_into().ok()?, to: tx.kind.into(), @@ -52,7 +52,7 @@ impl TypedTransactionRequest { }), TypedTransactionRequest::EIP1559(tx) => Transaction::Eip1559(TxEip1559 { chain_id: tx.chain_id, - nonce: tx.nonce.as_u64(), + nonce: tx.nonce.to(), max_fee_per_gas: tx.max_fee_per_gas.to(), gas_limit: tx.gas_limit.try_into().ok()?, to: tx.kind.into(), @@ -143,7 +143,7 @@ impl Encodable for TransactionKind { } impl Decodable for TransactionKind { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { if let Some(&first) = buf.first() { if first == 0x80 { *buf = &buf[1..]; @@ -153,7 +153,7 @@ impl Decodable for TransactionKind { Ok(TransactionKind::Call(addr)) } } else { - Err(DecodeError::InputTooShort) + Err(RlpError::InputTooShort) } } } diff --git a/crates/rpc/rpc-types/src/eth/withdrawal.rs b/crates/rpc/rpc-types/src/eth/withdrawal.rs index 41314ebb5a..54da7c9a86 100644 --- a/crates/rpc/rpc-types/src/eth/withdrawal.rs +++ b/crates/rpc/rpc-types/src/eth/withdrawal.rs @@ -1,5 +1,5 @@ +use alloy_rlp::RlpEncodable; use reth_primitives::{constants::GWEI_TO_WEI, serde_helper::u64_hex, Address, U256}; -use reth_rlp::RlpEncodable; use serde::{Deserialize, Serialize}; /// Withdrawal represents a validator withdrawal from the consensus layer. #[derive(Debug, Clone, PartialEq, Eq, Default, Hash, RlpEncodable, Serialize, Deserialize)] diff --git a/crates/rpc/rpc-types/src/eth/work.rs b/crates/rpc/rpc-types/src/eth/work.rs index b730655ebe..e6508a66aa 100644 --- a/crates/rpc/rpc-types/src/eth/work.rs +++ b/crates/rpc/rpc-types/src/eth/work.rs @@ -1,4 +1,4 @@ -use reth_primitives::{H256, U256}; +use reth_primitives::{B256, U256}; use serde::{ de::{Error, SeqAccess, Visitor}, Deserialize, Deserializer, Serialize, Serializer, @@ -9,11 +9,11 @@ use std::fmt; #[derive(Clone, Debug, PartialEq, Eq, Default)] pub struct Work { /// The proof-of-work hash. - pub pow_hash: H256, + pub pow_hash: B256, /// The seed hash. - pub seed_hash: H256, + pub seed_hash: B256, /// The target. - pub target: H256, + pub target: B256, /// The block number: this isn't always stored. pub number: Option, } @@ -51,13 +51,13 @@ impl<'a> Deserialize<'a> for Work { A: SeqAccess<'a>, { let pow_hash = seq - .next_element::()? + .next_element::()? .ok_or_else(|| A::Error::custom("missing pow hash"))?; let seed_hash = seq - .next_element::()? + .next_element::()? .ok_or_else(|| A::Error::custom("missing seed hash"))?; let target = seq - .next_element::()? + .next_element::()? .ok_or_else(|| A::Error::custom("missing target"))?; let number = seq.next_element::()?; Ok(Work { pow_hash, seed_hash, target, number }) diff --git a/crates/rpc/rpc-types/src/lib.rs b/crates/rpc/rpc-types/src/lib.rs index 3c52b87738..988a38a5fc 100644 --- a/crates/rpc/rpc-types/src/lib.rs +++ b/crates/rpc/rpc-types/src/lib.rs @@ -5,7 +5,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc/Cargo.toml b/crates/rpc/rpc/Cargo.toml index 33c2fdde67..d22337a203 100644 --- a/crates/rpc/rpc/Cargo.toml +++ b/crates/rpc/rpc/Cargo.toml @@ -14,7 +14,6 @@ Reth RPC implementation reth-interfaces.workspace = true reth-primitives.workspace = true reth-rpc-api = { path = "../rpc-api" } -reth-rlp.workspace = true reth-rpc-types.workspace = true reth-provider = { workspace = true, features = ["test-utils"] } reth-transaction-pool = { workspace = true, features = ["test-utils"] } @@ -27,12 +26,15 @@ reth-rpc-types-compat.workspace = true lazy_static = "*" # eth +alloy-rlp.workspace = true +alloy-dyn-abi = { workspace = true, features = ["eip712"] } +alloy-primitives.workspace = true +alloy-sol-types.workspace = true revm = { workspace = true, features = [ "optional_block_gas_limit", "optional_eip3607", "optional_no_base_fee", ] } -ethers-core = { workspace = true, features = ["eip712"] } revm-primitives = { workspace = true, features = ["serde"] } # rpc @@ -61,7 +63,6 @@ secp256k1 = { workspace = true, features = ["global-context", "rand-std", "recov serde = { workspace = true, features = ["derive"] } serde_json.workspace = true thiserror.workspace = true -hex = "0.4" rand.workspace = true tracing.workspace = true tracing-futures = "0.2" diff --git a/crates/rpc/rpc/src/debug.rs b/crates/rpc/rpc/src/debug.rs index 264c567365..edc7851371 100644 --- a/crates/rpc/rpc/src/debug.rs +++ b/crates/rpc/rpc/src/debug.rs @@ -10,10 +10,11 @@ use crate::{ result::{internal_rpc_err, ToRpcResult}, EthApiSpec, TracingCallGuard, }; +use alloy_rlp::{Decodable, Encodable}; use async_trait::async_trait; use jsonrpsee::core::RpcResult; use reth_primitives::{ - Account, Block, BlockId, BlockNumberOrTag, Bytes, TransactionSigned, H160, H256, + Account, Address, Block, BlockId, BlockNumberOrTag, Bytes, TransactionSigned, B256, }; use reth_provider::{BlockReaderIdExt, HeaderProvider, StateProviderBox}; use reth_revm::{ @@ -24,7 +25,6 @@ use reth_revm::{ FourByteInspector, TracingInspector, TracingInspectorConfig, }, }; -use reth_rlp::{Decodable, Encodable}; use reth_rpc_api::DebugApiServer; use reth_rpc_types::{ trace::geth::{ @@ -169,7 +169,7 @@ where /// Ref: pub async fn debug_trace_transaction( &self, - tx_hash: H256, + tx_hash: B256, opts: GethDebugTracingOptions, ) -> EthResult { let (transaction, block) = match self.inner.eth_api.transaction_and_block(tx_hash).await? { @@ -330,7 +330,7 @@ where }) .await?; let gas_used = res.result.gas_used(); - let return_value = result_output(&res.result).unwrap_or_default().into(); + let return_value = result_output(&res.result).unwrap_or_default(); let frame = inspector.into_geth_builder().geth_traces(gas_used, return_value, config); Ok(frame.into()) @@ -343,7 +343,7 @@ where bundles: Vec, state_context: Option, opts: Option, - ) -> EthResult> { + ) -> EthResult>> { if bundles.is_empty() { return Err(EthApiError::InvalidParams(String::from("bundles are empty."))) } @@ -378,7 +378,8 @@ where self.inner .eth_api .spawn_with_state_at_block(at.into(), move |state| { - let mut results = Vec::with_capacity(bundles.len()); + // the outer vec for the bundles + let mut all_bundles = Vec::with_capacity(bundles.len()); let mut db = SubState::new(StateProviderDatabase::new(state)); if replay_block_txs { @@ -399,7 +400,7 @@ where // Trace all bundles let mut bundles = bundles.into_iter().peekable(); while let Some(bundle) = bundles.next() { - //let mut result = Vec::with_capacity(bundle.len()); + let mut results = Vec::with_capacity(bundle.transactions.len()); let Bundle { transactions, block_override } = bundle; let block_overrides = block_override.map(Box::new); @@ -433,8 +434,10 @@ where } results.push(trace); } + + all_bundles.push(results); } - Ok(results) + Ok(all_bundles) }) .await } @@ -530,7 +533,7 @@ where let (res, _) = inspect(db, env, &mut inspector)?; let gas_used = res.result.gas_used(); - let return_value = result_output(&res.result).unwrap_or_default().into(); + let return_value = result_output(&res.result).unwrap_or_default(); let frame = inspector.into_geth_builder().geth_traces(gas_used, return_value, config); Ok((frame.into(), res.state)) @@ -727,8 +730,8 @@ where async fn debug_get_modified_accounts_by_hash( &self, - _start_hash: H256, - _end_hash: H256, + _start_hash: B256, + _end_hash: B256, ) -> RpcResult<()> { Ok(()) } @@ -747,7 +750,7 @@ where async fn debug_intermediate_roots( &self, - _block_hash: H256, + _block_hash: B256, _opts: Option, ) -> RpcResult<()> { Ok(()) @@ -761,7 +764,7 @@ where Ok(()) } - async fn debug_preimage(&self, _hash: H256) -> RpcResult<()> { + async fn debug_preimage(&self, _hash: B256) -> RpcResult<()> { Ok(()) } @@ -769,7 +772,7 @@ where Ok(()) } - async fn debug_seed_hash(&self, _number: u64) -> RpcResult { + async fn debug_seed_hash(&self, _number: u64) -> RpcResult { Ok(Default::default()) } @@ -831,10 +834,10 @@ where async fn debug_storage_range_at( &self, - _block_hash: H256, + _block_hash: B256, _tx_idx: usize, - _contract_address: H160, - _key_start: H256, + _contract_address: Address, + _key_start: B256, _max_result: u64, ) -> RpcResult<()> { Ok(()) @@ -842,7 +845,7 @@ where async fn debug_trace_bad_block( &self, - _block_hash: H256, + _block_hash: B256, _opts: Option, ) -> RpcResult<()> { Ok(()) @@ -886,7 +889,7 @@ where /// Handler for `debug_getRawTransaction` /// Returns the bytes of the transaction for the given hash. - async fn raw_transaction(&self, hash: H256) -> RpcResult { + async fn raw_transaction(&self, hash: B256) -> RpcResult { let tx = self.inner.eth_api.transaction_by_hash(hash).await?; let mut res = Vec::new(); @@ -940,7 +943,7 @@ where /// Handler for `debug_traceBlockByHash` async fn debug_trace_block_by_hash( &self, - block: H256, + block: B256, opts: Option, ) -> RpcResult> { let _permit = self.acquire_trace_permit().await; @@ -960,7 +963,7 @@ where /// Handler for `debug_traceTransaction` async fn debug_trace_transaction( &self, - tx_hash: H256, + tx_hash: B256, opts: Option, ) -> RpcResult { let _permit = self.acquire_trace_permit().await; @@ -984,7 +987,7 @@ where bundles: Vec, state_context: Option, opts: Option, - ) -> RpcResult> { + ) -> RpcResult>> { let _permit = self.acquire_trace_permit().await; Ok(DebugApi::debug_trace_call_many(self, bundles, state_context, opts).await?) } diff --git a/crates/rpc/rpc/src/engine.rs b/crates/rpc/rpc/src/engine.rs index 741aa8b47a..d7c53221c4 100644 --- a/crates/rpc/rpc/src/engine.rs +++ b/crates/rpc/rpc/src/engine.rs @@ -1,5 +1,5 @@ use jsonrpsee::core::RpcResult as Result; -use reth_primitives::{Address, BlockId, BlockNumberOrTag, Bytes, H256, U256, U64}; +use reth_primitives::{Address, BlockId, BlockNumberOrTag, Bytes, B256, U256, U64}; use reth_rpc_api::{EngineEthApiServer, EthApiServer, EthFilterApiServer}; /// Re-export for convenience pub use reth_rpc_engine_api::EngineApi; @@ -76,7 +76,7 @@ where } /// Handler for: `eth_getBlockByHash` - async fn block_by_hash(&self, hash: H256, full: bool) -> Result> { + async fn block_by_hash(&self, hash: B256, full: bool) -> Result> { self.eth.block_by_hash(hash, full).instrument(engine_span!()).await } @@ -90,7 +90,7 @@ where } /// Handler for: `eth_sendRawTransaction` - async fn send_raw_transaction(&self, bytes: Bytes) -> Result { + async fn send_raw_transaction(&self, bytes: Bytes) -> Result { self.eth.send_raw_transaction(bytes).instrument(engine_span!()).await } diff --git a/crates/rpc/rpc/src/eth/api/call.rs b/crates/rpc/rpc/src/eth/api/call.rs index 176fcc1daa..c5c60ac2da 100644 --- a/crates/rpc/rpc/src/eth/api/call.rs +++ b/crates/rpc/rpc/src/eth/api/call.rs @@ -11,7 +11,6 @@ use crate::{ }, EthApi, }; -use ethers_core::utils::get_contract_address; use reth_network_api::NetworkInfo; use reth_primitives::{AccessList, BlockId, BlockNumberOrTag, Bytes, U256}; use reth_provider::{ @@ -370,7 +369,7 @@ where to } else { let nonce = db.basic(from)?.unwrap_or_default().nonce; - get_contract_address(from, nonce).into() + from.create(nonce) }; // can consume the list since we're not using the request anymore diff --git a/crates/rpc/rpc/src/eth/api/mod.rs b/crates/rpc/rpc/src/eth/api/mod.rs index 262107232b..5a653c7245 100644 --- a/crates/rpc/rpc/src/eth/api/mod.rs +++ b/crates/rpc/rpc/src/eth/api/mod.rs @@ -14,7 +14,7 @@ use async_trait::async_trait; use reth_interfaces::RethResult; use reth_network_api::NetworkInfo; use reth_primitives::{ - Address, BlockId, BlockNumberOrTag, ChainInfo, SealedBlock, H256, U256, U64, + Address, BlockId, BlockNumberOrTag, ChainInfo, SealedBlock, B256, U256, U64, }; use reth_provider::{ BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProviderBox, StateProviderFactory, @@ -221,7 +221,7 @@ where } /// Returns the state at the given block number - pub fn state_at_hash(&self, block_hash: H256) -> RethResult> { + pub fn state_at_hash(&self, block_hash: B256) -> RethResult> { self.provider().history_by_block_hash(block_hash) } diff --git a/crates/rpc/rpc/src/eth/api/pending_block.rs b/crates/rpc/rpc/src/eth/api/pending_block.rs index 1e3b1f807d..9455d29903 100644 --- a/crates/rpc/rpc/src/eth/api/pending_block.rs +++ b/crates/rpc/rpc/src/eth/api/pending_block.rs @@ -4,8 +4,8 @@ use crate::eth::error::{EthApiError, EthResult}; use core::fmt::Debug; use reth_primitives::{ constants::{eip4844::MAX_DATA_GAS_PER_BLOCK, BEACON_NONCE}, - proofs, Block, ChainSpec, Header, IntoRecoveredTransaction, Receipt, SealedBlock, SealedHeader, - EMPTY_OMMER_ROOT, H256, U256, + proofs, Block, ChainSpec, Header, IntoRecoveredTransaction, Receipt, Receipts, SealedBlock, + SealedHeader, B256, EMPTY_OMMER_ROOT, U256, }; use reth_provider::{BundleStateWithReceipts, ChainSpecProvider, StateProviderFactory}; use reth_revm::{ @@ -187,7 +187,11 @@ impl PendingBlockEnv { // merge all transitions into bundle state. db.merge_transitions(BundleRetention::PlainState); - let bundle = BundleStateWithReceipts::new(db.take_bundle(), vec![receipts], block_number); + let bundle = BundleStateWithReceipts::new( + db.take_bundle(), + Receipts::from_vec(vec![receipts]), + block_number, + ); let receipts_root = bundle.receipts_root_slow(block_number).expect("Block is present"); let logs_bloom = bundle.block_logs_bloom(block_number).expect("Block is present"); @@ -246,7 +250,7 @@ fn pre_block_beacon_root_contract_call( block_number: u64, initialized_cfg: &CfgEnv, initialized_block_env: &BlockEnv, - parent_beacon_block_root: Option, + parent_beacon_block_root: Option, ) -> EthResult<()> where DB: Database + DatabaseCommit, @@ -298,7 +302,7 @@ impl PendingBlockEnvOrigin { } /// Returns the hash of the pending block should be built on - fn build_target_hash(&self) -> H256 { + fn build_target_hash(&self) -> B256 { match self { PendingBlockEnvOrigin::ActualPending(block) => block.parent_hash, PendingBlockEnvOrigin::DerivedFromLatest(header) => header.hash, diff --git a/crates/rpc/rpc/src/eth/api/server.rs b/crates/rpc/rpc/src/eth/api/server.rs index a294cc2d15..4eb45828a7 100644 --- a/crates/rpc/rpc/src/eth/api/server.rs +++ b/crates/rpc/rpc/src/eth/api/server.rs @@ -13,7 +13,7 @@ use jsonrpsee::core::RpcResult as Result; use reth_network_api::NetworkInfo; use reth_primitives::{ serde_helper::{num::U64HexOrNumber, JsonStorageKey}, - AccessListWithGasUsed, Address, BlockId, BlockNumberOrTag, Bytes, H256, H64, U256, U64, + AccessListWithGasUsed, Address, BlockId, BlockNumberOrTag, Bytes, B256, B64, U256, U64, }; use reth_provider::{ BlockIdReader, BlockReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, @@ -82,7 +82,7 @@ where } /// Handler for: `eth_getBlockByHash` - async fn block_by_hash(&self, hash: H256, full: bool) -> Result> { + async fn block_by_hash(&self, hash: B256, full: bool) -> Result> { trace!(target: "rpc::eth", ?hash, ?full, "Serving eth_getBlockByHash"); Ok(EthApi::rpc_block(self, hash, full).await?) } @@ -98,7 +98,7 @@ where } /// Handler for: `eth_getBlockTransactionCountByHash` - async fn block_transaction_count_by_hash(&self, hash: H256) -> Result> { + async fn block_transaction_count_by_hash(&self, hash: B256) -> Result> { trace!(target: "rpc::eth", ?hash, "Serving eth_getBlockTransactionCountByHash"); Ok(EthApi::block_transaction_count(self, hash).await?.map(U256::from)) } @@ -113,7 +113,7 @@ where } /// Handler for: `eth_getUncleCountByBlockHash` - async fn block_uncles_count_by_hash(&self, hash: H256) -> Result> { + async fn block_uncles_count_by_hash(&self, hash: B256) -> Result> { trace!(target: "rpc::eth", ?hash, "Serving eth_getUncleCountByBlockHash"); Ok(EthApi::ommers(self, hash)?.map(|ommers| U256::from(ommers.len()))) } @@ -136,7 +136,7 @@ where /// Handler for: `eth_getUncleByBlockHashAndIndex` async fn uncle_by_block_hash_and_index( &self, - hash: H256, + hash: B256, index: Index, ) -> Result> { trace!(target: "rpc::eth", ?hash, ?index, "Serving eth_getUncleByBlockHashAndIndex"); @@ -154,7 +154,7 @@ where } /// Handler for: `eth_getTransactionByHash` - async fn transaction_by_hash(&self, hash: H256) -> Result> { + async fn transaction_by_hash(&self, hash: B256) -> Result> { trace!(target: "rpc::eth", ?hash, "Serving eth_getTransactionByHash"); Ok(EthTransactions::transaction_by_hash(self, hash).await?.map(Into::into)) } @@ -162,7 +162,7 @@ where /// Handler for: `eth_getTransactionByBlockHashAndIndex` async fn transaction_by_block_hash_and_index( &self, - hash: H256, + hash: B256, index: Index, ) -> Result> { trace!(target: "rpc::eth", ?hash, ?index, "Serving eth_getTransactionByBlockHashAndIndex"); @@ -180,7 +180,7 @@ where } /// Handler for: `eth_getTransactionReceipt` - async fn transaction_receipt(&self, hash: H256) -> Result> { + async fn transaction_receipt(&self, hash: B256) -> Result> { trace!(target: "rpc::eth", ?hash, "Serving eth_getTransactionReceipt"); Ok(EthTransactions::transaction_receipt(self, hash).await?) } @@ -197,7 +197,7 @@ where address: Address, index: JsonStorageKey, block_number: Option, - ) -> Result { + ) -> Result { trace!(target: "rpc::eth", ?address, ?block_number, "Serving eth_getStorageAt"); Ok(self .on_blocking_task(|this| async move { this.storage_at(address, index, block_number) }) @@ -308,8 +308,9 @@ where reward_percentiles: Option>, ) -> Result { trace!(target: "rpc::eth", ?block_count, ?newest_block, ?reward_percentiles, "Serving eth_feeHistory"); - return Ok(EthApi::fee_history(self, block_count.as_u64(), newest_block, reward_percentiles) - .await?) + return Ok( + EthApi::fee_history(self, block_count.to(), newest_block, reward_percentiles).await? + ) } /// Handler for: `eth_mining` @@ -328,23 +329,23 @@ where } /// Handler for: `eth_submitHashrate` - async fn submit_hashrate(&self, _hashrate: U256, _id: H256) -> Result { + async fn submit_hashrate(&self, _hashrate: U256, _id: B256) -> Result { Ok(false) } /// Handler for: `eth_submitWork` - async fn submit_work(&self, _nonce: H64, _pow_hash: H256, _mix_digest: H256) -> Result { + async fn submit_work(&self, _nonce: B64, _pow_hash: B256, _mix_digest: B256) -> Result { Err(internal_rpc_err("unimplemented")) } /// Handler for: `eth_sendTransaction` - async fn send_transaction(&self, request: TransactionRequest) -> Result { + async fn send_transaction(&self, request: TransactionRequest) -> Result { trace!(target: "rpc::eth", ?request, "Serving eth_sendTransaction"); Ok(EthTransactions::send_transaction(self, request).await?) } /// Handler for: `eth_sendRawTransaction` - async fn send_raw_transaction(&self, tx: Bytes) -> Result { + async fn send_raw_transaction(&self, tx: Bytes) -> Result { trace!(target: "rpc::eth", ?tx, "Serving eth_sendRawTransaction"); Ok(EthTransactions::send_raw_transaction(self, tx).await?) } @@ -398,7 +399,7 @@ mod tests { use reth_network_api::noop::NoopNetwork; use reth_primitives::{ basefee::calculate_next_block_base_fee, constants::ETHEREUM_BLOCK_GAS_LIMIT, BaseFeeParams, - Block, BlockNumberOrTag, Header, TransactionSigned, H256, U256, + Block, BlockNumberOrTag, Header, TransactionSigned, U256, }; use reth_provider::{ test_utils::{MockEthProvider, NoopProvider}, @@ -448,7 +449,7 @@ mod tests { let mut last_header = None; for i in (0..block_count).rev() { - let hash = H256::random(); + let hash = rng.gen(); let gas_limit: u64 = rng.gen(); let gas_used: u64 = rng.gen(); // Note: Generates a u32 to avoid overflows later diff --git a/crates/rpc/rpc/src/eth/api/sign.rs b/crates/rpc/rpc/src/eth/api/sign.rs index c12d7b3011..3d8570efd3 100644 --- a/crates/rpc/rpc/src/eth/api/sign.rs +++ b/crates/rpc/rpc/src/eth/api/sign.rs @@ -6,7 +6,7 @@ use crate::{ }, EthApi, }; -use ethers_core::types::transaction::eip712::TypedData; +use alloy_dyn_abi::TypedData; use reth_primitives::{Address, Bytes}; use serde_json::Value; use std::ops::Deref; @@ -15,8 +15,7 @@ impl EthApi { pub(crate) async fn sign(&self, account: Address, message: Bytes) -> EthResult { let signer = self.find_signer(&account)?; let signature = signer.sign(account, &message).await?; - let bytes = hex::encode(signature.to_bytes()).as_bytes().into(); - Ok(bytes) + Ok(signature.to_hex_bytes()) } pub(crate) async fn sign_typed_data(&self, data: Value, account: Address) -> EthResult { @@ -24,8 +23,7 @@ impl EthApi { let data = serde_json::from_value::(data).map_err(|_| SignError::InvalidTypedData)?; let signature = signer.sign_typed_data(account, &data)?; - let bytes = hex::encode(signature.to_bytes()).as_bytes().into(); - Ok(bytes) + Ok(signature.to_hex_bytes()) } pub(crate) fn find_signer( diff --git a/crates/rpc/rpc/src/eth/api/state.rs b/crates/rpc/rpc/src/eth/api/state.rs index 4d4b29060e..2205c64b3a 100644 --- a/crates/rpc/rpc/src/eth/api/state.rs +++ b/crates/rpc/rpc/src/eth/api/state.rs @@ -5,8 +5,8 @@ use crate::{ EthApi, }; use reth_primitives::{ - serde_helper::JsonStorageKey, Address, BlockId, BlockNumberOrTag, Bytes, H256, KECCAK_EMPTY, - U256, + serde_helper::JsonStorageKey, Address, BlockId, BlockNumberOrTag, Bytes, B256, KECCAK_EMPTY, + U256, U64, }; use reth_provider::{ AccountReader, BlockReaderIdExt, ChainSpecProvider, EvmEnvProvider, StateProvider, @@ -25,7 +25,7 @@ where pub(crate) fn get_code(&self, address: Address, block_id: Option) -> EthResult { let state = self.state_at_block_id_or_latest(block_id)?; let code = state.account_code(address)?.unwrap_or_default(); - Ok(code.original_bytes().into()) + Ok(code.original_bytes()) } pub(crate) fn balance(&self, address: Address, block_id: Option) -> EthResult { @@ -78,10 +78,10 @@ where address: Address, index: JsonStorageKey, block_id: Option, - ) -> EthResult { + ) -> EthResult { let state = self.state_at_block_id_or_latest(block_id)?; let value = state.storage(address, index.0)?.unwrap_or_default(); - Ok(H256(value.to_be_bytes())) + Ok(B256::new(value.to_be_bytes())) } #[allow(unused)] @@ -137,7 +137,7 @@ where if let Some(account) = state.basic_account(proof.address)? { proof.balance = account.balance; - proof.nonce = account.nonce.into(); + proof.nonce = U64::from(account.nonce); proof.code_hash = account.get_bytecode_hash(); } @@ -174,7 +174,7 @@ mod tests { ); let address = Address::random(); let storage = eth_api.storage_at(address, U256::ZERO.into(), None).unwrap(); - assert_eq!(storage, U256::ZERO.into()); + assert_eq!(storage, U256::ZERO.to_be_bytes()); // === Mock === let mock_provider = MockEthProvider::default(); @@ -197,6 +197,6 @@ mod tests { let storage_key: U256 = storage_key.into(); let storage = eth_api.storage_at(address, storage_key.into(), None).unwrap(); - assert_eq!(storage, storage_value.into()); + assert_eq!(storage, storage_value.to_be_bytes()); } } diff --git a/crates/rpc/rpc/src/eth/api/transactions.rs b/crates/rpc/rpc/src/eth/api/transactions.rs index 8f21f5ac92..338871d428 100644 --- a/crates/rpc/rpc/src/eth/api/transactions.rs +++ b/crates/rpc/rpc/src/eth/api/transactions.rs @@ -18,7 +18,7 @@ use reth_primitives::{ Address, BlockId, BlockNumberOrTag, Bytes, FromRecoveredPooledTransaction, Header, IntoRecoveredTransaction, Receipt, SealedBlock, TransactionKind::{Call, Create}, - TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, H256, U128, U256, U64, + TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, B256, U128, U256, U64, }; use reth_rpc_types_compat::from_recovered_with_block_context; @@ -82,7 +82,7 @@ pub trait EthTransactions: Send + Sync { /// Get all transactions in the block with the given hash. /// /// Returns `None` if block does not exist. - async fn transactions_by_block(&self, block: H256) + async fn transactions_by_block(&self, block: B256) -> EthResult>>; /// Get the entire block for the given id. @@ -103,36 +103,36 @@ pub trait EthTransactions: Send + Sync { /// Checks the pool and state. /// /// Returns `Ok(None)` if no matching transaction was found. - async fn transaction_by_hash(&self, hash: H256) -> EthResult>; + async fn transaction_by_hash(&self, hash: B256) -> EthResult>; /// Returns the transaction by including its corresponding [BlockId] /// /// Note: this supports pending transactions async fn transaction_by_hash_at( &self, - hash: H256, + hash: B256, ) -> EthResult>; /// Returns the _historical_ transaction and the block it was mined in async fn historical_transaction_by_hash_at( &self, - hash: H256, - ) -> EthResult>; + hash: B256, + ) -> EthResult>; /// Returns the transaction receipt for the given hash. /// /// Returns None if the transaction does not exist or is pending /// Note: The tx receipt is not available for pending transactions. - async fn transaction_receipt(&self, hash: H256) -> EthResult>; + async fn transaction_receipt(&self, hash: B256) -> EthResult>; /// Decodes and recovers the transaction and submits it to the pool. /// /// Returns the hash of the transaction. - async fn send_raw_transaction(&self, tx: Bytes) -> EthResult; + async fn send_raw_transaction(&self, tx: Bytes) -> EthResult; /// Signs transaction with a matching signer, if any and submits the transaction to the pool. /// Returns the hash of the signed transaction. - async fn send_transaction(&self, request: TransactionRequest) -> EthResult; + async fn send_transaction(&self, request: TransactionRequest) -> EthResult; /// Prepares the state and env for the given [CallRequest] at the given [BlockId] and executes /// the closure on a new task returning the result of the closure. @@ -207,7 +207,7 @@ pub trait EthTransactions: Send + Sync { /// Fetches the transaction and the transaction's block async fn transaction_and_block( &self, - hash: H256, + hash: B256, ) -> EthResult>; /// Retrieves the transaction if it exists and returns its trace. @@ -221,7 +221,7 @@ pub trait EthTransactions: Send + Sync { /// [TracingCallPool](crate::tracing_call::TracingCallPool). async fn spawn_trace_transaction_in_block( &self, - hash: H256, + hash: B256, config: TracingInspectorConfig, f: F, ) -> EthResult> @@ -304,7 +304,7 @@ where async fn transactions_by_block( &self, - block: H256, + block: B256, ) -> EthResult>> { Ok(self.cache().get_block_transactions(block).await?) } @@ -320,7 +320,7 @@ where self.block_by_id(block).await.map(|block| block.map(|block| block.body)) } - async fn transaction_by_hash(&self, hash: H256) -> EthResult> { + async fn transaction_by_hash(&self, hash: B256) -> EthResult> { // Try to find the transaction on disk let mut resp = self .on_blocking_task(|this| async move { @@ -358,7 +358,7 @@ where async fn transaction_by_hash_at( &self, - transaction_hash: H256, + transaction_hash: B256, ) -> EthResult> { match self.transaction_by_hash(transaction_hash).await? { None => return Ok(None), @@ -392,15 +392,15 @@ where async fn historical_transaction_by_hash_at( &self, - hash: H256, - ) -> EthResult> { + hash: B256, + ) -> EthResult> { match self.transaction_by_hash_at(hash).await? { None => Ok(None), Some((tx, at)) => Ok(at.as_block_hash().map(|hash| (tx, hash))), } } - async fn transaction_receipt(&self, hash: H256) -> EthResult> { + async fn transaction_receipt(&self, hash: B256) -> EthResult> { let result = self .on_blocking_task(|this| async move { let (tx, meta) = match this.provider().transaction_by_hash_with_meta(hash)? { @@ -425,7 +425,7 @@ where self.build_transaction_receipt(tx, meta, receipt).await.map(Some) } - async fn send_raw_transaction(&self, tx: Bytes) -> EthResult { + async fn send_raw_transaction(&self, tx: Bytes) -> EthResult { let recovered = recover_raw_transaction(tx)?; let pool_transaction = ::from_recovered_transaction(recovered); @@ -436,7 +436,7 @@ where Ok(hash) } - async fn send_transaction(&self, mut request: TransactionRequest) -> EthResult { + async fn send_transaction(&self, mut request: TransactionRequest) -> EthResult { let from = match request.from { Some(from) => from, None => return Err(SignError::NoAccount.into()), @@ -480,21 +480,21 @@ where let transaction = match request.into_typed_request() { Some(TypedTransactionRequest::Legacy(mut m)) => { - m.chain_id = Some(chain_id.as_u64()); + m.chain_id = Some(chain_id.to()); m.gas_limit = gas_limit; m.gas_price = gas_price; TypedTransactionRequest::Legacy(m) } Some(TypedTransactionRequest::EIP2930(mut m)) => { - m.chain_id = chain_id.as_u64(); + m.chain_id = chain_id.to(); m.gas_limit = gas_limit; m.gas_price = gas_price; TypedTransactionRequest::EIP2930(m) } Some(TypedTransactionRequest::EIP1559(mut m)) => { - m.chain_id = chain_id.as_u64(); + m.chain_id = chain_id.to(); m.gas_limit = gas_limit; m.max_fee_per_gas = max_fee_per_gas; @@ -618,7 +618,7 @@ where async fn transaction_and_block( &self, - hash: H256, + hash: B256, ) -> EthResult> { let (transaction, at) = match self.transaction_by_hash_at(hash).await? { None => return Ok(None), @@ -636,7 +636,7 @@ where async fn spawn_trace_transaction_in_block( &self, - hash: H256, + hash: B256, config: TracingInspectorConfig, f: F, ) -> EthResult> @@ -773,7 +773,7 @@ pub enum TransactionSource { /// Index of the transaction in the block index: u64, /// Hash of the block. - block_hash: H256, + block_hash: B256, /// Number of the block. block_number: u64, /// base fee of the block. @@ -871,7 +871,7 @@ pub(crate) fn build_transaction_receipt_with_block_receipts( let mut res_receipt = TransactionReceipt { transaction_hash: Some(meta.tx_hash), - transaction_index: meta.index.into(), + transaction_index: U64::from(meta.index), block_hash: Some(meta.block_hash), block_number: Some(U256::from(meta.block_number)), from: transaction.signer(), diff --git a/crates/rpc/rpc/src/eth/cache/mod.rs b/crates/rpc/rpc/src/eth/cache/mod.rs index e5fa0d73bb..f5c061b714 100644 --- a/crates/rpc/rpc/src/eth/cache/mod.rs +++ b/crates/rpc/rpc/src/eth/cache/mod.rs @@ -2,7 +2,7 @@ use futures::{future::Either, Stream, StreamExt}; use reth_interfaces::{provider::ProviderError, RethResult}; -use reth_primitives::{Block, Receipt, SealedBlock, TransactionSigned, H256}; +use reth_primitives::{Block, Receipt, SealedBlock, TransactionSigned, B256}; use reth_provider::{ BlockReader, BlockSource, CanonStateNotification, EvmEnvProvider, StateProviderFactory, }; @@ -42,15 +42,15 @@ type ReceiptsResponseSender = oneshot::Sender>>>; type EnvResponseSender = oneshot::Sender>; type BlockLruCache = MultiConsumerLruCache< - H256, + B256, Block, L, Either, >; -type ReceiptsLruCache = MultiConsumerLruCache, L, ReceiptsResponseSender>; +type ReceiptsLruCache = MultiConsumerLruCache, L, ReceiptsResponseSender>; -type EnvLruCache = MultiConsumerLruCache; +type EnvLruCache = MultiConsumerLruCache; /// Provides async access to cached eth data /// @@ -127,7 +127,7 @@ impl EthStateCache { /// Requests the [Block] for the block hash /// /// Returns `None` if the block does not exist. - pub(crate) async fn get_block(&self, block_hash: H256) -> RethResult> { + pub(crate) async fn get_block(&self, block_hash: B256) -> RethResult> { let (response_tx, rx) = oneshot::channel(); let _ = self.to_service.send(CacheAction::GetBlock { block_hash, response_tx }); rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)? @@ -138,7 +138,7 @@ impl EthStateCache { /// Returns `None` if the block does not exist. pub(crate) async fn get_sealed_block( &self, - block_hash: H256, + block_hash: B256, ) -> RethResult> { Ok(self.get_block(block_hash).await?.map(|block| block.seal(block_hash))) } @@ -148,7 +148,7 @@ impl EthStateCache { /// Returns `None` if the block does not exist. pub(crate) async fn get_block_transactions( &self, - block_hash: H256, + block_hash: B256, ) -> RethResult>> { let (response_tx, rx) = oneshot::channel(); let _ = self.to_service.send(CacheAction::GetBlockTransactions { block_hash, response_tx }); @@ -158,7 +158,7 @@ impl EthStateCache { /// Fetches both transactions and receipts for the given block hash. pub(crate) async fn get_transactions_and_receipts( &self, - block_hash: H256, + block_hash: B256, ) -> RethResult, Vec)>> { let transactions = self.get_block_transactions(block_hash); let receipts = self.get_receipts(block_hash); @@ -171,7 +171,7 @@ impl EthStateCache { /// Requests the [Receipt] for the block hash /// /// Returns `None` if the block was not found. - pub(crate) async fn get_receipts(&self, block_hash: H256) -> RethResult>> { + pub(crate) async fn get_receipts(&self, block_hash: B256) -> RethResult>> { let (response_tx, rx) = oneshot::channel(); let _ = self.to_service.send(CacheAction::GetReceipts { block_hash, response_tx }); rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)? @@ -180,7 +180,7 @@ impl EthStateCache { /// Fetches both receipts and block for the given block hash. pub(crate) async fn get_block_and_receipts( &self, - block_hash: H256, + block_hash: B256, ) -> RethResult)>> { let block = self.get_sealed_block(block_hash); let receipts = self.get_receipts(block_hash); @@ -194,7 +194,7 @@ impl EthStateCache { /// /// Returns an error if the corresponding header (required for populating the envs) was not /// found. - pub(crate) async fn get_evm_env(&self, block_hash: H256) -> RethResult<(CfgEnv, BlockEnv)> { + pub(crate) async fn get_evm_env(&self, block_hash: B256) -> RethResult<(CfgEnv, BlockEnv)> { let (response_tx, rx) = oneshot::channel(); let _ = self.to_service.send(CacheAction::GetEnv { block_hash, response_tx }); rx.await.map_err(|_| ProviderError::CacheServiceUnavailable)? @@ -224,9 +224,9 @@ pub(crate) struct EthStateCacheService< LimitReceipts = ByLength, LimitEnvs = ByLength, > where - LimitBlocks: Limiter, - LimitReceipts: Limiter>, - LimitEnvs: Limiter, + LimitBlocks: Limiter, + LimitReceipts: Limiter>, + LimitEnvs: Limiter, { /// The type used to lookup data from disk provider: Provider, @@ -251,7 +251,7 @@ where Provider: StateProviderFactory + BlockReader + EvmEnvProvider + Clone + Unpin + 'static, Tasks: TaskSpawner + Clone + 'static, { - fn on_new_block(&mut self, block_hash: H256, res: RethResult>) { + fn on_new_block(&mut self, block_hash: B256, res: RethResult>) { if let Some(queued) = self.full_block_cache.remove(&block_hash) { // send the response to queued senders for tx in queued { @@ -274,7 +274,7 @@ where } } - fn on_new_receipts(&mut self, block_hash: H256, res: RethResult>>) { + fn on_new_receipts(&mut self, block_hash: B256, res: RethResult>>) { if let Some(queued) = self.receipts_cache.remove(&block_hash) { // send the response to queued senders for tx in queued { @@ -453,18 +453,18 @@ where /// All message variants sent through the channel enum CacheAction { - GetBlock { block_hash: H256, response_tx: BlockResponseSender }, - GetBlockTransactions { block_hash: H256, response_tx: BlockTransactionsResponseSender }, - GetEnv { block_hash: H256, response_tx: EnvResponseSender }, - GetReceipts { block_hash: H256, response_tx: ReceiptsResponseSender }, - BlockResult { block_hash: H256, res: RethResult> }, - ReceiptsResult { block_hash: H256, res: RethResult>> }, - EnvResult { block_hash: H256, res: Box> }, + GetBlock { block_hash: B256, response_tx: BlockResponseSender }, + GetBlockTransactions { block_hash: B256, response_tx: BlockTransactionsResponseSender }, + GetEnv { block_hash: B256, response_tx: EnvResponseSender }, + GetReceipts { block_hash: B256, response_tx: ReceiptsResponseSender }, + BlockResult { block_hash: B256, res: RethResult> }, + ReceiptsResult { block_hash: B256, res: RethResult>> }, + EnvResult { block_hash: B256, res: Box> }, CacheNewCanonicalChain { blocks: Vec, receipts: Vec }, } struct BlockReceipts { - block_hash: H256, + block_hash: B256, receipts: Vec>, } diff --git a/crates/rpc/rpc/src/eth/error.rs b/crates/rpc/rpc/src/eth/error.rs index 92d0b95256..e5ae757482 100644 --- a/crates/rpc/rpc/src/eth/error.rs +++ b/crates/rpc/rpc/src/eth/error.rs @@ -1,12 +1,13 @@ //! Implementation specific Errors for the `eth_` namespace. use crate::result::{internal_rpc_err, invalid_params_rpc_err, rpc_err, rpc_error_with_code}; +use alloy_sol_types::decode_revert_reason; use jsonrpsee::{ core::Error as RpcError, types::{error::CALL_EXECUTION_FAILED_CODE, ErrorObject}, }; use reth_interfaces::RethError; -use reth_primitives::{abi::decode_revert_reason, Address, Bytes, U256}; +use reth_primitives::{Address, Bytes, U256}; use reth_revm::tracing::js::JsInspectorError; use reth_rpc_types::{error::EthRpcErrorCode, BlockError, CallInputError}; use reth_transaction_pool::error::{ @@ -467,7 +468,7 @@ pub struct RevertError { /// The transaction output data /// /// Note: this is `None` if output was empty - output: Option, + output: Option, } // === impl RevertError == @@ -476,7 +477,7 @@ impl RevertError { /// Wraps the output bytes /// /// Note: this is intended to wrap an revm output - pub fn new(output: bytes::Bytes) -> Self { + pub fn new(output: Bytes) -> Self { if output.is_empty() { Self { output: None } } else { @@ -492,7 +493,7 @@ impl RevertError { impl std::fmt::Display for RevertError { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.write_str("execution reverted")?; - if let Some(reason) = self.output.as_ref().and_then(decode_revert_reason) { + if let Some(reason) = self.output.as_ref().and_then(|bytes| decode_revert_reason(bytes)) { write!(f, ": {reason}")?; } Ok(()) @@ -616,7 +617,7 @@ pub enum SignError { /// [ExecutionResult::Success]. pub(crate) fn ensure_success(result: ExecutionResult) -> EthResult { match result { - ExecutionResult::Success { output, .. } => Ok(output.into_data().into()), + ExecutionResult::Success { output, .. } => Ok(output.into_data()), ExecutionResult::Revert { output, .. } => { Err(RpcInvalidTransactionError::Revert(RevertError::new(output)).into()) } diff --git a/crates/rpc/rpc/src/eth/gas_oracle.rs b/crates/rpc/rpc/src/eth/gas_oracle.rs index 2448ad9a2e..ba504b6a1e 100644 --- a/crates/rpc/rpc/src/eth/gas_oracle.rs +++ b/crates/rpc/rpc/src/eth/gas_oracle.rs @@ -4,7 +4,7 @@ use crate::eth::{ cache::EthStateCache, error::{EthApiError, EthResult, RpcInvalidTransactionError}, }; -use reth_primitives::{constants::GWEI_TO_WEI, BlockNumberOrTag, H256, U256}; +use reth_primitives::{constants::GWEI_TO_WEI, BlockNumberOrTag, B256, U256}; use reth_provider::BlockReaderIdExt; use serde::{Deserialize, Serialize}; use tokio::sync::Mutex; @@ -197,9 +197,9 @@ where /// This method also returns the parent hash for the given block. async fn get_block_values( &self, - block_hash: H256, + block_hash: B256, limit: usize, - ) -> EthResult)>> { + ) -> EthResult)>> { // check the cache (this will hit the disk if the block is not cached) let block = match self.cache.get_block(block_hash).await? { Some(block) => block, @@ -253,14 +253,14 @@ where #[derive(Debug, Clone)] pub struct GasPriceOracleResult { /// The block hash that the oracle used to calculate the price - pub block_hash: H256, + pub block_hash: B256, /// The price that the oracle calculated pub price: U256, } impl Default for GasPriceOracleResult { fn default() -> Self { - Self { block_hash: H256::zero(), price: U256::from(GWEI_TO_WEI) } + Self { block_hash: B256::ZERO, price: U256::from(GWEI_TO_WEI) } } } diff --git a/crates/rpc/rpc/src/eth/revm_utils.rs b/crates/rpc/rpc/src/eth/revm_utils.rs index 3420a0bcf5..62aedc227c 100644 --- a/crates/rpc/rpc/src/eth/revm_utils.rs +++ b/crates/rpc/rpc/src/eth/revm_utils.rs @@ -2,7 +2,7 @@ use crate::eth::error::{EthApiError, EthResult, RpcInvalidTransactionError}; use reth_primitives::{ - AccessList, Address, TransactionSigned, TransactionSignedEcRecovered, TxHash, H256, U256, + AccessList, Address, Bytes, TransactionSigned, TransactionSignedEcRecovered, TxHash, B256, U256, }; use reth_revm::env::{fill_tx_env, fill_tx_env_with_recovered}; use reth_rpc_types::{ @@ -155,7 +155,7 @@ pub(crate) fn replay_transactions_until( cfg: CfgEnv, block_env: BlockEnv, transactions: I, - target_tx_hash: H256, + target_tx_hash: B256, ) -> EthResult<()> where DB: DatabaseRef, @@ -307,8 +307,8 @@ pub(crate) fn create_txn_env(block_env: &BlockEnv, request: CallRequest) -> EthR gas_priority_fee: max_priority_fee_per_gas, transact_to: to.map(TransactTo::Call).unwrap_or_else(TransactTo::create), value: value.unwrap_or_default(), - data: input.try_into_unique_input()?.map(|data| data.0).unwrap_or_default(), - chain_id: chain_id.map(|c| c.as_u64()), + data: input.try_into_unique_input()?.unwrap_or_default(), + chain_id: chain_id.map(|c| c.to()), access_list: access_list.map(AccessList::flattened).unwrap_or_default(), // EIP-4844 fields blob_hashes: blob_versioned_hashes.unwrap_or_default(), @@ -394,7 +394,7 @@ impl CallFees { call_max_fee: Option, call_priority_fee: Option, block_base_fee: U256, - blob_versioned_hashes: Option<&[H256]>, + blob_versioned_hashes: Option<&[B256]>, max_fee_per_blob_gas: Option, block_blob_fee: Option, ) -> EthResult { @@ -483,10 +483,10 @@ fn apply_block_overrides(overrides: BlockOverrides, env: &mut BlockEnv) { env.difficulty = difficulty; } if let Some(time) = time { - env.timestamp = U256::from(time.as_u64()); + env.timestamp = U256::from(time); } if let Some(gas_limit) = gas_limit { - env.gas_limit = U256::from(gas_limit.as_u64()); + env.gas_limit = U256::from(gas_limit); } if let Some(coinbase) = coinbase { env.coinbase = coinbase; @@ -526,10 +526,10 @@ where let mut account_info = DatabaseRef::basic(db, account)?.unwrap_or_default(); if let Some(nonce) = account_override.nonce { - account_info.nonce = nonce.as_u64(); + account_info.nonce = nonce.to(); } if let Some(code) = account_override.code { - account_info.code = Some(Bytecode::new_raw(code.0)); + account_info.code = Some(Bytecode::new_raw(code)); } if let Some(balance) = account_override.balance { account_info.balance = balance; @@ -584,7 +584,7 @@ where /// /// TODO: Can be phased out when is released #[inline] -pub(crate) fn result_output(res: &ExecutionResult) -> Option { +pub(crate) fn result_output(res: &ExecutionResult) -> Option { match res { ExecutionResult::Success { output, .. } => Some(output.clone().into_data()), ExecutionResult::Revert { output, .. } => Some(output.clone()), @@ -617,7 +617,7 @@ mod tests { None, None, U256::from(99), - Some(&[H256::from(U256::ZERO)]), + Some(&[B256::from(U256::ZERO)]), None, Some(U256::from(99)), ) diff --git a/crates/rpc/rpc/src/eth/signer.rs b/crates/rpc/rpc/src/eth/signer.rs index bf7fe6b335..a271c9ca52 100644 --- a/crates/rpc/rpc/src/eth/signer.rs +++ b/crates/rpc/rpc/src/eth/signer.rs @@ -1,11 +1,10 @@ //! An abstraction over ethereum signers. use crate::eth::error::SignError; -use ethers_core::{ - types::transaction::eip712::{Eip712, TypedData}, - utils::hash_message, +use alloy_dyn_abi::TypedData; +use reth_primitives::{ + eip191_hash_message, sign_message, Address, Signature, TransactionSigned, B256, }; -use reth_primitives::{sign_message, Address, Signature, TransactionSigned, H256}; use reth_rpc_types::TypedTransactionRequest; use secp256k1::SecretKey; @@ -49,9 +48,9 @@ impl DevSigner { self.accounts.get(&account).ok_or(SignError::NoAccount) } - fn sign_hash(&self, hash: H256, account: Address) -> Result { + fn sign_hash(&self, hash: B256, account: Address) -> Result { let secret = self.get_key(account)?; - let signature = sign_message(H256::from_slice(secret.as_ref()), hash); + let signature = sign_message(B256::from_slice(secret.as_ref()), hash); signature.map_err(|_| SignError::CouldNotSign) } } @@ -69,7 +68,7 @@ impl EthSigner for DevSigner { async fn sign(&self, address: Address, message: &[u8]) -> Result { // Hash message according to EIP 191: // https://ethereum.org/es/developers/docs/apis/json-rpc/#eth_sign - let hash = hash_message(message).into(); + let hash = eip191_hash_message(message); self.sign_hash(hash, address) } @@ -87,11 +86,12 @@ impl EthSigner for DevSigner { } fn sign_typed_data(&self, address: Address, payload: &TypedData) -> Result { - let encoded: H256 = - payload.encode_eip712().map_err(|_| SignError::InvalidTypedData)?.into(); + let encoded = payload.eip712_signing_hash().map_err(|_| SignError::InvalidTypedData)?; + // let b256 = encoded; self.sign_hash(encoded, address) } } + #[cfg(test)] mod test { use super::*; diff --git a/crates/rpc/rpc/src/layers/jwt_secret.rs b/crates/rpc/rpc/src/layers/jwt_secret.rs index 9be36c34cb..f0c0df0bae 100644 --- a/crates/rpc/rpc/src/layers/jwt_secret.rs +++ b/crates/rpc/rpc/src/layers/jwt_secret.rs @@ -1,7 +1,10 @@ -use hex::encode as hex_encode; use jsonwebtoken::{decode, errors::ErrorKind, Algorithm, DecodingKey, Validation}; use rand::Rng; -use reth_primitives::{fs, fs::FsPathError}; +use reth_primitives::{ + fs, + fs::FsPathError, + hex::{self, encode as hex_encode}, +}; use serde::{Deserialize, Serialize}; use std::{ path::Path, @@ -195,10 +198,9 @@ impl Claims { #[cfg(test)] mod tests { - use super::{Claims, JwtError, JwtSecret}; + use super::*; use crate::layers::jwt_secret::JWT_MAX_IAT_DIFF; use assert_matches::assert_matches; - use hex::encode as hex_encode; use jsonwebtoken::{encode, Algorithm, EncodingKey, Header}; use reth_primitives::fs::FsPathError; use std::{ diff --git a/crates/rpc/rpc/src/lib.rs b/crates/rpc/rpc/src/lib.rs index d330d31545..1c75b0f305 100644 --- a/crates/rpc/rpc/src/lib.rs +++ b/crates/rpc/rpc/src/lib.rs @@ -20,7 +20,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/rpc/rpc/src/net.rs b/crates/rpc/rpc/src/net.rs index 6f6ec7d38b..9c3582a584 100644 --- a/crates/rpc/rpc/src/net.rs +++ b/crates/rpc/rpc/src/net.rs @@ -1,6 +1,7 @@ use crate::eth::EthApiSpec; use jsonrpsee::core::RpcResult as Result; use reth_network_api::PeersInfo; +use reth_primitives::U64; use reth_rpc_api::NetApiServer; use reth_rpc_types::PeerCount; @@ -36,7 +37,7 @@ where /// Handler for `net_peerCount` fn peer_count(&self) -> Result { - Ok(PeerCount::Hex(self.network.num_connected_peers().into())) + Ok(PeerCount::Hex(U64::from(self.network.num_connected_peers()))) } /// Handler for `net_listening` diff --git a/crates/rpc/rpc/src/otterscan.rs b/crates/rpc/rpc/src/otterscan.rs index 7afcf2d545..91650985af 100644 --- a/crates/rpc/rpc/src/otterscan.rs +++ b/crates/rpc/rpc/src/otterscan.rs @@ -2,7 +2,7 @@ use crate::result::internal_rpc_err; use async_trait::async_trait; use jsonrpsee::core::RpcResult; -use reth_primitives::{Address, BlockId, BlockNumberOrTag, TxHash, H256}; +use reth_primitives::{Address, BlockId, BlockNumberOrTag, TxHash, B256}; use reth_rpc_api::{EthApiServer, OtterscanServer}; use reth_rpc_types::{ BlockDetails, ContractCreator, InternalOperation, OtsBlockTransactions, TraceEntry, @@ -64,7 +64,7 @@ where } /// Handler for `getBlockDetailsByHash` - async fn get_block_details_by_hash(&self, block_hash: H256) -> RpcResult> { + async fn get_block_details_by_hash(&self, block_hash: B256) -> RpcResult> { let block = self.eth.block_by_hash(block_hash, true).await?; Ok(block.map(Into::into)) } diff --git a/crates/rpc/rpc/src/result.rs b/crates/rpc/rpc/src/result.rs index 27d9c805f5..d187590115 100644 --- a/crates/rpc/rpc/src/result.rs +++ b/crates/rpc/rpc/src/result.rs @@ -167,8 +167,8 @@ pub(crate) fn rpc_err( code, msg.into(), data.map(|data| { - jsonrpsee::core::to_json_raw_value(&format!("0x{}", hex::encode(data))) - .expect("serializing String does fail") + jsonrpsee::core::to_json_raw_value(&reth_primitives::hex::encode_prefixed(data)) + .expect("serializing String can't fail") }), ) } diff --git a/crates/rpc/rpc/src/trace.rs b/crates/rpc/rpc/src/trace.rs index 260bb61965..53865b2645 100644 --- a/crates/rpc/rpc/src/trace.rs +++ b/crates/rpc/rpc/src/trace.rs @@ -11,7 +11,7 @@ use crate::{ use async_trait::async_trait; use jsonrpsee::core::RpcResult as Result; use reth_consensus_common::calc::{base_block_reward, block_reward}; -use reth_primitives::{BlockId, BlockNumberOrTag, Bytes, SealedHeader, H256, U256}; +use reth_primitives::{BlockId, BlockNumberOrTag, Bytes, SealedHeader, B256, U256}; use reth_provider::{ BlockReader, ChainSpecProvider, EvmEnvProvider, StateProviderBox, StateProviderFactory, }; @@ -195,7 +195,7 @@ where /// Replays a transaction, returning the traces. pub async fn replay_transaction( &self, - hash: H256, + hash: B256, trace_types: HashSet, ) -> EthResult { let config = tracing_config(&trace_types); @@ -222,7 +222,7 @@ where /// This returns `None` if `indices` is empty pub async fn trace_get( &self, - hash: H256, + hash: B256, indices: Vec, ) -> EthResult> { if indices.len() != 1 { @@ -237,7 +237,7 @@ where /// Returns `None` if the trace object at that index does not exist pub async fn trace_get_index( &self, - hash: H256, + hash: B256, index: usize, ) -> EthResult> { match self.trace_transaction(hash).await? { @@ -252,7 +252,7 @@ where /// Returns all traces for the given transaction hash pub async fn trace_transaction( &self, - hash: H256, + hash: B256, ) -> EthResult>> { self.inner .eth_api @@ -515,7 +515,7 @@ where /// Handler for `trace_replayTransaction` async fn replay_transaction( &self, - transaction: H256, + transaction: B256, trace_types: HashSet, ) -> Result { let _permit = self.acquire_trace_permit().await; @@ -540,7 +540,7 @@ where /// Handler for `trace_get` async fn trace_get( &self, - hash: H256, + hash: B256, indices: Vec, ) -> Result> { let _permit = self.acquire_trace_permit().await; @@ -550,7 +550,7 @@ where /// Handler for `trace_transaction` async fn trace_transaction( &self, - hash: H256, + hash: B256, ) -> Result>> { let _permit = self.acquire_trace_permit().await; Ok(TraceApi::trace_transaction(self, hash).await?) diff --git a/crates/rpc/rpc/src/web3.rs b/crates/rpc/rpc/src/web3.rs index e3b600f945..604987e167 100644 --- a/crates/rpc/rpc/src/web3.rs +++ b/crates/rpc/rpc/src/web3.rs @@ -2,7 +2,7 @@ use crate::result::ToRpcResult; use async_trait::async_trait; use jsonrpsee::core::RpcResult; use reth_network_api::NetworkInfo; -use reth_primitives::{keccak256, Bytes, H256}; +use reth_primitives::{keccak256, Bytes, B256}; use reth_rpc_api::Web3ApiServer; /// `web3` API implementation. @@ -32,7 +32,7 @@ where } /// Handler for `web3_sha3` - fn sha3(&self, input: Bytes) -> RpcResult { + fn sha3(&self, input: Bytes) -> RpcResult { Ok(keccak256(input)) } } diff --git a/crates/snapshot/Cargo.toml b/crates/snapshot/Cargo.toml new file mode 100644 index 0000000000..65976236da --- /dev/null +++ b/crates/snapshot/Cargo.toml @@ -0,0 +1,34 @@ +[package] +name = "reth-snapshot" +version.workspace = true +edition.workspace = true +rust-version.workspace = true +license.workspace = true +homepage.workspace = true +repository.workspace = true +description = """ +Snapshotting implementation +""" + +[dependencies] +# reth +reth-primitives.workspace = true +reth-db.workspace = true +reth-provider.workspace = true +reth-interfaces.workspace = true + +# async +tokio = { workspace = true, features = ["sync"] } + +# misc +thiserror.workspace = true +tracing.workspace = true + +[dev-dependencies] +# reth +reth-db = { workspace = true, features = ["test-utils"] } +reth-stages = { path = "../stages", features = ["test-utils"] } + +# misc + +assert_matches.workspace = true diff --git a/crates/snapshot/src/error.rs b/crates/snapshot/src/error.rs new file mode 100644 index 0000000000..4bdea3e8fc --- /dev/null +++ b/crates/snapshot/src/error.rs @@ -0,0 +1,21 @@ +use reth_db::DatabaseError; +use reth_interfaces::RethError; +use reth_provider::ProviderError; +use thiserror::Error; + +/// Error returned by [crate::Snapshotter::run] +#[derive(Error, Debug)] +#[allow(missing_docs)] +pub enum SnapshotterError { + #[error("Inconsistent data: {0}")] + InconsistentData(&'static str), + + #[error("An interface error occurred.")] + Interface(#[from] RethError), + + #[error(transparent)] + Database(#[from] DatabaseError), + + #[error(transparent)] + Provider(#[from] ProviderError), +} diff --git a/crates/snapshot/src/lib.rs b/crates/snapshot/src/lib.rs new file mode 100644 index 0000000000..60a62cae53 --- /dev/null +++ b/crates/snapshot/src/lib.rs @@ -0,0 +1,19 @@ +//! Snapshotting implementation. + +#![doc( + html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", + html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" +)] +#![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] +#![deny(unused_must_use, rust_2018_idioms)] +#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] + +mod error; +mod snapshotter; + +pub use error::SnapshotterError; +pub use snapshotter::{ + HighestSnapshots, HighestSnapshotsTracker, SnapshotTargets, Snapshotter, SnapshotterResult, + SnapshotterWithResult, +}; diff --git a/crates/snapshot/src/snapshotter.rs b/crates/snapshot/src/snapshotter.rs new file mode 100644 index 0000000000..2680b69815 --- /dev/null +++ b/crates/snapshot/src/snapshotter.rs @@ -0,0 +1,316 @@ +//! Support for snapshotting. + +use crate::SnapshotterError; +use reth_db::database::Database; +use reth_interfaces::{RethError, RethResult}; +use reth_primitives::{BlockNumber, ChainSpec, TxNumber}; +use reth_provider::{BlockReader, DatabaseProviderRO, ProviderFactory}; +use std::{collections::HashMap, ops::RangeInclusive, sync::Arc}; +use tokio::sync::watch; +use tracing::warn; + +/// Result of [Snapshotter::run] execution. +pub type SnapshotterResult = Result; + +/// The snapshotter type itself with the result of [Snapshotter::run] +pub type SnapshotterWithResult = (Snapshotter, SnapshotterResult); + +/// Snapshotting routine. Main snapshotting logic happens in [Snapshotter::run]. +#[derive(Debug)] +pub struct Snapshotter { + provider_factory: ProviderFactory, + highest_snapshots: HighestSnapshots, + highest_snapshots_tracker: watch::Sender>, + /// Block interval after which the snapshot is taken. + block_interval: u64, +} + +/// Tracker for the latest [`HighestSnapshots`] value. +pub type HighestSnapshotsTracker = watch::Receiver>; + +/// Highest snapshotted block numbers, per data part. +#[derive(Debug, Clone, Copy, Default, Eq, PartialEq)] +pub struct HighestSnapshots { + /// Highest snapshotted block of headers, inclusive. + /// If [`None`], no snapshot is available. + pub headers: Option, + /// Highest snapshotted block of receipts, inclusive. + /// If [`None`], no snapshot is available. + pub receipts: Option, + /// Highest snapshotted block of transactions, inclusive. + /// If [`None`], no snapshot is available. + pub transactions: Option, +} + +/// Snapshot targets, per data part, measured in [`BlockNumber`] and [`TxNumber`], if applicable. +#[derive(Debug, Clone, Eq, PartialEq)] +pub struct SnapshotTargets { + headers: Option>, + receipts: Option<(RangeInclusive, RangeInclusive)>, + transactions: Option<(RangeInclusive, RangeInclusive)>, +} + +impl SnapshotTargets { + /// Returns `true` if any of the data parts has targets, i.e. is [`Some`]. + pub fn any(&self) -> bool { + self.headers.is_some() || self.receipts.is_some() || self.transactions.is_some() + } + + /// Returns `true` if all targets are either [`None`] or multiple of `block_interval`. + fn is_multiple_of_block_interval(&self, block_interval: u64) -> bool { + [ + self.headers.as_ref(), + self.receipts.as_ref().map(|(blocks, _)| blocks), + self.transactions.as_ref().map(|(blocks, _)| blocks), + ] + .iter() + .all(|blocks| blocks.map_or(true, |blocks| (blocks.end() + 1) % block_interval == 0)) + } + + // Returns `true` if all targets are either [`None`] or has beginning of the range equal to the + // highest snapshot. + fn is_contiguous_to_highest_snapshots(&self, snapshots: HighestSnapshots) -> bool { + [ + (self.headers.as_ref(), snapshots.headers), + (self.receipts.as_ref().map(|(blocks, _)| blocks), snapshots.receipts), + (self.transactions.as_ref().map(|(blocks, _)| blocks), snapshots.transactions), + ] + .iter() + .all(|(target, highest)| { + target.map_or(true, |block_number| { + highest.map_or(*block_number.start() == 0, |previous_block_number| { + *block_number.start() == previous_block_number + 1 + }) + }) + }) + } +} + +impl Snapshotter { + /// Creates a new [Snapshotter]. + pub fn new( + db: DB, + chain_spec: Arc, + block_interval: u64, + highest_snapshots_tracker: watch::Sender>, + ) -> Self { + let snapshotter = Self { + provider_factory: ProviderFactory::new(db, chain_spec), + // TODO(alexey): fill from on-disk snapshot data + highest_snapshots: HighestSnapshots::default(), + highest_snapshots_tracker, + block_interval, + }; + + snapshotter.update_highest_snapshots_tracker(); + + snapshotter + } + + #[cfg(test)] + fn set_highest_snapshots_from_targets(&mut self, targets: &SnapshotTargets) { + if let Some(block_number) = &targets.headers { + self.highest_snapshots.headers = Some(*block_number.end()); + } + if let Some((block_number, _)) = &targets.receipts { + self.highest_snapshots.receipts = Some(*block_number.end()); + } + if let Some((block_number, _)) = &targets.transactions { + self.highest_snapshots.transactions = Some(*block_number.end()); + } + } + + fn update_highest_snapshots_tracker(&self) { + let _ = self.highest_snapshots_tracker.send(Some(self.highest_snapshots)).map_err(|_| { + warn!(target: "snapshot", "Highest snapshots channel closed"); + }); + } + + /// Run the snapshotter + pub fn run(&mut self, targets: SnapshotTargets) -> SnapshotterResult { + debug_assert!(targets.is_multiple_of_block_interval(self.block_interval)); + debug_assert!(targets.is_contiguous_to_highest_snapshots(self.highest_snapshots)); + + // TODO(alexey): snapshot logic + + self.update_highest_snapshots_tracker(); + + Ok(targets) + } + + /// Returns a snapshot targets at the provided finalized block number, respecting the block + /// interval. The target is determined by the check against last snapshots. + pub fn get_snapshot_targets( + &self, + finalized_block_number: BlockNumber, + ) -> RethResult { + let provider = self.provider_factory.provider()?; + + // Round down `finalized_block_number` to a multiple of `block_interval` + let to_block_number = finalized_block_number.saturating_sub( + // Adjust for 0-indexed block numbers + (finalized_block_number + 1) % self.block_interval, + ); + + // Calculate block ranges to snapshot + let headers_block_range = + self.get_snapshot_target_block_range(to_block_number, self.highest_snapshots.headers); + let receipts_block_range = + self.get_snapshot_target_block_range(to_block_number, self.highest_snapshots.receipts); + let transactions_block_range = self + .get_snapshot_target_block_range(to_block_number, self.highest_snapshots.transactions); + + // Calculate transaction ranges to snapshot + let mut block_to_tx_number_cache = HashMap::default(); + let receipts_tx_range = self.get_snapshot_target_tx_range( + &provider, + &mut block_to_tx_number_cache, + self.highest_snapshots.receipts, + &receipts_block_range, + )?; + let transactions_tx_range = self.get_snapshot_target_tx_range( + &provider, + &mut block_to_tx_number_cache, + self.highest_snapshots.transactions, + &transactions_block_range, + )?; + + Ok(SnapshotTargets { + headers: headers_block_range + .size_hint() + .1 + .expect("finalized block should be >= last headers snapshot") + .ge(&(self.block_interval as usize)) + .then_some(headers_block_range), + receipts: receipts_block_range + .size_hint() + .1 + .expect("finalized block should be >= last receipts snapshot") + .ge(&(self.block_interval as usize)) + .then_some((receipts_block_range, receipts_tx_range)), + transactions: transactions_block_range + .size_hint() + .1 + .expect("finalized block should be >= last transactions snapshot") + .ge(&(self.block_interval as usize)) + .then_some((transactions_block_range, transactions_tx_range)), + }) + } + + fn get_snapshot_target_block_range( + &self, + to_block_number: BlockNumber, + highest_snapshot: Option, + ) -> RangeInclusive { + let highest_snapshot = highest_snapshot.map_or(0, |block_number| block_number + 1); + highest_snapshot..=(highest_snapshot + self.block_interval - 1).min(to_block_number) + } + + fn get_snapshot_target_tx_range( + &self, + provider: &DatabaseProviderRO<'_, DB>, + block_to_tx_number_cache: &mut HashMap, + highest_snapshot: Option, + block_range: &RangeInclusive, + ) -> RethResult> { + let from_tx_number = if let Some(block_number) = highest_snapshot { + *block_to_tx_number_cache.entry(block_number).or_insert( + provider + .block_body_indices(block_number)? + .ok_or(RethError::Custom( + "Block body indices for highest snapshot not found".to_string(), + ))? + .next_tx_num(), + ) + } else { + 0 + }; + + let to_tx_number = *block_to_tx_number_cache.entry(*block_range.end()).or_insert( + provider + .block_body_indices(*block_range.end())? + .ok_or(RethError::Custom( + "Block body indices for block range end not found".to_string(), + ))? + .last_tx_num(), + ); + Ok(from_tx_number..=to_tx_number) + } +} + +#[cfg(test)] +mod tests { + use crate::{snapshotter::SnapshotTargets, HighestSnapshots, Snapshotter}; + use assert_matches::assert_matches; + use reth_interfaces::{ + test_utils::{generators, generators::random_block_range}, + RethError, + }; + use reth_primitives::{B256, MAINNET}; + use reth_stages::test_utils::TestTransaction; + use tokio::sync::watch; + + #[test] + fn new() { + let tx = TestTransaction::default(); + + let (highest_snapshots_tx, highest_snapshots_rx) = watch::channel(None); + assert_eq!(*highest_snapshots_rx.borrow(), None); + + Snapshotter::new(tx.inner_raw(), MAINNET.clone(), 2, highest_snapshots_tx); + assert_eq!(*highest_snapshots_rx.borrow(), Some(HighestSnapshots::default())); + } + + #[test] + fn get_snapshot_targets() { + let tx = TestTransaction::default(); + let mut rng = generators::rng(); + + let blocks = random_block_range(&mut rng, 0..=3, B256::ZERO, 2..3); + tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); + + let mut snapshotter = + Snapshotter::new(tx.inner_raw(), MAINNET.clone(), 2, watch::channel(None).0); + + // Snapshot targets has data per part up to the passed finalized block number, + // respecting the block interval + let targets = snapshotter.get_snapshot_targets(1).expect("get snapshot targets"); + assert_eq!( + targets, + SnapshotTargets { + headers: Some(0..=1), + receipts: Some((0..=1, 0..=3)), + transactions: Some((0..=1, 0..=3)) + } + ); + assert!(targets.is_multiple_of_block_interval(snapshotter.block_interval)); + assert!(targets.is_contiguous_to_highest_snapshots(snapshotter.highest_snapshots)); + // Imitate snapshotter run according to the targets which updates the last snapshots state + snapshotter.set_highest_snapshots_from_targets(&targets); + + // Nothing to snapshot, last snapshots state of snapshotter doesn't pass the thresholds + assert_eq!( + snapshotter.get_snapshot_targets(2), + Ok(SnapshotTargets { headers: None, receipts: None, transactions: None }) + ); + + // Snapshot targets has data per part up to the passed finalized block number, + // respecting the block interval + let targets = snapshotter.get_snapshot_targets(5).expect("get snapshot targets"); + assert_eq!( + targets, + SnapshotTargets { + headers: Some(2..=3), + receipts: Some((2..=3, 4..=7)), + transactions: Some((2..=3, 4..=7)) + } + ); + assert!(targets.is_multiple_of_block_interval(snapshotter.block_interval)); + assert!(targets.is_contiguous_to_highest_snapshots(snapshotter.highest_snapshots)); + // Imitate snapshotter run according to the targets which updates the last snapshots state + snapshotter.set_highest_snapshots_from_targets(&targets); + + // Block body indices not found + assert_matches!(snapshotter.get_snapshot_targets(5), Err(RethError::Custom(_))); + } +} diff --git a/crates/stages/Cargo.toml b/crates/stages/Cargo.toml index 16d36c5b0c..79a2e989bd 100644 --- a/crates/stages/Cargo.toml +++ b/crates/stages/Cargo.toml @@ -58,10 +58,10 @@ reth-interfaces = { workspace = true, features = ["test-utils"] } reth-downloaders = { path = "../net/downloaders" } reth-eth-wire = { path = "../net/eth-wire" } # TODO(onbjerg): We only need this for [BlockBody] reth-blockchain-tree = { path = "../blockchain-tree" } -reth-rlp.workspace = true reth-revm = { path = "../revm" } reth-trie = { path = "../trie", features = ["test-utils"] } +alloy-rlp.workspace = true itertools.workspace = true tokio = { workspace = true, features = ["rt", "sync", "macros"] } assert_matches.workspace = true diff --git a/crates/stages/benches/setup/mod.rs b/crates/stages/benches/setup/mod.rs index f61f7e273c..6bbd4746d0 100644 --- a/crates/stages/benches/setup/mod.rs +++ b/crates/stages/benches/setup/mod.rs @@ -12,7 +12,7 @@ use reth_interfaces::test_utils::{ random_eoa_account_range, }, }; -use reth_primitives::{Account, Address, SealedBlock, H256, MAINNET}; +use reth_primitives::{Account, Address, SealedBlock, B256, MAINNET}; use reth_provider::ProviderFactory; use reth_stages::{ stages::{AccountHashingStage, StorageHashingStage}, @@ -117,7 +117,7 @@ pub(crate) fn txs_testdata(num_blocks: u64) -> PathBuf { .into_iter() .collect(); - let mut blocks = random_block_range(&mut rng, 0..=num_blocks, H256::zero(), txs_range); + let mut blocks = random_block_range(&mut rng, 0..=num_blocks, B256::ZERO, txs_range); let (transitions, start_state) = random_changeset_range( &mut rng, diff --git a/crates/stages/src/lib.rs b/crates/stages/src/lib.rs index ba60047a9d..b718fa5ab1 100644 --- a/crates/stages/src/lib.rs +++ b/crates/stages/src/lib.rs @@ -19,7 +19,7 @@ //! # use reth_interfaces::consensus::Consensus; //! # use reth_interfaces::test_utils::{TestBodiesClient, TestConsensus, TestHeadersClient}; //! # use reth_revm::Factory; -//! # use reth_primitives::{PeerId, MAINNET, H256}; +//! # use reth_primitives::{PeerId, MAINNET, B256}; //! # use reth_stages::Pipeline; //! # use reth_stages::sets::DefaultStages; //! # use reth_stages::stages::HeaderSyncMode; @@ -31,11 +31,11 @@ //! # ); //! # let db = create_test_rw_db(); //! # let bodies_downloader = BodiesDownloaderBuilder::default().build( -//! # Arc::new(TestBodiesClient { responder: |_| Ok((PeerId::zero(), vec![]).into()) }), +//! # Arc::new(TestBodiesClient { responder: |_| Ok((PeerId::ZERO, vec![]).into()) }), //! # consensus.clone(), //! # db.clone() //! # ); -//! # let (tip_tx, tip_rx) = watch::channel(H256::default()); +//! # let (tip_tx, tip_rx) = watch::channel(B256::default()); //! # let factory = Factory::new(MAINNET.clone()); //! // Create a pipeline that can fully sync //! # let pipeline = @@ -54,7 +54,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![allow(clippy::result_large_err)] // TODO(danipopes): fix this #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] diff --git a/crates/stages/src/pipeline/builder.rs b/crates/stages/src/pipeline/builder.rs index eca3d9209a..b5a0a2d409 100644 --- a/crates/stages/src/pipeline/builder.rs +++ b/crates/stages/src/pipeline/builder.rs @@ -2,7 +2,7 @@ use std::sync::Arc; use crate::{pipeline::BoxedStage, MetricEventsSender, Pipeline, Stage, StageSet}; use reth_db::database::Database; -use reth_primitives::{stage::StageId, BlockNumber, ChainSpec, H256}; +use reth_primitives::{stage::StageId, BlockNumber, ChainSpec, B256}; use tokio::sync::watch; /// Builds a [`Pipeline`]. @@ -16,7 +16,7 @@ where /// The maximum block number to sync to. max_block: Option, /// A receiver for the current chain tip to sync to. - tip_tx: Option>, + tip_tx: Option>, metrics_tx: Option, } @@ -56,7 +56,7 @@ where } /// Set the tip sender. - pub fn with_tip_sender(mut self, tip_tx: watch::Sender) -> Self { + pub fn with_tip_sender(mut self, tip_tx: watch::Sender) -> Self { self.tip_tx = Some(tip_tx); self } diff --git a/crates/stages/src/pipeline/mod.rs b/crates/stages/src/pipeline/mod.rs index 8b617a178a..4fdfe4e6d2 100644 --- a/crates/stages/src/pipeline/mod.rs +++ b/crates/stages/src/pipeline/mod.rs @@ -7,7 +7,7 @@ use reth_db::database::Database; use reth_interfaces::executor::BlockExecutionError; use reth_primitives::{ constants::BEACON_CONSENSUS_REORG_UNWIND_DEPTH, listener::EventListeners, stage::StageId, - BlockNumber, ChainSpec, H256, + BlockNumber, ChainSpec, B256, }; use reth_provider::{ProviderFactory, StageCheckpointReader, StageCheckpointWriter}; use std::{pin::Pin, sync::Arc}; @@ -105,7 +105,7 @@ pub struct Pipeline { /// Keeps track of the progress of the pipeline. progress: PipelineProgress, /// A receiver for the current chain tip to sync to. - tip_tx: Option>, + tip_tx: Option>, metrics_tx: Option, } @@ -126,7 +126,7 @@ where /// Set tip for reverse sync. #[track_caller] - pub fn set_tip(&self, tip: H256) { + pub fn set_tip(&self, tip: B256) { let _ = self.tip_tx.as_ref().expect("tip sender is set").send(tip).map_err(|_| { warn!(target: "sync::pipeline", "Chain tip channel closed"); }); @@ -157,7 +157,7 @@ where /// Consume the pipeline and run it until it reaches the provided tip, if set. Return the /// pipeline and its result as a future. #[track_caller] - pub fn run_as_fut(mut self, tip: Option) -> PipelineFut { + pub fn run_as_fut(mut self, tip: Option) -> PipelineFut { // TODO: fix this in a follow up PR. ideally, consensus engine would be responsible for // updating metrics. let _ = self.register_metrics(); // ignore error diff --git a/crates/stages/src/stages/bodies.rs b/crates/stages/src/stages/bodies.rs index e1a003dce7..9991ae9deb 100644 --- a/crates/stages/src/stages/bodies.rs +++ b/crates/stages/src/stages/bodies.rs @@ -478,7 +478,7 @@ mod tests { TestConsensus, }, }; - use reth_primitives::{BlockBody, BlockNumber, SealedBlock, SealedHeader, TxNumber, H256}; + use reth_primitives::{BlockBody, BlockNumber, SealedBlock, SealedHeader, TxNumber, B256}; use std::{ collections::{HashMap, VecDeque}, ops::RangeInclusive, @@ -488,10 +488,10 @@ mod tests { }; /// The block hash of the genesis block. - pub(crate) const GENESIS_HASH: H256 = H256::zero(); + pub(crate) const GENESIS_HASH: B256 = B256::ZERO; /// A helper to create a collection of block bodies keyed by their hash. - pub(crate) fn body_by_hash(block: &SealedBlock) -> (H256, BlockBody) { + pub(crate) fn body_by_hash(block: &SealedBlock) -> (B256, BlockBody) { ( block.hash(), BlockBody { @@ -505,7 +505,7 @@ mod tests { /// A helper struct for running the [BodyStage]. pub(crate) struct BodyTestRunner { pub(crate) consensus: Arc, - responses: HashMap, + responses: HashMap, tx: TestTransaction, batch_size: u64, } @@ -526,7 +526,7 @@ mod tests { self.batch_size = batch_size; } - pub(crate) fn set_responses(&mut self, responses: HashMap) { + pub(crate) fn set_responses(&mut self, responses: HashMap) { self.responses = responses; } } @@ -730,7 +730,7 @@ mod tests { fn get_block_bodies_with_priority( &self, - _hashes: Vec, + _hashes: Vec, _priority: Priority, ) -> Self::Output { panic!("Noop client should not be called") @@ -741,7 +741,7 @@ mod tests { #[derive(Debug)] pub(crate) struct TestBodyDownloader { db: Arc, - responses: HashMap, + responses: HashMap, headers: VecDeque, batch_size: u64, } @@ -749,7 +749,7 @@ mod tests { impl TestBodyDownloader { pub(crate) fn new( db: Arc, - responses: HashMap, + responses: HashMap, batch_size: u64, ) -> Self { Self { db, responses, headers: VecDeque::default(), batch_size } diff --git a/crates/stages/src/stages/execution.rs b/crates/stages/src/stages/execution.rs index cd66ed954c..038cd5907f 100644 --- a/crates/stages/src/stages/execution.rs +++ b/crates/stages/src/stages/execution.rs @@ -513,15 +513,15 @@ impl ExecutionStageThresholds { mod tests { use super::*; use crate::test_utils::TestTransaction; + use alloy_rlp::Decodable; use assert_matches::assert_matches; use reth_db::{models::AccountBeforeTx, test_utils::create_test_rw_db}; use reth_primitives::{ - hex_literal::hex, keccak256, stage::StageUnitCheckpoint, Account, Bytecode, - ChainSpecBuilder, PruneModes, SealedBlock, StorageEntry, H160, H256, MAINNET, U256, + address, hex_literal::hex, keccak256, stage::StageUnitCheckpoint, Account, Bytecode, + ChainSpecBuilder, PruneModes, SealedBlock, StorageEntry, B256, MAINNET, U256, }; use reth_provider::{AccountReader, BlockWriter, ProviderFactory, ReceiptProvider}; use reth_revm::Factory; - use reth_rlp::Decodable; use std::sync::Arc; fn stage() -> ExecutionStage { @@ -684,8 +684,8 @@ mod tests { let provider = factory.provider_rw().unwrap(); let db_tx = provider.tx_ref(); - let acc1 = H160(hex!("1000000000000000000000000000000000000000")); - let acc2 = H160(hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b")); + let acc1 = address!("1000000000000000000000000000000000000000"); + let acc2 = address!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let code = hex!("5a465a905090036002900360015500"); let balance = U256::from(0x3635c9adc5dea00000u128); let code_hash = keccak256(code); @@ -728,16 +728,16 @@ mod tests { let provider = factory.provider().unwrap(); // check post state - let account1 = H160(hex!("1000000000000000000000000000000000000000")); + let account1 = address!("1000000000000000000000000000000000000000"); let account1_info = Account { balance: U256::ZERO, nonce: 0x00, bytecode_hash: Some(code_hash) }; - let account2 = H160(hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba")); + let account2 = address!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); let account2_info = Account { balance: U256::from(0x1bc16d674ece94bau128), nonce: 0x00, bytecode_hash: None, }; - let account3 = H160(hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b")); + let account3 = address!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let account3_info = Account { balance: U256::from(0x3635c9adc5de996b46u128), nonce: 0x01, @@ -764,7 +764,7 @@ mod tests { // Get on dupsort would return only first value. This is good enough for this test. assert_eq!( provider.tx_ref().get::(account1), - Ok(Some(StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(2) })), + Ok(Some(StorageEntry { key: B256::with_last_byte(1), value: U256::from(2) })), "Post changed of a account" ); } @@ -794,9 +794,9 @@ mod tests { let provider = factory.provider_rw().unwrap(); let db_tx = provider.tx_ref(); - let acc1 = H160(hex!("1000000000000000000000000000000000000000")); + let acc1 = address!("1000000000000000000000000000000000000000"); let acc1_info = Account { nonce: 0, balance: U256::ZERO, bytecode_hash: Some(code_hash) }; - let acc2 = H160(hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b")); + let acc2 = address!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); let acc2_info = Account { nonce: 0, balance, bytecode_hash: None }; db_tx.put::(acc1, acc1_info).unwrap(); @@ -840,7 +840,7 @@ mod tests { assert_eq!(provider.basic_account(acc1), Ok(Some(acc1_info)), "Pre changed of a account"); assert_eq!(provider.basic_account(acc2), Ok(Some(acc2_info)), "Post changed of a account"); - let miner_acc = H160(hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba")); + let miner_acc = address!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); assert_eq!(provider.basic_account(miner_acc), Ok(None), "Third account should be unwound"); assert_eq!(provider.receipt(0), Ok(None), "First receipt should be unwound"); @@ -861,9 +861,9 @@ mod tests { provider.commit().unwrap(); // variables - let caller_address = H160(hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b")); - let destroyed_address = H160(hex!("095e7baea6a6c7c4c2dfeb977efac326af552d87")); - let beneficiary_address = H160(hex!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba")); + let caller_address = address!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"); + let destroyed_address = address!("095e7baea6a6c7c4c2dfeb977efac326af552d87"); + let beneficiary_address = address!("2adc25665018aa1fe0e6bc666dac8fc2697ff9ba"); let code = hex!("73095e7baea6a6c7c4c2dfeb977efac326af552d8731ff00"); let balance = U256::from(0x0de0b6b3a7640000u64); @@ -890,14 +890,14 @@ mod tests { .tx_ref() .put::( destroyed_address, - StorageEntry { key: H256::zero(), value: U256::ZERO }, + StorageEntry { key: B256::ZERO, value: U256::ZERO }, ) .unwrap(); provider .tx_ref() .put::( destroyed_address, - StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(1u64) }, + StorageEntry { key: B256::with_last_byte(1), value: U256::from(1u64) }, ) .unwrap(); @@ -969,11 +969,11 @@ mod tests { vec![ ( (block.number, destroyed_address).into(), - StorageEntry { key: H256::zero(), value: U256::ZERO } + StorageEntry { key: B256::ZERO, value: U256::ZERO } ), ( (block.number, destroyed_address).into(), - StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(1u64) } + StorageEntry { key: B256::with_last_byte(1), value: U256::from(1u64) } ) ] ); diff --git a/crates/stages/src/stages/hashing_account.rs b/crates/stages/src/stages/hashing_account.rs index 5887fb8034..896bfc9762 100644 --- a/crates/stages/src/stages/hashing_account.rs +++ b/crates/stages/src/stages/hashing_account.rs @@ -87,12 +87,12 @@ impl AccountHashingStage { generators, generators::{random_block_range, random_eoa_account_range}, }; - use reth_primitives::{Account, H256, U256}; + use reth_primitives::{Account, B256, U256}; use reth_provider::BlockWriter; let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, opts.blocks.clone(), H256::zero(), opts.txs); + let blocks = random_block_range(&mut rng, opts.blocks.clone(), B256::ZERO, opts.txs); for block in blocks { provider.insert_block(block, None, None).unwrap(); diff --git a/crates/stages/src/stages/hashing_storage.rs b/crates/stages/src/stages/hashing_storage.rs index 040b6375bd..64c3d7bcca 100644 --- a/crates/stages/src/stages/hashing_storage.rs +++ b/crates/stages/src/stages/hashing_storage.rs @@ -240,7 +240,7 @@ mod tests { generators::{random_block_range, random_contract_account_range}, }; use reth_primitives::{ - stage::StageUnitCheckpoint, Address, SealedBlock, StorageEntry, H256, U256, + stage::StageUnitCheckpoint, Address, SealedBlock, StorageEntry, B256, U256, }; stage_test_suite_ext!(StorageHashingTestRunner, storage_hashing); @@ -492,7 +492,7 @@ mod tests { let n_accounts = 31; let mut accounts = random_contract_account_range(&mut rng, &mut (0..n_accounts)); - let blocks = random_block_range(&mut rng, stage_progress..=end, H256::zero(), 0..3); + let blocks = random_block_range(&mut rng, stage_progress..=end, B256::ZERO, 0..3); self.tx.insert_headers(blocks.iter().map(|block| &block.header))?; diff --git a/crates/stages/src/stages/headers.rs b/crates/stages/src/stages/headers.rs index a56e408ab9..a8460412de 100644 --- a/crates/stages/src/stages/headers.rs +++ b/crates/stages/src/stages/headers.rs @@ -17,7 +17,7 @@ use reth_primitives::{ stage::{ CheckpointBlockRange, EntitiesCheckpoint, HeadersCheckpoint, StageCheckpoint, StageId, }, - BlockHashOrNumber, BlockNumber, SealedHeader, H256, + BlockHashOrNumber, BlockNumber, SealedHeader, B256, }; use reth_provider::DatabaseProviderRW; use tokio::sync::watch; @@ -31,7 +31,7 @@ pub enum HeaderSyncMode { Continuous, /// A sync mode in which the stage polls the receiver for the next tip /// to download from. - Tip(watch::Receiver), + Tip(watch::Receiver), } /// The headers stage. @@ -385,8 +385,9 @@ mod tests { stage_test_suite, ExecuteStageTestRunner, StageTestRunner, UnwindStageTestRunner, }; use assert_matches::assert_matches; + use rand::Rng; use reth_interfaces::test_utils::{generators, generators::random_header}; - use reth_primitives::{stage::StageUnitCheckpoint, H256, MAINNET}; + use reth_primitives::{stage::StageUnitCheckpoint, B256, MAINNET}; use reth_provider::ProviderFactory; use test_runner::HeadersTestRunner; @@ -406,7 +407,7 @@ mod tests { pub(crate) struct HeadersTestRunner { pub(crate) client: TestHeadersClient, - channel: (watch::Sender, watch::Receiver), + channel: (watch::Sender, watch::Receiver), downloader_factory: Box D + Send + Sync + 'static>, tx: TestTransaction, } @@ -416,7 +417,7 @@ mod tests { let client = TestHeadersClient::default(); Self { client: client.clone(), - channel: watch::channel(H256::zero()), + channel: watch::channel(B256::ZERO), downloader_factory: Box::new(move || { TestHeaderDownloader::new( client.clone(), @@ -524,7 +525,7 @@ mod tests { let client = TestHeadersClient::default(); Self { client: client.clone(), - channel: watch::channel(H256::zero()), + channel: watch::channel(B256::ZERO), downloader_factory: Box::new(move || { ReverseHeadersDownloaderBuilder::default() .stream_batch_size(500) @@ -547,7 +548,7 @@ mod tests { Ok(()) } - pub(crate) fn send_tip(&self, tip: H256) { + pub(crate) fn send_tip(&self, tip: B256) { self.channel.0.send(tip).expect("failed to send tip"); } } @@ -604,7 +605,7 @@ mod tests { let mut rng = generators::rng(); - let consensus_tip = H256::random(); + let consensus_tip = rng.gen(); runner.send_tip(consensus_tip); // Genesis diff --git a/crates/stages/src/stages/index_account_history.rs b/crates/stages/src/stages/index_account_history.rs index b79af45d1e..7cdd2e936f 100644 --- a/crates/stages/src/stages/index_account_history.rs +++ b/crates/stages/src/stages/index_account_history.rs @@ -100,9 +100,6 @@ impl Stage for IndexAccountHistoryStage { #[cfg(test)] mod tests { - use reth_provider::ProviderFactory; - use std::collections::BTreeMap; - use super::*; use crate::test_utils::{ stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, @@ -123,16 +120,18 @@ mod tests { generators, generators::{random_block_range, random_changeset_range, random_contract_account_range}, }; - use reth_primitives::{hex_literal::hex, Address, BlockNumber, PruneMode, H160, H256, MAINNET}; + use reth_primitives::{address, Address, BlockNumber, PruneMode, B256, MAINNET}; + use reth_provider::ProviderFactory; + use std::collections::BTreeMap; - const ADDRESS: H160 = H160(hex!("0000000000000000000000000000000000000001")); + const ADDRESS: Address = address!("0000000000000000000000000000000000000001"); fn acc() -> AccountBeforeTx { AccountBeforeTx { address: ADDRESS, info: None } } /// Shard for account - fn shard(shard_index: u64) -> ShardedKey { + fn shard(shard_index: u64) -> ShardedKey
{ ShardedKey { key: ADDRESS, highest_block_number: shard_index } } @@ -141,8 +140,8 @@ mod tests { } fn cast( - table: Vec<(ShardedKey, BlockNumberList)>, - ) -> BTreeMap, Vec> { + table: Vec<(ShardedKey
, BlockNumberList)>, + ) -> BTreeMap, Vec> { table .into_iter() .map(|(k, v)| { @@ -425,7 +424,7 @@ mod tests { .unwrap(); // run - let input = ExecInput { target: Some(100), ..Default::default() }; + let input = ExecInput { target: Some(20000), ..Default::default() }; let mut stage = IndexAccountHistoryStage { prune_modes: PruneModes { account_history: Some(PruneMode::Before(36)), @@ -436,7 +435,7 @@ mod tests { let factory = ProviderFactory::new(tx.tx.as_ref(), MAINNET.clone()); let provider = factory.provider_rw().unwrap(); let out = stage.execute(&provider, input).await.unwrap(); - assert_eq!(out, ExecOutput { checkpoint: StageCheckpoint::new(100), done: true }); + assert_eq!(out, ExecOutput { checkpoint: StageCheckpoint::new(20000), done: true }); provider.commit().unwrap(); // verify @@ -444,7 +443,7 @@ mod tests { assert_eq!(table, BTreeMap::from([(shard(u64::MAX), vec![36, 100])])); // unwind - unwind(&tx, 100, 0).await; + unwind(&tx, 20000, 0).await; // verify initial state let table = tx.table::().unwrap(); @@ -498,7 +497,7 @@ mod tests { .into_iter() .collect::>(); - let blocks = random_block_range(&mut rng, start..=end, H256::zero(), 0..3); + let blocks = random_block_range(&mut rng, start..=end, B256::ZERO, 0..3); let (transitions, _) = random_changeset_range( &mut rng, @@ -564,14 +563,14 @@ mod tests { address, *list.last().expect("Chuck does not return empty list") as BlockNumber, - ) as ShardedKey, + ) as ShardedKey
, list, ); }); if let Some(last_list) = last_chunk { result.insert( - ShardedKey::new(address, u64::MAX) as ShardedKey, + ShardedKey::new(address, u64::MAX) as ShardedKey
, last_list, ); }; diff --git a/crates/stages/src/stages/index_storage_history.rs b/crates/stages/src/stages/index_storage_history.rs index 8d03bb20d5..9d4145a25b 100644 --- a/crates/stages/src/stages/index_storage_history.rs +++ b/crates/stages/src/stages/index_storage_history.rs @@ -97,9 +97,6 @@ impl Stage for IndexStorageHistoryStage { #[cfg(test)] mod tests { - use reth_provider::ProviderFactory; - use std::collections::BTreeMap; - use super::*; use crate::test_utils::{ stage_test_suite_ext, ExecuteStageTestRunner, StageTestRunner, TestRunnerError, @@ -122,14 +119,16 @@ mod tests { generators::{random_block_range, random_changeset_range, random_contract_account_range}, }; use reth_primitives::{ - hex_literal::hex, Address, BlockNumber, PruneMode, StorageEntry, H160, H256, MAINNET, U256, + address, b256, Address, BlockNumber, PruneMode, StorageEntry, B256, MAINNET, U256, }; + use reth_provider::ProviderFactory; + use std::collections::BTreeMap; - const ADDRESS: H160 = H160(hex!("0000000000000000000000000000000000000001")); - const STORAGE_KEY: H256 = - H256(hex!("0000000000000000000000000000000000000000000000000000000000000001")); + const ADDRESS: Address = address!("0000000000000000000000000000000000000001"); + const STORAGE_KEY: B256 = + b256!("0000000000000000000000000000000000000000000000000000000000000001"); - fn storage(key: H256) -> StorageEntry { + fn storage(key: B256) -> StorageEntry { // Value is not used in indexing stage. StorageEntry { key, value: U256::ZERO } } @@ -438,7 +437,7 @@ mod tests { .unwrap(); // run - let input = ExecInput { target: Some(100), ..Default::default() }; + let input = ExecInput { target: Some(20000), ..Default::default() }; let mut stage = IndexStorageHistoryStage { prune_modes: PruneModes { storage_history: Some(PruneMode::Before(36)), @@ -449,7 +448,7 @@ mod tests { let factory = ProviderFactory::new(tx.tx.as_ref(), MAINNET.clone()); let provider = factory.provider_rw().unwrap(); let out = stage.execute(&provider, input).await.unwrap(); - assert_eq!(out, ExecOutput { checkpoint: StageCheckpoint::new(100), done: true }); + assert_eq!(out, ExecOutput { checkpoint: StageCheckpoint::new(20000), done: true }); provider.commit().unwrap(); // verify @@ -457,7 +456,7 @@ mod tests { assert_eq!(table, BTreeMap::from([(shard(u64::MAX), vec![36, 100]),])); // unwind - unwind(&tx, 100, 0).await; + unwind(&tx, 20000, 0).await; // verify initial state let table = tx.table::().unwrap(); @@ -511,7 +510,7 @@ mod tests { .into_iter() .collect::>(); - let blocks = random_block_range(&mut rng, start..=end, H256::zero(), 0..3); + let blocks = random_block_range(&mut rng, start..=end, B256::ZERO, 0..3); let (transitions, _) = random_changeset_range( &mut rng, @@ -552,7 +551,7 @@ mod tests { .walk_range(BlockNumberAddress::range(start_block..=end_block))? .try_fold( BTreeMap::new(), - |mut storages: BTreeMap<(Address, H256), Vec>, + |mut storages: BTreeMap<(Address, B256), Vec>, entry| -> Result<_, TestRunnerError> { let (index, storage) = entry?; diff --git a/crates/stages/src/stages/merkle.rs b/crates/stages/src/stages/merkle.rs index d5e510db5c..60d5f466dc 100644 --- a/crates/stages/src/stages/merkle.rs +++ b/crates/stages/src/stages/merkle.rs @@ -10,7 +10,7 @@ use reth_primitives::{ hex, stage::{EntitiesCheckpoint, MerkleCheckpoint, StageCheckpoint, StageId}, trie::StoredSubNode, - BlockNumber, SealedHeader, H256, + BlockNumber, SealedHeader, B256, }; use reth_provider::{ DatabaseProviderRW, HeaderProvider, ProviderError, StageCheckpointReader, StageCheckpointWriter, @@ -80,7 +80,7 @@ impl MerkleStage { /// Check that the computed state root matches the root in the expected header. fn validate_state_root( &self, - got: H256, + got: B256, expected: SealedHeader, target_block: BlockNumber, ) -> Result<(), StageError> { @@ -356,7 +356,7 @@ mod tests { }, }; use reth_primitives::{ - keccak256, stage::StageUnitCheckpoint, SealedBlock, StorageEntry, H256, U256, + keccak256, stage::StageUnitCheckpoint, SealedBlock, StorageEntry, B256, U256, }; use reth_trie::test_utils::{state_root, state_root_prehashed}; use std::collections::BTreeMap; @@ -574,7 +574,7 @@ mod tests { let mut storage_cursor = tx.cursor_dup_write::().unwrap(); - let mut tree: BTreeMap> = BTreeMap::new(); + let mut tree: BTreeMap> = BTreeMap::new(); let mut rev_changeset_walker = storage_changesets_cursor.walk_back(None).unwrap(); diff --git a/crates/stages/src/stages/mod.rs b/crates/stages/src/stages/mod.rs index deb25b3c06..00a8207815 100644 --- a/crates/stages/src/stages/mod.rs +++ b/crates/stages/src/stages/mod.rs @@ -45,6 +45,7 @@ mod tests { test_utils::TestTransaction, ExecInput, }; + use alloy_rlp::Decodable; use reth_db::{ cursor::DbCursorRO, mdbx::{cursor::Cursor, RW}, @@ -54,15 +55,14 @@ mod tests { }; use reth_interfaces::test_utils::generators::{self, random_block}; use reth_primitives::{ - hex_literal::hex, keccak256, Account, Bytecode, ChainSpecBuilder, PruneMode, PruneModes, - SealedBlock, H160, MAINNET, U256, + address, hex_literal::hex, keccak256, Account, Bytecode, ChainSpecBuilder, PruneMode, + PruneModes, SealedBlock, MAINNET, U256, }; use reth_provider::{ AccountExtReader, BlockWriter, DatabaseProviderRW, ProviderFactory, ReceiptProvider, StorageReader, }; use reth_revm::Factory; - use reth_rlp::Decodable; use std::sync::Arc; #[tokio::test] @@ -98,14 +98,14 @@ mod tests { provider .tx_ref() .put::( - H160(hex!("1000000000000000000000000000000000000000")), + address!("1000000000000000000000000000000000000000"), Account { nonce: 0, balance: U256::ZERO, bytecode_hash: Some(code_hash) }, ) .unwrap(); provider .tx_ref() .put::( - H160(hex!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b")), + address!("a94f5374fce5edbc8e2a8697c15331677e6ebf0b"), Account { nonce: 0, balance: U256::from(0x3635c9adc5dea00000u128), diff --git a/crates/stages/src/stages/sender_recovery.rs b/crates/stages/src/stages/sender_recovery.rs index 002f0b6708..200e4fc672 100644 --- a/crates/stages/src/stages/sender_recovery.rs +++ b/crates/stages/src/stages/sender_recovery.rs @@ -11,7 +11,7 @@ use reth_interfaces::consensus; use reth_primitives::{ keccak256, stage::{EntitiesCheckpoint, StageCheckpoint, StageId}, - PrunePart, TransactionSignedNoHash, TxNumber, H160, + Address, PrunePart, TransactionSignedNoHash, TxNumber, }; use reth_provider::{ BlockReader, DatabaseProviderRW, HeaderProvider, ProviderError, PruneCheckpointReader, @@ -191,7 +191,7 @@ impl Stage for SenderRecoveryStage { fn recover_sender( entry: Result<(RawKey, RawValue), DatabaseError>, rlp_buf: &mut Vec, -) -> Result<(u64, H160), Box> { +) -> Result<(u64, Address), Box> { let (tx_id, transaction) = entry.map_err(|e| Box::new(SenderRecoveryStageError::StageError(e.into())))?; let tx_id = tx_id.key().expect("key to be formated"); @@ -249,7 +249,7 @@ mod tests { }; use reth_primitives::{ stage::StageUnitCheckpoint, BlockNumber, PruneCheckpoint, PruneMode, SealedBlock, - TransactionSigned, H256, MAINNET, + TransactionSigned, B256, MAINNET, }; use reth_provider::{ProviderFactory, PruneCheckpointWriter, TransactionsProvider}; @@ -320,7 +320,7 @@ mod tests { // Manually seed once with full input range let seed = - random_block_range(&mut rng, stage_progress + 1..=previous_stage, H256::zero(), 0..4); // set tx count range high enough to hit the threshold + random_block_range(&mut rng, stage_progress + 1..=previous_stage, B256::ZERO, 0..4); // set tx count range high enough to hit the threshold runner.tx.insert_blocks(seed.iter(), None).expect("failed to seed execution"); let total_transactions = runner.tx.table::().unwrap().len() as u64; @@ -381,7 +381,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=100, H256::zero(), 0..10); + let blocks = random_block_range(&mut rng, 0..=100, B256::ZERO, 0..10); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let max_pruned_block = 30; @@ -495,7 +495,7 @@ mod tests { let stage_progress = input.checkpoint().block_number; let end = input.target(); - let blocks = random_block_range(&mut rng, stage_progress..=end, H256::zero(), 0..2); + let blocks = random_block_range(&mut rng, stage_progress..=end, B256::ZERO, 0..2); self.tx.insert_blocks(blocks.iter(), None)?; Ok(blocks) } diff --git a/crates/stages/src/stages/tx_lookup.rs b/crates/stages/src/stages/tx_lookup.rs index d0bdacb6ea..bf101d41e7 100644 --- a/crates/stages/src/stages/tx_lookup.rs +++ b/crates/stages/src/stages/tx_lookup.rs @@ -12,7 +12,7 @@ use reth_interfaces::provider::ProviderError; use reth_primitives::{ keccak256, stage::{EntitiesCheckpoint, StageCheckpoint, StageId}, - PruneCheckpoint, PruneModes, PrunePart, TransactionSignedNoHash, TxNumber, H256, + PruneCheckpoint, PruneModes, PrunePart, TransactionSignedNoHash, TxNumber, B256, }; use reth_provider::{ BlockReader, DatabaseProviderRW, PruneCheckpointReader, PruneCheckpointWriter, @@ -203,7 +203,7 @@ impl Stage for TransactionLookupStage { fn calculate_hash( entry: Result<(TxNumber, TransactionSignedNoHash), DatabaseError>, rlp_buf: &mut Vec, -) -> Result<(H256, TxNumber), Box> { +) -> Result<(B256, TxNumber), Box> { let (tx_id, tx) = entry.map_err(|e| Box::new(e.into()))?; tx.transaction.encode_with_signature(&tx.signature, rlp_buf, false); Ok((keccak256(rlp_buf), tx_id)) @@ -240,7 +240,7 @@ mod tests { generators::{random_block, random_block_range}, }; use reth_primitives::{ - stage::StageUnitCheckpoint, BlockNumber, PruneCheckpoint, PruneMode, SealedBlock, H256, + stage::StageUnitCheckpoint, BlockNumber, PruneCheckpoint, PruneMode, SealedBlock, B256, MAINNET, }; use reth_provider::{ @@ -314,7 +314,7 @@ mod tests { // Seed only once with full input range let seed = - random_block_range(&mut rng, stage_progress + 1..=previous_stage, H256::zero(), 0..4); // set tx count range high enough to hit the threshold + random_block_range(&mut rng, stage_progress + 1..=previous_stage, B256::ZERO, 0..4); // set tx count range high enough to hit the threshold runner.tx.insert_blocks(seed.iter(), None).expect("failed to seed execution"); let total_txs = runner.tx.table::().unwrap().len() as u64; @@ -379,7 +379,7 @@ mod tests { // Seed only once with full input range let seed = - random_block_range(&mut rng, stage_progress + 1..=previous_stage, H256::zero(), 0..2); + random_block_range(&mut rng, stage_progress + 1..=previous_stage, B256::ZERO, 0..2); runner.tx.insert_blocks(seed.iter(), None).expect("failed to seed execution"); runner.set_prune_modes(PruneModes { @@ -413,7 +413,7 @@ mod tests { let tx = TestTransaction::default(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, 0..=100, H256::zero(), 0..10); + let blocks = random_block_range(&mut rng, 0..=100, B256::ZERO, 0..10); tx.insert_blocks(blocks.iter(), None).expect("insert blocks"); let max_pruned_block = 30; @@ -541,7 +541,7 @@ mod tests { let end = input.target(); let mut rng = generators::rng(); - let blocks = random_block_range(&mut rng, stage_progress + 1..=end, H256::zero(), 0..2); + let blocks = random_block_range(&mut rng, stage_progress + 1..=end, B256::ZERO, 0..2); self.tx.insert_blocks(blocks.iter(), None)?; Ok(blocks) } diff --git a/crates/stages/src/test_utils/test_db.rs b/crates/stages/src/test_utils/test_db.rs index 9868f4a60b..b0d14e6483 100644 --- a/crates/stages/src/test_utils/test_db.rs +++ b/crates/stages/src/test_utils/test_db.rs @@ -12,7 +12,7 @@ use reth_db::{ use reth_interfaces::{test_utils::generators::ChangeSet, RethResult}; use reth_primitives::{ keccak256, Account, Address, BlockNumber, Receipt, SealedBlock, SealedHeader, StorageEntry, - TxHash, TxNumber, H256, MAINNET, U256, + TxHash, TxNumber, B256, MAINNET, U256, }; use reth_provider::{DatabaseProviderRO, DatabaseProviderRW, HistoryWriter, ProviderFactory}; use std::{ @@ -384,7 +384,7 @@ impl TestTransaction { I: IntoIterator, { let mut accounts = BTreeMap::>::new(); - let mut storages = BTreeMap::<(Address, H256), Vec>::new(); + let mut storages = BTreeMap::<(Address, B256), Vec>::new(); for (block, changeset) in changesets.into_iter().enumerate() { for (address, _, storage_entries) in changeset { diff --git a/crates/storage/codecs/derive/Cargo.toml b/crates/storage/codecs/derive/Cargo.toml index 12d14d51ab..4a3cea7141 100644 --- a/crates/storage/codecs/derive/Cargo.toml +++ b/crates/storage/codecs/derive/Cargo.toml @@ -22,13 +22,16 @@ proc-macro = true [dependencies] proc-macro2.workspace = true quote.workspace = true -syn = { version = "2.0", features = ["full"] } +syn = { version = "2.0", features = ["full", "extra-traits"] } convert_case = "0.6.0" # codecs serde = { workspace = true, default-features = false } parity-scale-codec = { version = "3.2.1", features = ["derive", "bytes"] } +[dev-dependencies] +pretty_assertions = "1.3.0" + [features] default = ["compact"] compact = [] diff --git a/crates/storage/codecs/derive/src/arbitrary.rs b/crates/storage/codecs/derive/src/arbitrary.rs index 82d35c6e62..0768aa7c70 100644 --- a/crates/storage/codecs/derive/src/arbitrary.rs +++ b/crates/storage/codecs/derive/src/arbitrary.rs @@ -30,7 +30,7 @@ pub fn maybe_generate_tests(args: TokenStream, ast: &DeriveInput) -> TokenStream } }); } else if arg.to_string() == "rlp" { - traits.push(quote! { use reth_rlp::{Encodable, Decodable}; }); + traits.push(quote! { use alloy_rlp::{Encodable, Decodable}; }); roundtrips.push(quote! { { let mut buf = vec![]; @@ -64,7 +64,7 @@ pub fn maybe_generate_tests(args: TokenStream, ast: &DeriveInput) -> TokenStream // malformed rlp-header check let mut decode_buf = &mut buf.as_slice(); - let mut header = reth_rlp::Header::decode(decode_buf).expect("failed to decode header"); + let mut header = alloy_rlp::Header::decode(decode_buf).expect("failed to decode header"); header.payload_length+=1; let mut b = Vec::with_capacity(decode_buf.len()); header.encode(&mut b); diff --git a/crates/storage/codecs/derive/src/compact/generator.rs b/crates/storage/codecs/derive/src/compact/generator.rs index bdb81893b4..daa4cae875 100644 --- a/crates/storage/codecs/derive/src/compact/generator.rs +++ b/crates/storage/codecs/derive/src/compact/generator.rs @@ -52,13 +52,13 @@ pub fn generate_from_to(ident: &Ident, fields: &FieldList, is_zstd: bool) -> Tok /// Generates code to implement the `Compact` trait method `to_compact`. fn generate_from_compact(fields: &FieldList, ident: &Ident, is_zstd: bool) -> TokenStream2 { let mut lines = vec![]; - let mut known_types = vec!["H256", "H160", "Address", "Bloom", "Vec", "TxHash"]; + let mut known_types = vec!["B256", "Address", "Bloom", "Vec", "TxHash"]; - // Only types without `bytes::Bytes` should be added here. It's currently manually added, since - // it's hard to figure out with derive_macro which types have bytes::Bytes fields. + // Only types without `Bytes` should be added here. It's currently manually added, since + // it's hard to figure out with derive_macro which types have Bytes fields. // // This removes the requirement of the field to be placed last in the struct. - known_types.append(&mut vec![ + known_types.extend_from_slice(&[ "TransactionKind", "AccessList", "Signature", diff --git a/crates/storage/codecs/derive/src/compact/mod.rs b/crates/storage/codecs/derive/src/compact/mod.rs index cf27bb80bc..4bec0cce08 100644 --- a/crates/storage/codecs/derive/src/compact/mod.rs +++ b/crates/storage/codecs/derive/src/compact/mod.rs @@ -24,7 +24,7 @@ type FieldType = String; /// `Compact` has alternative functions that can be used as a workaround for type /// specialization of fixed sized types. /// -/// Example: `Vec` vs `Vec`. The first does not +/// Example: `Vec` vs `Vec`. The first does not /// require the len of the element, while the latter one does. type UseAlternative = bool; // Helper Alias type @@ -133,7 +133,7 @@ fn load_field(field: &syn::Field, fields: &mut FieldList, is_enum: bool) { } /// Since there's no impl specialization in rust stable atm, once we find we have a -/// Vec/Option we try to find out if it's a Vec/Option of a fixed size data type, e.g. `Vec`. +/// Vec/Option we try to find out if it's a Vec/Option of a fixed size data type, e.g. `Vec`. /// /// If so, we use another impl to code/decode its data. fn should_use_alt_impl(ftype: &String, segment: &syn::PathSegment) -> bool { @@ -143,7 +143,7 @@ fn should_use_alt_impl(ftype: &String, segment: &syn::PathSegment) -> bool { if let (Some(path), 1) = (arg_path.path.segments.first(), arg_path.path.segments.len()) { - if ["H256", "H160", "Address", "Bloom", "TxHash"] + if ["B256", "Address", "Address", "Bloom", "TxHash"] .contains(&path.ident.to_string().as_str()) { return true @@ -177,6 +177,7 @@ pub fn is_flag_type(ftype: &str) -> bool { #[cfg(test)] mod tests { use super::*; + use pretty_assertions::assert_eq; use syn::parse2; #[test] @@ -189,10 +190,10 @@ mod tests { f_bool_t: bool, f_bool_f: bool, f_option_none: Option, - f_option_some: Option, + f_option_some: Option, f_option_some_u64: Option, f_vec_empty: Vec, - f_vec_some: Vec, + f_vec_some: Vec
, } }; @@ -210,8 +211,7 @@ mod tests { mod TestStruct_flags { use bytes::Buf; use modular_bitfield::prelude::*; - - #[doc="Fieldset that facilitates compacting the parent type. Used bytes: 2 | Unused bits: 1"] + #[doc = "Fieldset that facilitates compacting the parent type. Used bytes: 2 | Unused bits: 1"] #[bitfield] #[derive(Clone, Copy, Debug, Default)] pub struct TestStructFlags { @@ -226,7 +226,7 @@ mod tests { unused: B1, } impl TestStructFlags { - #[doc=r" Deserializes this fieldset and returns it, alongside the original slice in an advanced position."] + #[doc = r" Deserializes this fieldset and returns it, alongside the original slice in an advanced position."] pub fn from(mut buf: &[u8]) -> (Self, &[u8]) { ( TestStructFlags::from_bytes([buf.get_u8(), buf.get_u8(),]), @@ -311,6 +311,9 @@ mod tests { } }; - assert_eq!(output.to_string(), should_output.to_string()); + assert_eq!( + syn::parse2::(output).unwrap(), + syn::parse2::(should_output).unwrap() + ); } } diff --git a/crates/storage/codecs/derive/src/compact/structs.rs b/crates/storage/codecs/derive/src/compact/structs.rs index 1cbdd90457..899e589dba 100644 --- a/crates/storage/codecs/derive/src/compact/structs.rs +++ b/crates/storage/codecs/derive/src/compact/structs.rs @@ -75,7 +75,7 @@ impl<'a> StructHandler<'a> { let set_len_method = format_ident!("set_{name}_len"); let len = format_ident!("{name}_len"); - // H256 with #[maybe_zero] attribute for example + // B256 with #[maybe_zero] attribute for example if *is_compact && !is_flag_type(ftype) { let itype = format_ident!("{ftype}"); let set_bool_method = format_ident!("set_{name}"); @@ -139,15 +139,15 @@ impl<'a> StructHandler<'a> { If it's an alias type (which are not supported by proc_macro), be sure to add it to either `known_types` or `get_bit_size` lists in the derive crate." ); - if ftype == "bytes::Bytes" { + if ftype == "Bytes" { self.lines.push(quote! { - let mut #name = bytes::Bytes::new(); - (#name, buf) = bytes::Bytes::from_compact(buf, buf.len() as usize); + let mut #name = Bytes::new(); + (#name, buf) = Bytes::from_compact(buf, buf.len() as usize); }) } else { let ident_type = format_ident!("{ftype}"); if !is_flag_type(ftype) { - // It's a type that handles its own length requirements. (h256, Custom, ...) + // It's a type that handles its own length requirements. (B256, Custom, ...) self.lines.push(quote! { let (#name, new_buf) = #ident_type::#from_compact_ident(buf, buf.len()); }) diff --git a/crates/storage/codecs/derive/src/lib.rs b/crates/storage/codecs/derive/src/lib.rs index 97c9f619a3..a06a111e70 100644 --- a/crates/storage/codecs/derive/src/lib.rs +++ b/crates/storage/codecs/derive/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] // TODO(danipopes): add these warnings // #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] diff --git a/crates/storage/codecs/src/lib.rs b/crates/storage/codecs/src/lib.rs index a7831414e6..2866e79fb3 100644 --- a/crates/storage/codecs/src/lib.rs +++ b/crates/storage/codecs/src/lib.rs @@ -3,26 +3,28 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] -#![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] -#![deny(unused_must_use, rust_2018_idioms)] -#![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] -use bytes::{Buf, Bytes}; -use revm_primitives::{B160 as H160, B256 as H256, U256}; +pub use codecs_derive::*; + +use bytes::Buf; +use revm_primitives::{ + alloy_primitives::{Bloom, B512}, + Address, Bytes, B256, U256, +}; pub use codecs_derive::*; /// Trait that implements the `Compact` codec. /// /// When deriving the trait for custom structs, be aware of certain limitations/recommendations: -/// * Works best with structs that only have native types (eg. u64, H256, U256). -/// * Fixed array types (H256, Address, Bloom) are not compacted. +/// * Works best with structs that only have native types (eg. u64, B256, U256). +/// * Fixed array types (B256, Address, Bloom) are not compacted. /// * Max size of `T` in `Option` or `Vec` shouldn't exceed `0xffff`. -/// * Any `bytes::Bytes` field **should be placed last**. +/// * Any `Bytes` field **should be placed last**. /// * Any other type which is not known to the derive module **should be placed last** in they -/// contain a `bytes::Bytes` field. +/// contain a `Bytes` field. /// /// The last two points make it easier to decode the data without saving the length on the /// `StructFlags`. It will fail compilation if it's not respected. If they're alias to known types, @@ -30,12 +32,13 @@ pub use codecs_derive::*; /// /// Regarding the `specialized_to/from_compact` methods: Mainly used as a workaround for not being /// able to specialize an impl over certain types like `Vec`/`Option` where `T` is a fixed -/// size array like `Vec`. +/// size array like `Vec`. pub trait Compact { /// Takes a buffer which can be written to. *Ideally*, it returns the length written to. fn to_compact(self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>; + /// Takes a buffer which can be read from. Returns the object and `buf` with its internal cursor /// advanced (eg.`.advance(len)`). /// @@ -138,7 +141,7 @@ where (list, buf) } - /// To be used by fixed sized types like `Vec`. + /// To be used by fixed sized types like `Vec`. fn specialized_to_compact(self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>, @@ -151,7 +154,7 @@ where 0 } - /// To be used by fixed sized types like `Vec`. + /// To be used by fixed sized types like `Vec`. fn specialized_from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { let (length, mut buf) = decode_varuint(buf); let mut list = Vec::with_capacity(length); @@ -206,7 +209,7 @@ where (Some(element), buf) } - /// To be used by fixed sized types like `Option`. + /// To be used by fixed sized types like `Option`. fn specialized_to_compact(self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]>, @@ -218,7 +221,7 @@ where 0 } - /// To be used by fixed sized types like `Option`. + /// To be used by fixed sized types like `Option`. fn specialized_from_compact(buf: &[u8], len: usize) -> (Self, &[u8]) { if len == 0 { return (None, buf) @@ -259,22 +262,23 @@ impl Compact for Bytes { B: bytes::BufMut + AsMut<[u8]>, { let len = self.len(); - buf.put(self); + buf.put(self.0); len } + fn from_compact(mut buf: &[u8], len: usize) -> (Self, &[u8]) { - (buf.copy_to_bytes(len), buf) + (buf.copy_to_bytes(len).into(), buf) } } -/// Implements the [`Compact`] trait for fixed size hash types like [`H256`]. +/// Implements the [`Compact`] trait for fixed size hash types like [`B256`]. #[macro_export] macro_rules! impl_hash_compact { ($($name:tt),+) => { $( impl Compact for $name { fn to_compact(self, buf: &mut B) -> usize where B: bytes::BufMut + AsMut<[u8]> { - buf.put_slice(&self.0); + buf.put_slice(self.as_slice()); std::mem::size_of::<$name>() } @@ -304,7 +308,7 @@ macro_rules! impl_hash_compact { }; } -impl_hash_compact!(H256, H160); +impl_hash_compact!(Address, B256, B512, Bloom); impl Compact for bool { /// `bool` vars go directly to the `StructFlags` and are not written to the buffer. @@ -350,12 +354,12 @@ fn decode_varuint(mut buf: &[u8]) -> (usize, &[u8]) { #[cfg(test)] mod tests { use super::*; - use revm_primitives::Address; + use revm_primitives::{Address, Bytes}; #[test] fn compact_bytes() { let arr = [1, 2, 3, 4, 5]; - let list = bytes::Bytes::copy_from_slice(&arr); + let list = Bytes::copy_from_slice(&arr); let mut buf = vec![]; assert_eq!(list.clone().to_compact(&mut buf), list.len()); @@ -363,33 +367,33 @@ mod tests { buf.push(1); assert_eq!(&buf[..arr.len()], &arr); - assert_eq!(bytes::Bytes::from_compact(&buf, list.len()), (list, vec![1].as_slice())); + assert_eq!(Bytes::from_compact(&buf, list.len()), (list, vec![1].as_slice())); } #[test] fn compact_address() { let mut buf = vec![]; - assert_eq!(Address::zero().to_compact(&mut buf), 20); + assert_eq!(Address::ZERO.to_compact(&mut buf), 20); assert_eq!(buf, vec![0; 20]); // Add some noise data. buf.push(1); // Address shouldn't care about the len passed, since it's not actually compacted. - assert_eq!(Address::from_compact(&buf, 1000), (Address::zero(), vec![1u8].as_slice())); + assert_eq!(Address::from_compact(&buf, 1000), (Address::ZERO, vec![1u8].as_slice())); } #[test] - fn compact_h256() { + fn compact_b256() { let mut buf = vec![]; - assert_eq!(H256::zero().to_compact(&mut buf), 32); + assert_eq!(B256::ZERO.to_compact(&mut buf), 32); assert_eq!(buf, vec![0; 32]); // Add some noise data. buf.push(1); - // H256 shouldn't care about the len passed, since it's not actually compacted. - assert_eq!(H256::from_compact(&buf, 1000), (H256::zero(), vec![1u8].as_slice())); + // B256 shouldn't care about the len passed, since it's not actually compacted. + assert_eq!(B256::from_compact(&buf, 1000), (B256::ZERO, vec![1u8].as_slice())); } #[test] @@ -413,27 +417,27 @@ mod tests { #[test] fn compact_option() { - let opt = Some(H256::zero()); + let opt = Some(B256::ZERO); let mut buf = vec![]; - assert_eq!(None::.to_compact(&mut buf), 0); + assert_eq!(None::.to_compact(&mut buf), 0); assert_eq!(opt.to_compact(&mut buf), 1); assert_eq!(buf.len(), 1 + 32); - assert_eq!(Option::::from_compact(&buf, 1), (opt, vec![].as_slice())); + assert_eq!(Option::::from_compact(&buf, 1), (opt, vec![].as_slice())); // If `None`, it returns the slice at the same cursor position. - assert_eq!(Option::::from_compact(&buf, 0), (None, buf.as_slice())); + assert_eq!(Option::::from_compact(&buf, 0), (None, buf.as_slice())); let mut buf = vec![]; assert_eq!(opt.specialized_to_compact(&mut buf), 1); assert_eq!(buf.len(), 32); - assert_eq!(Option::::specialized_from_compact(&buf, 1), (opt, vec![].as_slice())); + assert_eq!(Option::::specialized_from_compact(&buf, 1), (opt, vec![].as_slice())); } #[test] fn compact_vec() { - let list = vec![H256::zero(), H256::zero()]; + let list = vec![B256::ZERO, B256::ZERO]; let mut buf = vec![]; // Vec doesn't return a total length @@ -445,7 +449,7 @@ mod tests { let mut remaining_buf = buf.as_slice(); remaining_buf.advance(1 + 1 + 32 + 1 + 32); - assert_eq!(Vec::::from_compact(&buf, 0), (list, remaining_buf)); + assert_eq!(Vec::::from_compact(&buf, 0), (list, remaining_buf)); assert_eq!(remaining_buf, &[1u8, 2]); } @@ -499,25 +503,25 @@ mod tests { f_u256: U256, f_bool_t: bool, f_bool_f: bool, - f_option_none: Option, - f_option_some: Option, + f_option_none: Option, + f_option_some: Option, f_option_some_u64: Option, - f_vec_empty: Vec, - f_vec_some: Vec, + f_vec_empty: Vec
, + f_vec_some: Vec
, } impl Default for TestStruct { fn default() -> Self { TestStruct { - f_u64: 1u64, // 4 bits | 1 byte - f_u256: U256::from(1u64), // 6 bits | 1 byte - f_bool_f: false, // 1 bit | 0 bytes - f_bool_t: true, // 1 bit | 0 bytes - f_option_none: None, // 1 bit | 0 bytes - f_option_some: Some(H256::zero()), // 1 bit | 32 bytes - f_option_some_u64: Some(0xffffu64), // 1 bit | 1 + 2 bytes - f_vec_empty: vec![], // 0 bits | 1 bytes - f_vec_some: vec![H160::zero(), H160::zero()], // 0 bits | 1 + 20*2 bytes + f_u64: 1u64, // 4 bits | 1 byte + f_u256: U256::from(1u64), // 6 bits | 1 byte + f_bool_f: false, // 1 bit | 0 bytes + f_bool_t: true, // 1 bit | 0 bytes + f_option_none: None, // 1 bit | 0 bytes + f_option_some: Some(B256::ZERO), // 1 bit | 32 bytes + f_option_some_u64: Some(0xffffu64), // 1 bit | 1 + 2 bytes + f_vec_empty: vec![], // 0 bits | 1 bytes + f_vec_some: vec![Address::ZERO, Address::ZERO], // 0 bits | 1 + 20*2 bytes } } } diff --git a/crates/storage/db/Cargo.toml b/crates/storage/db/Cargo.toml index 029039aa29..a04d895088 100644 --- a/crates/storage/db/Cargo.toml +++ b/crates/storage/db/Cargo.toml @@ -14,6 +14,7 @@ reth-primitives.workspace = true reth-interfaces.workspace = true reth-codecs = { path = "../codecs" } reth-libmdbx = { path = "../libmdbx-rs", optional = true, features = ["return-borrowed"] } +reth-nippy-jar = { path = "../nippy-jar" } # codecs serde = { workspace = true, default-features = false } @@ -42,6 +43,7 @@ tempfile = { version = "3.3.0", optional = true } parking_lot.workspace = true derive_more = "0.99" eyre = "0.6.8" +paste = "1.0" # arbitrary utils arbitrary = { workspace = true, features = ["derive"], optional = true } diff --git a/crates/storage/db/benches/utils.rs b/crates/storage/db/benches/utils.rs index 5951b73815..d5c558df60 100644 --- a/crates/storage/db/benches/utils.rs +++ b/crates/storage/db/benches/utils.rs @@ -6,20 +6,20 @@ use reth_db::{ transaction::{DbTx, DbTxMut}, DatabaseEnv, }; -use reth_primitives::fs; +use reth_primitives::{fs, Bytes}; use std::{path::Path, sync::Arc}; /// Path where the DB is initialized for benchmarks. -#[allow(unused)] +#[allow(dead_code)] const BENCH_DB_PATH: &str = "/tmp/reth-benches"; /// Used for RandomRead and RandomWrite benchmarks. -#[allow(unused)] +#[allow(dead_code)] const RANDOM_INDEXES: [usize; 10] = [23, 2, 42, 5, 3, 99, 54, 0, 33, 64]; /// Returns bench vectors in the format: `Vec<(Key, EncodedKey, Value, CompressedValue)>`. -#[allow(unused)] -fn load_vectors() -> Vec<(T::Key, bytes::Bytes, T::Value, bytes::Bytes)> +#[allow(dead_code)] +fn load_vectors() -> Vec<(T::Key, Bytes, T::Value, Bytes)> where T: Default, T::Key: Default + Clone + for<'de> serde::Deserialize<'de>, @@ -39,9 +39,9 @@ where .map(|(k, v)| { ( k.clone(), - bytes::Bytes::copy_from_slice(k.encode().as_ref()), + Bytes::copy_from_slice(k.encode().as_ref()), v.clone(), - bytes::Bytes::copy_from_slice(v.compress().as_ref()), + Bytes::copy_from_slice(v.compress().as_ref()), ) }) .collect::>() @@ -49,10 +49,10 @@ where /// Sets up a clear database at `bench_db_path`. #[allow(clippy::ptr_arg)] -#[allow(unused)] +#[allow(dead_code)] fn set_up_db( bench_db_path: &Path, - pair: &Vec<(::Key, bytes::Bytes, ::Value, bytes::Bytes)>, + pair: &Vec<(::Key, Bytes, ::Value, Bytes)>, ) -> DatabaseEnv where T: Table + Default, diff --git a/crates/storage/db/src/abstraction/table.rs b/crates/storage/db/src/abstraction/table.rs index 668bdf6998..81193bcc73 100644 --- a/crates/storage/db/src/abstraction/table.rs +++ b/crates/storage/db/src/abstraction/table.rs @@ -35,6 +35,11 @@ pub trait Compress: Send + Sync + Sized + Debug { pub trait Decompress: Send + Sync + Sized + Debug { /// Decompresses data coming from the database. fn decompress>(value: B) -> Result; + + /// Decompresses owned data coming from the database. + fn decompress_owned(value: Vec) -> Result { + Self::decompress(value) + } } /// Trait that will transform the data to be saved in the DB. diff --git a/crates/storage/db/src/implementation/mdbx/cursor.rs b/crates/storage/db/src/implementation/mdbx/cursor.rs index 18d8cef1e9..274908ffab 100644 --- a/crates/storage/db/src/implementation/mdbx/cursor.rs +++ b/crates/storage/db/src/implementation/mdbx/cursor.rs @@ -41,7 +41,7 @@ macro_rules! decode { }; } -/// Some types don't support compression (eg. H256), and we don't want to be copying them to the +/// Some types don't support compression (eg. B256), and we don't want to be copying them to the /// allocated buffer when we can just use their reference. macro_rules! compress_or_ref { ($self:expr, $value:expr) => { diff --git a/crates/storage/db/src/implementation/mdbx/mod.rs b/crates/storage/db/src/implementation/mdbx/mod.rs index b9cce24693..ea82ed4285 100644 --- a/crates/storage/db/src/implementation/mdbx/mod.rs +++ b/crates/storage/db/src/implementation/mdbx/mod.rs @@ -169,7 +169,7 @@ mod tests { }; use reth_interfaces::db::DatabaseWriteOperation; use reth_libmdbx::{NoWriteMap, WriteMap}; - use reth_primitives::{Account, Address, Header, IntegerList, StorageEntry, H160, H256, U256}; + use reth_primitives::{Account, Address, Header, IntegerList, StorageEntry, B256, U256}; use std::{path::Path, str::FromStr, sync::Arc}; use tempfile::TempDir; @@ -255,7 +255,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 2, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -264,49 +264,49 @@ mod tests { // [1, 3) let mut walker = cursor.walk_range(1..3).unwrap(); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); assert_eq!(walker.next(), None); // next() returns None after walker is done assert_eq!(walker.next(), None); // [1, 2] let mut walker = cursor.walk_range(1..=2).unwrap(); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); // next() returns None after walker is done assert_eq!(walker.next(), None); // [1, ∞) let mut walker = cursor.walk_range(1..).unwrap(); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((3, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((3, B256::ZERO)))); // next() returns None after walker is done assert_eq!(walker.next(), None); // [2, 4) let mut walker = cursor.walk_range(2..4).unwrap(); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((3, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((3, B256::ZERO)))); assert_eq!(walker.next(), None); // next() returns None after walker is done assert_eq!(walker.next(), None); // (∞, 3) let mut walker = cursor.walk_range(..3).unwrap(); - assert_eq!(walker.next(), Some(Ok((0, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((0, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); // next() returns None after walker is done assert_eq!(walker.next(), None); // (∞, ∞) let mut walker = cursor.walk_range(..).unwrap(); - assert_eq!(walker.next(), Some(Ok((0, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((2, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((3, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((0, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((2, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((3, B256::ZERO)))); // next() returns None after walker is done assert_eq!(walker.next(), None); } @@ -315,9 +315,9 @@ mod tests { fn db_cursor_walk_range_on_dup_table() { let db: Arc> = create_test_db(EnvKind::RW); - let address0 = Address::zero(); - let address1 = Address::from_low_u64_be(1); - let address2 = Address::from_low_u64_be(2); + let address0 = Address::ZERO; + let address1 = Address::with_last_byte(1); + let address2 = Address::with_last_byte(2); let tx = db.tx_mut().expect(ERROR_INIT_TX); tx.put::(0, AccountBeforeTx { address: address0, info: None }) @@ -361,7 +361,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 2, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -389,7 +389,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -398,16 +398,16 @@ mod tests { let mut walker = Walker::new(&mut cursor, None); - assert_eq!(walker.next(), Some(Ok((0, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((3, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((0, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((3, B256::ZERO)))); assert_eq!(walker.next(), None); // transform to ReverseWalker let mut reverse_walker = walker.rev(); - assert_eq!(reverse_walker.next(), Some(Ok((3, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((3, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); } @@ -419,7 +419,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -428,16 +428,16 @@ mod tests { let mut reverse_walker = ReverseWalker::new(&mut cursor, None); - assert_eq!(reverse_walker.next(), Some(Ok((3, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((3, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); // transform to Walker let mut walker = reverse_walker.forward(); - assert_eq!(walker.next(), Some(Ok((0, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(walker.next(), Some(Ok((3, H256::zero())))); + assert_eq!(walker.next(), Some(Ok((0, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(walker.next(), Some(Ok((3, B256::ZERO)))); assert_eq!(walker.next(), None); } @@ -449,7 +449,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -457,26 +457,26 @@ mod tests { let mut cursor = tx.cursor_read::().unwrap(); let mut reverse_walker = cursor.walk_back(Some(1)).unwrap(); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); let mut reverse_walker = cursor.walk_back(Some(2)).unwrap(); - assert_eq!(reverse_walker.next(), Some(Ok((3, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((3, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); let mut reverse_walker = cursor.walk_back(Some(4)).unwrap(); - assert_eq!(reverse_walker.next(), Some(Ok((3, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((3, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); let mut reverse_walker = cursor.walk_back(None).unwrap(); - assert_eq!(reverse_walker.next(), Some(Ok((3, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((1, H256::zero())))); - assert_eq!(reverse_walker.next(), Some(Ok((0, H256::zero())))); + assert_eq!(reverse_walker.next(), Some(Ok((3, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((1, B256::ZERO)))); + assert_eq!(reverse_walker.next(), Some(Ok((0, B256::ZERO)))); assert_eq!(reverse_walker.next(), None); } @@ -488,7 +488,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -501,9 +501,9 @@ mod tests { // Seek exact let exact = cursor.seek_exact(missing_key).unwrap(); assert_eq!(exact, None); - assert_eq!(cursor.current(), Ok(Some((missing_key + 1, H256::zero())))); - assert_eq!(cursor.prev(), Ok(Some((missing_key - 1, H256::zero())))); - assert_eq!(cursor.prev(), Ok(Some((missing_key - 2, H256::zero())))); + assert_eq!(cursor.current(), Ok(Some((missing_key + 1, B256::ZERO)))); + assert_eq!(cursor.prev(), Ok(Some((missing_key - 1, B256::ZERO)))); + assert_eq!(cursor.prev(), Ok(Some((missing_key - 2, B256::ZERO)))); } #[test] @@ -514,7 +514,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3, 4, 5] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -523,12 +523,12 @@ mod tests { let mut cursor = tx.cursor_write::().unwrap(); // INSERT - assert_eq!(cursor.insert(key_to_insert, H256::zero()), Ok(())); - assert_eq!(cursor.current(), Ok(Some((key_to_insert, H256::zero())))); + assert_eq!(cursor.insert(key_to_insert, B256::ZERO), Ok(())); + assert_eq!(cursor.current(), Ok(Some((key_to_insert, B256::ZERO)))); // INSERT (failure) assert_eq!( - cursor.insert(key_to_insert, H256::zero()), + cursor.insert(key_to_insert, B256::ZERO), Err(DatabaseError::Write { code: -30799, operation: DatabaseWriteOperation::CursorInsert, @@ -536,7 +536,7 @@ mod tests { key: Box::from(key_to_insert.encode().as_ref()) }) ); - assert_eq!(cursor.current(), Ok(Some((key_to_insert, H256::zero())))); + assert_eq!(cursor.current(), Ok(Some((key_to_insert, B256::ZERO)))); tx.commit().expect(ERROR_COMMIT); @@ -555,8 +555,8 @@ mod tests { let mut dup_cursor = tx.cursor_dup_write::().unwrap(); let key = Address::random(); - let subkey1 = H256::random(); - let subkey2 = H256::random(); + let subkey1 = B256::random(); + let subkey2 = B256::random(); let entry1 = StorageEntry { key: subkey1, value: U256::ZERO }; assert!(dup_cursor.insert(key, entry1).is_ok()); @@ -571,9 +571,9 @@ mod tests { let db: Arc> = create_test_db(EnvKind::RW); let tx = db.tx_mut().expect(ERROR_INIT_TX); - let key1 = Address::from_low_u64_be(1); - let key2 = Address::from_low_u64_be(2); - let key3 = Address::from_low_u64_be(3); + let key1 = Address::with_last_byte(1); + let key2 = Address::with_last_byte(2); + let key3 = Address::with_last_byte(3); let mut cursor = tx.cursor_write::().unwrap(); assert!(cursor.insert(key1, Account::default()).is_ok()); @@ -602,7 +602,7 @@ mod tests { // PUT vec![0, 1, 3, 5, 7, 9] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -611,11 +611,11 @@ mod tests { // INSERT (cursor starts at last) cursor.last().unwrap(); - assert_eq!(cursor.current(), Ok(Some((9, H256::zero())))); + assert_eq!(cursor.current(), Ok(Some((9, B256::ZERO)))); for pos in (2..=8).step_by(2) { - assert_eq!(cursor.insert(pos, H256::zero()), Ok(())); - assert_eq!(cursor.current(), Ok(Some((pos, H256::zero())))); + assert_eq!(cursor.insert(pos, B256::ZERO), Ok(())); + assert_eq!(cursor.current(), Ok(Some((pos, B256::ZERO)))); } tx.commit().expect(ERROR_COMMIT); @@ -635,7 +635,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 2, 3, 4] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -643,7 +643,7 @@ mod tests { let key_to_append = 5; let tx = db.tx_mut().expect(ERROR_INIT_TX); let mut cursor = tx.cursor_write::().unwrap(); - assert_eq!(cursor.append(key_to_append, H256::zero()), Ok(())); + assert_eq!(cursor.append(key_to_append, B256::ZERO), Ok(())); tx.commit().expect(ERROR_COMMIT); // Confirm the result @@ -662,7 +662,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); vec![0, 1, 3, 4, 5] .into_iter() - .try_for_each(|key| tx.put::(key, H256::zero())) + .try_for_each(|key| tx.put::(key, B256::ZERO)) .expect(ERROR_PUT); tx.commit().expect(ERROR_COMMIT); @@ -671,7 +671,7 @@ mod tests { let tx = db.tx_mut().expect(ERROR_INIT_TX); let mut cursor = tx.cursor_write::().unwrap(); assert_eq!( - cursor.append(key_to_append, H256::zero()), + cursor.append(key_to_append, B256::ZERO), Err(DatabaseError::Write { code: -30418, operation: DatabaseWriteOperation::CursorAppend, @@ -679,7 +679,7 @@ mod tests { key: Box::from(key_to_append.encode().as_ref()) }) ); - assert_eq!(cursor.current(), Ok(Some((5, H256::zero())))); // the end of table + assert_eq!(cursor.current(), Ok(Some((5, B256::ZERO)))); // the end of table tx.commit().expect(ERROR_COMMIT); // Confirm the result @@ -711,7 +711,7 @@ mod tests { assert_eq!(cursor.seek_exact(key), Ok(Some((key, account)))); let mut dup_cursor = tx.cursor_dup_write::().unwrap(); - let subkey = H256::random(); + let subkey = B256::random(); let value = U256::from(1); let entry1 = StorageEntry { key: subkey, value }; @@ -738,7 +738,7 @@ mod tests { .try_for_each(|val| { cursor.append( transition_id, - AccountBeforeTx { address: Address::from_low_u64_be(val), info: None }, + AccountBeforeTx { address: Address::with_last_byte(val), info: None }, ) }) .expect(ERROR_APPEND); @@ -751,7 +751,7 @@ mod tests { assert_eq!( cursor.append_dup( transition_id, - AccountBeforeTx { address: Address::from_low_u64_be(subkey_to_append), info: None } + AccountBeforeTx { address: Address::with_last_byte(subkey_to_append), info: None } ), Err(DatabaseError::Write { code: -30418, @@ -763,7 +763,7 @@ mod tests { assert_eq!( cursor.append( transition_id - 1, - AccountBeforeTx { address: Address::from_low_u64_be(subkey_to_append), info: None } + AccountBeforeTx { address: Address::with_last_byte(subkey_to_append), info: None } ), Err(DatabaseError::Write { code: -30418, @@ -775,7 +775,7 @@ mod tests { assert_eq!( cursor.append( transition_id, - AccountBeforeTx { address: Address::from_low_u64_be(subkey_to_append), info: None } + AccountBeforeTx { address: Address::with_last_byte(subkey_to_append), info: None } ), Ok(()) ); @@ -787,7 +787,7 @@ mod tests { let value = Account { nonce: 18446744073709551615, - bytecode_hash: Some(H256::random()), + bytecode_hash: Some(B256::random()), balance: U256::MAX, }; let key = Address::from_str("0xa2c122be93b0074270ebee7f6b7292c7deb45047") @@ -824,11 +824,11 @@ mod tests { env.update(|tx| tx.put::(key, value00).expect(ERROR_PUT)).unwrap(); // PUT (2,2) - let value22 = StorageEntry { key: H256::from_low_u64_be(2), value: U256::from(2) }; + let value22 = StorageEntry { key: B256::with_last_byte(2), value: U256::from(2) }; env.update(|tx| tx.put::(key, value22).expect(ERROR_PUT)).unwrap(); // PUT (1,1) - let value11 = StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(1) }; + let value11 = StorageEntry { key: B256::with_last_byte(1), value: U256::from(1) }; env.update(|tx| tx.put::(key, value11).expect(ERROR_PUT)).unwrap(); // Iterate with cursor @@ -846,7 +846,7 @@ mod tests { { let tx = env.tx().expect(ERROR_INIT_TX); let mut cursor = tx.cursor_dup_read::().unwrap(); - let mut walker = cursor.walk_dup(Some(key), Some(H256::from_low_u64_be(1))).unwrap(); + let mut walker = cursor.walk_dup(Some(key), Some(B256::with_last_byte(1))).unwrap(); assert_eq!( (key, value11), walker @@ -870,11 +870,11 @@ mod tests { env.update(|tx| tx.put::(key1, value00).expect(ERROR_PUT)).unwrap(); // PUT key1 (1,1) - let value11 = StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(1) }; + let value11 = StorageEntry { key: B256::with_last_byte(1), value: U256::from(1) }; env.update(|tx| tx.put::(key1, value11).expect(ERROR_PUT)).unwrap(); // PUT key2 (2,2) - let value22 = StorageEntry { key: H256::from_low_u64_be(2), value: U256::from(2) }; + let value22 = StorageEntry { key: B256::with_last_byte(2), value: U256::from(2) }; env.update(|tx| tx.put::(key2, value22).expect(ERROR_PUT)).unwrap(); // Iterate with walk_dup @@ -906,11 +906,11 @@ mod tests { #[test] fn dup_value_with_same_subkey() { let env = create_test_db::(EnvKind::RW); - let key1 = H160([0x11; 20]); - let key2 = H160([0x22; 20]); + let key1 = Address::new([0x11; 20]); + let key2 = Address::new([0x22; 20]); // PUT key1 (0,1) - let value01 = StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(1) }; + let value01 = StorageEntry { key: B256::with_last_byte(0), value: U256::from(1) }; env.update(|tx| tx.put::(key1, value01).expect(ERROR_PUT)).unwrap(); // PUT key1 (0,0) @@ -918,7 +918,7 @@ mod tests { env.update(|tx| tx.put::(key1, value00).expect(ERROR_PUT)).unwrap(); // PUT key2 (2,2) - let value22 = StorageEntry { key: H256::from_low_u64_be(2), value: U256::from(2) }; + let value22 = StorageEntry { key: B256::with_last_byte(2), value: U256::from(2) }; env.update(|tx| tx.put::(key2, value22).expect(ERROR_PUT)).unwrap(); // Iterate with walk diff --git a/crates/storage/db/src/lib.rs b/crates/storage/db/src/lib.rs index cb705871d8..8cb3fe80c1 100644 --- a/crates/storage/db/src/lib.rs +++ b/crates/storage/db/src/lib.rs @@ -58,7 +58,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -68,6 +68,7 @@ pub mod abstraction; mod implementation; +pub mod snapshot; pub mod tables; mod utils; pub mod version; diff --git a/crates/storage/db/src/snapshot.rs b/crates/storage/db/src/snapshot.rs new file mode 100644 index 0000000000..cea6350d68 --- /dev/null +++ b/crates/storage/db/src/snapshot.rs @@ -0,0 +1,87 @@ +//! reth's snapshot creation from database tables + +use crate::{ + abstraction::cursor::DbCursorRO, + table::{Key, Table}, + transaction::DbTx, + RawKey, RawTable, +}; +use reth_interfaces::RethResult; +use reth_nippy_jar::{ColumnResult, NippyJar, PHFKey}; +use std::{error::Error as StdError, ops::RangeInclusive}; + +/// Macro that generates snapshot creation functions that take an arbitratry number of [`Table`] and +/// creates a [`NippyJar`] file out of their [`Table::Value`]. Each list of [`Table::Value`] from a +/// table is a column of values. +/// +/// Has membership filter set and compression dictionary support. +macro_rules! generate_snapshot_func { + ($(($($tbl:ident),+)),+ $(,)? ) => { + $( + paste::item! { + /// Creates a snapshot from specified tables. Each table's `Value` iterator represents a column. + /// + /// **Ensure the range contains the same number of rows.** + /// + /// * `tx`: Database transaction. + /// * `range`: Data range for columns in tables. + /// * `keys`: Iterator of keys (eg. `TxHash` or `BlockHash`) with length equal to `row_count` and ordered by future column insertion from `range`. + /// * `dict_compression_set`: Sets of column data for compression dictionaries. Max size is 2GB. Row count is independent. + /// * `row_count`: Total rows to add to `NippyJar`. Must match row count in `range`. + /// * `nippy_jar`: Snapshot object responsible for file generation. + #[allow(non_snake_case)] + pub fn []<'tx, + $($tbl: Table,)+ + K + > + ( + tx: &impl DbTx<'tx>, + range: RangeInclusive, + dict_compression_set: Option>>>, + keys: Option>>, + row_count: usize, + nippy_jar: &mut NippyJar + ) -> RethResult<()> + where K: Key + Copy + { + let range: RangeInclusive> = RawKey::new(*range.start())..=RawKey::new(*range.end()); + + // Create PHF and Filter if required + if let Some(keys) = keys { + nippy_jar.prepare_index(keys, row_count)?; + } + + // Create compression dictionaries if required + if let Some(data_sets) = dict_compression_set { + nippy_jar.prepare_compression(data_sets)?; + } + + // Creates the cursors for the columns + $( + let mut [< $tbl _cursor>] = tx.cursor_read::>()?; + let [< $tbl _iter>] = [< $tbl _cursor>] + .walk_range(range.clone())? + .into_iter() + .map(|row| + row + .map(|(_key, val)| val.into_value()) + .map_err(|e| Box::new(e) as Box) + ); + + )+ + + // Create the snapshot from the data + let col_iterators: Vec,_>>>> = vec![ + $(Box::new([< $tbl _iter>]),)+ + ]; + + nippy_jar.freeze(col_iterators, row_count as u64)?; + + Ok(()) + } + } + )+ + }; +} + +generate_snapshot_func!((T1), (T1, T2), (T1, T2, T3), (T1, T2, T3, T4),); diff --git a/crates/storage/db/src/tables/codecs/compact.rs b/crates/storage/db/src/tables/codecs/compact.rs index 882fe6b317..c420e31053 100644 --- a/crates/storage/db/src/tables/codecs/compact.rs +++ b/crates/storage/db/src/tables/codecs/compact.rs @@ -81,7 +81,7 @@ macro_rules! impl_compression_fixed_compact { }; } -impl_compression_fixed_compact!(H256, H160); +impl_compression_fixed_compact!(B256, Address); /// Adds wrapper structs for some primitive types so they can use StructFlags from Compact, when /// used as pure table values. diff --git a/crates/storage/db/src/tables/codecs/postcard.rs b/crates/storage/db/src/tables/codecs/postcard.rs index 13bd506de1..fe7d5b01ef 100644 --- a/crates/storage/db/src/tables/codecs/postcard.rs +++ b/crates/storage/db/src/tables/codecs/postcard.rs @@ -29,7 +29,7 @@ macro_rules! impl_postcard { } impl Decode for $name { - fn decode>(value: B) -> Result { + fn decode>(value: B) -> Result { from_bytes(&value.into()).map_err(|e| Error::Decode(e.into())) } } @@ -40,4 +40,4 @@ macro_rules! impl_postcard { type VecU8 = Vec; //#[cfg(feature = "bench-postcard")] -//impl_postcard!(VecU8, Receipt, H256, U256, H160, u8, u16, u64, Header, Account, Log, TxType); +//impl_postcard!(VecU8, Receipt, B256, U256, Address, u8, u16, u64, Header, Account, Log, TxType); diff --git a/crates/storage/db/src/tables/mod.rs b/crates/storage/db/src/tables/mod.rs index 1c3c148c23..6ee0bbb852 100644 --- a/crates/storage/db/src/tables/mod.rs +++ b/crates/storage/db/src/tables/mod.rs @@ -38,7 +38,7 @@ use reth_primitives::{ stage::StageCheckpoint, trie::{BranchNodeCompact, StorageTrieEntry, StoredNibbles, StoredNibblesSubKey}, Account, Address, BlockHash, BlockNumber, Bytecode, Header, IntegerList, PruneCheckpoint, - PrunePart, Receipt, StorageEntry, TransactionSignedNoHash, TxHash, TxNumber, H256, + PrunePart, Receipt, StorageEntry, TransactionSignedNoHash, TxHash, TxNumber, B256, }; /// Enum for the types of tables present in libmdbx. @@ -302,7 +302,7 @@ table!( /// There will be multiple accounts that have same bytecode /// So we would need to introduce reference counter. /// This will be small optimization on state. - ( Bytecodes ) H256 | Bytecode + ( Bytecodes ) B256 | Bytecode ); table!( @@ -312,7 +312,7 @@ table!( dupsort!( /// Stores the current value of a storage key. - ( PlainStorageState ) Address | [H256] StorageEntry + ( PlainStorageState ) Address | [B256] StorageEntry ); table!( @@ -370,7 +370,7 @@ dupsort!( /// Stores the state of a storage key before a certain transaction changed it. /// If [`StorageEntry::value`] is zero, this means storage was not existing /// and needs to be removed. - ( StorageChangeSet ) BlockNumberAddress | [H256] StorageEntry + ( StorageChangeSet ) BlockNumberAddress | [B256] StorageEntry ); table!( @@ -378,7 +378,7 @@ table!( /// This table is in preparation for merkelization and calculation of state root. /// We are saving whole account data as it is needed for partial update when /// part of storage is changed. Benefit for merkelization is that hashed addresses are sorted. - ( HashedAccount ) H256 | Account + ( HashedAccount ) B256 | Account ); dupsort!( @@ -386,7 +386,7 @@ dupsort!( /// hash of storage key `keccak256(key)`. /// This table is in preparation for merkelization and calculation of state root. /// Benefit for merklization is that hashed addresses/keys are sorted. - ( HashedStorage ) H256 | [H256] StorageEntry + ( HashedStorage ) B256 | [B256] StorageEntry ); table!( @@ -396,7 +396,7 @@ table!( dupsort!( /// From HashedAddress => NibblesSubKey => Intermediate value - ( StoragesTrie ) H256 | [StoredNibblesSubKey] StorageTrieEntry + ( StoragesTrie ) B256 | [StoredNibblesSubKey] StorageTrieEntry ); table!( diff --git a/crates/storage/db/src/tables/models/accounts.rs b/crates/storage/db/src/tables/models/accounts.rs index c82b474090..c3b8e60677 100644 --- a/crates/storage/db/src/tables/models/accounts.rs +++ b/crates/storage/db/src/tables/models/accounts.rs @@ -7,9 +7,8 @@ use crate::{ table::{Decode, Encode}, DatabaseError, }; -use bytes::Buf; use reth_codecs::{derive_arbitrary, Compact}; -use reth_primitives::{Account, Address, BlockNumber}; +use reth_primitives::{Account, Address, BlockNumber, Buf}; use serde::{Deserialize, Serialize}; /// Account as it is saved inside [`AccountChangeSet`][crate::tables::AccountChangeSet]. @@ -33,7 +32,7 @@ impl Compact for AccountBeforeTx { B: bytes::BufMut + AsMut<[u8]>, { // for now put full bytes and later compress it. - buf.put_slice(&self.address.to_fixed_bytes()[..]); + buf.put_slice(self.address.as_slice()); let mut acc_len = 0; if let Some(account) = self.info { @@ -74,7 +73,7 @@ impl BlockNumberAddress { /// /// Note: End is inclusive pub fn range(range: RangeInclusive) -> Range { - (*range.start(), Address::zero()).into()..(*range.end() + 1, Address::zero()).into() + (*range.start(), Address::ZERO).into()..(*range.end() + 1, Address::ZERO).into() } /// Return the transition id @@ -109,7 +108,7 @@ impl Encode for BlockNumberAddress { let mut buf = [0u8; 28]; buf[..8].copy_from_slice(&tx.to_be_bytes()); - buf[8..].copy_from_slice(address.as_bytes()); + buf[8..].copy_from_slice(address.as_slice()); buf } } @@ -141,7 +140,7 @@ mod test { let mut bytes = [0u8; 28]; bytes[..8].copy_from_slice(&num.to_be_bytes()); - bytes[8..].copy_from_slice(&hash.0); + bytes[8..].copy_from_slice(hash.as_slice()); let encoded = Encode::encode(key); assert_eq!(encoded, bytes); diff --git a/crates/storage/db/src/tables/models/blocks.rs b/crates/storage/db/src/tables/models/blocks.rs index 20b305c2f0..a03917aaa5 100644 --- a/crates/storage/db/src/tables/models/blocks.rs +++ b/crates/storage/db/src/tables/models/blocks.rs @@ -1,7 +1,7 @@ //! Block related models and types. use reth_codecs::{main_codec, Compact}; -use reth_primitives::{Header, TxNumber, Withdrawal, H256}; +use reth_primitives::{Header, TxNumber, Withdrawal, B256}; use std::ops::Range; /// Total number of transactions. @@ -84,7 +84,7 @@ pub struct StoredBlockWithdrawals { } /// Hash of the block header. Value for [`CanonicalHeaders`][crate::tables::CanonicalHeaders] -pub type HeaderHash = H256; +pub type HeaderHash = B256; #[cfg(test)] mod test { diff --git a/crates/storage/db/src/tables/models/mod.rs b/crates/storage/db/src/tables/models/mod.rs index 3bfd0fbfe9..a516570d83 100644 --- a/crates/storage/db/src/tables/models/mod.rs +++ b/crates/storage/db/src/tables/models/mod.rs @@ -6,7 +6,7 @@ use crate::{ use reth_codecs::Compact; use reth_primitives::{ trie::{StoredNibbles, StoredNibblesSubKey}, - Address, PrunePart, H256, + Address, PrunePart, B256, }; pub mod accounts; @@ -64,7 +64,7 @@ impl Decode for Vec { impl Encode for Address { type Encoded = [u8; 20]; fn encode(self) -> Self::Encoded { - self.to_fixed_bytes() + self.0 .0 } } @@ -74,16 +74,16 @@ impl Decode for Address { } } -impl Encode for H256 { +impl Encode for B256 { type Encoded = [u8; 32]; fn encode(self) -> Self::Encoded { - self.to_fixed_bytes() + self.0 } } -impl Decode for H256 { +impl Decode for B256 { fn decode>(value: B) -> Result { - Ok(H256::from_slice(value.as_ref())) + Ok(B256::from_slice(value.as_ref())) } } diff --git a/crates/storage/db/src/tables/models/storage_sharded_key.rs b/crates/storage/db/src/tables/models/storage_sharded_key.rs index 15e6736599..a5fd89b379 100644 --- a/crates/storage/db/src/tables/models/storage_sharded_key.rs +++ b/crates/storage/db/src/tables/models/storage_sharded_key.rs @@ -5,7 +5,7 @@ use crate::{ DatabaseError, }; use derive_more::AsRef; -use reth_primitives::{BlockNumber, H160, H256}; +use reth_primitives::{Address, BlockNumber, B256}; use serde::{Deserialize, Serialize}; use super::ShardedKey; @@ -24,21 +24,21 @@ pub const NUM_OF_INDICES_IN_SHARD: usize = 2_000; )] pub struct StorageShardedKey { /// Storage account address. - pub address: H160, + pub address: Address, /// Storage slot with highest transition id. #[as_ref] - pub sharded_key: ShardedKey, + pub sharded_key: ShardedKey, } impl StorageShardedKey { /// Creates a new `StorageShardedKey`. - pub fn new(address: H160, storage_key: H256, highest_block_number: BlockNumber) -> Self { + pub fn new(address: Address, storage_key: B256, highest_block_number: BlockNumber) -> Self { Self { address, sharded_key: ShardedKey { key: storage_key, highest_block_number } } } /// Creates a new key with the highest block number set to maximum. /// This is useful when we want to search the last value for a given key. - pub fn last(address: H160, storage_key: H256) -> Self { + pub fn last(address: Address, storage_key: B256) -> Self { Self { address, sharded_key: ShardedKey { key: storage_key, highest_block_number: u64::MAX }, @@ -65,8 +65,8 @@ impl Decode for StorageShardedKey { let highest_tx_number = u64::from_be_bytes( value[tx_num_index..].try_into().map_err(|_| DatabaseError::DecodeError)?, ); - let address = H160::decode(&value[..20])?; - let storage_key = H256::decode(&value[20..52])?; + let address = Address::decode(&value[..20])?; + let storage_key = B256::decode(&value[20..52])?; Ok(Self { address, sharded_key: ShardedKey::new(storage_key, highest_tx_number) }) } diff --git a/crates/storage/db/src/tables/raw.rs b/crates/storage/db/src/tables/raw.rs index 00f5db2a14..6e1db5f68e 100644 --- a/crates/storage/db/src/tables/raw.rs +++ b/crates/storage/db/src/tables/raw.rs @@ -54,14 +54,21 @@ impl RawKey { pub fn new(key: K) -> Self { Self { key: K::encode(key).as_ref().to_vec(), _phantom: std::marker::PhantomData } } + /// Returns the decoded value. pub fn key(&self) -> Result { K::decode(&self.key) } + /// Returns the raw key as seen on the database. pub fn raw_key(&self) -> &Vec { &self.key } + + /// Consumes [`Self`] and returns the inner raw key. + pub fn into_key(self) -> Vec { + self.key + } } impl From for RawKey { @@ -105,14 +112,21 @@ impl RawValue { pub fn new(value: V) -> Self { Self { value: V::compress(value).as_ref().to_vec(), _phantom: std::marker::PhantomData } } + /// Returns the decompressed value. pub fn value(&self) -> Result { V::decompress(&self.value) } + /// Returns the raw value as seen on the database. - pub fn raw_value(&self) -> &Vec { + pub fn raw_value(&self) -> &[u8] { &self.value } + + /// Consumes [`Self`] and returns the inner raw value. + pub fn into_value(self) -> Vec { + self.value + } } impl AsRef<[u8]> for RawValue> { @@ -142,4 +156,8 @@ impl Decompress for RawValue { fn decompress>(value: B) -> Result { Ok(Self { value: value.as_ref().to_vec(), _phantom: std::marker::PhantomData }) } + + fn decompress_owned(value: Vec) -> Result { + Ok(Self { value, _phantom: std::marker::PhantomData }) + } } diff --git a/crates/storage/db/src/tables/utils.rs b/crates/storage/db/src/tables/utils.rs index 13bd1ce278..5e82eab626 100644 --- a/crates/storage/db/src/tables/utils.rs +++ b/crates/storage/db/src/tables/utils.rs @@ -54,7 +54,7 @@ where }; let value = match kv.1 { Cow::Borrowed(v) => Decompress::decompress(v)?, - Cow::Owned(v) => Decompress::decompress(v)?, + Cow::Owned(v) => Decompress::decompress_owned(v)?, }; Ok((key, value)) } @@ -68,7 +68,7 @@ where { Ok(match kv.1 { Cow::Borrowed(v) => Decompress::decompress(v)?, - Cow::Owned(v) => Decompress::decompress(v)?, + Cow::Owned(v) => Decompress::decompress_owned(v)?, }) } @@ -79,6 +79,6 @@ where { Ok(match value { Cow::Borrowed(v) => Decompress::decompress(v)?, - Cow::Owned(v) => Decompress::decompress(v)?, + Cow::Owned(v) => Decompress::decompress_owned(v)?, }) } diff --git a/crates/storage/libmdbx-rs/mdbx-sys/src/lib.rs b/crates/storage/libmdbx-rs/mdbx-sys/src/lib.rs index e9365d437a..27894fc1df 100644 --- a/crates/storage/libmdbx-rs/mdbx-sys/src/lib.rs +++ b/crates/storage/libmdbx-rs/mdbx-sys/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![allow(non_upper_case_globals, non_camel_case_types, non_snake_case, clippy::all)] #![cfg_attr(docsrs, feature(doc_cfg, doc_auto_cfg))] diff --git a/crates/storage/libmdbx-rs/src/lib.rs b/crates/storage/libmdbx-rs/src/lib.rs index 9f785e325e..399a815385 100644 --- a/crates/storage/libmdbx-rs/src/lib.rs +++ b/crates/storage/libmdbx-rs/src/lib.rs @@ -2,7 +2,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![allow(clippy::type_complexity)] // TODO(danipopes): add these warnings diff --git a/crates/storage/nippy-jar/src/cursor.rs b/crates/storage/nippy-jar/src/cursor.rs index 1ea0fea106..1fd7170c90 100644 --- a/crates/storage/nippy-jar/src/cursor.rs +++ b/crates/storage/nippy-jar/src/cursor.rs @@ -9,7 +9,7 @@ use sucds::int_vectors::Access; use zstd::bulk::Decompressor; /// Simple cursor implementation to retrieve data from [`NippyJar`]. -pub struct NippyJarCursor<'a, H> { +pub struct NippyJarCursor<'a, H = ()> { /// [`NippyJar`] which holds most of the required configuration to read from the file. jar: &'a NippyJar, /// Optional dictionary decompressors. diff --git a/crates/storage/nippy-jar/src/error.rs b/crates/storage/nippy-jar/src/error.rs index 86353d0a79..1ea1b0f4d0 100644 --- a/crates/storage/nippy-jar/src/error.rs +++ b/crates/storage/nippy-jar/src/error.rs @@ -3,6 +3,8 @@ use thiserror::Error; /// Errors associated with [`crate::NippyJar`]. #[derive(Debug, Error)] pub enum NippyJarError { + #[error(transparent)] + Internal(#[from] Box), #[error(transparent)] Disconnect(#[from] std::io::Error), #[error(transparent)] diff --git a/crates/storage/nippy-jar/src/lib.rs b/crates/storage/nippy-jar/src/lib.rs index 2626bbeb78..1e7bcd44ec 100644 --- a/crates/storage/nippy-jar/src/lib.rs +++ b/crates/storage/nippy-jar/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] // TODO(danipopes): add these warnings // #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] @@ -13,8 +13,8 @@ use serde::{Deserialize, Serialize}; use std::{ clone::Clone, + error::Error as StdError, fs::File, - hash::Hash, io::{Seek, Write}, marker::Sync, path::{Path, PathBuf}, @@ -32,6 +32,7 @@ pub mod compression; use compression::{Compression, Compressors}; pub mod phf; +pub use phf::PHFKey; use phf::{Fmph, Functions, GoFmph, PerfectHashingFunction}; mod error; @@ -45,6 +46,9 @@ const NIPPY_JAR_VERSION: usize = 1; /// A [`Row`] is a list of its selected column values. type Row = Vec>; +/// Alias type for a column value wrapped in `Result` +pub type ColumnResult = Result>; + /// `NippyJar` is a specialized storage format designed for immutable data. /// /// Data is organized into a columnar format, enabling column-based compression. Data retrieval @@ -106,6 +110,11 @@ impl NippyJar<()> { pub fn load_without_header(path: &Path) -> Result { NippyJar::<()>::load(path) } + + /// Whether this [`NippyJar`] uses a [`InclusionFilters`] and [`Functions`]. + pub fn uses_filters(&self) -> bool { + self.filter.is_some() && self.phf.is_some() + } } impl NippyJar @@ -210,19 +219,23 @@ where /// Prepares beforehand the offsets index for querying rows based on `values` (eg. transaction /// hash). Expects `values` to be sorted in the same way as the data that is going to be /// later on inserted. - pub fn prepare_index + Sync + Clone + Hash>( + /// + /// Currently collecting all items before acting on them. + pub fn prepare_index( &mut self, - values: &[T], + values: impl IntoIterator>, + row_count: usize, ) -> Result<(), NippyJarError> { - let mut offsets_index = vec![0; values.len()]; + let values = values.into_iter().collect::, _>>()?; + let mut offsets_index = vec![0; row_count]; // Builds perfect hashing function from the values if let Some(phf) = self.phf.as_mut() { - phf.set_keys(values)?; + phf.set_keys(&values)?; } if self.filter.is_some() || self.phf.is_some() { - for (row_num, v) in values.iter().enumerate() { + for (row_num, v) in values.into_iter().enumerate() { if let Some(filter) = self.filter.as_mut() { filter.add(v.as_ref())?; } @@ -242,7 +255,7 @@ where /// Writes all data and configuration to a file and the offset index to another. pub fn freeze( &mut self, - columns: Vec>>, + columns: Vec>>>, total_rows: u64, ) -> Result<(), NippyJarError> { let mut file = self.freeze_check(&columns)?; @@ -275,7 +288,7 @@ where offsets.push(file.stream_position()? as usize); match column_iter.next() { - Some(value) => { + Some(Ok(value)) => { if let Some(compression) = &self.compressor { // Special zstd case with dictionaries if let (Some(dict_compressors), Compressors::Zstd(_)) = @@ -300,6 +313,7 @@ where column_number as u64, )) } + Some(Err(err)) => return Err(err.into()), } iterators.push(column_iter); @@ -339,7 +353,7 @@ where /// Safety checks before creating and returning a [`File`] handle to write data to. fn freeze_check( &mut self, - columns: &Vec>>, + columns: &Vec>>>, ) -> Result { if columns.len() != self.columns { return Err(NippyJarError::ColumnLenMismatch(self.columns, columns.len())) @@ -384,10 +398,7 @@ impl PerfectHashingFunction for NippyJar where H: Send + Sync + Serialize + for<'a> Deserialize<'a>, { - fn set_keys + Sync + Clone + Hash>( - &mut self, - keys: &[T], - ) -> Result<(), NippyJarError> { + fn set_keys(&mut self, keys: &[T]) -> Result<(), NippyJarError> { self.phf.as_mut().ok_or(NippyJarError::PHFMissing)?.set_keys(keys) } @@ -402,6 +413,7 @@ mod tests { use rand::{rngs::SmallRng, seq::SliceRandom, RngCore, SeedableRng}; use std::collections::HashSet; + type ColumnResults = Vec>; type ColumnValues = Vec>; fn test_data(seed: Option) -> (ColumnValues, ColumnValues) { @@ -423,6 +435,10 @@ mod tests { (gen(), gen()) } + fn clone_with_result(col: &ColumnValues) -> ColumnResults> { + col.iter().map(|v| Ok(v.clone())).collect() + } + #[test] fn test_phf() { let (col1, col2) = test_data(None); @@ -455,8 +471,10 @@ mod tests { assert_eq!(indexes, collect_indexes(nippy)); // Ensure that loaded phf provides the same function outputs - nippy.prepare_index(&col1).unwrap(); - nippy.freeze(vec![col1.clone(), col2.clone()], num_rows).unwrap(); + nippy.prepare_index(clone_with_result(&col1), col1.len()).unwrap(); + nippy + .freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows) + .unwrap(); let loaded_nippy = NippyJar::load_without_header(file_path.path()).unwrap(); assert_eq!(indexes, collect_indexes(&loaded_nippy)); }; @@ -504,7 +522,7 @@ mod tests { Err(NippyJarError::FilterMaxCapacity) )); - nippy.freeze(vec![col1.clone(), col2.clone()], num_rows).unwrap(); + nippy.freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows).unwrap(); let loaded_nippy = NippyJar::load_without_header(file_path.path()).unwrap(); assert_eq!(nippy, loaded_nippy); @@ -540,16 +558,14 @@ mod tests { )); } - let data = vec![col1.clone(), col2.clone()]; - // If ZSTD is enabled, do not write to the file unless the column dictionaries have been // calculated. assert!(matches!( - nippy.freeze(data.clone(), num_rows), + nippy.freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows), Err(NippyJarError::CompressorNotReady) )); - nippy.prepare_compression(data.clone()).unwrap(); + nippy.prepare_compression(vec![col1.clone(), col2.clone()]).unwrap(); if let Some(Compressors::Zstd(zstd)) = &nippy.compressor { assert!(matches!( @@ -558,7 +574,7 @@ mod tests { )); } - nippy.freeze(data.clone(), num_rows).unwrap(); + nippy.freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows).unwrap(); let mut loaded_nippy = NippyJar::load_without_header(file_path.path()).unwrap(); assert_eq!(nippy, loaded_nippy); @@ -578,7 +594,7 @@ mod tests { // Iterate over compressed values and compare let mut row_index = 0usize; while let Some(row) = cursor.next_row().unwrap() { - assert_eq!((&row[0], &row[1]), (&data[0][row_index], &data[1][row_index])); + assert_eq!((&row[0], &row[1]), (&col1[row_index], &col2[row_index])); row_index += 1; } } @@ -598,9 +614,7 @@ mod tests { NippyJar::new_without_header(num_columns, file_path.path()).with_zstd(false, 5000); assert!(nippy.compressor.is_some()); - let data = vec![col1.clone(), col2.clone()]; - - nippy.freeze(data.clone(), num_rows).unwrap(); + nippy.freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows).unwrap(); let loaded_nippy = NippyJar::load_without_header(file_path.path()).unwrap(); assert_eq!(nippy, loaded_nippy); @@ -613,7 +627,7 @@ mod tests { // Iterate over compressed values and compare let mut row_index = 0usize; while let Some(row) = cursor.next_row().unwrap() { - assert_eq!((&row[0], &row[1]), (&data[0][row_index], &data[1][row_index])); + assert_eq!((&row[0], &row[1]), (&col1[row_index], &col2[row_index])); row_index += 1; } } else { @@ -629,6 +643,7 @@ mod tests { let num_columns = 2; let file_path = tempfile::NamedTempFile::new().unwrap(); let data = vec![col1.clone(), col2.clone()]; + let block_start = 500; #[derive(Serialize, Deserialize, Debug)] @@ -645,8 +660,10 @@ mod tests { .with_mphf(); nippy.prepare_compression(data.clone()).unwrap(); - nippy.prepare_index(&col1).unwrap(); - nippy.freeze(data.clone(), num_rows).unwrap(); + nippy.prepare_index(clone_with_result(&col1), col1.len()).unwrap(); + nippy + .freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows) + .unwrap(); } // Read file @@ -710,8 +727,10 @@ mod tests { .with_mphf(); nippy.prepare_compression(data.clone()).unwrap(); - nippy.prepare_index(&col1).unwrap(); - nippy.freeze(data.clone(), num_rows).unwrap(); + nippy.prepare_index(clone_with_result(&col1), col1.len()).unwrap(); + nippy + .freeze(vec![clone_with_result(&col1), clone_with_result(&col2)], num_rows) + .unwrap(); } // Read file diff --git a/crates/storage/nippy-jar/src/phf/fmph.rs b/crates/storage/nippy-jar/src/phf/fmph.rs index 62740e48a2..e540bae64f 100644 --- a/crates/storage/nippy-jar/src/phf/fmph.rs +++ b/crates/storage/nippy-jar/src/phf/fmph.rs @@ -1,10 +1,9 @@ -use crate::{NippyJarError, PerfectHashingFunction}; +use crate::{NippyJarError, PHFKey, PerfectHashingFunction}; use ph::fmph::{BuildConf, Function}; use serde::{ de::Error as DeSerdeError, ser::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer, }; -use std::{clone::Clone, hash::Hash, marker::Sync}; /// Wrapper struct for [`Function`]. Implementation of the following [paper](https://dl.acm.org/doi/10.1145/3596453). #[derive(Default)] @@ -19,10 +18,7 @@ impl Fmph { } impl PerfectHashingFunction for Fmph { - fn set_keys + Sync + Clone + Hash>( - &mut self, - keys: &[T], - ) -> Result<(), NippyJarError> { + fn set_keys(&mut self, keys: &[T]) -> Result<(), NippyJarError> { self.function = Some(Function::from_slice_with_conf( keys, BuildConf { use_multiple_threads: true, ..Default::default() }, diff --git a/crates/storage/nippy-jar/src/phf/go_fmph.rs b/crates/storage/nippy-jar/src/phf/go_fmph.rs index fc7b8fa89e..fd244cd1fc 100644 --- a/crates/storage/nippy-jar/src/phf/go_fmph.rs +++ b/crates/storage/nippy-jar/src/phf/go_fmph.rs @@ -1,10 +1,9 @@ -use crate::{NippyJarError, PerfectHashingFunction}; +use crate::{NippyJarError, PHFKey, PerfectHashingFunction}; use ph::fmph::{GOBuildConf, GOFunction}; use serde::{ de::Error as DeSerdeError, ser::Error as SerdeError, Deserialize, Deserializer, Serialize, Serializer, }; -use std::{clone::Clone, hash::Hash, marker::Sync}; /// Wrapper struct for [`GOFunction`]. Implementation of the following [paper](https://dl.acm.org/doi/10.1145/3596453). #[derive(Default)] @@ -19,10 +18,7 @@ impl GoFmph { } impl PerfectHashingFunction for GoFmph { - fn set_keys + Sync + Clone + Hash>( - &mut self, - keys: &[T], - ) -> Result<(), NippyJarError> { + fn set_keys(&mut self, keys: &[T]) -> Result<(), NippyJarError> { self.function = Some(GOFunction::from_slice_with_conf( keys, GOBuildConf { use_multiple_threads: true, ..Default::default() }, diff --git a/crates/storage/nippy-jar/src/phf/mod.rs b/crates/storage/nippy-jar/src/phf/mod.rs index 84113181bc..d04d4fc2db 100644 --- a/crates/storage/nippy-jar/src/phf/mod.rs +++ b/crates/storage/nippy-jar/src/phf/mod.rs @@ -8,13 +8,14 @@ pub use fmph::Fmph; mod go_fmph; pub use go_fmph::GoFmph; +/// Trait alias for [`PerfectHashingFunction`] keys. +pub trait PHFKey: AsRef<[u8]> + Sync + Clone + Hash {} +impl + Sync + Clone + Hash> PHFKey for T {} + /// Trait to build and query a perfect hashing function. pub trait PerfectHashingFunction: Serialize + for<'a> Deserialize<'a> { /// Adds the key set and builds the perfect hashing function. - fn set_keys + Sync + Clone + Hash>( - &mut self, - keys: &[T], - ) -> Result<(), NippyJarError>; + fn set_keys(&mut self, keys: &[T]) -> Result<(), NippyJarError>; /// Get corresponding associated integer. There might be false positives. fn get_index(&self, key: &[u8]) -> Result, NippyJarError>; @@ -29,10 +30,7 @@ pub enum Functions { } impl PerfectHashingFunction for Functions { - fn set_keys + Sync + Clone + Hash>( - &mut self, - keys: &[T], - ) -> Result<(), NippyJarError> { + fn set_keys(&mut self, keys: &[T]) -> Result<(), NippyJarError> { match self { Functions::Fmph(f) => f.set_keys(keys), Functions::GoFmph(f) => f.set_keys(keys), diff --git a/crates/storage/provider/Cargo.toml b/crates/storage/provider/Cargo.toml index ee3616e736..df0460580d 100644 --- a/crates/storage/provider/Cargo.toml +++ b/crates/storage/provider/Cargo.toml @@ -15,6 +15,9 @@ reth-interfaces.workspace = true reth-revm-primitives = { path = "../../revm/revm-primitives" } reth-db.workspace = true reth-trie = { path = "../../trie" } +reth-nippy-jar = { path = "../nippy-jar" } + +revm.workspace = true # async tokio = { workspace = true, features = ["sync", "macros", "rt-multi-thread"] } @@ -30,7 +33,7 @@ pin-project.workspace = true parking_lot.workspace = true # test-utils -reth-rlp = { workspace = true, optional = true } +alloy-rlp = { workspace = true, optional = true } # parallel utils rayon = "1.7" @@ -38,13 +41,14 @@ rayon = "1.7" [dev-dependencies] reth-db = { workspace = true, features = ["test-utils"] } reth-primitives = { workspace = true, features = ["arbitrary", "test-utils"] } -reth-rlp.workspace = true -revm.workspace = true reth-trie = { path = "../../trie", features = ["test-utils"] } reth-interfaces = { workspace = true, features = ["test-utils"] } + +alloy-rlp.workspace = true parking_lot.workspace = true tempfile = "3.3" assert_matches.workspace = true +rand.workspace = true [features] -test-utils = ["reth-rlp"] +test-utils = ["alloy-rlp"] diff --git a/crates/storage/provider/src/bundle_state/bundle_state_with_receipts.rs b/crates/storage/provider/src/bundle_state/bundle_state_with_receipts.rs index 5936cb9401..6ec5278514 100644 --- a/crates/storage/provider/src/bundle_state/bundle_state_with_receipts.rs +++ b/crates/storage/provider/src/bundle_state/bundle_state_with_receipts.rs @@ -1,3 +1,4 @@ +use crate::{StateChanges, StateReverts}; use reth_db::{ cursor::{DbCursorRO, DbCursorRW}, tables, @@ -5,21 +6,18 @@ use reth_db::{ }; use reth_interfaces::db::DatabaseError; use reth_primitives::{ - bloom::logs_bloom, keccak256, proofs::calculate_receipt_root_ref, Account, Address, - BlockNumber, Bloom, Bytecode, Log, Receipt, StorageEntry, H256, U256, -}; -use reth_revm_primitives::{ - db::states::BundleState, into_reth_acc, into_revm_acc, primitives::AccountInfo, + keccak256, logs_bloom, Account, Address, BlockNumber, Bloom, Bytecode, Log, Receipt, Receipts, + StorageEntry, B256, U256, }; +use reth_revm_primitives::{into_reth_acc, into_revm_acc}; use reth_trie::{ hashed_cursor::{HashedPostState, HashedPostStateCursorFactory, HashedStorage}, StateRoot, StateRootError, }; +use revm::{db::states::BundleState, primitives::AccountInfo}; use std::collections::HashMap; -pub use reth_revm_primitives::db::states::OriginalValuesKnown; - -use crate::{StateChanges, StateReverts}; +pub use revm::db::states::OriginalValuesKnown; /// Bundle state of post execution changes and reverts #[derive(Default, Debug, Clone, PartialEq, Eq)] @@ -30,15 +28,15 @@ pub struct BundleStateWithReceipts { /// Outer vector stores receipts for each block sequentially. /// The inner vector stores receipts ordered by transaction number. /// - /// If receipt is None it means it is pruned. - receipts: Vec>>, + /// If receipt is None it means it is pruned. + receipts: Receipts, /// First block of bundle state. first_block: BlockNumber, } /// Type used to initialize revms bundle state. pub type BundleStateInit = - HashMap, Option, HashMap)>; + HashMap, Option, HashMap)>; /// Types used inside RevertsInit to initialize revms reverts. pub type AccountRevertInit = (Option>, Vec); @@ -48,11 +46,7 @@ pub type RevertsInit = HashMap> impl BundleStateWithReceipts { /// Create Bundle State. - pub fn new( - bundle: BundleState, - receipts: Vec>>, - first_block: BlockNumber, - ) -> Self { + pub fn new(bundle: BundleState, receipts: Receipts, first_block: BlockNumber) -> Self { Self { bundle, receipts, first_block } } @@ -60,8 +54,8 @@ impl BundleStateWithReceipts { pub fn new_init( state_init: BundleStateInit, revert_init: RevertsInit, - contracts_init: Vec<(H256, Bytecode)>, - receipts: Vec>>, + contracts_init: Vec<(B256, Bytecode)>, + receipts: Receipts, first_block: BlockNumber, ) -> Self { // sort reverts by block number @@ -122,7 +116,7 @@ impl BundleStateWithReceipts { } /// Return bytecode if known. - pub fn bytecode(&self, code_hash: &H256) -> Option { + pub fn bytecode(&self, code_hash: &B256) -> Option { self.bundle.bytecode(code_hash).map(Bytecode) } @@ -147,7 +141,7 @@ impl BundleStateWithReceipts { let mut hashed_storage = HashedStorage::new(account.status.was_destroyed()); for (key, value) in account.storage.iter() { - let hashed_key = keccak256(H256(key.to_be_bytes())); + let hashed_key = keccak256(B256::new(key.to_be_bytes())); if value.present_value == U256::ZERO { hashed_storage.insert_zero_valued_slot(hashed_key); } else { @@ -167,7 +161,7 @@ impl BundleStateWithReceipts { /// # Example /// /// ``` - /// use reth_primitives::{Account, U256}; + /// use reth_primitives::{Account, U256, Receipts}; /// use reth_provider::BundleStateWithReceipts; /// use reth_db::{test_utils::create_test_rw_db, database::Database}; /// use std::collections::HashMap; @@ -187,7 +181,7 @@ impl BundleStateWithReceipts { /// )]), /// HashMap::from([]), /// vec![], - /// vec![], + /// Receipts::new(), /// 0, /// ); /// @@ -202,7 +196,7 @@ impl BundleStateWithReceipts { pub fn state_root_slow<'a, 'tx, TX: DbTx<'tx>>( &self, tx: &'a TX, - ) -> Result { + ) -> Result { let hashed_post_state = self.hash_state_slow(); let (account_prefix_set, storage_prefix_set) = hashed_post_state.construct_prefix_sets(); let hashed_cursor_factory = HashedPostStateCursorFactory::new(tx, &hashed_post_state); @@ -239,15 +233,12 @@ impl BundleStateWithReceipts { /// Returns the receipt root for all recorded receipts. /// Note: this function calculated Bloom filters for every receipt and created merkle trees /// of receipt. This is a expensive operation. - pub fn receipts_root_slow(&self, block_number: BlockNumber) -> Option { - let index = self.block_number_to_index(block_number)?; - let block_receipts = - self.receipts[index].iter().map(Option::as_ref).collect::>>()?; - Some(calculate_receipt_root_ref(&block_receipts)) + pub fn receipts_root_slow(&self, block_number: BlockNumber) -> Option { + self.receipts.root_slow(self.block_number_to_index(block_number)?) } /// Return reference to receipts. - pub fn receipts(&self) -> &Vec>> { + pub fn receipts(&self) -> &Receipts { &self.receipts } @@ -325,7 +316,7 @@ impl BundleStateWithReceipts { // split is done as [0, num) and [num, len] let (_, this) = self.receipts.split_at(num_of_detached_block as usize); - self.receipts = this.to_vec().clone(); + self.receipts = Receipts::from_vec(this.to_vec().clone()); self.bundle.take_n_reverts(num_of_detached_block as usize); self.first_block = block_number + 1; @@ -340,7 +331,7 @@ impl BundleStateWithReceipts { /// In most cases this would be true. pub fn extend(&mut self, other: Self) { self.bundle.extend(other.bundle); - self.receipts.extend(other.receipts); + self.receipts.extend(other.receipts.receipt_vec); } /// Write bundle state to database. @@ -383,7 +374,7 @@ impl BundleStateWithReceipts { #[cfg(test)] mod tests { - use super::{StateChanges, StateReverts}; + use super::*; use crate::{AccountReader, BundleStateWithReceipts, ProviderFactory}; use reth_db::{ cursor::{DbCursorRO, DbDupCursorRO}, @@ -393,8 +384,8 @@ mod tests { transaction::DbTx, DatabaseEnv, }; - use reth_primitives::{Address, Receipt, StorageEntry, H256, MAINNET, U256}; - use reth_revm_primitives::{into_reth_acc, primitives::HashMap}; + use reth_primitives::{Address, Receipt, Receipts, StorageEntry, B256, MAINNET, U256}; + use reth_revm_primitives::into_reth_acc; use revm::{ db::{ states::{ @@ -404,7 +395,9 @@ mod tests { }, BundleState, }, - primitives::{Account, AccountInfo as RevmAccountInfo, AccountStatus, StorageSlot}, + primitives::{ + Account, AccountInfo as RevmAccountInfo, AccountStatus, HashMap, StorageSlot, + }, CacheState, DatabaseCommit, State, }; use std::sync::Arc; @@ -415,7 +408,7 @@ mod tests { let factory = ProviderFactory::new(db, MAINNET.clone()); let provider = factory.provider_rw().unwrap(); - let address_a = Address::zero(); + let address_a = Address::ZERO; let address_b = Address::repeat_byte(0xff); let account_a = RevmAccountInfo { balance: U256::from(1), nonce: 1, ..Default::default() }; @@ -558,7 +551,7 @@ mod tests { let factory = ProviderFactory::new(db, MAINNET.clone()); let provider = factory.provider_rw().unwrap(); - let address_a = Address::zero(); + let address_a = Address::ZERO; let address_b = Address::repeat_byte(0xff); let account_b = RevmAccountInfo { balance: U256::from(2), nonce: 2, ..Default::default() }; @@ -612,7 +605,7 @@ mod tests { state.merge_transitions(BundleRetention::Reverts); - BundleStateWithReceipts::new(state.take_bundle(), Vec::new(), 1) + BundleStateWithReceipts::new(state.take_bundle(), Receipts::new(), 1) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write bundle state to DB"); @@ -624,14 +617,14 @@ mod tests { assert_eq!( storage_cursor.seek_exact(address_a).unwrap(), - Some((address_a, StorageEntry { key: H256::zero(), value: U256::from(1) })), + Some((address_a, StorageEntry { key: B256::ZERO, value: U256::from(1) })), "Slot 0 for account A should be 1" ); assert_eq!( storage_cursor.next_dup().unwrap(), Some(( address_a, - StorageEntry { key: H256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } + StorageEntry { key: B256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } )), "Slot 1 for account A should be 2" ); @@ -645,7 +638,7 @@ mod tests { storage_cursor.seek_exact(address_b).unwrap(), Some(( address_b, - StorageEntry { key: H256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } + StorageEntry { key: B256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } )), "Slot 1 for account B should be 2" ); @@ -664,7 +657,7 @@ mod tests { changeset_cursor.seek_exact(BlockNumberAddress((1, address_a))).unwrap(), Some(( BlockNumberAddress((1, address_a)), - StorageEntry { key: H256::zero(), value: U256::from(0) } + StorageEntry { key: B256::ZERO, value: U256::from(0) } )), "Slot 0 for account A should have changed from 0" ); @@ -672,7 +665,7 @@ mod tests { changeset_cursor.next_dup().unwrap(), Some(( BlockNumberAddress((1, address_a)), - StorageEntry { key: H256::from(U256::from(1).to_be_bytes()), value: U256::from(0) } + StorageEntry { key: B256::from(U256::from(1).to_be_bytes()), value: U256::from(0) } )), "Slot 1 for account A should have changed from 0" ); @@ -686,7 +679,7 @@ mod tests { changeset_cursor.seek_exact(BlockNumberAddress((1, address_b))).unwrap(), Some(( BlockNumberAddress((1, address_b)), - StorageEntry { key: H256::from(U256::from(1).to_be_bytes()), value: U256::from(1) } + StorageEntry { key: B256::from(U256::from(1).to_be_bytes()), value: U256::from(1) } )), "Slot 1 for account B should have changed from 1" ); @@ -712,7 +705,7 @@ mod tests { )])); state.merge_transitions(BundleRetention::Reverts); - BundleStateWithReceipts::new(state.take_bundle(), Vec::new(), 2) + BundleStateWithReceipts::new(state.take_bundle(), Receipts::new(), 2) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write bundle state to DB"); @@ -726,7 +719,7 @@ mod tests { changeset_cursor.seek_exact(BlockNumberAddress((2, address_a))).unwrap(), Some(( BlockNumberAddress((2, address_a)), - StorageEntry { key: H256::zero(), value: U256::from(1) } + StorageEntry { key: B256::ZERO, value: U256::from(1) } )), "Slot 0 for account A should have changed from 1 on deletion" ); @@ -734,7 +727,7 @@ mod tests { changeset_cursor.next_dup().unwrap(), Some(( BlockNumberAddress((2, address_a)), - StorageEntry { key: H256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } + StorageEntry { key: B256::from(U256::from(1).to_be_bytes()), value: U256::from(2) } )), "Slot 1 for account A should have changed from 2 on deletion" ); @@ -779,7 +772,7 @@ mod tests { }, )])); init_state.merge_transitions(BundleRetention::Reverts); - BundleStateWithReceipts::new(init_state.take_bundle(), Vec::new(), 0) + BundleStateWithReceipts::new(init_state.take_bundle(), Receipts::new(), 0) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write init bundle state to DB"); @@ -926,7 +919,7 @@ mod tests { let bundle = state.take_bundle(); - BundleStateWithReceipts::new(bundle, Vec::new(), 1) + BundleStateWithReceipts::new(bundle, Receipts::new(), 1) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write bundle state to DB"); @@ -949,14 +942,14 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((0, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(0), value: U256::ZERO } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((0, address1)), - StorageEntry { key: H256::from_low_u64_be(1), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(1), value: U256::ZERO } ))) ); @@ -966,7 +959,7 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((1, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(1) } + StorageEntry { key: B256::with_last_byte(0), value: U256::from(1) } ))) ); @@ -977,14 +970,14 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((2, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(2) } + StorageEntry { key: B256::with_last_byte(0), value: U256::from(2) } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((2, address1)), - StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(2) } + StorageEntry { key: B256::with_last_byte(1), value: U256::from(2) } ))) ); @@ -999,21 +992,21 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((4, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(0), value: U256::ZERO } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((4, address1)), - StorageEntry { key: H256::from_low_u64_be(2), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(2), value: U256::ZERO } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((4, address1)), - StorageEntry { key: H256::from_low_u64_be(6), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(6), value: U256::ZERO } ))) ); @@ -1025,21 +1018,21 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((5, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(2) } + StorageEntry { key: B256::with_last_byte(0), value: U256::from(2) } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((5, address1)), - StorageEntry { key: H256::from_low_u64_be(2), value: U256::from(4) } + StorageEntry { key: B256::with_last_byte(2), value: U256::from(4) } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((5, address1)), - StorageEntry { key: H256::from_low_u64_be(6), value: U256::from(6) } + StorageEntry { key: B256::with_last_byte(6), value: U256::from(6) } ))) ); @@ -1052,7 +1045,7 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((7, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::ZERO } + StorageEntry { key: B256::with_last_byte(0), value: U256::ZERO } ))) ); assert_eq!(storage_changes.next(), None); @@ -1092,7 +1085,7 @@ mod tests { }, )])); init_state.merge_transitions(BundleRetention::Reverts); - BundleStateWithReceipts::new(init_state.take_bundle(), Vec::new(), 0) + BundleStateWithReceipts::new(init_state.take_bundle(), Receipts::new(), 0) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write init bundle state to DB"); @@ -1139,7 +1132,7 @@ mod tests { // Commit block #1 changes to the database. state.merge_transitions(BundleRetention::Reverts); - BundleStateWithReceipts::new(state.take_bundle(), Vec::new(), 1) + BundleStateWithReceipts::new(state.take_bundle(), Receipts::new(), 1) .write_to_db(provider.tx_ref(), OriginalValuesKnown::Yes) .expect("Could not write bundle state to DB"); @@ -1154,14 +1147,14 @@ mod tests { storage_changes.next(), Some(Ok(( BlockNumberAddress((1, address1)), - StorageEntry { key: H256::from_low_u64_be(0), value: U256::from(1) } + StorageEntry { key: B256::with_last_byte(0), value: U256::from(1) } ))) ); assert_eq!( storage_changes.next(), Some(Ok(( BlockNumberAddress((1, address1)), - StorageEntry { key: H256::from_low_u64_be(1), value: U256::from(2) } + StorageEntry { key: B256::with_last_byte(1), value: U256::from(2) } ))) ); assert_eq!(storage_changes.next(), None); @@ -1171,7 +1164,7 @@ mod tests { fn revert_to_indices() { let base = BundleStateWithReceipts { bundle: BundleState::default(), - receipts: vec![vec![Some(Receipt::default()); 2]; 7], + receipts: Receipts::from_vec(vec![vec![Some(Receipt::default()); 2]; 7]), first_block: 10, }; diff --git a/crates/storage/provider/src/bundle_state/state_changes.rs b/crates/storage/provider/src/bundle_state/state_changes.rs index 750e50acef..4ab832b37d 100644 --- a/crates/storage/provider/src/bundle_state/state_changes.rs +++ b/crates/storage/provider/src/bundle_state/state_changes.rs @@ -6,10 +6,8 @@ use reth_db::{ }; use reth_interfaces::db::DatabaseError; use reth_primitives::{Bytecode, StorageEntry, U256}; -use reth_revm_primitives::{ - db::states::{PlainStorageChangeset, StateChangeset}, - into_reth_acc, -}; +use reth_revm_primitives::into_reth_acc; +use revm::db::states::{PlainStorageChangeset, StateChangeset}; /// A change to the state of the world. #[derive(Debug, Default)] @@ -62,7 +60,7 @@ impl StateChanges { if wipe_storage && storages_cursor.seek_exact(address)?.is_some() { storages_cursor.delete_current_duplicates()?; } - // cast storages to H256. + // cast storages to B256. let mut storage = storage .into_iter() .map(|(k, value)| StorageEntry { key: k.into(), value }) diff --git a/crates/storage/provider/src/bundle_state/state_reverts.rs b/crates/storage/provider/src/bundle_state/state_reverts.rs index e97c0855ae..f11e8f11bd 100644 --- a/crates/storage/provider/src/bundle_state/state_reverts.rs +++ b/crates/storage/provider/src/bundle_state/state_reverts.rs @@ -6,11 +6,9 @@ use reth_db::{ transaction::{DbTx, DbTxMut}, }; use reth_interfaces::db::DatabaseError; -use reth_primitives::{BlockNumber, StorageEntry, H256, U256}; -use reth_revm_primitives::{ - db::states::{PlainStateReverts, PlainStorageRevert, RevertToSlot}, - into_reth_acc, -}; +use reth_primitives::{BlockNumber, StorageEntry, B256, U256}; +use reth_revm_primitives::into_reth_acc; +use revm::db::states::{PlainStateReverts, PlainStorageRevert, RevertToSlot}; use std::iter::Peekable; /// Revert of the state. @@ -48,7 +46,7 @@ impl StateReverts { let mut storage = storage_revert .into_iter() - .map(|(k, v)| (H256(k.to_be_bytes()), v)) + .map(|(k, v)| (B256::new(k.to_be_bytes()), v)) .collect::>(); // sort storage slots by key. storage.par_sort_unstable_by_key(|a| a.0); @@ -102,8 +100,8 @@ struct StorageRevertsIter { impl StorageRevertsIter where - R: Iterator, - W: Iterator, + R: Iterator, + W: Iterator, { fn new( reverts: impl IntoIterator, @@ -113,22 +111,22 @@ where } /// Consume next revert and return it. - fn next_revert(&mut self) -> Option<(H256, U256)> { + fn next_revert(&mut self) -> Option<(B256, U256)> { self.reverts.next().map(|(key, revert)| (key, revert.to_previous_value())) } /// Consume next wiped storage and return it. - fn next_wiped(&mut self) -> Option<(H256, U256)> { + fn next_wiped(&mut self) -> Option<(B256, U256)> { self.wiped.next() } } impl Iterator for StorageRevertsIter where - R: Iterator, - W: Iterator, + R: Iterator, + W: Iterator, { - type Item = (H256, U256); + type Item = (B256, U256); /// Iterate over storage reverts and wiped entries and return items in the sorted order. /// NOTE: The implementation assumes that inner iterators are already sorted. diff --git a/crates/storage/provider/src/chain.rs b/crates/storage/provider/src/chain.rs index bb97c25f27..db68598061 100644 --- a/crates/storage/provider/src/chain.rs +++ b/crates/storage/provider/src/chain.rs @@ -360,8 +360,8 @@ pub enum ChainSplit { #[cfg(test)] mod tests { use super::*; - use reth_primitives::{H160, H256}; - use reth_revm_primitives::{ + use reth_primitives::{Address, Receipts, B256}; + use revm::{ db::BundleState, primitives::{AccountInfo, HashMap}, }; @@ -369,10 +369,10 @@ mod tests { #[test] fn chain_append() { let block = SealedBlockWithSenders::default(); - let block1_hash = H256([0x01; 32]); - let block2_hash = H256([0x02; 32]); - let block3_hash = H256([0x03; 32]); - let block4_hash = H256([0x04; 32]); + let block1_hash = B256::new([0x01; 32]); + let block2_hash = B256::new([0x02; 32]); + let block3_hash = B256::new([0x03; 32]); + let block4_hash = B256::new([0x04; 32]); let mut block1 = block.clone(); let mut block2 = block.clone(); @@ -402,35 +402,45 @@ mod tests { fn test_number_split() { let block_state1 = BundleStateWithReceipts::new( BundleState::new( - vec![(H160([2; 20]), None, Some(AccountInfo::default()), HashMap::default())], - vec![vec![(H160([2; 20]), None, vec![])]], + vec![( + Address::new([2; 20]), + None, + Some(AccountInfo::default()), + HashMap::default(), + )], + vec![vec![(Address::new([2; 20]), None, vec![])]], vec![], ), - vec![vec![]], + Receipts::from_vec(vec![vec![]]), 1, ); let block_state2 = BundleStateWithReceipts::new( BundleState::new( - vec![(H160([3; 20]), None, Some(AccountInfo::default()), HashMap::default())], - vec![vec![(H160([3; 20]), None, vec![])]], + vec![( + Address::new([3; 20]), + None, + Some(AccountInfo::default()), + HashMap::default(), + )], + vec![vec![(Address::new([3; 20]), None, vec![])]], vec![], ), - vec![vec![]], + Receipts::from_vec(vec![vec![]]), 2, ); let mut block1 = SealedBlockWithSenders::default(); - let block1_hash = H256([15; 32]); + let block1_hash = B256::new([15; 32]); block1.number = 1; block1.hash = block1_hash; - block1.senders.push(H160([4; 20])); + block1.senders.push(Address::new([4; 20])); let mut block2 = SealedBlockWithSenders::default(); - let block2_hash = H256([16; 32]); + let block2_hash = B256::new([16; 32]); block2.number = 2; block2.hash = block2_hash; - block2.senders.push(H160([4; 20])); + block2.senders.push(Address::new([4; 20])); let mut block_state_extended = block_state1.clone(); block_state_extended.extend(block_state2.clone()); @@ -460,7 +470,7 @@ mod tests { // split at unknown block hash assert_eq!( - chain.clone().split(SplitAt::Hash(H256([100; 32]))), + chain.clone().split(SplitAt::Hash(B256::new([100; 32]))), ChainSplit::NoSplitPending(chain.clone()) ); diff --git a/crates/storage/provider/src/lib.rs b/crates/storage/provider/src/lib.rs index 9a5045d73a..02f0936641 100644 --- a/crates/storage/provider/src/lib.rs +++ b/crates/storage/provider/src/lib.rs @@ -7,7 +7,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/storage/provider/src/providers/bundle_state_provider.rs b/crates/storage/provider/src/providers/bundle_state_provider.rs index 63741919ca..0f6e049e76 100644 --- a/crates/storage/provider/src/providers/bundle_state_provider.rs +++ b/crates/storage/provider/src/providers/bundle_state_provider.rs @@ -3,7 +3,7 @@ use crate::{ StateProvider, StateRootProvider, }; use reth_interfaces::{provider::ProviderError, RethResult}; -use reth_primitives::{Account, Address, BlockNumber, Bytecode, Bytes, H256}; +use reth_primitives::{Account, Address, BlockNumber, Bytecode, Bytes, B256}; /// A state provider that either resolves to data in a wrapped [`crate::BundleStateWithReceipts`], /// or an underlying state provider. @@ -27,7 +27,7 @@ impl BundleStateProvider BlockHashReader for BundleStateProvider { - fn block_hash(&self, block_number: BlockNumber) -> RethResult> { + fn block_hash(&self, block_number: BlockNumber) -> RethResult> { let block_hash = self.post_state_data_provider.block_hash(block_number); if block_hash.is_some() { return Ok(block_hash) @@ -39,7 +39,7 @@ impl BlockHashReader &self, _start: BlockNumber, _end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { unimplemented!() } } @@ -59,7 +59,7 @@ impl AccountReader impl StateRootProvider for BundleStateProvider { - fn state_root(&self, post_state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, post_state: &BundleStateWithReceipts) -> RethResult { let mut state = self.post_state_data_provider.state().clone(); state.extend(post_state.clone()); self.state_provider.state_root(&state) @@ -84,7 +84,7 @@ impl StateProvider self.state_provider.storage(account, storage_key) } - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult> { if let Some(bytecode) = self.post_state_data_provider.state().bytecode(&code_hash) { return Ok(Some(bytecode)) } @@ -95,8 +95,8 @@ impl StateProvider fn proof( &self, _address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { Err(ProviderError::StateRootNotAvailableForHistoricalBlock.into()) } } diff --git a/crates/storage/provider/src/providers/database/mod.rs b/crates/storage/provider/src/providers/database/mod.rs index 993fb31961..f19c768f6b 100644 --- a/crates/storage/provider/src/providers/database/mod.rs +++ b/crates/storage/provider/src/providers/database/mod.rs @@ -6,21 +6,23 @@ use crate::{ TransactionsProvider, WithdrawalsProvider, }; use reth_db::{database::Database, init_db, models::StoredBlockBodyIndices, DatabaseEnv}; -use reth_interfaces::{RethError, RethResult}; +use reth_interfaces::{db::LogLevel, RethError, RethResult}; use reth_primitives::{ stage::{StageCheckpoint, StageId}, Address, Block, BlockHash, BlockHashOrNumber, BlockNumber, BlockWithSenders, ChainInfo, ChainSpec, Header, PruneCheckpoint, PrunePart, Receipt, SealedBlock, SealedHeader, TransactionMeta, TransactionSigned, TransactionSignedNoHash, TxHash, TxNumber, Withdrawal, - H256, U256, + B256, U256, +}; +use revm::primitives::{BlockEnv, CfgEnv}; +use std::{ + ops::{RangeBounds, RangeInclusive}, + sync::Arc, }; -use reth_revm_primitives::primitives::{BlockEnv, CfgEnv}; -use std::{ops::RangeBounds, sync::Arc}; use tracing::trace; mod provider; pub use provider::{DatabaseProvider, DatabaseProviderRO, DatabaseProviderRW}; -use reth_interfaces::db::LogLevel; /// A common provider that fetches data from a database. /// @@ -185,7 +187,7 @@ impl HeaderProvider for ProviderFactory { } impl BlockHashReader for ProviderFactory { - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { self.provider()?.block_hash(number) } @@ -193,7 +195,7 @@ impl BlockHashReader for ProviderFactory { &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { self.provider()?.canonical_hashes_range(start, end) } } @@ -211,13 +213,13 @@ impl BlockNumReader for ProviderFactory { self.provider()?.last_block_number() } - fn block_number(&self, hash: H256) -> RethResult> { + fn block_number(&self, hash: B256) -> RethResult> { self.provider()?.block_number(hash) } } impl BlockReader for ProviderFactory { - fn find_block_by_hash(&self, hash: H256, source: BlockSource) -> RethResult> { + fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> RethResult> { self.provider()?.find_block_by_hash(hash, source) } @@ -247,6 +249,10 @@ impl BlockReader for ProviderFactory { fn block_with_senders(&self, number: BlockNumber) -> RethResult> { self.provider()?.block_with_senders(number) } + + fn block_range(&self, range: RangeInclusive) -> RethResult> { + self.provider()?.block_range(range) + } } impl TransactionsProvider for ProviderFactory { @@ -407,6 +413,7 @@ impl PruneCheckpointReader for ProviderFactory { mod tests { use super::ProviderFactory; use crate::{BlockHashReader, BlockNumReader, BlockWriter, TransactionsProvider}; + use alloy_rlp::Decodable; use assert_matches::assert_matches; use reth_db::{ tables, @@ -415,9 +422,8 @@ mod tests { }; use reth_interfaces::test_utils::{generators, generators::random_block}; use reth_primitives::{ - hex_literal::hex, ChainSpecBuilder, PruneMode, PruneModes, SealedBlock, TxNumber, H256, + hex_literal::hex, ChainSpecBuilder, PruneMode, PruneModes, SealedBlock, TxNumber, B256, }; - use reth_rlp::Decodable; use std::{ops::RangeInclusive, sync::Arc}; #[test] @@ -437,7 +443,7 @@ mod tests { let chain_info = provider.chain_info().expect("should be ok"); assert_eq!(chain_info.best_number, 0); - assert_eq!(chain_info.best_hash, H256::zero()); + assert_eq!(chain_info.best_hash, B256::ZERO); } #[test] diff --git a/crates/storage/provider/src/providers/database/provider.rs b/crates/storage/provider/src/providers/database/provider.rs index 17c6c97d15..ae19cf9e37 100644 --- a/crates/storage/provider/src/providers/database/provider.rs +++ b/crates/storage/provider/src/providers/database/provider.rs @@ -34,14 +34,14 @@ use reth_primitives::{ ChainInfo, ChainSpec, Hardfork, Head, Header, PruneCheckpoint, PruneModes, PrunePart, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, StorageEntry, TransactionMeta, TransactionSigned, TransactionSignedEcRecovered, TransactionSignedNoHash, TxHash, TxNumber, - Withdrawal, H256, U256, + Withdrawal, B256, U256, }; use reth_revm_primitives::{ config::revm_spec, env::{fill_block_env, fill_cfg_and_block_env, fill_cfg_env}, - primitives::{BlockEnv, CfgEnv, SpecId}, }; use reth_trie::{prefix_set::PrefixSetMut, StateRoot}; +use revm::primitives::{BlockEnv, CfgEnv, SpecId}; use std::{ collections::{hash_map, BTreeMap, BTreeSet, HashMap, HashSet}, fmt::Debug, @@ -368,7 +368,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> DatabaseProvider<'this, TX> { state, reverts, Vec::new(), - receipts, + reth_primitives::Receipts::from_vec(receipts), start_block_number, )) } @@ -930,7 +930,7 @@ impl<'this, TX: DbTx<'this>> HeaderProvider for DatabaseProvider<'this, TX> { } impl<'this, TX: DbTx<'this>> BlockHashReader for DatabaseProvider<'this, TX> { - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { Ok(self.tx.get::(number)?) } @@ -938,7 +938,7 @@ impl<'this, TX: DbTx<'this>> BlockHashReader for DatabaseProvider<'this, TX> { &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { let range = start..end; let mut cursor = self.tx.cursor_read::()?; cursor @@ -966,13 +966,13 @@ impl<'this, TX: DbTx<'this>> BlockNumReader for DatabaseProvider<'this, TX> { Ok(self.tx.cursor_read::()?.last()?.unwrap_or_default().0) } - fn block_number(&self, hash: H256) -> RethResult> { + fn block_number(&self, hash: B256) -> RethResult> { Ok(self.tx.get::(hash)?) } } impl<'this, TX: DbTx<'this>> BlockReader for DatabaseProvider<'this, TX> { - fn find_block_by_hash(&self, hash: H256, source: BlockSource) -> RethResult> { + fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> RethResult> { if source.is_database() { self.block(hash.into()) } else { @@ -1086,6 +1086,64 @@ impl<'this, TX: DbTx<'this>> BlockReader for DatabaseProvider<'this, TX> { Ok(Some(Block { header, body, ommers, withdrawals }.with_senders(senders))) } + + fn block_range(&self, range: RangeInclusive) -> RethResult> { + if range.is_empty() { + return Ok(Vec::new()) + } + + let len = range.end().saturating_sub(*range.start()) as usize; + let mut blocks = Vec::with_capacity(len); + + let mut headers_cursor = self.tx.cursor_read::()?; + let mut ommers_cursor = self.tx.cursor_read::()?; + let mut withdrawals_cursor = self.tx.cursor_read::()?; + let mut block_body_cursor = self.tx.cursor_read::()?; + let mut tx_cursor = self.tx.cursor_read::()?; + + for num in range { + if let Some((_, header)) = headers_cursor.seek_exact(num)? { + // If the body indices are not found, this means that the transactions either do + // not exist in the database yet, or they do exit but are + // not indexed. If they exist but are not indexed, we don't + // have enough information to return the block anyways, so + // we skip the block. + if let Some((_, block_body_indices)) = block_body_cursor.seek_exact(num)? { + let tx_range = block_body_indices.tx_num_range(); + let body = if tx_range.is_empty() { + Vec::new() + } else { + tx_cursor + .walk_range(tx_range)? + .map(|result| result.map(|(_, tx)| tx.into())) + .collect::, _>>()? + }; + + // If we are past shanghai, then all blocks should have a withdrawal list, + // even if empty + let withdrawals = + if self.chain_spec.is_shanghai_active_at_timestamp(header.timestamp) { + Some( + withdrawals_cursor + .seek_exact(num)? + .map(|(_, w)| w.withdrawals) + .unwrap_or_default(), + ) + } else { + None + }; + let ommers = if self.chain_spec.final_paris_total_difficulty(num).is_some() { + Vec::new() + } else { + ommers_cursor.seek_exact(num)?.map(|(_, o)| o.ommers).unwrap_or_default() + }; + + blocks.push(Block { header, body, ommers, withdrawals }); + } + } + } + Ok(blocks) + } } impl<'this, TX: DbTx<'this>> TransactionsProvider for DatabaseProvider<'this, TX> { @@ -1416,7 +1474,7 @@ impl<'this, TX: DbTxMut<'this>> StageCheckpointWriter for DatabaseProvider<'this impl<'this, TX: DbTx<'this>> StorageReader for DatabaseProvider<'this, TX> { fn plainstate_storages( &self, - addresses_with_keys: impl IntoIterator)>, + addresses_with_keys: impl IntoIterator)>, ) -> RethResult)>> { let mut plain_storage = self.tx.cursor_dup_read::()?; @@ -1440,13 +1498,13 @@ impl<'this, TX: DbTx<'this>> StorageReader for DatabaseProvider<'this, TX> { fn changed_storages_with_range( &self, range: RangeInclusive, - ) -> RethResult>> { + ) -> RethResult>> { self.tx .cursor_read::()? .walk_range(BlockNumberAddress::range(range))? // fold all storages and save its old state so we can remove it from HashedStorage // it is needed as it is dup table. - .try_fold(BTreeMap::new(), |mut accounts: BTreeMap>, entry| { + .try_fold(BTreeMap::new(), |mut accounts: BTreeMap>, entry| { let (BlockNumberAddress((_, address)), storage_entry) = entry?; accounts.entry(address).or_default().insert(storage_entry.key); Ok(accounts) @@ -1456,13 +1514,13 @@ impl<'this, TX: DbTx<'this>> StorageReader for DatabaseProvider<'this, TX> { fn changed_storages_and_blocks_with_range( &self, range: RangeInclusive, - ) -> RethResult>> { + ) -> RethResult>> { let mut changeset_cursor = self.tx.cursor_read::()?; let storage_changeset_lists = changeset_cursor.walk_range(BlockNumberAddress::range(range))?.try_fold( BTreeMap::new(), - |mut storages: BTreeMap<(Address, H256), Vec>, entry| -> RethResult<_> { + |mut storages: BTreeMap<(Address, B256), Vec>, entry| -> RethResult<_> { let (index, storage) = entry?; storages .entry((index.address(), storage.key)) @@ -1480,12 +1538,12 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider fn insert_hashes( &self, range: RangeInclusive, - end_block_hash: H256, - expected_state_root: H256, + end_block_hash: B256, + expected_state_root: B256, ) -> RethResult<()> { // Initialize prefix sets. let mut account_prefix_set = PrefixSetMut::default(); - let mut storage_prefix_set: HashMap = HashMap::default(); + let mut storage_prefix_set: HashMap = HashMap::default(); let mut destroyed_accounts = HashSet::default(); // storage hashing stage @@ -1546,7 +1604,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider fn unwind_storage_hashing( &self, range: Range, - ) -> RethResult>> { + ) -> RethResult>> { let mut hashed_storage = self.tx.cursor_dup_write::()?; // Aggregate all block changesets and make list of accounts that have been changed. @@ -1560,7 +1618,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider // fold all account to get the old balance/nonces and account that needs to be removed .fold( BTreeMap::new(), - |mut accounts: BTreeMap<(Address, H256), U256>, + |mut accounts: BTreeMap<(Address, B256), U256>, (BlockNumberAddress((_, address)), storage_entry)| { accounts.insert((address, storage_entry.key), storage_entry.value); accounts @@ -1572,7 +1630,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider .map(|((address, key), value)| ((keccak256(address), keccak256(key)), value)) .collect::>(); - let mut hashed_storage_keys: HashMap> = HashMap::default(); + let mut hashed_storage_keys: HashMap> = HashMap::default(); for (hashed_address, hashed_slot) in hashed_storages.keys() { hashed_storage_keys.entry(*hashed_address).or_default().insert(*hashed_slot); } @@ -1601,7 +1659,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider fn insert_storage_for_hashing( &self, storages: impl IntoIterator)>, - ) -> RethResult>> { + ) -> RethResult>> { // hash values let hashed_storages = storages.into_iter().fold(BTreeMap::new(), |mut map, (address, storage)| { @@ -1644,7 +1702,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider fn unwind_account_hashing( &self, range: RangeInclusive, - ) -> RethResult>> { + ) -> RethResult>> { let mut hashed_accounts_cursor = self.tx.cursor_write::()?; // Aggregate all block changesets and make a list of accounts that have been changed. @@ -1687,12 +1745,12 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HashingWriter for DatabaseProvider fn insert_account_for_hashing( &self, accounts: impl IntoIterator)>, - ) -> RethResult>> { + ) -> RethResult>> { let mut hashed_accounts_cursor = self.tx.cursor_write::()?; let hashed_accounts = accounts.into_iter().fold( BTreeMap::new(), - |mut map: BTreeMap>, (address, account)| { + |mut map: BTreeMap>, (address, account)| { map.insert(keccak256(address), account); map }, @@ -1730,7 +1788,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HistoryWriter for DatabaseProvider fn insert_storage_history_index( &self, - storage_transitions: BTreeMap<(Address, H256), Vec>, + storage_transitions: BTreeMap<(Address, B256), Vec>, ) -> RethResult<()> { self.append_history_index::<_, tables::StorageHistory>( storage_transitions, @@ -1765,7 +1823,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> HistoryWriter for DatabaseProvider // fold all storages and get last block number .fold( BTreeMap::new(), - |mut accounts: BTreeMap<(Address, H256), u64>, (index, storage)| { + |mut accounts: BTreeMap<(Address, B256), u64>, (index, storage)| { // we just need address and lowest block number. accounts.insert((index.address(), storage.key), index.block_number()); accounts @@ -1855,7 +1913,7 @@ impl<'this, TX: DbTxMut<'this> + DbTx<'this>> BlockExecutionWriter for DatabaseP // Initialize prefix sets. let mut account_prefix_set = PrefixSetMut::default(); - let mut storage_prefix_set: HashMap = HashMap::default(); + let mut storage_prefix_set: HashMap = HashMap::default(); let mut destroyed_accounts = HashSet::default(); // Unwind account hashes. Add changed accounts to account prefix set. diff --git a/crates/storage/provider/src/providers/mod.rs b/crates/storage/provider/src/providers/mod.rs index 123b0cc28f..5d0e5eebd1 100644 --- a/crates/storage/provider/src/providers/mod.rs +++ b/crates/storage/provider/src/providers/mod.rs @@ -17,24 +17,27 @@ use reth_primitives::{ Account, Address, Block, BlockHash, BlockHashOrNumber, BlockId, BlockNumHash, BlockNumber, BlockNumberOrTag, BlockWithSenders, ChainInfo, ChainSpec, Header, PruneCheckpoint, PrunePart, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, TxHash, TxNumber, Withdrawal, H256, U256, -}; -use reth_revm_primitives::primitives::{BlockEnv, CfgEnv}; -pub use state::{ - historical::{HistoricalStateProvider, HistoricalStateProviderRef}, - latest::{LatestStateProvider, LatestStateProviderRef}, + TransactionSignedNoHash, TxHash, TxNumber, Withdrawal, B256, U256, }; +use revm::primitives::{BlockEnv, CfgEnv}; use std::{ collections::{BTreeMap, HashSet}, - ops::RangeBounds, + ops::{RangeBounds, RangeInclusive}, sync::Arc, time::Instant, }; use tracing::trace; +pub use state::{ + historical::{HistoricalStateProvider, HistoricalStateProviderRef}, + latest::{LatestStateProvider, LatestStateProviderRef}, +}; + mod bundle_state_provider; mod chain_info; mod database; +mod snapshot; +pub use snapshot::SnapshotProvider; mod state; use crate::{providers::chain_info::ChainInfoTracker, traits::BlockSource}; pub use bundle_state_provider::BundleStateProvider; @@ -154,7 +157,7 @@ where DB: Database, Tree: Send + Sync, { - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { self.database.provider()?.block_hash(number) } @@ -162,7 +165,7 @@ where &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { self.database.provider()?.canonical_hashes_range(start, end) } } @@ -184,7 +187,7 @@ where self.database.provider()?.last_block_number() } - fn block_number(&self, hash: H256) -> RethResult> { + fn block_number(&self, hash: B256) -> RethResult> { self.database.provider()?.block_number(hash) } } @@ -212,7 +215,7 @@ where DB: Database, Tree: BlockchainTreeViewer + Send + Sync, { - fn find_block_by_hash(&self, hash: H256, source: BlockSource) -> RethResult> { + fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> RethResult> { let block = match source { BlockSource::Any => { // check database first @@ -266,6 +269,10 @@ where fn block_with_senders(&self, number: BlockNumber) -> RethResult> { self.database.provider()?.block_with_senders(number) } + + fn block_range(&self, range: RangeInclusive) -> RethResult> { + self.database.provider()?.block_range(range) + } } impl TransactionsProvider for BlockchainProvider @@ -525,7 +532,7 @@ where self.latest() } - fn pending_state_by_hash(&self, block_hash: H256) -> RethResult>> { + fn pending_state_by_hash(&self, block_hash: B256) -> RethResult>> { if let Some(state) = self.tree.find_pending_state_provider(block_hash) { return Ok(Some(self.pending_with_provider(state)?)) } diff --git a/crates/storage/provider/src/providers/snapshot.rs b/crates/storage/provider/src/providers/snapshot.rs new file mode 100644 index 0000000000..e63f3f8f79 --- /dev/null +++ b/crates/storage/provider/src/providers/snapshot.rs @@ -0,0 +1,205 @@ +use crate::HeaderProvider; +use reth_db::{ + table::{Decompress, Table}, + HeaderTD, +}; +use reth_interfaces::RethResult; +use reth_nippy_jar::{NippyJar, NippyJarCursor}; +use reth_primitives::{BlockHash, BlockNumber, Header, SealedHeader, U256}; +use std::ops::RangeBounds; + +/// SnapshotProvider +/// +/// WIP Rudimentary impl just for testes +/// TODO: should be able to walk through snapshot files/block_ranges +/// TODO: Arc over NippyJars and/or NippyJarCursors (LRU) +#[derive(Debug)] +pub struct SnapshotProvider<'a> { + /// NippyJar + pub jar: &'a NippyJar, +} + +impl<'a> SnapshotProvider<'a> { + fn cursor(&self) -> NippyJarCursor<'a> { + NippyJarCursor::new(self.jar, None).unwrap() + } +} + +impl<'a> HeaderProvider for SnapshotProvider<'a> { + fn header(&self, block_hash: &BlockHash) -> RethResult> { + // WIP + let mut cursor = self.cursor(); + + let header = Header::decompress( + &cursor.row_by_key_with_cols::<0b01, 2>(&block_hash.0).unwrap().unwrap()[0], + ) + .unwrap(); + + if &header.hash_slow() == block_hash { + return Ok(Some(header)) + } else { + // check next snapshot + } + Ok(None) + } + + fn header_by_number(&self, _num: BlockNumber) -> RethResult> { + unimplemented!(); + } + + fn header_td(&self, block_hash: &BlockHash) -> RethResult> { + // WIP + let mut cursor = self.cursor(); + + let row = cursor.row_by_key_with_cols::<0b11, 2>(&block_hash.0).unwrap().unwrap(); + + let header = Header::decompress(&row[0]).unwrap(); + let td = ::Value::decompress(&row[1]).unwrap(); + + if &header.hash_slow() == block_hash { + return Ok(Some(td.0)) + } else { + // check next snapshot + } + Ok(None) + } + + fn header_td_by_number(&self, _number: BlockNumber) -> RethResult> { + unimplemented!(); + } + + fn headers_range(&self, _range: impl RangeBounds) -> RethResult> { + unimplemented!(); + } + + fn sealed_headers_range( + &self, + _range: impl RangeBounds, + ) -> RethResult> { + unimplemented!(); + } + + fn sealed_header(&self, _number: BlockNumber) -> RethResult> { + unimplemented!(); + } +} + +#[cfg(test)] +mod test { + use super::*; + use crate::ProviderFactory; + use rand::{self, seq::SliceRandom}; + use reth_db::{ + cursor::DbCursorRO, + database::Database, + snapshot::create_snapshot_T1_T2, + test_utils::create_test_rw_db, + transaction::{DbTx, DbTxMut}, + CanonicalHeaders, DatabaseError, HeaderNumbers, HeaderTD, Headers, RawTable, + }; + use reth_interfaces::test_utils::generators::{self, random_header_range}; + use reth_nippy_jar::NippyJar; + use reth_primitives::{B256, MAINNET}; + + #[test] + fn test_snap() { + // Ranges + let row_count = 100u64; + let range = 0..=(row_count - 1); + + // Data sources + let db = create_test_rw_db(); + let factory = ProviderFactory::new(&db, MAINNET.clone()); + let snap_file = tempfile::NamedTempFile::new().unwrap(); + + // Setup data + let mut headers = random_header_range( + &mut generators::rng(), + *range.start()..(*range.end() + 1), + B256::random(), + ); + + db.update(|tx| -> std::result::Result<(), DatabaseError> { + let mut td = U256::ZERO; + for header in headers.clone() { + td += header.header.difficulty; + let hash = header.hash(); + + tx.put::(header.number, hash)?; + tx.put::(header.number, header.clone().unseal())?; + tx.put::(header.number, td.into())?; + tx.put::(hash, header.number)?; + } + Ok(()) + }) + .unwrap() + .unwrap(); + + // Create Snapshot + { + let with_compression = true; + let with_filter = true; + + let mut nippy_jar = NippyJar::new_without_header(2, snap_file.path()); + + if with_compression { + nippy_jar = nippy_jar.with_zstd(false, 0); + } + + if with_filter { + nippy_jar = nippy_jar.with_cuckoo_filter(row_count as usize + 10).with_mphf(); + } + + let tx = db.tx().unwrap(); + + // Hacky type inference. TODO fix + let mut none_vec = Some(vec![vec![vec![0u8]].into_iter()]); + let _ = none_vec.take(); + + // Generate list of hashes for filters & PHF + let mut cursor = tx.cursor_read::>().unwrap(); + let hashes = cursor + .walk(None) + .unwrap() + .map(|row| row.map(|(_key, value)| value.into_value()).map_err(|e| e.into())); + + create_snapshot_T1_T2::( + &tx, + range, + none_vec, + Some(hashes), + row_count as usize, + &mut nippy_jar, + ) + .unwrap(); + } + + // Use providers to query Header data and compare if it matches + { + let jar = NippyJar::load_without_header(snap_file.path()).unwrap(); + + let db_provider = factory.provider().unwrap(); + let snap_provider = SnapshotProvider { jar: &jar }; + + assert!(!headers.is_empty()); + + // Shuffled for chaos. + headers.shuffle(&mut generators::rng()); + + for header in headers { + let header_hash = header.hash(); + let header = header.unseal(); + + // Compare Header + assert_eq!(header, db_provider.header(&header_hash).unwrap().unwrap()); + assert_eq!(header, snap_provider.header(&header_hash).unwrap().unwrap()); + + // Compare HeaderTD + assert_eq!( + db_provider.header_td(&header_hash).unwrap().unwrap(), + snap_provider.header_td(&header_hash).unwrap().unwrap() + ); + } + } + } +} diff --git a/crates/storage/provider/src/providers/state/historical.rs b/crates/storage/provider/src/providers/state/historical.rs index e63ab53b9a..e10e845f6a 100644 --- a/crates/storage/provider/src/providers/state/historical.rs +++ b/crates/storage/provider/src/providers/state/historical.rs @@ -12,7 +12,7 @@ use reth_db::{ }; use reth_interfaces::RethResult; use reth_primitives::{ - Account, Address, BlockNumber, Bytecode, Bytes, StorageKey, StorageValue, H256, + Account, Address, BlockNumber, Bytecode, Bytes, StorageKey, StorageValue, B256, }; use std::marker::PhantomData; @@ -183,7 +183,7 @@ impl<'a, 'b, TX: DbTx<'a>> AccountReader for HistoricalStateProviderRef<'a, 'b, impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for HistoricalStateProviderRef<'a, 'b, TX> { /// Get block hash by number. - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { self.tx.get::(number).map_err(Into::into) } @@ -191,7 +191,7 @@ impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for HistoricalStateProviderRef<'a, 'b &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { let range = start..end; self.tx .cursor_read::() @@ -206,7 +206,7 @@ impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for HistoricalStateProviderRef<'a, 'b } impl<'a, 'b, TX: DbTx<'a>> StateRootProvider for HistoricalStateProviderRef<'a, 'b, TX> { - fn state_root(&self, _post_state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, _post_state: &BundleStateWithReceipts) -> RethResult { Err(ProviderError::StateRootNotAvailableForHistoricalBlock.into()) } } @@ -243,7 +243,7 @@ impl<'a, 'b, TX: DbTx<'a>> StateProvider for HistoricalStateProviderRef<'a, 'b, } /// Get account code by its hash - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult> { self.tx.get::(code_hash).map_err(Into::into) } @@ -251,8 +251,8 @@ impl<'a, 'b, TX: DbTx<'a>> StateProvider for HistoricalStateProviderRef<'a, 'b, fn proof( &self, _address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { Err(ProviderError::StateRootNotAvailableForHistoricalBlock.into()) } } @@ -357,12 +357,11 @@ mod tests { BlockNumberList, }; use reth_interfaces::provider::ProviderError; - use reth_primitives::{hex_literal::hex, Account, StorageEntry, H160, H256, U256}; + use reth_primitives::{address, b256, Account, Address, StorageEntry, B256, U256}; - const ADDRESS: H160 = H160(hex!("0000000000000000000000000000000000000001")); - const HIGHER_ADDRESS: H160 = H160(hex!("0000000000000000000000000000000000000005")); - const STORAGE: H256 = - H256(hex!("0000000000000000000000000000000000000000000000000000000000000001")); + const ADDRESS: Address = address!("0000000000000000000000000000000000000001"); + const HIGHER_ADDRESS: Address = address!("0000000000000000000000000000000000000005"); + const STORAGE: B256 = b256!("0000000000000000000000000000000000000000000000000000000000000001"); fn assert_state_provider() {} #[allow(unused)] diff --git a/crates/storage/provider/src/providers/state/latest.rs b/crates/storage/provider/src/providers/state/latest.rs index a1759b6ffe..8c7247bb3f 100644 --- a/crates/storage/provider/src/providers/state/latest.rs +++ b/crates/storage/provider/src/providers/state/latest.rs @@ -9,7 +9,7 @@ use reth_db::{ }; use reth_interfaces::{provider::ProviderError, RethError, RethResult}; use reth_primitives::{ - keccak256, Account, Address, BlockNumber, Bytecode, Bytes, StorageKey, StorageValue, H256, + keccak256, Account, Address, BlockNumber, Bytecode, Bytes, StorageKey, StorageValue, B256, }; use std::marker::PhantomData; @@ -38,7 +38,7 @@ impl<'a, 'b, TX: DbTx<'a>> AccountReader for LatestStateProviderRef<'a, 'b, TX> impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for LatestStateProviderRef<'a, 'b, TX> { /// Get block hash by number. - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { self.db.get::(number).map_err(Into::into) } @@ -46,7 +46,7 @@ impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for LatestStateProviderRef<'a, 'b, TX &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { let range = start..end; self.db .cursor_read::() @@ -61,7 +61,7 @@ impl<'a, 'b, TX: DbTx<'a>> BlockHashReader for LatestStateProviderRef<'a, 'b, TX } impl<'a, 'b, TX: DbTx<'a>> StateRootProvider for LatestStateProviderRef<'a, 'b, TX> { - fn state_root(&self, bundle_state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, bundle_state: &BundleStateWithReceipts) -> RethResult { bundle_state.state_root_slow(self.db).map_err(|err| RethError::Database(err.into())) } } @@ -83,15 +83,15 @@ impl<'a, 'b, TX: DbTx<'a>> StateProvider for LatestStateProviderRef<'a, 'b, TX> } /// Get account code by its hash - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult> { self.db.get::(code_hash).map_err(Into::into) } fn proof( &self, address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { let _hashed_address = keccak256(address); let _root = self .db diff --git a/crates/storage/provider/src/providers/state/macros.rs b/crates/storage/provider/src/providers/state/macros.rs index 81eb4fe2fb..5313ebf980 100644 --- a/crates/storage/provider/src/providers/state/macros.rs +++ b/crates/storage/provider/src/providers/state/macros.rs @@ -31,19 +31,19 @@ macro_rules! delegate_provider_impls { $crate::providers::state::macros::delegate_impls_to_as_ref!( for $target => StateRootProvider $(where [$($generics)*])? { - fn state_root(&self, state: &crate::BundleStateWithReceipts) -> reth_interfaces::RethResult; + fn state_root(&self, state: &crate::BundleStateWithReceipts) -> reth_interfaces::RethResult; } AccountReader $(where [$($generics)*])? { fn basic_account(&self, address: reth_primitives::Address) -> reth_interfaces::RethResult>; } BlockHashReader $(where [$($generics)*])? { - fn block_hash(&self, number: u64) -> reth_interfaces::RethResult>; - fn canonical_hashes_range(&self, start: reth_primitives::BlockNumber, end: reth_primitives::BlockNumber) -> reth_interfaces::RethResult>; + fn block_hash(&self, number: u64) -> reth_interfaces::RethResult>; + fn canonical_hashes_range(&self, start: reth_primitives::BlockNumber, end: reth_primitives::BlockNumber) -> reth_interfaces::RethResult>; } StateProvider $(where [$($generics)*])?{ fn storage(&self, account: reth_primitives::Address, storage_key: reth_primitives::StorageKey) -> reth_interfaces::RethResult>; - fn proof(&self, address: reth_primitives::Address, keys: &[reth_primitives::H256]) -> reth_interfaces::RethResult<(Vec, reth_primitives::H256, Vec>)>; - fn bytecode_by_hash(&self, code_hash: reth_primitives::H256) -> reth_interfaces::RethResult>; + fn proof(&self, address: reth_primitives::Address, keys: &[reth_primitives::B256]) -> reth_interfaces::RethResult<(Vec, reth_primitives::B256, Vec>)>; + fn bytecode_by_hash(&self, code_hash: reth_primitives::B256) -> reth_interfaces::RethResult>; } ); } diff --git a/crates/storage/provider/src/test_utils/blocks.rs b/crates/storage/provider/src/test_utils/blocks.rs index e978e979cb..53321ebd60 100644 --- a/crates/storage/provider/src/test_utils/blocks.rs +++ b/crates/storage/provider/src/test_utils/blocks.rs @@ -1,18 +1,18 @@ //! Dummy blocks and data for tests use crate::{BundleStateWithReceipts, DatabaseProviderRW}; +use alloy_rlp::Decodable; use reth_db::{database::Database, models::StoredBlockBodyIndices, tables}; use reth_primitives::{ - hex_literal::hex, Account, BlockNumber, Bytes, Header, Log, Receipt, SealedBlock, - SealedBlockWithSenders, StorageEntry, TxType, Withdrawal, H160, H256, U256, + b256, hex_literal::hex, Account, Address, BlockNumber, Bytes, Header, Log, Receipt, Receipts, + SealedBlock, SealedBlockWithSenders, StorageEntry, TxType, Withdrawal, B256, U256, }; -use reth_rlp::Decodable; use std::collections::HashMap; /// Assert genesis block pub fn assert_genesis_block(provider: &DatabaseProviderRW<'_, DB>, g: SealedBlock) { let n = g.number; - let h = H256::zero(); + let h = B256::ZERO; let tx = provider; // check if all tables are empty @@ -78,7 +78,7 @@ impl Default for BlockChainTestData { pub fn genesis() -> SealedBlock { SealedBlock { header: Header { number: 0, difficulty: U256::from(1), ..Default::default() } - .seal(H256::zero()), + .seal(B256::ZERO), body: vec![], ommers: vec![], withdrawals: Some(vec![]), @@ -92,15 +92,14 @@ fn block1(number: BlockNumber) -> (SealedBlockWithSenders, BundleStateWithReceip block.withdrawals = Some(vec![Withdrawal::default()]); let mut header = block.header.clone().unseal(); header.number = number; - header.state_root = - H256(hex!("5d035ccb3e75a9057452ff060b773b213ec1fc353426174068edfc3971a0b6bd")); - header.parent_hash = H256::zero(); + header.state_root = b256!("5d035ccb3e75a9057452ff060b773b213ec1fc353426174068edfc3971a0b6bd"); + header.parent_hash = B256::ZERO; block.header = header.seal_slow(); // block changes - let account1: H160 = [0x60; 20].into(); - let account2: H160 = [0x61; 20].into(); - let slot: H256 = H256::from_low_u64_be(5); + let account1: Address = [0x60; 20].into(); + let account2: Address = [0x61; 20].into(); + let slot: B256 = B256::with_last_byte(5); let bundle = BundleStateWithReceipts::new_init( HashMap::from([ @@ -129,41 +128,40 @@ fn block1(number: BlockNumber) -> (SealedBlockWithSenders, BundleStateWithReceip ]), )]), vec![], - vec![vec![Some(Receipt { + Receipts::from_vec(vec![vec![Some(Receipt { tx_type: TxType::EIP2930, success: true, cumulative_gas_used: 300, logs: vec![Log { - address: H160([0x60; 20]), - topics: vec![H256::from_low_u64_be(1), H256::from_low_u64_be(2)], + address: Address::new([0x60; 20]), + topics: vec![B256::with_last_byte(1), B256::with_last_byte(2)], data: Bytes::default(), }], - })]], + })]]), number, ); - (SealedBlockWithSenders { block, senders: vec![H160([0x30; 20])] }, bundle) + (SealedBlockWithSenders { block, senders: vec![Address::new([0x30; 20])] }, bundle) } /// Block two that points to block 1 fn block2( number: BlockNumber, - parent_hash: H256, + parent_hash: B256, ) -> (SealedBlockWithSenders, BundleStateWithReceipts) { let mut block_rlp = hex!("f9025ff901f7a0c86e8cc0310ae7c531c758678ddbfd16fc51c8cef8cec650b032de9869e8b94fa01dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347942adc25665018aa1fe0e6bc666dac8fc2697ff9baa050554882fbbda2c2fd93fdc466db9946ea262a67f7a76cc169e714f105ab583da00967f09ef1dfed20c0eacfaa94d5cd4002eda3242ac47eae68972d07b106d192a0e3c8b47fbfc94667ef4cceb17e5cc21e3b1eebd442cebb27f07562b33836290db90100000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008302000001830f42408238108203e800a00000000000000000000000000000000000000000000000000000000000000000880000000000000000f862f860800a83061a8094095e7baea6a6c7c4c2dfeb977efac326af552d8780801ba072ed817487b84ba367d15d2f039b5fc5f087d0a8882fbdf73e8cb49357e1ce30a0403d800545b8fc544f92ce8124e2255f8c3c6af93f28243a120585d4c4c6a2a3c0").as_slice(); let mut block = SealedBlock::decode(&mut block_rlp).unwrap(); block.withdrawals = Some(vec![Withdrawal::default()]); let mut header = block.header.clone().unseal(); header.number = number; - header.state_root = - H256(hex!("90101a13dd059fa5cca99ed93d1dc23657f63626c5b8f993a2ccbdf7446b64f8")); + header.state_root = b256!("90101a13dd059fa5cca99ed93d1dc23657f63626c5b8f993a2ccbdf7446b64f8"); // parent_hash points to block1 hash header.parent_hash = parent_hash; block.header = header.seal_slow(); // block changes - let account: H160 = [0x60; 20].into(); - let slot: H256 = H256::from_low_u64_be(5); + let account: Address = [0x60; 20].into(); + let slot: B256 = B256::with_last_byte(5); let bundle = BundleStateWithReceipts::new_init( HashMap::from([( @@ -185,17 +183,17 @@ fn block2( )]), )]), vec![], - vec![vec![Some(Receipt { + Receipts::from_vec(vec![vec![Some(Receipt { tx_type: TxType::EIP1559, success: false, cumulative_gas_used: 400, logs: vec![Log { - address: H160([0x61; 20]), - topics: vec![H256::from_low_u64_be(3), H256::from_low_u64_be(4)], + address: Address::new([0x61; 20]), + topics: vec![B256::with_last_byte(3), B256::with_last_byte(4)], data: Bytes::default(), }], - })]], + })]]), number, ); - (SealedBlockWithSenders { block, senders: vec![H160([0x31; 20])] }, bundle) + (SealedBlockWithSenders { block, senders: vec![Address::new([0x31; 20])] }, bundle) } diff --git a/crates/storage/provider/src/test_utils/mock.rs b/crates/storage/provider/src/test_utils/mock.rs index fdf938ed37..4ee0ccc56b 100644 --- a/crates/storage/provider/src/test_utils/mock.rs +++ b/crates/storage/provider/src/test_utils/mock.rs @@ -13,12 +13,12 @@ use reth_primitives::{ keccak256, Account, Address, Block, BlockHash, BlockHashOrNumber, BlockId, BlockNumber, BlockWithSenders, Bytecode, Bytes, ChainInfo, ChainSpec, Header, Receipt, SealedBlock, SealedHeader, StorageKey, StorageValue, TransactionMeta, TransactionSigned, - TransactionSignedNoHash, TxHash, TxNumber, H256, U256, + TransactionSignedNoHash, TxHash, TxNumber, B256, U256, }; -use reth_revm_primitives::primitives::{BlockEnv, CfgEnv}; +use revm::primitives::{BlockEnv, CfgEnv}; use std::{ collections::{BTreeMap, HashMap}, - ops::RangeBounds, + ops::{RangeBounds, RangeInclusive}, sync::Arc, }; @@ -26,9 +26,9 @@ use std::{ #[derive(Debug, Clone)] pub struct MockEthProvider { /// Local block store - pub blocks: Arc>>, + pub blocks: Arc>>, /// Local header store - pub headers: Arc>>, + pub headers: Arc>>, /// Local account store pub accounts: Arc>>, /// Local chain spec @@ -68,7 +68,7 @@ impl ExtendedAccount { pub fn with_bytecode(mut self, bytecode: Bytes) -> Self { let hash = keccak256(&bytecode); self.account.bytecode_hash = Some(hash); - self.bytecode = Some(Bytecode::new_raw(bytecode.into())); + self.bytecode = Some(Bytecode::new_raw(bytecode)); self } @@ -85,13 +85,13 @@ impl ExtendedAccount { impl MockEthProvider { /// Add block to local block store - pub fn add_block(&self, hash: H256, block: Block) { + pub fn add_block(&self, hash: B256, block: Block) { self.add_header(hash, block.header.clone()); self.blocks.lock().insert(hash, block); } /// Add multiple blocks to local block store - pub fn extend_blocks(&self, iter: impl IntoIterator) { + pub fn extend_blocks(&self, iter: impl IntoIterator) { for (hash, block) in iter.into_iter() { self.add_header(hash, block.header.clone()); self.add_block(hash, block) @@ -99,12 +99,12 @@ impl MockEthProvider { } /// Add header to local header store - pub fn add_header(&self, hash: H256, header: Header) { + pub fn add_header(&self, hash: B256, header: Header) { self.headers.lock().insert(hash, header); } /// Add multiple headers to local header store - pub fn extend_headers(&self, iter: impl IntoIterator) { + pub fn extend_headers(&self, iter: impl IntoIterator) { for (hash, header) in iter.into_iter() { self.add_header(hash, header) } @@ -339,7 +339,7 @@ impl ReceiptProvider for MockEthProvider { impl ReceiptProviderIdExt for MockEthProvider {} impl BlockHashReader for MockEthProvider { - fn block_hash(&self, number: u64) -> RethResult> { + fn block_hash(&self, number: u64) -> RethResult> { let lock = self.blocks.lock(); let hash = lock.iter().find_map(|(hash, b)| (b.number == number).then_some(*hash)); @@ -350,7 +350,7 @@ impl BlockHashReader for MockEthProvider { &self, start: BlockNumber, end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { let range = start..end; let lock = self.blocks.lock(); @@ -387,7 +387,7 @@ impl BlockNumReader for MockEthProvider { self.best_block_number() } - fn block_number(&self, hash: H256) -> RethResult> { + fn block_number(&self, hash: B256) -> RethResult> { let lock = self.blocks.lock(); let num = lock.iter().find_map(|(h, b)| (*h == hash).then_some(b.number)); Ok(num) @@ -409,7 +409,7 @@ impl BlockIdReader for MockEthProvider { } impl BlockReader for MockEthProvider { - fn find_block_by_hash(&self, hash: H256, _source: BlockSource) -> RethResult> { + fn find_block_by_hash(&self, hash: B256, _source: BlockSource) -> RethResult> { self.block(hash.into()) } @@ -440,6 +440,10 @@ impl BlockReader for MockEthProvider { fn block_with_senders(&self, _number: BlockNumber) -> RethResult> { Ok(None) } + + fn block_range(&self, _range: RangeInclusive) -> RethResult> { + Ok(vec![]) + } } impl BlockReaderIdExt for MockEthProvider { @@ -476,7 +480,7 @@ impl AccountReader for MockEthProvider { } impl StateRootProvider for MockEthProvider { - fn state_root(&self, _state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, _state: &BundleStateWithReceipts) -> RethResult { todo!() } } @@ -491,7 +495,7 @@ impl StateProvider for MockEthProvider { Ok(lock.get(&account).and_then(|account| account.storage.get(&storage_key)).cloned()) } - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult> { let lock = self.accounts.lock(); Ok(lock.values().find_map(|account| { match (account.account.bytecode_hash.as_ref(), account.bytecode.as_ref()) { @@ -506,8 +510,8 @@ impl StateProvider for MockEthProvider { fn proof( &self, _address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { todo!() } } @@ -577,7 +581,7 @@ impl StateProviderFactory for MockEthProvider { todo!() } - fn pending_state_by_hash(&self, _block_hash: H256) -> RethResult>> { + fn pending_state_by_hash(&self, _block_hash: B256) -> RethResult>> { todo!() } @@ -610,7 +614,7 @@ impl StateProviderFactory for Arc { todo!() } - fn pending_state_by_hash(&self, _block_hash: H256) -> RethResult>> { + fn pending_state_by_hash(&self, _block_hash: B256) -> RethResult>> { todo!() } diff --git a/crates/storage/provider/src/test_utils/noop.rs b/crates/storage/provider/src/test_utils/noop.rs index 31501f71c1..df50c209c7 100644 --- a/crates/storage/provider/src/test_utils/noop.rs +++ b/crates/storage/provider/src/test_utils/noop.rs @@ -13,10 +13,13 @@ use reth_primitives::{ Account, Address, Block, BlockHash, BlockHashOrNumber, BlockId, BlockNumber, Bytecode, Bytes, ChainInfo, ChainSpec, Header, PruneCheckpoint, PrunePart, Receipt, SealedBlock, SealedHeader, StorageKey, StorageValue, TransactionMeta, TransactionSigned, TransactionSignedNoHash, TxHash, - TxNumber, H256, KECCAK_EMPTY, MAINNET, U256, + TxNumber, B256, KECCAK_EMPTY, MAINNET, U256, +}; +use revm::primitives::{BlockEnv, CfgEnv}; +use std::{ + ops::{RangeBounds, RangeInclusive}, + sync::Arc, }; -use reth_revm_primitives::primitives::{BlockEnv, CfgEnv}; -use std::{ops::RangeBounds, sync::Arc}; /// Supports various api interfaces for testing purposes. #[derive(Debug, Clone, Default, Copy)] @@ -31,7 +34,7 @@ impl ChainSpecProvider for NoopProvider { /// Noop implementation for testing purposes impl BlockHashReader for NoopProvider { - fn block_hash(&self, _number: u64) -> RethResult> { + fn block_hash(&self, _number: u64) -> RethResult> { Ok(None) } @@ -39,7 +42,7 @@ impl BlockHashReader for NoopProvider { &self, _start: BlockNumber, _end: BlockNumber, - ) -> RethResult> { + ) -> RethResult> { Ok(vec![]) } } @@ -57,13 +60,13 @@ impl BlockNumReader for NoopProvider { Ok(0) } - fn block_number(&self, _hash: H256) -> RethResult> { + fn block_number(&self, _hash: B256) -> RethResult> { Ok(None) } } impl BlockReader for NoopProvider { - fn find_block_by_hash(&self, hash: H256, _source: BlockSource) -> RethResult> { + fn find_block_by_hash(&self, hash: B256, _source: BlockSource) -> RethResult> { self.block(hash.into()) } @@ -93,6 +96,10 @@ impl BlockReader for NoopProvider { ) -> RethResult> { Ok(None) } + + fn block_range(&self, _range: RangeInclusive) -> RethResult> { + Ok(vec![]) + } } impl BlockReaderIdExt for NoopProvider { @@ -253,7 +260,7 @@ impl ChangeSetReader for NoopProvider { } impl StateRootProvider for NoopProvider { - fn state_root(&self, _state: &BundleStateWithReceipts) -> RethResult { + fn state_root(&self, _state: &BundleStateWithReceipts) -> RethResult { todo!() } } @@ -267,15 +274,15 @@ impl StateProvider for NoopProvider { Ok(None) } - fn bytecode_by_hash(&self, _code_hash: H256) -> RethResult> { + fn bytecode_by_hash(&self, _code_hash: B256) -> RethResult> { Ok(None) } fn proof( &self, _address: Address, - _keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)> { + _keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)> { Ok((vec![], KECCAK_EMPTY, vec![])) } } @@ -345,7 +352,7 @@ impl StateProviderFactory for NoopProvider { Ok(Box::new(*self)) } - fn pending_state_by_hash(&self, _block_hash: H256) -> RethResult>> { + fn pending_state_by_hash(&self, _block_hash: B256) -> RethResult>> { Ok(Some(Box::new(*self))) } diff --git a/crates/storage/provider/src/traits/block.rs b/crates/storage/provider/src/traits/block.rs index 4ef539418d..122955f455 100644 --- a/crates/storage/provider/src/traits/block.rs +++ b/crates/storage/provider/src/traits/block.rs @@ -8,7 +8,7 @@ use reth_interfaces::RethResult; use reth_primitives::{ Address, Block, BlockHashOrNumber, BlockId, BlockNumber, BlockNumberOrTag, BlockWithSenders, ChainSpec, Header, PruneModes, Receipt, SealedBlock, SealedBlockWithSenders, SealedHeader, - H256, + B256, }; use std::ops::RangeInclusive; @@ -62,7 +62,7 @@ pub trait BlockReader: /// Note: this only operates on the hash because the number might be ambiguous. /// /// Returns `None` if block is not found. - fn find_block_by_hash(&self, hash: H256, source: BlockSource) -> RethResult>; + fn find_block_by_hash(&self, hash: B256, source: BlockSource) -> RethResult>; /// Returns the block with given id from the database. /// @@ -86,7 +86,7 @@ pub trait BlockReader: /// Returns the block with matching hash from the database. /// /// Returns `None` if block is not found. - fn block_by_hash(&self, hash: H256) -> RethResult> { + fn block_by_hash(&self, hash: B256) -> RethResult> { self.block(hash.into()) } @@ -106,6 +106,11 @@ pub trait BlockReader: /// /// Returns `None` if block is not found. fn block_with_senders(&self, number: BlockNumber) -> RethResult>; + + /// Returns all blocks in the given inclusive range. + /// + /// Note: returns only available blocks + fn block_range(&self, range: RangeInclusive) -> RethResult>; } /// Trait extension for `BlockReader`, for types that implement `BlockId` conversion. diff --git a/crates/storage/provider/src/traits/block_hash.rs b/crates/storage/provider/src/traits/block_hash.rs index cd3ee9ccee..e7b9e91c03 100644 --- a/crates/storage/provider/src/traits/block_hash.rs +++ b/crates/storage/provider/src/traits/block_hash.rs @@ -1,17 +1,17 @@ use auto_impl::auto_impl; use reth_interfaces::RethResult; -use reth_primitives::{BlockHashOrNumber, BlockNumber, H256}; +use reth_primitives::{BlockHashOrNumber, BlockNumber, B256}; /// Client trait for fetching block hashes by number. #[auto_impl(&, Arc, Box)] pub trait BlockHashReader: Send + Sync { /// Get the hash of the block with the given number. Returns `None` if no block with this number /// exists. - fn block_hash(&self, number: BlockNumber) -> RethResult>; + fn block_hash(&self, number: BlockNumber) -> RethResult>; /// Get the hash of the block with the given number. Returns `None` if no block with this number /// exists. - fn convert_block_hash(&self, hash_or_number: BlockHashOrNumber) -> RethResult> { + fn convert_block_hash(&self, hash_or_number: BlockHashOrNumber) -> RethResult> { match hash_or_number { BlockHashOrNumber::Hash(hash) => Ok(Some(hash)), BlockHashOrNumber::Number(num) => self.block_hash(num), @@ -24,5 +24,5 @@ pub trait BlockHashReader: Send + Sync { /// /// Note: The range is `start..end`, so the expected result is `[start..end)` fn canonical_hashes_range(&self, start: BlockNumber, end: BlockNumber) - -> RethResult>; + -> RethResult>; } diff --git a/crates/storage/provider/src/traits/block_id.rs b/crates/storage/provider/src/traits/block_id.rs index e29441d91f..ee7f548807 100644 --- a/crates/storage/provider/src/traits/block_id.rs +++ b/crates/storage/provider/src/traits/block_id.rs @@ -1,6 +1,6 @@ use super::BlockHashReader; use reth_interfaces::{provider::ProviderError, RethResult}; -use reth_primitives::{BlockHashOrNumber, BlockId, BlockNumber, BlockNumberOrTag, ChainInfo, H256}; +use reth_primitives::{BlockHashOrNumber, BlockId, BlockNumber, BlockNumberOrTag, ChainInfo, B256}; /// Client trait for getting important block numbers (such as the latest block number), converting /// block hashes to numbers, and fetching a block hash from its block number. @@ -18,7 +18,7 @@ pub trait BlockNumReader: BlockHashReader + Send + Sync { fn last_block_number(&self) -> RethResult; /// Gets the `BlockNumber` for the given hash. Returns `None` if no block with this hash exists. - fn block_number(&self, hash: H256) -> RethResult>; + fn block_number(&self, hash: B256) -> RethResult>; /// Gets the block number for the given `BlockHashOrNumber`. Returns `None` if no block with /// this hash exists. If the `BlockHashOrNumber` is a `Number`, it is returned as is. @@ -31,7 +31,7 @@ pub trait BlockNumReader: BlockHashReader + Send + Sync { /// Gets the block hash for the given `BlockHashOrNumber`. Returns `None` if no block with this /// number exists. If the `BlockHashOrNumber` is a `Hash`, it is returned as is. - fn convert_number(&self, id: BlockHashOrNumber) -> RethResult> { + fn convert_number(&self, id: BlockHashOrNumber) -> RethResult> { match id { BlockHashOrNumber::Hash(hash) => Ok(Some(hash)), BlockHashOrNumber::Number(num) => self.block_hash(num), @@ -74,7 +74,7 @@ pub trait BlockIdReader: BlockNumReader + Send + Sync { } /// Get the hash of the block by matching the given id. - fn block_hash_for_id(&self, block_id: BlockId) -> RethResult> { + fn block_hash_for_id(&self, block_id: BlockId) -> RethResult> { match block_id { BlockId::Hash(hash) => Ok(Some(hash.into())), BlockId::Number(num) => { @@ -124,12 +124,12 @@ pub trait BlockIdReader: BlockNumReader + Send + Sync { } /// Get the safe block hash. - fn safe_block_hash(&self) -> RethResult> { + fn safe_block_hash(&self) -> RethResult> { self.safe_block_num_hash().map(|res_opt| res_opt.map(|num_hash| num_hash.hash)) } /// Get the finalized block hash. - fn finalized_block_hash(&self) -> RethResult> { + fn finalized_block_hash(&self) -> RethResult> { self.finalized_block_num_hash().map(|res_opt| res_opt.map(|num_hash| num_hash.hash)) } } diff --git a/crates/storage/provider/src/traits/evm_env.rs b/crates/storage/provider/src/traits/evm_env.rs index 96bf51781d..23c13440bc 100644 --- a/crates/storage/provider/src/traits/evm_env.rs +++ b/crates/storage/provider/src/traits/evm_env.rs @@ -1,9 +1,9 @@ use reth_interfaces::RethResult; use reth_primitives::{BlockHashOrNumber, Header}; -use reth_revm_primitives::primitives::{BlockEnv, CfgEnv}; +use revm::primitives::{BlockEnv, CfgEnv}; /// A provider type that knows chain specific information required to configure an -/// [Env](reth_revm_primitives::primitives::Env) +/// [Env](revm::primitives::Env). /// /// This type is mainly used to provide required data to configure the EVM environment. #[auto_impl::auto_impl(&, Arc)] diff --git a/crates/storage/provider/src/traits/hashing.rs b/crates/storage/provider/src/traits/hashing.rs index ca1786ee07..dfeeabd186 100644 --- a/crates/storage/provider/src/traits/hashing.rs +++ b/crates/storage/provider/src/traits/hashing.rs @@ -1,7 +1,7 @@ use auto_impl::auto_impl; use reth_db::models::BlockNumberAddress; use reth_interfaces::RethResult; -use reth_primitives::{Account, Address, BlockNumber, StorageEntry, H256}; +use reth_primitives::{Account, Address, BlockNumber, StorageEntry, B256}; use std::{ collections::{BTreeMap, BTreeSet, HashMap}, ops::{Range, RangeInclusive}, @@ -18,7 +18,7 @@ pub trait HashingWriter: Send + Sync { fn unwind_account_hashing( &self, range: RangeInclusive, - ) -> RethResult>>; + ) -> RethResult>>; /// Inserts all accounts into [reth_db::tables::AccountHistory] table. /// @@ -28,7 +28,7 @@ pub trait HashingWriter: Send + Sync { fn insert_account_for_hashing( &self, accounts: impl IntoIterator)>, - ) -> RethResult>>; + ) -> RethResult>>; /// Unwind and clear storage hashing /// @@ -38,7 +38,7 @@ pub trait HashingWriter: Send + Sync { fn unwind_storage_hashing( &self, range: Range, - ) -> RethResult>>; + ) -> RethResult>>; /// Iterates over storages and inserts them to hashing table. /// @@ -48,7 +48,7 @@ pub trait HashingWriter: Send + Sync { fn insert_storage_for_hashing( &self, storages: impl IntoIterator)>, - ) -> RethResult>>; + ) -> RethResult>>; /// Calculate the hashes of all changed accounts and storages, and finally calculate the state /// root. @@ -59,7 +59,7 @@ pub trait HashingWriter: Send + Sync { fn insert_hashes( &self, range: RangeInclusive, - end_block_hash: H256, - expected_state_root: H256, + end_block_hash: B256, + expected_state_root: B256, ) -> RethResult<()>; } diff --git a/crates/storage/provider/src/traits/history.rs b/crates/storage/provider/src/traits/history.rs index 58a56a2b20..bd563fde94 100644 --- a/crates/storage/provider/src/traits/history.rs +++ b/crates/storage/provider/src/traits/history.rs @@ -1,7 +1,7 @@ use auto_impl::auto_impl; use reth_db::models::BlockNumberAddress; use reth_interfaces::RethResult; -use reth_primitives::{Address, BlockNumber, H256}; +use reth_primitives::{Address, BlockNumber, B256}; use std::{ collections::BTreeMap, ops::{Range, RangeInclusive}, @@ -33,7 +33,7 @@ pub trait HistoryWriter: Send + Sync { /// Insert storage change index to database. Used inside StorageHistoryIndex stage fn insert_storage_history_index( &self, - storage_transitions: BTreeMap<(Address, H256), Vec>, + storage_transitions: BTreeMap<(Address, B256), Vec>, ) -> RethResult<()>; /// Read account/storage changesets and update account/storage history indices. diff --git a/crates/storage/provider/src/traits/state.rs b/crates/storage/provider/src/traits/state.rs index 3aa8cbad7b..e7e572b344 100644 --- a/crates/storage/provider/src/traits/state.rs +++ b/crates/storage/provider/src/traits/state.rs @@ -4,7 +4,7 @@ use auto_impl::auto_impl; use reth_interfaces::{provider::ProviderError, RethResult}; use reth_primitives::{ Address, BlockHash, BlockId, BlockNumHash, BlockNumber, BlockNumberOrTag, Bytecode, Bytes, - StorageKey, StorageValue, H256, KECCAK_EMPTY, U256, + StorageKey, StorageValue, B256, KECCAK_EMPTY, U256, }; /// Type alias of boxed [StateProvider]. @@ -21,14 +21,14 @@ pub trait StateProvider: BlockHashReader + AccountReader + StateRootProvider + S ) -> RethResult>; /// Get account code by its hash - fn bytecode_by_hash(&self, code_hash: H256) -> RethResult>; + fn bytecode_by_hash(&self, code_hash: B256) -> RethResult>; /// Get account and storage proofs. fn proof( &self, address: Address, - keys: &[H256], - ) -> RethResult<(Vec, H256, Vec>)>; + keys: &[B256], + ) -> RethResult<(Vec, B256, Vec>)>; /// Get account code by its address. /// @@ -178,7 +178,7 @@ pub trait StateProviderFactory: BlockIdReader + Send + Sync { /// Represents the state at the block that extends the canonical chain. /// /// If the block couldn't be found, returns `None`. - fn pending_state_by_hash(&self, block_hash: H256) -> RethResult>>; + fn pending_state_by_hash(&self, block_hash: B256) -> RethResult>>; /// Return a [StateProvider] that contains post state data provider. /// Used to inspect or execute transaction on the pending state. @@ -234,5 +234,5 @@ pub trait BundleStateDataProvider: Send + Sync { #[auto_impl[Box,&, Arc]] pub trait StateRootProvider: Send + Sync { /// Returns the state root of the BundleState on top of the current state. - fn state_root(&self, post_state: &BundleStateWithReceipts) -> RethResult; + fn state_root(&self, post_state: &BundleStateWithReceipts) -> RethResult; } diff --git a/crates/storage/provider/src/traits/storage.rs b/crates/storage/provider/src/traits/storage.rs index bb740fe1c2..764ea1c017 100644 --- a/crates/storage/provider/src/traits/storage.rs +++ b/crates/storage/provider/src/traits/storage.rs @@ -5,7 +5,7 @@ use std::{ use auto_impl::auto_impl; use reth_interfaces::RethResult; -use reth_primitives::{Address, BlockNumber, StorageEntry, H256}; +use reth_primitives::{Address, BlockNumber, StorageEntry, B256}; /// Storage reader #[auto_impl(&, Arc, Box)] @@ -13,14 +13,14 @@ pub trait StorageReader: Send + Sync { /// Get plainstate storages for addresses and storage keys. fn plainstate_storages( &self, - addresses_with_keys: impl IntoIterator)>, + addresses_with_keys: impl IntoIterator)>, ) -> RethResult)>>; /// Iterate over storage changesets and return all storage slots that were changed. fn changed_storages_with_range( &self, range: RangeInclusive, - ) -> RethResult>>; + ) -> RethResult>>; /// Iterate over storage changesets and return all storage slots that were changed alongside /// each specific set of blocks. @@ -29,5 +29,5 @@ pub trait StorageReader: Send + Sync { fn changed_storages_and_blocks_with_range( &self, range: RangeInclusive, - ) -> RethResult>>; + ) -> RethResult>>; } diff --git a/crates/tasks/src/lib.rs b/crates/tasks/src/lib.rs index 05721ac4e5..220c6aea5d 100644 --- a/crates/tasks/src/lib.rs +++ b/crates/tasks/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/tracing/src/lib.rs b/crates/tracing/src/lib.rs index 9deb46b257..61481013ff 100644 --- a/crates/tracing/src/lib.rs +++ b/crates/tracing/src/lib.rs @@ -11,7 +11,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/transaction-pool/Cargo.toml b/crates/transaction-pool/Cargo.toml index 0ac9379d2a..b99a02bf9c 100644 --- a/crates/transaction-pool/Cargo.toml +++ b/crates/transaction-pool/Cargo.toml @@ -21,9 +21,10 @@ normal = [ reth-primitives.workspace = true reth-provider.workspace = true reth-interfaces.workspace = true -reth-rlp.workspace = true reth-tasks.workspace = true +alloy-rlp.workspace = true + # async/futures async-trait.workspace = true futures-util.workspace = true diff --git a/crates/transaction-pool/src/blobstore/mem.rs b/crates/transaction-pool/src/blobstore/mem.rs index 2a2d2fd861..67528fe44c 100644 --- a/crates/transaction-pool/src/blobstore/mem.rs +++ b/crates/transaction-pool/src/blobstore/mem.rs @@ -1,6 +1,6 @@ use crate::blobstore::{BlobStore, BlobStoreError, BlobTransactionSidecar}; use parking_lot::RwLock; -use reth_primitives::H256; +use reth_primitives::B256; use std::{ collections::HashMap, sync::{atomic::AtomicUsize, Arc}, @@ -15,7 +15,7 @@ pub struct InMemoryBlobStore { #[derive(Debug, Default)] struct InMemoryBlobStoreInner { /// Storage for all blob data. - store: RwLock>, + store: RwLock>, data_size: AtomicUsize, num_blobs: AtomicUsize, } @@ -37,14 +37,14 @@ impl InMemoryBlobStoreInner { } impl BlobStore for InMemoryBlobStore { - fn insert(&self, tx: H256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { + fn insert(&self, tx: B256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { let mut store = self.inner.store.write(); self.inner.add_size(insert_size(&mut store, tx, data)); self.inner.update_len(store.len()); Ok(()) } - fn insert_all(&self, txs: Vec<(H256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError> { + fn insert_all(&self, txs: Vec<(B256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError> { if txs.is_empty() { return Ok(()) } @@ -59,7 +59,7 @@ impl BlobStore for InMemoryBlobStore { Ok(()) } - fn delete(&self, tx: H256) -> Result<(), BlobStoreError> { + fn delete(&self, tx: B256) -> Result<(), BlobStoreError> { let mut store = self.inner.store.write(); let sub = remove_size(&mut store, &tx); self.inner.sub_size(sub); @@ -67,7 +67,7 @@ impl BlobStore for InMemoryBlobStore { Ok(()) } - fn delete_all(&self, txs: Vec) -> Result<(), BlobStoreError> { + fn delete_all(&self, txs: Vec) -> Result<(), BlobStoreError> { if txs.is_empty() { return Ok(()) } @@ -82,15 +82,15 @@ impl BlobStore for InMemoryBlobStore { } // Retrieves the decoded blob data for the given transaction hash. - fn get(&self, tx: H256) -> Result, BlobStoreError> { + fn get(&self, tx: B256) -> Result, BlobStoreError> { let store = self.inner.store.read(); Ok(store.get(&tx).cloned()) } fn get_all( &self, - txs: Vec, - ) -> Result, BlobStoreError> { + txs: Vec, + ) -> Result, BlobStoreError> { let mut items = Vec::with_capacity(txs.len()); let store = self.inner.store.read(); for tx in txs { @@ -102,7 +102,7 @@ impl BlobStore for InMemoryBlobStore { Ok(items) } - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { let mut items = Vec::with_capacity(txs.len()); let store = self.inner.store.read(); for tx in txs { @@ -127,7 +127,7 @@ impl BlobStore for InMemoryBlobStore { /// Removes the given blob from the store and returns the size of the blob that was removed. #[inline] -fn remove_size(store: &mut HashMap, tx: &H256) -> usize { +fn remove_size(store: &mut HashMap, tx: &B256) -> usize { store.remove(tx).map(|rem| rem.size()).unwrap_or_default() } @@ -136,8 +136,8 @@ fn remove_size(store: &mut HashMap, tx: &H256) -> /// We don't need to handle the size updates for replacements because transactions are unique. #[inline] fn insert_size( - store: &mut HashMap, - tx: H256, + store: &mut HashMap, + tx: B256, blob: BlobTransactionSidecar, ) -> usize { let add = blob.size(); diff --git a/crates/transaction-pool/src/blobstore/mod.rs b/crates/transaction-pool/src/blobstore/mod.rs index ce2e875bf9..1a09fe4613 100644 --- a/crates/transaction-pool/src/blobstore/mod.rs +++ b/crates/transaction-pool/src/blobstore/mod.rs @@ -2,7 +2,7 @@ pub use mem::InMemoryBlobStore; pub use noop::NoopBlobStore; -use reth_primitives::{BlobTransactionSidecar, H256}; +use reth_primitives::{BlobTransactionSidecar, B256}; use std::fmt; pub use tracker::{BlobStoreCanonTracker, BlobStoreUpdates}; @@ -18,19 +18,19 @@ mod tracker; /// Note: this is Clone because it is expected to be wrapped in an Arc. pub trait BlobStore: fmt::Debug + Send + Sync + 'static { /// Inserts the blob sidecar into the store - fn insert(&self, tx: H256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError>; + fn insert(&self, tx: B256, data: BlobTransactionSidecar) -> Result<(), BlobStoreError>; /// Inserts multiple blob sidecars into the store - fn insert_all(&self, txs: Vec<(H256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError>; + fn insert_all(&self, txs: Vec<(B256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError>; /// Deletes the blob sidecar from the store - fn delete(&self, tx: H256) -> Result<(), BlobStoreError>; + fn delete(&self, tx: B256) -> Result<(), BlobStoreError>; /// Deletes multiple blob sidecars from the store - fn delete_all(&self, txs: Vec) -> Result<(), BlobStoreError>; + fn delete_all(&self, txs: Vec) -> Result<(), BlobStoreError>; /// Retrieves the decoded blob data for the given transaction hash. - fn get(&self, tx: H256) -> Result, BlobStoreError>; + fn get(&self, tx: B256) -> Result, BlobStoreError>; /// Retrieves all decoded blob data for the given transaction hashes. /// @@ -38,14 +38,14 @@ pub trait BlobStore: fmt::Debug + Send + Sync + 'static { /// If there's no blob it will not be returned. fn get_all( &self, - txs: Vec, - ) -> Result, BlobStoreError>; + txs: Vec, + ) -> Result, BlobStoreError>; /// Returns the exact [BlobTransactionSidecar] for the given transaction hashes in the order /// they were requested. /// /// Returns an error if any of the blobs are not found in the blob store. - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError>; + fn get_exact(&self, txs: Vec) -> Result, BlobStoreError>; /// Data size of all transactions in the blob store. fn data_size_hint(&self) -> Option; @@ -59,10 +59,10 @@ pub trait BlobStore: fmt::Debug + Send + Sync + 'static { pub enum BlobStoreError { /// Thrown if the blob sidecar is not found for a given transaction hash but was required. #[error("blob sidecar not found for transaction {0:?}")] - MissingSidecar(H256), + MissingSidecar(B256), /// Failed to decode the stored blob data. #[error("failed to decode blob data: {0}")] - DecodeError(#[from] reth_rlp::DecodeError), + DecodeError(#[from] alloy_rlp::Error), /// Other implementation specific error. #[error(transparent)] Other(Box), diff --git a/crates/transaction-pool/src/blobstore/noop.rs b/crates/transaction-pool/src/blobstore/noop.rs index 431d34cc8f..b3d4915dd1 100644 --- a/crates/transaction-pool/src/blobstore/noop.rs +++ b/crates/transaction-pool/src/blobstore/noop.rs @@ -1,5 +1,5 @@ use crate::blobstore::{BlobStore, BlobStoreError, BlobTransactionSidecar}; -use reth_primitives::H256; +use reth_primitives::B256; /// A blobstore implementation that does nothing #[derive(Clone, Copy, Debug, PartialOrd, PartialEq, Default)] @@ -7,34 +7,34 @@ use reth_primitives::H256; pub struct NoopBlobStore; impl BlobStore for NoopBlobStore { - fn insert(&self, _tx: H256, _data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { + fn insert(&self, _tx: B256, _data: BlobTransactionSidecar) -> Result<(), BlobStoreError> { Ok(()) } - fn insert_all(&self, _txs: Vec<(H256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError> { + fn insert_all(&self, _txs: Vec<(B256, BlobTransactionSidecar)>) -> Result<(), BlobStoreError> { Ok(()) } - fn delete(&self, _tx: H256) -> Result<(), BlobStoreError> { + fn delete(&self, _tx: B256) -> Result<(), BlobStoreError> { Ok(()) } - fn delete_all(&self, _txs: Vec) -> Result<(), BlobStoreError> { + fn delete_all(&self, _txs: Vec) -> Result<(), BlobStoreError> { Ok(()) } - fn get(&self, _tx: H256) -> Result, BlobStoreError> { + fn get(&self, _tx: B256) -> Result, BlobStoreError> { Ok(None) } fn get_all( &self, - _txs: Vec, - ) -> Result, BlobStoreError> { + _txs: Vec, + ) -> Result, BlobStoreError> { Ok(vec![]) } - fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { + fn get_exact(&self, txs: Vec) -> Result, BlobStoreError> { if txs.is_empty() { return Ok(vec![]) } diff --git a/crates/transaction-pool/src/blobstore/tracker.rs b/crates/transaction-pool/src/blobstore/tracker.rs index 20461e1126..a3dd30bca0 100644 --- a/crates/transaction-pool/src/blobstore/tracker.rs +++ b/crates/transaction-pool/src/blobstore/tracker.rs @@ -1,6 +1,6 @@ //! Support for maintaining the blob pool. -use reth_primitives::{BlockNumber, H256}; +use reth_primitives::{BlockNumber, B256}; use reth_provider::chain::ChainBlocks; use std::collections::BTreeMap; @@ -8,7 +8,7 @@ use std::collections::BTreeMap; #[derive(Debug, Default, Eq, PartialEq)] pub struct BlobStoreCanonTracker { /// Keeps track of the blob transactions included in blocks. - blob_txs_in_blocks: BTreeMap>, + blob_txs_in_blocks: BTreeMap>, } impl BlobStoreCanonTracker { @@ -16,7 +16,7 @@ impl BlobStoreCanonTracker { pub fn add_block( &mut self, block_number: BlockNumber, - blob_txs: impl IntoIterator, + blob_txs: impl IntoIterator, ) { self.blob_txs_in_blocks.insert(block_number, blob_txs.into_iter().collect()); } @@ -24,7 +24,7 @@ impl BlobStoreCanonTracker { /// Adds all blocks to the tracked list of blocks. pub fn add_blocks( &mut self, - blocks: impl IntoIterator)>, + blocks: impl IntoIterator)>, ) { for (block_number, blob_txs) in blocks { self.add_block(block_number, blob_txs); @@ -66,7 +66,7 @@ pub enum BlobStoreUpdates { /// No updates. None, /// Delete the given finalized transactions from the blob store. - Finalized(Vec), + Finalized(Vec), } #[cfg(test)] @@ -77,9 +77,9 @@ mod tests { fn test_finalized_tracker() { let mut tracker = BlobStoreCanonTracker::default(); - let block1 = vec![H256::random()]; - let block2 = vec![H256::random()]; - let block3 = vec![H256::random()]; + let block1 = vec![B256::random()]; + let block2 = vec![B256::random()]; + let block3 = vec![B256::random()]; tracker.add_block(1, block1.clone()); tracker.add_block(2, block2.clone()); tracker.add_block(3, block3.clone()); diff --git a/crates/transaction-pool/src/lib.rs b/crates/transaction-pool/src/lib.rs index 09955b1e38..c2bab47832 100644 --- a/crates/transaction-pool/src/lib.rs +++ b/crates/transaction-pool/src/lib.rs @@ -138,7 +138,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] @@ -154,6 +154,7 @@ use std::{ }; use tokio::sync::mpsc::Receiver; use tracing::{instrument, trace}; +use traits::BestTransactionsAttributes; pub use crate::{ blobstore::{BlobStore, BlobStoreError}, @@ -419,6 +420,13 @@ where self.pool.best_transactions_with_base_fee(base_fee) } + fn best_transactions_with_attributes( + &self, + best_transactions_attributes: BestTransactionsAttributes, + ) -> Box>>> { + self.pool.best_transactions_with_attributes(best_transactions_attributes) + } + fn pending_transactions(&self) -> Vec>> { self.pool.pending_transactions() } diff --git a/crates/transaction-pool/src/noop.rs b/crates/transaction-pool/src/noop.rs index 0f313644d3..0ba41a6fab 100644 --- a/crates/transaction-pool/src/noop.rs +++ b/crates/transaction-pool/src/noop.rs @@ -6,7 +6,10 @@ use crate::{ blobstore::BlobStoreError, error::PoolError, - traits::{GetPooledTransactionLimit, NewBlobSidecar, TransactionListenerKind}, + traits::{ + BestTransactionsAttributes, GetPooledTransactionLimit, NewBlobSidecar, + TransactionListenerKind, + }, validate::ValidTransaction, AllPoolTransactions, AllTransactionsEvents, BestTransactions, BlockInfo, EthPooledTransaction, NewTransactionEvent, PoolResult, PoolSize, PoolTransaction, PropagatedTransactions, @@ -144,6 +147,13 @@ impl TransactionPool for NoopTransactionPool { Box::new(std::iter::empty()) } + fn best_transactions_with_attributes( + &self, + _: BestTransactionsAttributes, + ) -> Box>>> { + Box::new(std::iter::empty()) + } + fn pending_transactions(&self) -> Vec>> { vec![] } diff --git a/crates/transaction-pool/src/pool/best.rs b/crates/transaction-pool/src/pool/best.rs index fccb4c55ba..663274e76c 100644 --- a/crates/transaction-pool/src/pool/best.rs +++ b/crates/transaction-pool/src/pool/best.rs @@ -2,7 +2,7 @@ use crate::{ identifier::TransactionId, pool::pending::PendingTransaction, PoolTransaction, TransactionOrdering, ValidPoolTransaction, }; -use reth_primitives::H256 as TxHash; +use reth_primitives::B256 as TxHash; use std::{ collections::{BTreeMap, BTreeSet, HashSet}, sync::Arc, diff --git a/crates/transaction-pool/src/pool/blob.rs b/crates/transaction-pool/src/pool/blob.rs index 8ee1fb5cac..65845f9659 100644 --- a/crates/transaction-pool/src/pool/blob.rs +++ b/crates/transaction-pool/src/pool/blob.rs @@ -1,6 +1,7 @@ #![allow(dead_code, unused)] use crate::{ - identifier::TransactionId, pool::size::SizeTracker, PoolTransaction, ValidPoolTransaction, + identifier::TransactionId, pool::size::SizeTracker, traits::BestTransactionsAttributes, + PoolTransaction, ValidPoolTransaction, }; use std::{ cmp::Ordering, @@ -76,6 +77,14 @@ impl BlobTransactions { Some(tx) } + /// Returns all transactions that satisfy the given basefee and blob_fee. + pub(crate) fn satisfy_attributes( + &self, + best_transactions_attributes: BestTransactionsAttributes, + ) -> Vec>> { + Vec::new() + } + fn next_id(&mut self) -> u64 { let id = self.submission_id; self.submission_id = self.submission_id.wrapping_add(1); diff --git a/crates/transaction-pool/src/pool/events.rs b/crates/transaction-pool/src/pool/events.rs index 3556337a6d..c3a67b8bd0 100644 --- a/crates/transaction-pool/src/pool/events.rs +++ b/crates/transaction-pool/src/pool/events.rs @@ -1,5 +1,5 @@ use crate::{traits::PropagateKind, PoolTransaction, ValidPoolTransaction}; -use reth_primitives::{TxHash, H256}; +use reth_primitives::{TxHash, B256}; use std::sync::Arc; #[cfg(feature = "serde")] @@ -17,7 +17,7 @@ pub enum FullTransactionEvent { /// The hash of the mined transaction. tx_hash: TxHash, /// The hash of the mined block that contains the transaction. - block_hash: H256, + block_hash: B256, }, /// Transaction has been replaced by the transaction belonging to the hash. /// @@ -63,7 +63,7 @@ pub enum TransactionEvent { /// Transaction has been added to the queued pool. Queued, /// Transaction has been included in the block belonging to this hash. - Mined(H256), + Mined(B256), /// Transaction has been replaced by the transaction belonging to the hash. /// /// E.g. same (sender + nonce) pair diff --git a/crates/transaction-pool/src/pool/listener.rs b/crates/transaction-pool/src/pool/listener.rs index c64fa71f5e..aca64c9cd8 100644 --- a/crates/transaction-pool/src/pool/listener.rs +++ b/crates/transaction-pool/src/pool/listener.rs @@ -6,7 +6,7 @@ use crate::{ PoolTransaction, ValidPoolTransaction, }; use futures_util::Stream; -use reth_primitives::{TxHash, H256}; +use reth_primitives::{TxHash, B256}; use std::{ collections::{hash_map::Entry, HashMap}, pin::Pin, @@ -166,7 +166,7 @@ impl PoolEventBroadcast { } /// Notify listeners that the transaction was mined - pub(crate) fn mined(&mut self, tx: &TxHash, block_hash: H256) { + pub(crate) fn mined(&mut self, tx: &TxHash, block_hash: B256) { self.broadcast_event( tx, TransactionEvent::Mined(block_hash), diff --git a/crates/transaction-pool/src/pool/mod.rs b/crates/transaction-pool/src/pool/mod.rs index d4823b3d4f..c34d4b28da 100644 --- a/crates/transaction-pool/src/pool/mod.rs +++ b/crates/transaction-pool/src/pool/mod.rs @@ -74,8 +74,8 @@ use crate::{ txpool::{SenderInfo, TxPool}, }, traits::{ - AllPoolTransactions, BlockInfo, NewTransactionEvent, PoolSize, PoolTransaction, - PropagatedTransactions, TransactionOrigin, + AllPoolTransactions, BestTransactionsAttributes, BlockInfo, NewTransactionEvent, PoolSize, + PoolTransaction, PropagatedTransactions, TransactionOrigin, }, validate::{TransactionValidationOutcome, ValidPoolTransaction}, CanonicalStateUpdate, ChangedAccount, PoolConfig, TransactionOrdering, TransactionValidator, @@ -84,7 +84,7 @@ use best::BestTransactions; use parking_lot::{Mutex, RwLock}; use reth_primitives::{ Address, BlobTransaction, BlobTransactionSidecar, IntoRecoveredTransaction, - PooledTransactionsElement, TransactionSigned, TxHash, H256, + PooledTransactionsElement, TransactionSigned, TxHash, B256, }; use std::{ collections::{HashMap, HashSet}, @@ -106,8 +106,8 @@ use crate::{ traits::{GetPooledTransactionLimit, NewBlobSidecar, TransactionListenerKind}, validate::ValidTransaction, }; +use alloy_rlp::Encodable; pub use listener::{AllTransactionsEvents, TransactionEvents}; -use reth_rlp::Encodable; mod best; mod blob; @@ -664,6 +664,16 @@ where self.pool.read().best_transactions_with_base_fee(base_fee) } + /// Returns an iterator that yields transactions that are ready to be included in the block with + /// the given base fee and optional blob fee attributes. + pub(crate) fn best_transactions_with_attributes( + &self, + best_transactions_attributes: BestTransactionsAttributes, + ) -> Box>>> + { + self.pool.read().best_transactions_with_attributes(best_transactions_attributes) + } + /// Returns all transactions from the pending sub-pool pub(crate) fn pending_transactions(&self) -> Vec>> { self.pool.read().pending_transactions() @@ -857,7 +867,7 @@ impl AddedPendingTransaction { pub(crate) fn pending_transactions( &self, kind: TransactionListenerKind, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_ { let iter = std::iter::once(&self.transaction).chain(self.promoted.iter()); PendingTransactionIter { kind, iter } } @@ -878,7 +888,7 @@ where Iter: Iterator>>, T: PoolTransaction + 'a, { - type Item = H256; + type Item = B256; fn next(&mut self) -> Option { loop { @@ -934,7 +944,7 @@ impl AddedTransaction { } /// Returns the hash of the replaced transaction if it is a blob transaction. - pub(crate) fn replaced_blob_transaction(&self) -> Option { + pub(crate) fn replaced_blob_transaction(&self) -> Option { self.replaced().filter(|tx| tx.transaction.is_eip4844()).map(|tx| *tx.transaction.hash()) } @@ -981,7 +991,7 @@ impl AddedTransaction { #[derive(Debug)] pub(crate) struct OnNewCanonicalStateOutcome { /// Hash of the block. - pub(crate) block_hash: H256, + pub(crate) block_hash: B256, /// All mined transactions. pub(crate) mined: Vec, /// Transactions promoted to the ready queue. @@ -999,7 +1009,7 @@ impl OnNewCanonicalStateOutcome { pub(crate) fn pending_transactions( &self, kind: TransactionListenerKind, - ) -> impl Iterator + '_ { + ) -> impl Iterator + '_ { let iter = self.promoted.iter(); PendingTransactionIter { kind, iter } } diff --git a/crates/transaction-pool/src/pool/state.rs b/crates/transaction-pool/src/pool/state.rs index d0282311e1..27a7f14869 100644 --- a/crates/transaction-pool/src/pool/state.rs +++ b/crates/transaction-pool/src/pool/state.rs @@ -5,31 +5,37 @@ bitflags::bitflags! { #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Default, PartialOrd, Ord)] pub(crate) struct TxState: u8 { /// Set to `1` if all ancestor transactions are pending. - const NO_PARKED_ANCESTORS = 0b100000; + const NO_PARKED_ANCESTORS = 0b10000000; /// Set to `1` of the transaction is either the next transaction of the sender (on chain nonce == tx.nonce) or all prior transactions are also present in the pool. - const NO_NONCE_GAPS = 0b010000; + const NO_NONCE_GAPS = 0b01000000; /// Bit derived from the sender's balance. /// /// Set to `1` if the sender's balance can cover the maximum cost for this transaction (`feeCap * gasLimit + value`). /// This includes cumulative costs of prior transactions, which ensures that the sender has enough funds for all max cost of prior transactions. - const ENOUGH_BALANCE = 0b001000; + const ENOUGH_BALANCE = 0b00100000; /// Bit set to true if the transaction has a lower gas limit than the block's gas limit - const NOT_TOO_MUCH_GAS = 0b000100; + const NOT_TOO_MUCH_GAS = 0b00010000; /// Covers the Dynamic fee requirement. /// /// Set to 1 if `maxBlobFeePerGas` of the transaction meets the requirement of the pending block. - const ENOUGH_FEE_CAP_BLOCK = 0b000010; + const ENOUGH_FEE_CAP_BLOCK = 0b00001000; /// Covers the dynamic blob fee requirement, only relevant for EIP-4844 blob transactions /// /// Set to 1 if `maxBlobFeePer` of the transaction meets the requirement of the pending block. - const ENOUGH_BLOB_FEE_CAP_BLOCK = 0b000001; + const ENOUGH_BLOB_FEE_CAP_BLOCK = 0b00000100; - const PENDING_POOL_BITS = Self::NO_PARKED_ANCESTORS.bits()| Self::NO_NONCE_GAPS.bits() | Self::ENOUGH_BALANCE.bits() | Self::NOT_TOO_MUCH_GAS.bits() | Self::ENOUGH_FEE_CAP_BLOCK.bits() | Self::ENOUGH_BLOB_FEE_CAP_BLOCK.bits(); + /// Marks whether the transaction is a blob transaction + /// + /// We track this as part of the state for simplicity, since blob transactions are handled differently and are mutually exclusive with normal transactions. + const BLOB_TRANSACTION = 0b00000010; + + const PENDING_POOL_BITS = Self::NO_PARKED_ANCESTORS.bits() | Self::NO_NONCE_GAPS.bits() | Self::ENOUGH_BALANCE.bits() | Self::NOT_TOO_MUCH_GAS.bits() | Self::ENOUGH_FEE_CAP_BLOCK.bits() | Self::ENOUGH_BLOB_FEE_CAP_BLOCK.bits(); const BASE_FEE_POOL_BITS = Self::NO_PARKED_ANCESTORS.bits() | Self::NO_NONCE_GAPS.bits() | Self::ENOUGH_BALANCE.bits() | Self::NOT_TOO_MUCH_GAS.bits(); const QUEUED_POOL_BITS = Self::NO_PARKED_ANCESTORS.bits(); + const BLOB_POOL_BITS = Self::BLOB_TRANSACTION.bits(); } } @@ -40,11 +46,18 @@ impl TxState { /// - _No_ parked ancestors /// - enough balance /// - enough fee cap + /// - enough blob fee cap #[inline] pub(crate) fn is_pending(&self) -> bool { self.bits() >= TxState::PENDING_POOL_BITS.bits() } + /// Whether this transaction is a blob transaction. + #[inline] + pub(crate) fn is_blob(&self) -> bool { + self.contains(TxState::BLOB_TRANSACTION) + } + /// Returns `true` if the transaction has a nonce gap. #[inline] pub(crate) fn has_nonce_gap(&self) -> bool { @@ -62,6 +75,8 @@ pub enum SubPool { /// The base-fee sub-pool contains transactions that are not ready to be included in the next /// block because they don't meet the base fee requirement. BaseFee, + /// The blob sub-pool contains all blob transactions that are __not__ pending. + Blob, /// The pending sub-pool contains transactions that are ready to be included in the next block. Pending, } @@ -87,6 +102,12 @@ impl SubPool { matches!(self, SubPool::BaseFee) } + /// Whether this transaction is in the blob pool. + #[inline] + pub fn is_blob(&self) -> bool { + matches!(self, SubPool::Blob) + } + /// Returns whether this is a promotion depending on the current sub-pool location. #[inline] pub fn is_promoted(&self, other: SubPool) -> bool { @@ -99,6 +120,10 @@ impl From for SubPool { if value.is_pending() { return SubPool::Pending } + if value.is_blob() { + // all _non-pending_ blob transactions are in the blob sub-pool + return SubPool::Blob + } if value.bits() < TxState::BASE_FEE_POOL_BITS.bits() { return SubPool::Queued } @@ -115,6 +140,7 @@ mod tests { assert!(SubPool::BaseFee.is_promoted(SubPool::Queued)); assert!(SubPool::Pending.is_promoted(SubPool::BaseFee)); assert!(SubPool::Pending.is_promoted(SubPool::Queued)); + assert!(SubPool::Pending.is_promoted(SubPool::Blob)); assert!(!SubPool::BaseFee.is_promoted(SubPool::Pending)); assert!(!SubPool::Queued.is_promoted(SubPool::BaseFee)); } @@ -144,14 +170,25 @@ mod tests { assert_eq!(SubPool::Pending, state.into()); assert!(state.is_pending()); - let bits = 0b111111; + let bits = 0b11111100; let state = TxState::from_bits(bits).unwrap(); assert_eq!(SubPool::Pending, state.into()); assert!(state.is_pending()); - let bits = 0b111111; + let bits = 0b11111110; let state = TxState::from_bits(bits).unwrap(); assert_eq!(SubPool::Pending, state.into()); assert!(state.is_pending()); } + + #[test] + fn test_blob() { + let mut state = TxState::PENDING_POOL_BITS; + state.insert(TxState::BLOB_TRANSACTION); + assert!(state.is_pending()); + + state.remove(TxState::ENOUGH_BLOB_FEE_CAP_BLOCK); + assert!(state.is_blob()); + assert!(!state.is_pending()); + } } diff --git a/crates/transaction-pool/src/pool/txpool.rs b/crates/transaction-pool/src/pool/txpool.rs index f1043df833..f94fc6bdd2 100644 --- a/crates/transaction-pool/src/pool/txpool.rs +++ b/crates/transaction-pool/src/pool/txpool.rs @@ -13,7 +13,7 @@ use crate::{ update::{Destination, PoolUpdate}, AddedPendingTransaction, AddedTransaction, OnNewCanonicalStateOutcome, }, - traits::{BlockInfo, PoolSize}, + traits::{BestTransactionsAttributes, BlockInfo, PoolSize}, PoolConfig, PoolResult, PoolTransaction, PriceBumpConfig, TransactionOrdering, ValidPoolTransaction, U256, }; @@ -22,7 +22,7 @@ use reth_primitives::{ constants::{ eip4844::BLOB_TX_MIN_BLOB_GASPRICE, ETHEREUM_BLOCK_GAS_LIMIT, MIN_PROTOCOL_BASE_FEE, }, - Address, TxHash, H256, + Address, TxHash, B256, }; use std::{ cmp::Ordering, @@ -88,7 +88,6 @@ pub struct TxPool { /// be moved to pending if the base fee changes in their favor (decreases) in future blocks. basefee_pool: ParkedPool>, /// All blob transactions in the pool - #[allow(unused)] blob_transactions: BlobTransactions, /// All transactions in the pool. all_transactions: AllTransactions, @@ -245,6 +244,38 @@ impl TxPool { } } + /// Returns an iterator that yields transactions that are ready to be included in the block with + /// the given base fee and optional blob fee. + pub(crate) fn best_transactions_with_attributes( + &self, + best_transactions_attributes: BestTransactionsAttributes, + ) -> Box>>> + { + match best_transactions_attributes.basefee.cmp(&self.all_transactions.pending_basefee) { + Ordering::Equal => { + // fee unchanged, nothing to shift + Box::new(self.best_transactions()) + } + Ordering::Greater => { + // base fee increased, we only need to enforce this on the pending pool + Box::new(self.pending_pool.best_with_basefee(best_transactions_attributes.basefee)) + } + Ordering::Less => { + // base fee decreased, we need to move transactions from the basefee pool to the + // pending pool and satisfy blob fee transactions as well + let unlocked_with_blob = + self.blob_transactions.satisfy_attributes(best_transactions_attributes); + + Box::new( + self.pending_pool.best_with_unlocked( + unlocked_with_blob, + self.all_transactions.pending_basefee, + ), + ) + } + } + } + /// Returns all transactions from the pending sub-pool pub(crate) fn pending_transactions(&self) -> Vec>> { self.pending_pool.all().collect() @@ -269,6 +300,7 @@ impl TxPool { SubPool::Queued => self.queued_pool.contains(id), SubPool::Pending => self.pending_pool.contains(id), SubPool::BaseFee => self.basefee_pool.contains(id), + SubPool::Blob => self.blob_transactions.contains(id), } } @@ -530,7 +562,7 @@ impl TxPool { /// This includes the total set of transactions and the subpool it currently resides in. fn remove_transaction_by_hash( &mut self, - tx_hash: &H256, + tx_hash: &B256, ) -> Option>> { let (tx, pool) = self.all_transactions.remove_transaction_by_hash(tx_hash)?; self.remove_from_subpool(pool, tx.id()) @@ -543,7 +575,7 @@ impl TxPool { /// [Self::on_canonical_state_change] fn prune_transaction_by_hash( &mut self, - tx_hash: &H256, + tx_hash: &B256, ) -> Option>> { let (tx, pool) = self.all_transactions.remove_transaction_by_hash(tx_hash)?; self.prune_from_subpool(pool, tx.id()) @@ -561,6 +593,7 @@ impl TxPool { SubPool::Queued => self.queued_pool.remove_transaction(tx), SubPool::Pending => self.pending_pool.remove_transaction(tx), SubPool::BaseFee => self.basefee_pool.remove_transaction(tx), + SubPool::Blob => self.blob_transactions.remove_transaction(tx), } } @@ -572,9 +605,10 @@ impl TxPool { tx: &TransactionId, ) -> Option>> { match pool { - SubPool::Queued => self.queued_pool.remove_transaction(tx), SubPool::Pending => self.pending_pool.prune_transaction(tx), + SubPool::Queued => self.queued_pool.remove_transaction(tx), SubPool::BaseFee => self.basefee_pool.remove_transaction(tx), + SubPool::Blob => self.blob_transactions.remove_transaction(tx), } } @@ -619,6 +653,9 @@ impl TxPool { SubPool::BaseFee => { self.basefee_pool.add_transaction(tx); } + SubPool::Blob => { + self.blob_transactions.add_transaction(tx); + } } } @@ -774,7 +811,7 @@ pub(crate) struct AllTransactions { /// The current block number the pool keeps track of. last_seen_block_number: u64, /// The current block hash the pool keeps track of. - last_seen_block_hash: H256, + last_seen_block_hash: B256, /// Expected base fee for the pending block. pending_basefee: u64, /// Expected blob fee for the pending block. @@ -1084,7 +1121,7 @@ impl AllTransactions { /// Removes a transaction from the set using its hash. pub(crate) fn remove_transaction_by_hash( &mut self, - tx_hash: &H256, + tx_hash: &B256, ) -> Option<(Arc>, SubPool)> { let tx = self.by_hash.remove(tx_hash)?; let internal = self.txs.remove(&tx.transaction_id)?; @@ -1324,6 +1361,8 @@ impl AllTransactions { // before attempting to insert a blob transaction, we need to ensure that additional // constraints are met that only apply to blob transactions if transaction.is_eip4844() { + state.insert(TxState::BLOB_TRANSACTION); + transaction = self.ensure_valid_blob_transaction(transaction, on_chain_balance, ancestor)?; let blob_fee_cap = transaction.transaction.max_fee_per_blob_gas().unwrap_or_default(); diff --git a/crates/transaction-pool/src/test_utils/mock.rs b/crates/transaction-pool/src/test_utils/mock.rs index 82bc1e0ba0..547fe093db 100644 --- a/crates/transaction-pool/src/test_utils/mock.rs +++ b/crates/transaction-pool/src/test_utils/mock.rs @@ -16,7 +16,7 @@ use reth_primitives::{ hex, Address, FromRecoveredPooledTransaction, FromRecoveredTransaction, IntoRecoveredTransaction, PooledTransactionsElementEcRecovered, Signature, Transaction, TransactionKind, TransactionSigned, TransactionSignedEcRecovered, TxEip1559, TxEip2930, - TxEip4844, TxHash, TxLegacy, TxType, EIP1559_TX_TYPE_ID, EIP4844_TX_TYPE_ID, H256, + TxEip4844, TxHash, TxLegacy, TxType, B256, EIP1559_TX_TYPE_ID, EIP4844_TX_TYPE_ID, LEGACY_TX_TYPE_ID, U128, U256, }; use std::{ops::Range, sync::Arc, time::Instant}; @@ -84,7 +84,7 @@ macro_rules! make_setters_getters { #[derive(Debug, Clone, Eq, PartialEq)] pub enum MockTransaction { Legacy { - hash: H256, + hash: B256, sender: Address, nonce: u64, gas_price: u128, @@ -93,7 +93,7 @@ pub enum MockTransaction { value: U256, }, Eip1559 { - hash: H256, + hash: B256, sender: Address, nonce: u64, max_fee_per_gas: u128, @@ -103,7 +103,7 @@ pub enum MockTransaction { value: U256, }, Eip4844 { - hash: H256, + hash: B256, sender: Address, nonce: u64, max_fee_per_gas: u128, @@ -120,7 +120,7 @@ pub enum MockTransaction { impl MockTransaction { make_setters_getters! { nonce => u64; - hash => H256; + hash => B256; sender => Address; gas_limit => u64; value => U256 @@ -129,7 +129,7 @@ impl MockTransaction { /// Returns a new legacy transaction with random address and hash and empty values pub fn legacy() -> Self { MockTransaction::Legacy { - hash: H256::random(), + hash: B256::random(), sender: Address::random(), nonce: 0, gas_price: 0, @@ -142,7 +142,7 @@ impl MockTransaction { /// Returns a new EIP1559 transaction with random address and hash and empty values pub fn eip1559() -> Self { MockTransaction::Eip1559 { - hash: H256::random(), + hash: B256::random(), sender: Address::random(), nonce: 0, max_fee_per_gas: MIN_PROTOCOL_BASE_FEE as u128, @@ -156,7 +156,7 @@ impl MockTransaction { /// Returns a new EIP4844 transaction with random address and hash and empty values pub fn eip4844() -> Self { MockTransaction::Eip4844 { - hash: H256::random(), + hash: B256::random(), sender: Address::random(), nonce: 0, max_fee_per_gas: MIN_PROTOCOL_BASE_FEE as u128, @@ -283,19 +283,19 @@ impl MockTransaction { /// Returns a clone with a decreased nonce pub fn prev(&self) -> Self { - let mut next = self.clone().with_hash(H256::random()); + let mut next = self.clone().with_hash(B256::random()); next.with_nonce(self.get_nonce() - 1) } /// Returns a clone with an increased nonce pub fn next(&self) -> Self { - let mut next = self.clone().with_hash(H256::random()); + let mut next = self.clone().with_hash(B256::random()); next.with_nonce(self.get_nonce() + 1) } /// Returns a clone with an increased nonce pub fn skip(&self, skip: u64) -> Self { - let mut next = self.clone().with_hash(H256::random()); + let mut next = self.clone().with_hash(B256::random()); next.with_nonce(self.get_nonce() + skip + 1) } @@ -307,7 +307,7 @@ impl MockTransaction { /// Sets a new random hash pub fn rng_hash(mut self) -> Self { - self.with_hash(H256::random()) + self.with_hash(B256::random()) } /// Returns a new transaction with a higher gas price +1 @@ -578,7 +578,7 @@ impl IntoRecoveredTransaction for MockTransaction { gas_price: self.get_gas_price(), gas_limit: self.get_gas_limit(), to: TransactionKind::Call(Address::from_slice( - &hex::decode("d3e8763675e4c425df46cc3b5c0f6cbdac396046").unwrap()[..], + &hex!("d3e8763675e4c425df46cc3b5c0f6cbdac396046")[..], )), value: 693361000000000u64.into(), input: Default::default(), @@ -600,7 +600,7 @@ impl proptest::arbitrary::Arbitrary for MockTransaction { fn arbitrary_with(_: Self::Parameters) -> Self::Strategy { use proptest::prelude::{any, Strategy}; - any::<(Transaction, Address, H256)>() + any::<(Transaction, Address, B256)>() .prop_map(|(tx, sender, tx_hash)| match &tx { Transaction::Legacy(TxLegacy { nonce, diff --git a/crates/transaction-pool/src/test_utils/pool.rs b/crates/transaction-pool/src/test_utils/pool.rs index c8d79fb16e..545b90d0cf 100644 --- a/crates/transaction-pool/src/test_utils/pool.rs +++ b/crates/transaction-pool/src/test_utils/pool.rs @@ -168,7 +168,8 @@ pub struct MockSimulatorConfig { impl MockSimulatorConfig { /// Generates a set of random addresses pub fn addresses(&self, rng: &mut impl rand::Rng) -> Vec
{ - std::iter::repeat_with(|| Address::random_using(rng)).take(self.num_senders).collect() + let _ = rng.gen::(); // TODO(dani): ::random_with + std::iter::repeat_with(Address::random).take(self.num_senders).collect() } } diff --git a/crates/transaction-pool/src/traits.rs b/crates/transaction-pool/src/traits.rs index a90c1071c6..606bc5b690 100644 --- a/crates/transaction-pool/src/traits.rs +++ b/crates/transaction-pool/src/traits.rs @@ -4,15 +4,15 @@ use crate::{ validate::ValidPoolTransaction, AllTransactionsEvents, }; +use alloy_rlp::Encodable; use futures_util::{ready, Stream}; use reth_primitives::{ Address, BlobTransactionSidecar, BlobTransactionValidationError, FromRecoveredPooledTransaction, FromRecoveredTransaction, IntoRecoveredTransaction, PeerId, PooledTransactionsElement, PooledTransactionsElementEcRecovered, SealedBlock, Transaction, - TransactionKind, TransactionSignedEcRecovered, TxEip4844, TxHash, EIP1559_TX_TYPE_ID, - EIP4844_TX_TYPE_ID, H256, U256, + TransactionKind, TransactionSignedEcRecovered, TxEip4844, TxHash, B256, EIP1559_TX_TYPE_ID, + EIP4844_TX_TYPE_ID, U256, }; -use reth_rlp::Encodable; use std::{ collections::{HashMap, HashSet}, fmt, @@ -235,6 +235,15 @@ pub trait TransactionPool: Send + Sync + Clone { base_fee: u64, ) -> Box>>>; + /// Returns an iterator that yields transactions that are ready for block production with the + /// given base fee and optional blob fee attributes. + /// + /// Consumer: Block production + fn best_transactions_with_attributes( + &self, + best_transactions_attributes: BestTransactionsAttributes, + ) -> Box>>>; + /// Returns all transactions that can be included in the next block. /// /// This is primarily used for the `txpool_` RPC namespace: which distinguishes between `pending` and `queued` transactions, where `pending` are transactions ready for inclusion in the next block and `queued` are transactions that are ready for inclusion in future blocks. @@ -342,10 +351,10 @@ pub trait TransactionPoolExt: TransactionPool { fn update_accounts(&self, accounts: Vec); /// Deletes the blob sidecar for the given transaction from the blob store - fn delete_blob(&self, tx: H256); + fn delete_blob(&self, tx: B256); /// Deletes multiple blob sidecars from the blob store - fn delete_blobs(&self, txs: Vec); + fn delete_blobs(&self, txs: Vec); } /// Determines what kind of new transactions should be emitted by a stream of transactions. @@ -515,7 +524,7 @@ pub struct CanonicalStateUpdate<'a> { /// A set of changed accounts across a range of blocks. pub changed_accounts: Vec, /// All mined transactions in the block range. - pub mined_transactions: Vec, + pub mined_transactions: Vec, } impl<'a> CanonicalStateUpdate<'a> { @@ -525,7 +534,7 @@ impl<'a> CanonicalStateUpdate<'a> { } /// Returns the hash of the tip block. - pub fn hash(&self) -> H256 { + pub fn hash(&self) -> B256 { self.new_tip.hash } @@ -622,6 +631,35 @@ impl BestTransactions for std::iter::Empty { fn set_skip_blobs(&mut self, _skip_blobs: bool) {} } +/// A Helper type that bundles best transactions attributes together. +#[derive(Debug, Copy, Clone, PartialEq, Eq)] +pub struct BestTransactionsAttributes { + /// The base fee attribute for best transactions. + pub basefee: u64, + /// The blob fee attribute for best transactions. + pub blob_fee: Option, +} + +// === impl BestTransactionsAttributes === + +impl BestTransactionsAttributes { + /// Creates a new `BestTransactionsAttributes` with the given basefee and blob fee. + pub fn new(basefee: u64, blob_fee: Option) -> Self { + Self { basefee, blob_fee } + } + + /// Creates a new `BestTransactionsAttributes` with the given basefee. + pub fn base_fee(basefee: u64) -> Self { + Self::new(basefee, None) + } + + /// Sets the given blob fee. + pub fn with_blob_fee(mut self, blob_fee: u64) -> Self { + self.blob_fee = Some(blob_fee); + self + } +} + /// Trait for transaction types used inside the pool pub trait PoolTransaction: fmt::Debug @@ -990,7 +1028,7 @@ impl PoolSize { #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub struct BlockInfo { /// Hash for the currently tracked block. - pub last_seen_block_hash: H256, + pub last_seen_block_hash: B256, /// Current the currently tracked block. pub last_seen_block_number: u64, /// Currently enforced base fee: the threshold for the basefee sub-pool. diff --git a/crates/transaction-pool/src/validate/mod.rs b/crates/transaction-pool/src/validate/mod.rs index e53828b7e9..9fcf59b3d3 100644 --- a/crates/transaction-pool/src/validate/mod.rs +++ b/crates/transaction-pool/src/validate/mod.rs @@ -7,7 +7,7 @@ use crate::{ }; use reth_primitives::{ Address, BlobTransactionSidecar, IntoRecoveredTransaction, SealedBlock, TransactionKind, - TransactionSignedEcRecovered, TxHash, H256, U256, + TransactionSignedEcRecovered, TxHash, B256, U256, }; use std::{fmt, time::Instant}; @@ -114,7 +114,7 @@ impl ValidTransaction { /// Returns the hash of the transaction. #[inline] - pub(crate) fn hash(&self) -> &H256 { + pub(crate) fn hash(&self) -> &B256 { self.transaction().hash() } diff --git a/crates/trie/Cargo.toml b/crates/trie/Cargo.toml index 9879bb3dca..9fd45bfa11 100644 --- a/crates/trie/Cargo.toml +++ b/crates/trie/Cargo.toml @@ -14,8 +14,9 @@ Merkle trie implementation # reth reth-primitives.workspace = true reth-interfaces.workspace = true -reth-rlp.workspace = true -reth-db.workspace = true +reth-db = { path = "../storage/db" } + +alloy-rlp.workspace = true # tokio tokio = { workspace = true, default-features = false, features = ["sync"] } @@ -24,7 +25,6 @@ tokio = { workspace = true, default-features = false, features = ["sync"] } tracing.workspace = true # misc -hex = "0.4" thiserror.workspace = true derive_more = "0.99" diff --git a/crates/trie/src/account.rs b/crates/trie/src/account.rs index 88307168a0..6804cb7a77 100644 --- a/crates/trie/src/account.rs +++ b/crates/trie/src/account.rs @@ -1,5 +1,5 @@ -use reth_primitives::{proofs::EMPTY_ROOT, Account, H256, KECCAK_EMPTY, U256}; -use reth_rlp::{RlpDecodable, RlpEncodable}; +use alloy_rlp::{RlpDecodable, RlpEncodable}; +use reth_primitives::{proofs::EMPTY_ROOT, Account, B256, KECCAK_EMPTY, U256}; /// An Ethereum account as represented in the trie. #[derive(Clone, Copy, Debug, PartialEq, Eq, Default, RlpEncodable, RlpDecodable)] @@ -9,9 +9,9 @@ pub struct EthAccount { /// Account balance. balance: U256, /// Account's storage root. - storage_root: H256, + storage_root: B256, /// Hash of the account's bytecode. - code_hash: H256, + code_hash: B256, } impl From for EthAccount { @@ -27,13 +27,13 @@ impl From for EthAccount { impl EthAccount { /// Set storage root on account. - pub fn with_storage_root(mut self, storage_root: H256) -> Self { + pub fn with_storage_root(mut self, storage_root: B256) -> Self { self.storage_root = storage_root; self } /// Get account's storage root. - pub fn storage_root(&self) -> H256 { + pub fn storage_root(&self) -> B256 { self.storage_root } } diff --git a/crates/trie/src/errors.rs b/crates/trie/src/errors.rs index 9999de96e6..4b8563062a 100644 --- a/crates/trie/src/errors.rs +++ b/crates/trie/src/errors.rs @@ -1,4 +1,4 @@ -use reth_primitives::H256; +use reth_primitives::B256; use thiserror::Error; /// State root error. @@ -36,7 +36,7 @@ pub enum ProofError { #[error( "Expected leaf account with key greater or equal to {0:?} is missing from the database" )] - LeafAccountMissing(H256), + LeafAccountMissing(B256), /// Storage root error. #[error(transparent)] StorageRootError(#[from] StorageRootError), diff --git a/crates/trie/src/hashed_cursor/default.rs b/crates/trie/src/hashed_cursor/default.rs index f5fb60c652..139fba9654 100644 --- a/crates/trie/src/hashed_cursor/default.rs +++ b/crates/trie/src/hashed_cursor/default.rs @@ -4,7 +4,7 @@ use reth_db::{ tables, transaction::{DbTx, DbTxGAT}, }; -use reth_primitives::{Account, StorageEntry, H256}; +use reth_primitives::{Account, StorageEntry, B256}; impl<'a, 'tx, TX: DbTx<'tx>> HashedCursorFactory<'a> for TX { type AccountCursor = >::Cursor where Self: 'a; @@ -23,11 +23,11 @@ impl<'tx, C> HashedAccountCursor for C where C: DbCursorRO<'tx, tables::HashedAccount>, { - fn seek(&mut self, key: H256) -> Result, reth_db::DatabaseError> { + fn seek(&mut self, key: B256) -> Result, reth_db::DatabaseError> { self.seek(key) } - fn next(&mut self) -> Result, reth_db::DatabaseError> { + fn next(&mut self) -> Result, reth_db::DatabaseError> { self.next() } } @@ -36,14 +36,14 @@ impl<'tx, C> HashedStorageCursor for C where C: DbCursorRO<'tx, tables::HashedStorage> + DbDupCursorRO<'tx, tables::HashedStorage>, { - fn is_storage_empty(&mut self, key: H256) -> Result { + fn is_storage_empty(&mut self, key: B256) -> Result { Ok(self.seek_exact(key)?.is_none()) } fn seek( &mut self, - key: H256, - subkey: H256, + key: B256, + subkey: B256, ) -> Result, reth_db::DatabaseError> { self.seek_by_key_subkey(key, subkey) } diff --git a/crates/trie/src/hashed_cursor/mod.rs b/crates/trie/src/hashed_cursor/mod.rs index 6d8910785a..7f4054b571 100644 --- a/crates/trie/src/hashed_cursor/mod.rs +++ b/crates/trie/src/hashed_cursor/mod.rs @@ -1,4 +1,4 @@ -use reth_primitives::{Account, StorageEntry, H256}; +use reth_primitives::{Account, StorageEntry, B256}; /// Default implementation of the hashed state cursor traits. mod default; @@ -28,22 +28,22 @@ pub trait HashedCursorFactory<'a> { /// The cursor for iterating over hashed accounts. pub trait HashedAccountCursor { /// Seek an entry greater or equal to the given key and position the cursor there. - fn seek(&mut self, key: H256) -> Result, reth_db::DatabaseError>; + fn seek(&mut self, key: B256) -> Result, reth_db::DatabaseError>; /// Move the cursor to the next entry and return it. - fn next(&mut self) -> Result, reth_db::DatabaseError>; + fn next(&mut self) -> Result, reth_db::DatabaseError>; } /// The cursor for iterating over hashed storage entries. pub trait HashedStorageCursor { /// Returns `true` if there are no entries for a given key. - fn is_storage_empty(&mut self, key: H256) -> Result; + fn is_storage_empty(&mut self, key: B256) -> Result; /// Seek an entry greater or equal to the given key/subkey and position the cursor there. fn seek( &mut self, - key: H256, - subkey: H256, + key: B256, + subkey: B256, ) -> Result, reth_db::DatabaseError>; /// Move the cursor to the next entry and return it. diff --git a/crates/trie/src/hashed_cursor/post_state.rs b/crates/trie/src/hashed_cursor/post_state.rs index 58150d5e0a..0357877c45 100644 --- a/crates/trie/src/hashed_cursor/post_state.rs +++ b/crates/trie/src/hashed_cursor/post_state.rs @@ -5,16 +5,16 @@ use reth_db::{ tables, transaction::{DbTx, DbTxGAT}, }; -use reth_primitives::{trie::Nibbles, Account, StorageEntry, H256, U256}; +use reth_primitives::{trie::Nibbles, Account, StorageEntry, B256, U256}; use std::collections::{HashMap, HashSet}; /// The post state account storage with hashed slots. #[derive(Debug, Clone, Eq, PartialEq)] pub struct HashedStorage { /// Hashed storage slots with non-zero. - non_zero_valued_storage: Vec<(H256, U256)>, + non_zero_valued_storage: Vec<(B256, U256)>, /// Slots that have been zero valued. - zero_valued_slots: HashSet, + zero_valued_slots: HashSet, /// Whether the storage was wiped or not. wiped: bool, /// Whether the storage entries were sorted or not. @@ -41,14 +41,14 @@ impl HashedStorage { } /// Insert non zero-valued storage entry. - pub fn insert_non_zero_valued_storage(&mut self, slot: H256, value: U256) { + pub fn insert_non_zero_valued_storage(&mut self, slot: B256, value: U256) { debug_assert!(value != U256::ZERO, "value cannot be zero"); self.non_zero_valued_storage.push((slot, value)); self.sorted = false; } /// Insert zero-valued storage slot. - pub fn insert_zero_valued_slot(&mut self, slot: H256) { + pub fn insert_zero_valued_slot(&mut self, slot: B256) { self.zero_valued_slots.insert(slot); } } @@ -57,11 +57,11 @@ impl HashedStorage { #[derive(Debug, Clone, Eq, PartialEq)] pub struct HashedPostState { /// Map of hashed addresses to account info. - accounts: Vec<(H256, Account)>, + accounts: Vec<(B256, Account)>, /// Set of cleared accounts. - cleared_accounts: HashSet, + cleared_accounts: HashSet, /// Map of hashed addresses to hashed storage. - storages: HashMap, + storages: HashMap, /// Whether the account and storage entries were sorted or not. sorted: bool, } @@ -97,18 +97,18 @@ impl HashedPostState { } /// Insert non-empty account info. - pub fn insert_account(&mut self, hashed_address: H256, account: Account) { + pub fn insert_account(&mut self, hashed_address: B256, account: Account) { self.accounts.push((hashed_address, account)); self.sorted = false; } /// Insert cleared hashed account key. - pub fn insert_cleared_account(&mut self, hashed_address: H256) { + pub fn insert_cleared_account(&mut self, hashed_address: B256) { self.cleared_accounts.insert(hashed_address); } /// Insert hashed storage entry. - pub fn insert_hashed_storage(&mut self, hashed_address: H256, hashed_storage: HashedStorage) { + pub fn insert_hashed_storage(&mut self, hashed_address: B256, hashed_storage: HashedStorage) { self.sorted &= hashed_storage.sorted; self.storages.insert(hashed_address, hashed_storage); } @@ -116,10 +116,10 @@ impl HashedPostState { /// Construct (PrefixSet)[PrefixSet] from hashed post state. /// The prefix sets contain the hashed account and storage keys that have been changed in the /// post state. - pub fn construct_prefix_sets(&self) -> (PrefixSet, HashMap) { + pub fn construct_prefix_sets(&self) -> (PrefixSet, HashMap) { // Initialize prefix sets. let mut account_prefix_set = PrefixSetMut::default(); - let mut storage_prefix_set: HashMap = HashMap::default(); + let mut storage_prefix_set: HashMap = HashMap::default(); // Populate account prefix set. for (hashed_address, _) in &self.accounts { @@ -194,7 +194,7 @@ pub struct HashedPostStateAccountCursor<'b, C> { post_state_account_index: usize, /// The last hashed account key that was returned by the cursor. /// De facto, this is a current cursor position. - last_account: Option, + last_account: Option, } impl<'b, C> HashedPostStateAccountCursor<'b, C> { @@ -208,7 +208,7 @@ impl<'b, C> HashedPostStateAccountCursor<'b, C> { /// /// This function only checks the post state, not the database, because the latter does not /// store destroyed accounts. - fn is_account_cleared(&self, account: &H256) -> bool { + fn is_account_cleared(&self, account: &B256) -> bool { self.post_state.cleared_accounts.contains(account) } @@ -217,9 +217,9 @@ impl<'b, C> HashedPostStateAccountCursor<'b, C> { /// Given the next post state and database entries, return the smallest of the two. /// If the account keys are the same, the post state entry is given precedence. fn next_account( - post_state_item: Option<&(H256, Account)>, - db_item: Option<(H256, Account)>, - ) -> Option<(H256, Account)> { + post_state_item: Option<&(B256, Account)>, + db_item: Option<(B256, Account)>, + ) -> Option<(B256, Account)> { match (post_state_item, db_item) { // If both are not empty, return the smallest of the two // Post state is given precedence if keys are equal @@ -254,7 +254,7 @@ where /// /// The returned account key is memoized and the cursor remains positioned at that key until /// [HashedAccountCursor::seek] or [HashedAccountCursor::next] are called. - fn seek(&mut self, key: H256) -> Result, reth_db::DatabaseError> { + fn seek(&mut self, key: B256) -> Result, reth_db::DatabaseError> { debug_assert!(self.post_state.sorted, "`HashedPostState` must be pre-sorted"); self.last_account = None; @@ -300,7 +300,7 @@ where /// /// NOTE: This function will not return any entry unless [HashedAccountCursor::seek] has been /// called. - fn next(&mut self) -> Result, reth_db::DatabaseError> { + fn next(&mut self) -> Result, reth_db::DatabaseError> { debug_assert!(self.post_state.sorted, "`HashedPostState` must be pre-sorted"); let last_account = match self.last_account.as_ref() { @@ -343,10 +343,10 @@ pub struct HashedPostStateStorageCursor<'b, C> { /// The post state index where the cursor is currently at. post_state_storage_index: usize, /// The current hashed account key. - account: Option, + account: Option, /// The last slot that has been returned by the cursor. /// De facto, this is the cursor's position for the given account key. - last_slot: Option, + last_slot: Option, } impl<'b, C> HashedPostStateStorageCursor<'b, C> { @@ -357,7 +357,7 @@ impl<'b, C> HashedPostStateStorageCursor<'b, C> { /// Returns `true` if the storage for the given /// The database is not checked since it already has no wiped storage entries. - fn is_db_storage_wiped(&self, account: &H256) -> bool { + fn is_db_storage_wiped(&self, account: &B256) -> bool { match self.post_state.storages.get(account) { Some(storage) => storage.wiped, None => false, @@ -366,7 +366,7 @@ impl<'b, C> HashedPostStateStorageCursor<'b, C> { /// Check if the slot was zeroed out in the post state. /// The database is not checked since it already has no zero-valued slots. - fn is_slot_zero_valued(&self, account: &H256, slot: &H256) -> bool { + fn is_slot_zero_valued(&self, account: &B256, slot: &B256) -> bool { self.post_state .storages .get(account) @@ -379,7 +379,7 @@ impl<'b, C> HashedPostStateStorageCursor<'b, C> { /// Given the next post state and database entries, return the smallest of the two. /// If the storage keys are the same, the post state entry is given precedence. fn next_slot( - post_state_item: Option<&(H256, U256)>, + post_state_item: Option<&(B256, U256)>, db_item: Option, ) -> Option { match (post_state_item, db_item) { @@ -412,7 +412,7 @@ where /// /// This function should be called before attempting to call [HashedStorageCursor::seek] or /// [HashedStorageCursor::next]. - fn is_storage_empty(&mut self, key: H256) -> Result { + fn is_storage_empty(&mut self, key: B256) -> Result { let is_empty = match self.post_state.storages.get(&key) { Some(storage) => { // If the storage has been wiped at any point @@ -428,8 +428,8 @@ where /// Seek the next account storage entry for a given hashed key pair. fn seek( &mut self, - account: H256, - subkey: H256, + account: B256, + subkey: B256, ) -> Result, reth_db::DatabaseError> { if self.account.map_or(true, |acc| acc != account) { self.account = Some(account); @@ -546,13 +546,13 @@ mod tests { fn assert_account_cursor_order<'a, 'b>( factory: &'a impl HashedCursorFactory<'b>, - mut expected: impl Iterator, + mut expected: impl Iterator, ) where 'a: 'b, { let mut cursor = factory.hashed_account_cursor().unwrap(); - let first_account = cursor.seek(H256::default()).unwrap(); + let first_account = cursor.seek(B256::default()).unwrap(); assert_eq!(first_account, expected.next()); for expected in expected { @@ -565,7 +565,7 @@ mod tests { fn assert_storage_cursor_order<'a, 'b>( factory: &'a impl HashedCursorFactory<'b>, - expected: impl Iterator)>, + expected: impl Iterator)>, ) where 'a: 'b, { @@ -574,7 +574,7 @@ mod tests { for (account, storage) in expected { let mut expected_storage = storage.into_iter(); - let first_storage = cursor.seek(account, H256::default()).unwrap(); + let first_storage = cursor.seek(account, B256::default()).unwrap(); assert_eq!(first_storage.map(|e| (e.key, e.value)), expected_storage.next()); for expected_entry in expected_storage { @@ -589,7 +589,7 @@ mod tests { #[test] fn post_state_only_accounts() { let accounts = - Vec::from_iter((1..11).map(|key| (H256::from_low_u64_be(key), Account::default()))); + Vec::from_iter((1..11).map(|key| (B256::with_last_byte(key), Account::default()))); let mut hashed_post_state = HashedPostState::default(); for (hashed_address, account) in &accounts { @@ -607,7 +607,7 @@ mod tests { #[test] fn db_only_accounts() { let accounts = - Vec::from_iter((1..11).map(|key| (H256::from_low_u64_be(key), Account::default()))); + Vec::from_iter((1..11).map(|key| (B256::with_last_byte(key), Account::default()))); let db = create_test_rw_db(); db.update(|tx| { @@ -627,18 +627,18 @@ mod tests { fn account_cursor_correct_order() { // odd keys are in post state, even keys are in db let accounts = - Vec::from_iter((1..111).map(|key| (H256::from_low_u64_be(key), Account::default()))); + Vec::from_iter((1..111).map(|key| (B256::with_last_byte(key), Account::default()))); let db = create_test_rw_db(); db.update(|tx| { - for (key, account) in accounts.iter().filter(|x| x.0.to_low_u64_be() % 2 == 0) { + for (key, account) in accounts.iter().filter(|x| x.0[31] % 2 == 0) { tx.put::(*key, *account).unwrap(); } }) .unwrap(); let mut hashed_post_state = HashedPostState::default(); - for (hashed_address, account) in accounts.iter().filter(|x| x.0.to_low_u64_be() % 2 != 0) { + for (hashed_address, account) in accounts.iter().filter(|x| x.0[31] % 2 != 0) { hashed_post_state.insert_account(*hashed_address, *account); } hashed_post_state.sort(); @@ -652,20 +652,20 @@ mod tests { fn removed_accounts_are_discarded() { // odd keys are in post state, even keys are in db let accounts = - Vec::from_iter((1..111).map(|key| (H256::from_low_u64_be(key), Account::default()))); + Vec::from_iter((1..111).map(|key| (B256::with_last_byte(key), Account::default()))); // accounts 5, 9, 11 should be considered removed from post state - let removed_keys = Vec::from_iter([5, 9, 11].into_iter().map(H256::from_low_u64_be)); + let removed_keys = Vec::from_iter([5, 9, 11].into_iter().map(B256::with_last_byte)); let db = create_test_rw_db(); db.update(|tx| { - for (key, account) in accounts.iter().filter(|x| x.0.to_low_u64_be() % 2 == 0) { + for (key, account) in accounts.iter().filter(|x| x.0[31] % 2 == 0) { tx.put::(*key, *account).unwrap(); } }) .unwrap(); let mut hashed_post_state = HashedPostState::default(); - for (hashed_address, account) in accounts.iter().filter(|x| x.0.to_low_u64_be() % 2 != 0) { + for (hashed_address, account) in accounts.iter().filter(|x| x.0[31] % 2 != 0) { if removed_keys.contains(hashed_address) { hashed_post_state.insert_cleared_account(*hashed_address); } else { @@ -682,10 +682,9 @@ mod tests { #[test] fn post_state_accounts_take_precedence() { - let accounts = - Vec::from_iter((1..10).map(|key| { - (H256::from_low_u64_be(key), Account { nonce: key, ..Default::default() }) - })); + let accounts = Vec::from_iter((1..10).map(|key| { + (B256::with_last_byte(key), Account { nonce: key as u64, ..Default::default() }) + })); let db = create_test_rw_db(); db.update(|tx| { @@ -709,7 +708,7 @@ mod tests { #[test] fn fuzz_hashed_account_cursor() { - proptest!(ProptestConfig::with_cases(10), |(db_accounts: BTreeMap, post_state_accounts: BTreeMap>)| { + proptest!(ProptestConfig::with_cases(10), |(db_accounts: BTreeMap, post_state_accounts: BTreeMap>)| { let db = create_test_rw_db(); db.update(|tx| { for (key, account) in db_accounts.iter() { @@ -747,7 +746,7 @@ mod tests { #[test] fn storage_is_empty() { - let address = H256::random(); + let address = B256::random(); let db = create_test_rw_db(); // empty from the get go @@ -760,7 +759,7 @@ mod tests { } let db_storage = - BTreeMap::from_iter((0..10).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((0..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); db.update(|tx| { for (slot, value) in db_storage.iter() { // insert zero value accounts to the database @@ -800,7 +799,7 @@ mod tests { { let wiped = true; let mut hashed_storage = HashedStorage::new(wiped); - hashed_storage.insert_zero_valued_slot(H256::random()); + hashed_storage.insert_zero_valued_slot(B256::random()); let mut hashed_post_state = HashedPostState::default(); hashed_post_state.insert_hashed_storage(address, hashed_storage); @@ -815,7 +814,7 @@ mod tests { { let wiped = true; let mut hashed_storage = HashedStorage::new(wiped); - hashed_storage.insert_non_zero_valued_storage(H256::random(), U256::from(1)); + hashed_storage.insert_non_zero_valued_storage(B256::random(), U256::from(1)); let mut hashed_post_state = HashedPostState::default(); hashed_post_state.insert_hashed_storage(address, hashed_storage); @@ -829,11 +828,11 @@ mod tests { #[test] fn storage_cursor_correct_order() { - let address = H256::random(); + let address = B256::random(); let db_storage = - BTreeMap::from_iter((1..11).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((1..11).map(|key| (B256::with_last_byte(key), U256::from(key)))); let post_state_storage = - BTreeMap::from_iter((11..21).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((11..21).map(|key| (B256::with_last_byte(key), U256::from(key)))); let db = create_test_rw_db(); db.update(|tx| { @@ -867,11 +866,11 @@ mod tests { #[test] fn zero_value_storage_entries_are_discarded() { - let address = H256::random(); + let address = B256::random(); let db_storage = - BTreeMap::from_iter((0..10).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); // every even number is changed to zero value + BTreeMap::from_iter((0..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); // every even number is changed to zero value let post_state_storage = BTreeMap::from_iter((0..10).map(|key| { - (H256::from_low_u64_be(key), if key % 2 == 0 { U256::ZERO } else { U256::from(key) }) + (B256::with_last_byte(key), if key % 2 == 0 { U256::ZERO } else { U256::from(key) }) })); let db = create_test_rw_db(); @@ -910,11 +909,11 @@ mod tests { #[test] fn wiped_storage_is_discarded() { - let address = H256::random(); + let address = B256::random(); let db_storage = - BTreeMap::from_iter((1..11).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((1..11).map(|key| (B256::with_last_byte(key), U256::from(key)))); let post_state_storage = - BTreeMap::from_iter((11..21).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((11..21).map(|key| (B256::with_last_byte(key), U256::from(key)))); let db = create_test_rw_db(); db.update(|tx| { @@ -944,9 +943,9 @@ mod tests { #[test] fn post_state_storages_take_precedence() { - let address = H256::random(); + let address = B256::random(); let storage = - BTreeMap::from_iter((1..10).map(|key| (H256::from_low_u64_be(key), U256::from(key)))); + BTreeMap::from_iter((1..10).map(|key| (B256::with_last_byte(key), U256::from(key)))); let db = create_test_rw_db(); db.update(|tx| { @@ -981,8 +980,8 @@ mod tests { fn fuzz_hashed_storage_cursor() { proptest!(ProptestConfig::with_cases(10), |( - db_storages: BTreeMap>, - post_state_storages: BTreeMap)> + db_storages: BTreeMap>, + post_state_storages: BTreeMap)> )| { let db = create_test_rw_db(); diff --git a/crates/trie/src/lib.rs b/crates/trie/src/lib.rs index 98a200a23c..436c9f9ca1 100644 --- a/crates/trie/src/lib.rs +++ b/crates/trie/src/lib.rs @@ -9,7 +9,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/crates/trie/src/prefix_set/loader.rs b/crates/trie/src/prefix_set/loader.rs index 3ee8b211a3..8474300263 100644 --- a/crates/trie/src/prefix_set/loader.rs +++ b/crates/trie/src/prefix_set/loader.rs @@ -7,7 +7,7 @@ use reth_db::{ transaction::DbTx, DatabaseError, }; -use reth_primitives::{keccak256, trie::Nibbles, BlockNumber, StorageEntry, H256}; +use reth_primitives::{keccak256, trie::Nibbles, BlockNumber, StorageEntry, B256}; use std::{ collections::{HashMap, HashSet}, ops::RangeInclusive, @@ -19,9 +19,9 @@ pub struct LoadedPrefixSets { /// The account prefix set pub account_prefix_set: PrefixSetMut, /// The mapping of hashed account key to the corresponding storage prefix set - pub storage_prefix_sets: HashMap, + pub storage_prefix_sets: HashMap, /// The account keys of destroyed accounts - pub destroyed_accounts: HashSet, + pub destroyed_accounts: HashSet, } /// A wrapper around a database transaction that loads prefix sets within a given block range. diff --git a/crates/trie/src/progress.rs b/crates/trie/src/progress.rs index 36b96643dc..fdd7186a91 100644 --- a/crates/trie/src/progress.rs +++ b/crates/trie/src/progress.rs @@ -2,14 +2,14 @@ use crate::{trie_cursor::CursorSubNode, updates::TrieUpdates}; use reth_primitives::{ stage::MerkleCheckpoint, trie::{hash_builder::HashBuilder, Nibbles}, - H256, + B256, }; /// The progress of the state root computation. #[derive(Debug)] pub enum StateRootProgress { /// The complete state root computation with updates and computed root. - Complete(H256, usize, TrieUpdates), + Complete(B256, usize, TrieUpdates), /// The intermediate progress of state root computation. /// Contains the walker stack, the hash builder and the trie updates. Progress(Box, usize, TrieUpdates), @@ -23,7 +23,7 @@ pub struct IntermediateStateRootState { /// Previously recorded walker stack. pub walker_stack: Vec, /// The last hashed account key processed. - pub last_account_key: H256, + pub last_account_key: B256, /// The last walker key processed. pub last_walker_key: Nibbles, } diff --git a/crates/trie/src/proof.rs b/crates/trie/src/proof.rs index d76f79ebdb..9690fbb085 100644 --- a/crates/trie/src/proof.rs +++ b/crates/trie/src/proof.rs @@ -6,6 +6,7 @@ use crate::{ walker::TrieWalker, ProofError, StorageRoot, }; +use alloy_rlp::Encodable; use reth_db::{cursor::DbCursorRO, tables, transaction::DbTx}; use reth_primitives::{ keccak256, @@ -13,9 +14,8 @@ use reth_primitives::{ nodes::{rlp_hash, BranchNode, LeafNode, CHILD_INDEX_RANGE}, BranchNodeCompact, HashBuilder, Nibbles, }, - Address, Bytes, H256, + Address, Bytes, B256, }; -use reth_rlp::Encodable; /// A struct for generating merkle proofs. /// @@ -102,7 +102,7 @@ where &self, trie_cursor: &mut AccountTrieCursor, proof_restorer: &mut ProofRestorer<'a, 'b, TX, H>, - hashed_address: H256, + hashed_address: B256, ) -> Result, ProofError> { let mut intermediate_proofs = Vec::new(); @@ -202,19 +202,19 @@ where child_key_to_seek.resize(32, 0); let leaf_node_rlp = - self.restore_leaf_node(H256::from_slice(&child_key_to_seek), child_key.len())?; + self.restore_leaf_node(B256::from_slice(&child_key_to_seek), child_key.len())?; branch_node_stack.push(leaf_node_rlp.to_vec()); } } self.node_rlp_buf.clear(); BranchNode::new(&branch_node_stack).rlp(node.state_mask, &mut self.node_rlp_buf); - Ok(Bytes::from(self.node_rlp_buf.as_slice())) + Ok(Bytes::copy_from_slice(self.node_rlp_buf.as_slice())) } /// Restore leaf node. /// The leaf nodes are always encoded as `RLP(node) or RLP(keccak(RLP(node)))`. - fn restore_leaf_node(&mut self, seek_key: H256, slice_at: usize) -> Result { + fn restore_leaf_node(&mut self, seek_key: B256, slice_at: usize) -> Result { let (hashed_address, account) = self .hashed_account_cursor .seek(seek_key)? @@ -240,7 +240,7 @@ where /// The target node might be missing from the trie. fn restore_target_leaf_node( &mut self, - seek_key: H256, + seek_key: B256, slice_at: usize, ) -> Result, ProofError> { let (hashed_address, account) = match self.hashed_account_cursor.seek(seek_key)? { @@ -261,7 +261,7 @@ where self.node_rlp_buf.clear(); leaf_node.rlp(&mut self.node_rlp_buf); - Ok(Some(Bytes::from(self.node_rlp_buf.as_slice()))) + Ok(Some(Bytes::copy_from_slice(self.node_rlp_buf.as_slice()))) } } diff --git a/crates/trie/src/test_utils.rs b/crates/trie/src/test_utils.rs index d3be29e2bb..bd16f04cd3 100644 --- a/crates/trie/src/test_utils.rs +++ b/crates/trie/src/test_utils.rs @@ -1,15 +1,15 @@ use crate::account::EthAccount; -use reth_primitives::{proofs::triehash::KeccakHasher, Account, Address, H256, U256}; -use reth_rlp::{encode_fixed_size, Encodable}; +use alloy_rlp::{encode_fixed_size, Encodable}; +use reth_primitives::{proofs::triehash::KeccakHasher, Account, Address, B256, U256}; /// Re-export of [triehash]. pub use triehash; /// Compute the state root of a given set of accounts using [triehash::sec_trie_root]. -pub fn state_root(accounts: I) -> H256 +pub fn state_root(accounts: I) -> B256 where I: Iterator, - S: IntoIterator, + S: IntoIterator, { let encoded_accounts = accounts.map(|(address, (account, storage))| { let storage_root = storage_root(storage.into_iter()); @@ -22,17 +22,17 @@ where } /// Compute the storage root for a given account using [triehash::sec_trie_root]. -pub fn storage_root>(storage: I) -> H256 { +pub fn storage_root>(storage: I) -> B256 { let encoded_storage = storage.map(|(k, v)| (k, encode_fixed_size(&v).to_vec())); triehash::sec_trie_root::(encoded_storage) } /// Compute the state root of a given set of accounts with prehashed keys using /// [triehash::trie_root]. -pub fn state_root_prehashed(accounts: I) -> H256 +pub fn state_root_prehashed(accounts: I) -> B256 where - I: Iterator, - S: IntoIterator, + I: Iterator, + S: IntoIterator, { let encoded_accounts = accounts.map(|(address, (account, storage))| { let storage_root = storage_root_prehashed(storage.into_iter()); @@ -45,7 +45,7 @@ where } /// Compute the storage root for a given account with prehashed slots using [triehash::trie_root]. -pub fn storage_root_prehashed>(storage: I) -> H256 { +pub fn storage_root_prehashed>(storage: I) -> B256 { let encoded_storage = storage.map(|(k, v)| (k, encode_fixed_size(&v).to_vec())); triehash::trie_root::(encoded_storage) } diff --git a/crates/trie/src/trie.rs b/crates/trie/src/trie.rs index 87f0f90265..0df91a687f 100644 --- a/crates/trie/src/trie.rs +++ b/crates/trie/src/trie.rs @@ -8,14 +8,14 @@ use crate::{ walker::TrieWalker, StateRootError, StorageRootError, }; +use alloy_rlp::Encodable; use reth_db::{tables, transaction::DbTx}; use reth_primitives::{ keccak256, proofs::EMPTY_ROOT, trie::{HashBuilder, Nibbles}, - Address, BlockNumber, StorageEntry, H256, + Address, BlockNumber, StorageEntry, B256, }; -use reth_rlp::Encodable; use std::{ collections::{HashMap, HashSet}, ops::RangeInclusive, @@ -32,9 +32,9 @@ pub struct StateRoot<'a, 'b, TX, H> { pub changed_account_prefixes: PrefixSet, /// A map containing storage changes with the hashed address as key and a set of storage key /// prefixes as the value. - pub changed_storage_prefixes: HashMap, + pub changed_storage_prefixes: HashMap, /// A map containing keys of accounts that were destroyed. - pub destroyed_accounts: HashSet, + pub destroyed_accounts: HashSet, /// Previous intermediate state. previous_state: Option, /// The number of updates after which the intermediate progress should be returned. @@ -49,13 +49,13 @@ impl<'a, 'b, TX, H> StateRoot<'a, 'b, TX, H> { } /// Set the changed storage prefixes. - pub fn with_changed_storage_prefixes(mut self, prefixes: HashMap) -> Self { + pub fn with_changed_storage_prefixes(mut self, prefixes: HashMap) -> Self { self.changed_storage_prefixes = prefixes; self } /// Set the destroyed accounts. - pub fn with_destroyed_accounts(mut self, accounts: HashSet) -> Self { + pub fn with_destroyed_accounts(mut self, accounts: HashSet) -> Self { self.destroyed_accounts = accounts; self } @@ -144,7 +144,7 @@ where pub fn incremental_root( tx: &'a TX, range: RangeInclusive, - ) -> Result { + ) -> Result { tracing::debug!(target: "loader", "incremental state root"); Self::incremental_root_calculator(tx, range)?.root() } @@ -160,7 +160,7 @@ where pub fn incremental_root_with_updates( tx: &'a TX, range: RangeInclusive, - ) -> Result<(H256, TrieUpdates), StateRootError> { + ) -> Result<(B256, TrieUpdates), StateRootError> { tracing::debug!(target: "loader", "incremental state root"); Self::incremental_root_calculator(tx, range)?.root_with_updates() } @@ -193,7 +193,7 @@ where /// # Returns /// /// The intermediate progress of state root computation and the trie updates. - pub fn root_with_updates(self) -> Result<(H256, TrieUpdates), StateRootError> { + pub fn root_with_updates(self) -> Result<(B256, TrieUpdates), StateRootError> { match self.with_no_threshold().calculate(true)? { StateRootProgress::Complete(root, _, updates) => Ok((root, updates)), StateRootProgress::Progress(..) => unreachable!(), // unreachable threshold @@ -206,7 +206,7 @@ where /// # Returns /// /// The state root hash. - pub fn root(self) -> Result { + pub fn root(self) -> Result { match self.calculate(false)? { StateRootProgress::Complete(root, _, _) => Ok(root), StateRootProgress::Progress(..) => unreachable!(), // update retenion is disabled @@ -376,7 +376,7 @@ pub struct StorageRoot<'a, 'b, TX, H> { /// The factory for hashed cursors. pub hashed_cursor_factory: &'b H, /// The hashed address of an account. - pub hashed_address: H256, + pub hashed_address: B256, /// The set of storage slot prefixes that have changed. pub changed_prefixes: PrefixSet, } @@ -391,7 +391,7 @@ where } /// Creates a new storage root calculator given a hashed address. - pub fn new_hashed(tx: &'a TX, hashed_address: H256) -> Self { + pub fn new_hashed(tx: &'a TX, hashed_address: B256) -> Self { Self { tx, hashed_address, @@ -411,7 +411,7 @@ impl<'a, 'b, TX, H> StorageRoot<'a, 'b, TX, H> { pub fn new_hashed_with_factory( tx: &'a TX, hashed_cursor_factory: &'b H, - hashed_address: H256, + hashed_address: B256, ) -> Self { Self { tx, @@ -451,7 +451,7 @@ where /// # Returns /// /// The storage root and storage trie updates for a given address. - pub fn root_with_updates(&self) -> Result<(H256, usize, TrieUpdates), StorageRootError> { + pub fn root_with_updates(&self) -> Result<(B256, usize, TrieUpdates), StorageRootError> { self.calculate(true) } @@ -460,7 +460,7 @@ where /// # Returns /// /// The storage root. - pub fn root(&self) -> Result { + pub fn root(&self) -> Result { let (root, _, _) = self.calculate(false)?; Ok(root) } @@ -468,7 +468,7 @@ where fn calculate( &self, retain_updates: bool, - ) -> Result<(H256, usize, TrieUpdates), StorageRootError> { + ) -> Result<(B256, usize, TrieUpdates), StorageRootError> { tracing::debug!(target: "trie::storage_root", hashed_address = ?self.hashed_address, "calculating storage root"); let mut hashed_storage_cursor = self.hashed_cursor_factory.hashed_storage_cursor()?; @@ -515,7 +515,7 @@ where } } hash_builder - .add_leaf(storage_key_nibbles, reth_rlp::encode_fixed_size(&value).as_ref()); + .add_leaf(storage_key_nibbles, alloy_rlp::encode_fixed_size(&value).as_ref()); storage = hashed_storage_cursor.next()?; } } @@ -554,7 +554,7 @@ mod tests { keccak256, proofs::triehash::KeccakHasher, trie::{BranchNodeCompact, TrieMask}, - Account, Address, H256, MAINNET, U256, + Account, Address, B256, MAINNET, U256, }; use reth_provider::{DatabaseProviderRW, ProviderFactory}; use std::{collections::BTreeMap, ops::Mul, str::FromStr}; @@ -563,7 +563,7 @@ mod tests { tx: &TX, address: Address, account: Account, - storage: &BTreeMap, + storage: &BTreeMap, ) { let hashed_address = keccak256(address); tx.put::(hashed_address, account).unwrap(); @@ -572,8 +572,8 @@ mod tests { fn insert_storage<'a, TX: DbTxMut<'a>>( tx: &TX, - hashed_address: H256, - storage: &BTreeMap, + hashed_address: B256, + storage: &BTreeMap, ) { for (k, v) in storage { tx.put::( @@ -588,11 +588,11 @@ mod tests { let db = create_test_rw_db(); let factory = ProviderFactory::new(db.as_ref(), MAINNET.clone()); let tx = factory.provider_rw().unwrap(); - let hashed_address = H256::from_low_u64_be(1); + let hashed_address = B256::with_last_byte(1); let mut hashed_storage_cursor = tx.tx_ref().cursor_dup_write::().unwrap(); - let data = inputs.iter().map(|x| H256::from_str(x).unwrap()); + let data = inputs.iter().map(|x| B256::from_str(x).unwrap()); let value = U256::from(0); for key in data { hashed_storage_cursor.upsert(hashed_address, StorageEntry { key, value }).unwrap(); @@ -603,7 +603,7 @@ mod tests { StorageRoot::new_hashed(tx.tx_ref(), hashed_address).root_with_updates().unwrap(); // 1. Some state transition happens, update the hashed storage to the new value - let modified_key = H256::from_str(modified).unwrap(); + let modified_key = B256::from_str(modified).unwrap(); let value = U256::from(1); if hashed_storage_cursor.seek_by_key_subkey(hashed_address, modified_key).unwrap().is_some() { @@ -648,7 +648,7 @@ mod tests { #[test] fn arbitrary_storage_root() { - proptest!(ProptestConfig::with_cases(10), |(item: (Address, std::collections::BTreeMap))| { + proptest!(ProptestConfig::with_cases(10), |(item: (Address, std::collections::BTreeMap))| { let (address, storage) = item; let hashed_address = keccak256(address); @@ -679,7 +679,7 @@ mod tests { Address::random(), ( Account { nonce: 0, balance: U256::from(0), bytecode_hash: None }, - BTreeMap::from([(H256::from_low_u64_be(0x4), U256::from(12))]), + BTreeMap::from([(B256::with_last_byte(0x4), U256::from(12))]), ), ), ( @@ -698,8 +698,8 @@ mod tests { bytecode_hash: Some(keccak256("test")), }, BTreeMap::from([ - (H256::zero(), U256::from(3)), - (H256::from_low_u64_be(2), U256::from(1)), + (B256::ZERO, U256::from(3)), + (B256::with_last_byte(2), U256::from(1)), ]), ), ), @@ -737,10 +737,8 @@ mod tests { let tx = factory.provider_rw().unwrap(); let address = Address::random(); - let storage = BTreeMap::from([ - (H256::zero(), U256::from(3)), - (H256::from_low_u64_be(2), U256::from(1)), - ]); + let storage = + BTreeMap::from([(B256::ZERO, U256::from(3)), (B256::with_last_byte(2), U256::from(1))]); let code = "el buen fla"; let account = Account { @@ -758,7 +756,7 @@ mod tests { assert_eq!(storage_root(storage.into_iter()), got); } - type State = BTreeMap)>; + type State = BTreeMap)>; #[test] fn arbitrary_state_root() { @@ -830,7 +828,7 @@ mod tests { assert_eq!(expected, got); } - fn encode_account(account: Account, storage_root: Option) -> Vec { + fn encode_account(account: Account, storage_root: Option) -> Vec { let mut account = EthAccount::from(account); if let Some(storage_root) = storage_root { account = account.with_storage_root(storage_root); @@ -858,7 +856,7 @@ mod tests { ("3000000000000000000000000000000000000000000000000000000000E00000", 0x127a89), ("3000000000000000000000000000000000000000000000000000000000E00001", 0x05), ] - .map(|(slot, val)| (H256::from_str(slot).unwrap(), U256::from(val))), + .map(|(slot, val)| (B256::from_str(slot).unwrap(), U256::from(val))), ); let mut hashed_storage_cursor = @@ -884,7 +882,7 @@ mod tests { ("3000000000000000000000000000000000000000000000000000000000E00000", 0x127a89), ("3000000000000000000000000000000000000000000000000000000000E00001", 0x05), ] - .map(|(slot, val)| (H256::from_str(slot).unwrap(), U256::from(val))), + .map(|(slot, val)| (B256::from_str(slot).unwrap(), U256::from(val))), ); let db = create_test_rw_db(); @@ -900,7 +898,7 @@ mod tests { // Insert first account let key1 = - H256::from_str("b000000000000000000000000000000000000000000000000000000000000000") + B256::from_str("b000000000000000000000000000000000000000000000000000000000000000") .unwrap(); let account1 = Account { nonce: 0, balance: U256::from(3).mul(ether), bytecode_hash: None }; hashed_account_cursor.upsert(key1, account1).unwrap(); @@ -921,7 +919,7 @@ mod tests { assert_eq!(key3[0], 0xB0); assert_eq!(key3[1], 0x41); let code_hash = - H256::from_str("5be74cad16203c4905c068b012a2e9fb6d19d036c410f16fd177f337541440dd") + B256::from_str("5be74cad16203c4905c068b012a2e9fb6d19d036c410f16fd177f337541440dd") .unwrap(); let account3 = Account { nonce: 0, balance: U256::from(2).mul(ether), bytecode_hash: Some(code_hash) }; @@ -944,7 +942,7 @@ mod tests { ); let key4a = - H256::from_str("B1A0000000000000000000000000000000000000000000000000000000000000") + B256::from_str("B1A0000000000000000000000000000000000000000000000000000000000000") .unwrap(); let account4a = Account { nonce: 0, balance: U256::from(4).mul(ether), ..Default::default() }; @@ -952,7 +950,7 @@ mod tests { hash_builder.add_leaf(Nibbles::unpack(key4a), &encode_account(account4a, None)); let key5 = - H256::from_str("B310000000000000000000000000000000000000000000000000000000000000") + B256::from_str("B310000000000000000000000000000000000000000000000000000000000000") .unwrap(); let account5 = Account { nonce: 0, balance: U256::from(8).mul(ether), ..Default::default() }; @@ -960,7 +958,7 @@ mod tests { hash_builder.add_leaf(Nibbles::unpack(key5), &encode_account(account5, None)); let key6 = - H256::from_str("B340000000000000000000000000000000000000000000000000000000000000") + B256::from_str("B340000000000000000000000000000000000000000000000000000000000000") .unwrap(); let account6 = Account { nonce: 0, balance: U256::from(1).mul(ether), ..Default::default() }; @@ -969,9 +967,9 @@ mod tests { // Populate account & storage trie DB tables let expected_root = - H256::from_str("72861041bc90cd2f93777956f058a545412b56de79af5eb6b8075fe2eabbe015") + B256::from_str("72861041bc90cd2f93777956f058a545412b56de79af5eb6b8075fe2eabbe015") .unwrap(); - let computed_expected_root: H256 = triehash::trie_root::([ + let computed_expected_root: B256 = triehash::trie_root::([ (key1, encode_account(account1, None)), (key2, encode_account(account2, None)), (key3, encode_account(account3, Some(account3_storage_root))), @@ -1048,7 +1046,7 @@ mod tests { prefix_set.insert(Nibbles::unpack(key4b)); let expected_state_root = - H256::from_str("8e263cd4eefb0c3cbbb14e5541a66a755cad25bcfab1e10dd9d706263e811b28") + B256::from_str("8e263cd4eefb0c3cbbb14e5541a66a755cad25bcfab1e10dd9d706263e811b28") .unwrap(); let (root, trie_updates) = StateRoot::new(tx.tx_ref()) @@ -1093,7 +1091,7 @@ mod tests { let mut account_prefix_set = PrefixSetMut::default(); account_prefix_set.insert(Nibbles::unpack(account.0)); - let computed_expected_root: H256 = triehash::trie_root::([ + let computed_expected_root: B256 = triehash::trie_root::([ (key1, encode_account(account1, None)), // DELETED: (key2, encode_account(account2, None)), (key3, encode_account(account3, Some(account3_storage_root))), @@ -1150,7 +1148,7 @@ mod tests { account_prefix_set.insert(Nibbles::unpack(account2.0)); account_prefix_set.insert(Nibbles::unpack(account3.0)); - let computed_expected_root: H256 = triehash::trie_root::([ + let computed_expected_root: B256 = triehash::trie_root::([ (key1, encode_account(account1, None)), // DELETED: (key2, encode_account(account2, None)), // DELETED: (key3, encode_account(account3, Some(account3_storage_root))), @@ -1247,7 +1245,7 @@ mod tests { cases: 128, ..ProptestConfig::default() })] #[test] - fn fuzz_state_root_incremental(account_changes: [BTreeMap; 5]) { + fn fuzz_state_root_incremental(account_changes: [BTreeMap; 5]) { tokio::runtime::Runtime::new().unwrap().block_on(async { let db = create_test_rw_db(); let factory = ProviderFactory::new(db.as_ref(), MAINNET.clone()); @@ -1287,7 +1285,7 @@ mod tests { let factory = ProviderFactory::new(db.as_ref(), MAINNET.clone()); let tx = factory.provider_rw().unwrap(); - let hashed_address = H256::random(); + let hashed_address = B256::random(); let (expected_root, expected_updates) = extension_node_storage_trie(&tx, hashed_address); let (got, _, updates) = @@ -1311,8 +1309,8 @@ mod tests { fn extension_node_storage_trie( tx: &DatabaseProviderRW<'_, &DatabaseEnv>, - hashed_address: H256, - ) -> (H256, HashMap) { + hashed_address: B256, + ) -> (B256, HashMap) { let value = U256::from(1); let mut hashed_storage = tx.tx_ref().cursor_write::().unwrap(); @@ -1327,8 +1325,10 @@ mod tests { hex!("30af8f0000000000000000000000000000000000000000000000000000000000"), hex!("3100000000000000000000000000000000000000000000000000000000000000"), ] { - hashed_storage.upsert(hashed_address, StorageEntry { key: H256(key), value }).unwrap(); - hb.add_leaf(Nibbles::unpack(key), &reth_rlp::encode_fixed_size(&value)); + hashed_storage + .upsert(hashed_address, StorageEntry { key: B256::new(key), value }) + .unwrap(); + hb.add_leaf(Nibbles::unpack(key), &alloy_rlp::encode_fixed_size(&value)); } let root = hb.root(); @@ -1336,9 +1336,9 @@ mod tests { (root, updates) } - fn extension_node_trie(tx: &DatabaseProviderRW<'_, &DatabaseEnv>) -> H256 { + fn extension_node_trie(tx: &DatabaseProviderRW<'_, &DatabaseEnv>) -> B256 { let a = - Account { nonce: 0, balance: U256::from(1u64), bytecode_hash: Some(H256::random()) }; + Account { nonce: 0, balance: U256::from(1u64), bytecode_hash: Some(B256::random()) }; let val = encode_account(a, None); let mut hashed_accounts = tx.tx_ref().cursor_write::().unwrap(); @@ -1352,7 +1352,7 @@ mod tests { hex!("30af8f0000000000000000000000000000000000000000000000000000000000"), hex!("3100000000000000000000000000000000000000000000000000000000000000"), ] { - hashed_accounts.upsert(H256(key), a).unwrap(); + hashed_accounts.upsert(B256::new(key), a).unwrap(); hb.add_leaf(Nibbles::unpack(key), &val); } diff --git a/crates/trie/src/trie_cursor/storage_cursor.rs b/crates/trie/src/trie_cursor/storage_cursor.rs index 61091e7741..53c84448c7 100644 --- a/crates/trie/src/trie_cursor/storage_cursor.rs +++ b/crates/trie/src/trie_cursor/storage_cursor.rs @@ -6,7 +6,7 @@ use reth_db::{ }; use reth_primitives::{ trie::{BranchNodeCompact, StoredNibblesSubKey}, - H256, + B256, }; /// A cursor over the storage trie. @@ -14,12 +14,12 @@ use reth_primitives::{ pub struct StorageTrieCursor { /// The underlying cursor. pub cursor: C, - hashed_address: H256, + hashed_address: B256, } impl StorageTrieCursor { /// Create a new storage trie cursor. - pub fn new(cursor: C, hashed_address: H256) -> Self { + pub fn new(cursor: C, hashed_address: B256) -> Self { Self { cursor, hashed_address } } } @@ -75,9 +75,9 @@ mod tests { let provider = factory.provider_rw().unwrap(); let mut cursor = provider.tx_ref().cursor_dup_write::().unwrap(); - let hashed_address = H256::random(); + let hashed_address = B256::random(); let key = vec![0x2, 0x3]; - let value = BranchNodeCompact::new(1, 1, 1, vec![H256::random()], None); + let value = BranchNodeCompact::new(1, 1, 1, vec![B256::random()], None); cursor .upsert( diff --git a/crates/trie/src/trie_cursor/subnode.rs b/crates/trie/src/trie_cursor/subnode.rs index fb82dc5333..a0e30dd937 100644 --- a/crates/trie/src/trie_cursor/subnode.rs +++ b/crates/trie/src/trie_cursor/subnode.rs @@ -1,6 +1,6 @@ use reth_primitives::{ trie::{nodes::CHILD_INDEX_RANGE, BranchNodeCompact, Nibbles, StoredSubNode}, - H256, + B256, }; /// Cursor for iterating over a subtrie. @@ -106,7 +106,7 @@ impl CursorSubNode { } /// Returns the root hash of the current node, if it has one. - pub fn hash(&self) -> Option { + pub fn hash(&self) -> Option { if self.hash_flag() { let node = self.node.as_ref().unwrap(); match self.nibble { diff --git a/crates/trie/src/updates.rs b/crates/trie/src/updates.rs index f270f18543..2cd6578ee5 100644 --- a/crates/trie/src/updates.rs +++ b/crates/trie/src/updates.rs @@ -6,7 +6,7 @@ use reth_db::{ }; use reth_primitives::{ trie::{BranchNodeCompact, Nibbles, StorageTrieEntry, StoredNibbles, StoredNibblesSubKey}, - H256, + B256, }; use std::collections::{hash_map::IntoIter, HashMap}; @@ -16,9 +16,9 @@ pub enum TrieKey { /// A node in the account trie. AccountNode(StoredNibbles), /// A node in the storage trie. - StorageNode(H256, StoredNibblesSubKey), + StorageNode(B256, StoredNibblesSubKey), /// Storage trie of an account. - StorageTrie(H256), + StorageTrie(B256), } /// The operation to perform on the trie. @@ -88,7 +88,7 @@ impl TrieUpdates { #[allow(clippy::mutable_key_type)] pub fn extend_with_storage_updates( &mut self, - hashed_address: H256, + hashed_address: B256, updates: HashMap, ) { self.extend(updates.into_iter().map(|(nibbles, node)| { diff --git a/crates/trie/src/walker.rs b/crates/trie/src/walker.rs index c3073b3643..e9832d61bd 100644 --- a/crates/trie/src/walker.rs +++ b/crates/trie/src/walker.rs @@ -6,7 +6,7 @@ use crate::{ use reth_db::{table::Key, DatabaseError}; use reth_primitives::{ trie::{BranchNodeCompact, Nibbles}, - H256, + B256, }; use std::marker::PhantomData; @@ -216,7 +216,7 @@ impl<'a, K: Key + From>, C: TrieCursor> TrieWalker<'a, K, C> { } /// Returns the current hash in the trie if any. - pub fn hash(&self) -> Option { + pub fn hash(&self) -> Option { self.stack.last().and_then(|n| n.hash()) } @@ -226,7 +226,7 @@ impl<'a, K: Key + From>, C: TrieCursor> TrieWalker<'a, K, C> { } /// Returns the next unprocessed key in the trie. - pub fn next_unprocessed_key(&self) -> Option { + pub fn next_unprocessed_key(&self) -> Option { self.key() .as_ref() .and_then(|key| { @@ -238,7 +238,7 @@ impl<'a, K: Key + From>, C: TrieCursor> TrieWalker<'a, K, C> { }) .map(|mut key| { key.resize(32, 0); - H256::from_slice(key.as_slice()) + B256::from_slice(key.as_slice()) }) } @@ -302,7 +302,7 @@ mod tests { let account_trie = AccountTrieCursor::new(account_cursor); test_cursor(account_trie, &expected); - let hashed_address = H256::random(); + let hashed_address = B256::random(); let mut storage_cursor = tx.tx_ref().cursor_dup_write::().unwrap(); for (k, v) in &inputs { storage_cursor @@ -351,7 +351,7 @@ mod tests { 0b00100, 0, vec![], - Some(H256::random()), + Some(B256::random()), ), ), ( @@ -361,13 +361,13 @@ mod tests { 0b00010, 0, 0b00010, - vec![H256::random()], + vec![B256::random()], None, ), ), ]; - let hashed_address = H256::random(); + let hashed_address = B256::random(); for (k, v) in nodes { cursor.upsert(hashed_address, StorageTrieEntry { nibbles: k.into(), node: v }).unwrap(); } diff --git a/deny.toml b/deny.toml index 927db548b8..10e5a73136 100644 --- a/deny.toml +++ b/deny.toml @@ -95,8 +95,10 @@ unknown-registry = "warn" # in the allow list is encountered unknown-git = "deny" allow-git = [ - "https://github.com/bluealloy/revm", - "https://github.com/boa-dev/boa", + # TODO: remove, see ./Cargo.toml + "https://github.com/Evalir/revm", + # "https://github.com/bluealloy/revm", + "https://github.com/ethereum/c-kzg-4844", "https://github.com/sigp/discv5", "https://github.com/stevefan1999-personal/rust-igd", diff --git a/docs/crates/eth-wire.md b/docs/crates/eth-wire.md index 0270668d3f..dfc85e37db 100644 --- a/docs/crates/eth-wire.md +++ b/docs/crates/eth-wire.md @@ -65,7 +65,7 @@ These traits are defined as follows: [Crate: crates/rlp](https://github.com/paradigmxyz/reth/tree/1563506aea09049a85e5cc72c2894f3f7a371581/crates/rlp) ```rust, ignore pub trait Decodable: Sized { - fn decode(buf: &mut &[u8]) -> Result; + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result; } pub trait Encodable { fn encode(&self, out: &mut dyn BufMut); @@ -127,7 +127,7 @@ impl Encodable for TransactionSigned { } impl Decodable for TransactionSigned { - fn decode(buf: &mut &[u8]) -> Result { + fn decode(buf: &mut &[u8]) -> alloy_rlp::Result { // Implementation omitted for brevity //... } diff --git a/docs/crates/network.md b/docs/crates/network.md index edfa3515b0..8da43254f4 100644 --- a/docs/crates/network.md +++ b/docs/crates/network.md @@ -129,7 +129,7 @@ pub struct NetworkConfig { /// The id of the network pub chain: Chain, /// Genesis hash of the network - pub genesis_hash: H256, + pub genesis_hash: B256, /// The [`ForkFilter`] to use at launch for authenticating sessions. /// /// See also @@ -279,7 +279,7 @@ pub struct StateFetcher { HashMap>>>, /// Currently active [`GetBlockBodies`] requests inflight_bodies_requests: - HashMap, PeerRequestResult>>>, + HashMap, PeerRequestResult>>>, /// The list of _available_ peers for requests. peers: HashMap, /// The handle to the peers manager @@ -309,7 +309,7 @@ pub struct NetworkState { /// Network discovery. discovery: Discovery, /// The genesis hash of the network we're on - genesis_hash: H256, + genesis_hash: B256, /// The type that handles requests. /// /// The fetcher streams RLPx related requests on a per-peer basis to this type. This type will @@ -334,7 +334,7 @@ impl HeadersClient for FetchClient { } impl BodiesClient for FetchClient { - async fn get_block_bodies(&self, request: Vec) -> PeerRequestResult> { + async fn get_block_bodies(&self, request: Vec) -> PeerRequestResult> { let (response, rx) = oneshot::channel(); self.request_tx.send(DownloadRequest::GetBlockBodies { request, response })?; rx.await? @@ -602,7 +602,7 @@ The `GetBlockBodies` payload is simpler, it just contains a vector of requested ```rust,ignore pub struct GetBlockBodies( /// The block hashes to request bodies for. - pub Vec, + pub Vec, ); ``` @@ -826,7 +826,7 @@ Begins by inserting a `Peer` into `TransactionsManager.peers` by `peer_id`, whic ```rust,ignore struct Peer { /// Keeps track of transactions that we know the peer has seen. - transactions: LruCache, + transactions: LruCache, /// A communication channel directly to the session task. request_tx: PeerRequestSender, } @@ -878,7 +878,7 @@ Next in the `poll` method, `TransactionsCommand`s sent through the `Transactions [File: crates/net/network/src/transactions.rs](https://github.com/paradigmxyz/reth/blob/1563506aea09049a85e5cc72c2894f3f7a371581/crates/net/network/src/transactions.rs) ```rust,ignore enum TransactionsCommand { - PropagateHash(H256), + PropagateHash(B256), } ``` diff --git a/docs/crates/stages.md b/docs/crates/stages.md index f9fcc33cd6..1ea64aaab1 100644 --- a/docs/crates/stages.md +++ b/docs/crates/stages.md @@ -132,7 +132,7 @@ Following a successful `BodyStage`, the `SenderRecoveryStage` starts to execute. [File: crates/primitives/src/transaction/signature.rs](https://github.com/paradigmxyz/reth/blob/1563506aea09049a85e5cc72c2894f3f7a371581/crates/primitives/src/transaction/signature.rs) ```rust,ignore -pub(crate) fn recover_signer(&self, hash: H256) -> Option
{ +pub(crate) fn recover_signer(&self, hash: B256) -> Option
{ let mut sig: [u8; 65] = [0; 65]; sig[0..32].copy_from_slice(&self.r.to_be_bytes::<32>()); diff --git a/docs/design/database.md b/docs/design/database.md index 45f9d2a139..4c51b613e0 100644 --- a/docs/design/database.md +++ b/docs/design/database.md @@ -31,10 +31,10 @@ Below, you can see the table design that implements this scheme: erDiagram CanonicalHeaders { u64 BlockNumber "PK" - H256 HeaderHash "Value for CanonicalHeaders" + B256 HeaderHash "Value for CanonicalHeaders" } HeaderNumbers { - H256 BlockHash "PK" + B256 BlockHash "PK" u64 BlockNumber } Headers { @@ -59,7 +59,7 @@ Transactions { TransactionSignedNoHash Data } TxHashNumber { - H256 TxHash "PK" + B256 TxHash "PK" u64 TxNumber } TransactionBlock { @@ -71,7 +71,7 @@ Receipts { Receipt Data } Bytecodes { - H256 CodeHash "PK" + B256 CodeHash "PK" Bytes Code } PlainAccountState { @@ -80,36 +80,36 @@ PlainAccountState { } PlainStorageState { Address Account "PK" - H256 StorageKey "PK" + B256 StorageKey "PK" U256 StorageValue } AccountHistory { - H256 Account "PK" + B256 Account "PK" BlockNumberList BlockNumberList "List of transitions where account was changed" } StorageHistory { - H256 Account "PK" - H256 StorageKey "PK" + B256 Account "PK" + B256 StorageKey "PK" BlockNumberList BlockNumberList "List of transitions where account storage entry was changed" } AccountChangeSet { u64 BlockNumber "PK" - H256 Account "PK" + B256 Account "PK" ChangeSet AccountChangeSet "Account before transition" } StorageChangeSet { u64 BlockNumber "PK" - H256 Account "PK" - H256 StorageKey "PK" + B256 Account "PK" + B256 StorageKey "PK" ChangeSet StorageChangeSet "Storage entry before transition" } HashedAccount { - H256 HashedAddress "PK" + B256 HashedAddress "PK" Account Data } HashedStorage { - H256 HashedAddress "PK" - H256 HashedStorageKey "PK" + B256 HashedAddress "PK" + B256 HashedStorageKey "PK" U256 StorageValue } AccountsTrie { @@ -117,7 +117,7 @@ AccountsTrie { BranchNodeCompact Node } StoragesTrie { - H256 HashedAddress "PK" + B256 HashedAddress "PK" StoredNibblesSubKey NibblesSubKey "PK" StorageTrieEntry Node } diff --git a/examples/db-access.rs b/examples/db-access.rs index 60089dba75..289f895bec 100644 --- a/examples/db-access.rs +++ b/examples/db-access.rs @@ -1,5 +1,5 @@ use reth_db::open_db_read_only; -use reth_primitives::{Address, ChainSpecBuilder, H256, U256}; +use reth_primitives::{Address, ChainSpecBuilder, B256, U256}; use reth_provider::{ AccountReader, BlockReader, BlockSource, HeaderProvider, ProviderFactory, ReceiptProvider, StateProvider, TransactionsProvider, @@ -180,7 +180,7 @@ fn receipts_provider_example(provider: T) -> eyre::Result<()> { let address = Address::random(); - let storage_key = H256::random(); + let storage_key = B256::random(); // Can get account / storage state with simple point queries let _account = provider.basic_account(address)?; diff --git a/examples/rpc-db/src/main.rs b/examples/rpc-db/src/main.rs index 54aa243acc..aa97f82b62 100644 --- a/examples/rpc-db/src/main.rs +++ b/examples/rpc-db/src/main.rs @@ -20,23 +20,14 @@ use reth::{ use reth::rpc::builder::{ RethRpcModule, RpcModuleBuilder, RpcServerConfig, TransportRpcModuleConfig, }; - -// Code which we'd ideally like to not need to import if you're only spinning up -// read-only parts of the API and do not require access to pending state or to -// EVM sims -use reth::{ - beacon_consensus::BeaconConsensus, - blockchain_tree::{ - BlockchainTree, BlockchainTreeConfig, ShareableBlockchainTree, TreeExternals, - }, - revm::Factory as ExecutionFactory, -}; - // Configuring the network parts, ideally also wouldn't ned to think about this. -use reth::{providers::test_utils::TestCanonStateSubscriptions, tasks::TokioTaskExecutor}; +use myrpc_ext::{MyRpcExt, MyRpcExtApiServer}; +use reth::{ + blockchain_tree::noop::NoopBlockchainTree, providers::test_utils::TestCanonStateSubscriptions, + tasks::TokioTaskExecutor, +}; use std::{path::Path, sync::Arc}; -use myrpc_ext::{MyRpcExt, MyRpcExtApiServer}; // Custom rpc extension pub mod myrpc_ext; @@ -47,26 +38,10 @@ async fn main() -> eyre::Result<()> { let spec = Arc::new(ChainSpecBuilder::mainnet().build()); let factory = ProviderFactory::new(db.clone(), spec.clone()); - // 2. Setup blcokchain tree to be able to receive live notifs - // TODO: Make this easier to configure - let provider = { - let consensus = Arc::new(BeaconConsensus::new(spec.clone())); - let exec_factory = ExecutionFactory::new(spec.clone()); - - let externals = TreeExternals::new(db.clone(), consensus, exec_factory, spec.clone()); - let tree_config = BlockchainTreeConfig::default(); - let (canon_state_notification_sender, _receiver) = - tokio::sync::broadcast::channel(tree_config.max_reorg_depth() as usize * 2); - - let tree = ShareableBlockchainTree::new(BlockchainTree::new( - externals, - canon_state_notification_sender, - tree_config, - None, - )?); - - BlockchainProvider::new(factory, tree)? - }; + // 2. Setup the blockchain provider using only the database provider and a noop for the tree to + // satisfy trait bounds. Tree is not used in this example since we are only operating on the + // disk and don't handle new blocks/live sync etc, which is done by the blockchain tree. + let provider = BlockchainProvider::new(factory, NoopBlockchainTree::default())?; let rpc_builder = RpcModuleBuilder::default() .with_provider(provider.clone()) diff --git a/testing/ef-tests/Cargo.toml b/testing/ef-tests/Cargo.toml index 7ca4f599b8..a47990e7c9 100644 --- a/testing/ef-tests/Cargo.toml +++ b/testing/ef-tests/Cargo.toml @@ -16,9 +16,10 @@ reth-primitives.workspace = true reth-db = { workspace = true, features = ["mdbx", "test-utils"] } reth-provider.workspace = true reth-stages = { path = "../../crates/stages" } -reth-rlp.workspace = true reth-interfaces.workspace = true reth-revm = { path = "../../crates/revm" } + +alloy-rlp.workspace = true tokio = "1.28.1" walkdir = "2.3.3" serde = "1.0.163" diff --git a/testing/ef-tests/src/cases/blockchain_test.rs b/testing/ef-tests/src/cases/blockchain_test.rs index d240885e72..6b5c10aba0 100644 --- a/testing/ef-tests/src/cases/blockchain_test.rs +++ b/testing/ef-tests/src/cases/blockchain_test.rs @@ -4,10 +4,10 @@ use crate::{ models::{BlockchainTest, ForkSpec, RootOrState}, Case, Error, Suite, }; +use alloy_rlp::Decodable; use reth_db::test_utils::create_test_rw_db; use reth_primitives::{BlockBody, SealedBlock}; use reth_provider::{BlockWriter, ProviderFactory}; -use reth_rlp::Decodable; use reth_stages::{stages::ExecutionStage, ExecInput, Stage}; use std::{collections::BTreeMap, fs, path::Path, sync::Arc}; diff --git a/testing/ef-tests/src/lib.rs b/testing/ef-tests/src/lib.rs index c6b743a909..24180d5cce 100644 --- a/testing/ef-tests/src/lib.rs +++ b/testing/ef-tests/src/lib.rs @@ -3,7 +3,7 @@ #![doc( html_logo_url = "https://raw.githubusercontent.com/paradigmxyz/reth/main/assets/reth-docs.png", html_favicon_url = "https://avatars0.githubusercontent.com/u/97369466?s=256", - issue_tracker_base_url = "https://github.com/paradigmxzy/reth/issues/" + issue_tracker_base_url = "https://github.com/paradigmxyz/reth/issues/" )] #![warn(missing_debug_implementations, missing_docs, unreachable_pub, rustdoc::all)] #![deny(unused_must_use, rust_2018_idioms)] diff --git a/testing/ef-tests/src/models.rs b/testing/ef-tests/src/models.rs index 5171f4fe99..89c644e5f7 100644 --- a/testing/ef-tests/src/models.rs +++ b/testing/ef-tests/src/models.rs @@ -7,9 +7,9 @@ use reth_db::{ transaction::{DbTx, DbTxMut}, }; use reth_primitives::{ - keccak256, Account as RethAccount, Address, BigEndianHash, Bloom, Bytecode, Bytes, ChainSpec, - ChainSpecBuilder, Header as RethHeader, JsonU256, SealedHeader, StorageEntry, Withdrawal, H160, - H256, H64, U256, + keccak256, Account as RethAccount, Address, Bloom, Bytecode, Bytes, ChainSpec, + ChainSpecBuilder, Header as RethHeader, JsonU256, SealedHeader, StorageEntry, Withdrawal, B256, + B64, U256, }; use serde::{self, Deserialize}; use std::{collections::BTreeMap, ops::Deref}; @@ -30,7 +30,7 @@ pub struct BlockchainTest { /// The test pre-state. pub pre: State, /// Hash of the best block. - pub lastblockhash: H256, + pub lastblockhash: B256, /// Network spec. pub network: ForkSpec, #[serde(default)] @@ -55,35 +55,35 @@ pub struct Header { /// Gas used. pub gas_used: JsonU256, /// Block Hash. - pub hash: H256, + pub hash: B256, /// Mix hash. - pub mix_hash: H256, + pub mix_hash: B256, /// Seal nonce. - pub nonce: H64, + pub nonce: B64, /// Block number. pub number: JsonU256, /// Parent hash. - pub parent_hash: H256, + pub parent_hash: B256, /// Receipt trie. - pub receipt_trie: H256, + pub receipt_trie: B256, /// State root. - pub state_root: H256, + pub state_root: B256, /// Timestamp. pub timestamp: JsonU256, /// Transactions trie. - pub transactions_trie: H256, + pub transactions_trie: B256, /// Uncle hash. - pub uncle_hash: H256, + pub uncle_hash: B256, /// Base fee per gas. pub base_fee_per_gas: Option, /// Withdrawals root. - pub withdrawals_root: Option, + pub withdrawals_root: Option, /// Blob gas used. pub blob_gas_used: Option, /// Excess blob gas. pub excess_blob_gas: Option, /// Parent beacon block root. - pub parent_beacon_block_root: Option, + pub parent_beacon_block_root: Option, } impl From
for SealedHeader { @@ -96,7 +96,7 @@ impl From
for SealedHeader { gas_limit: value.gas_limit.0.to::(), gas_used: value.gas_used.0.to::(), mix_hash: value.mix_hash, - nonce: value.nonce.into_uint().as_u64(), + nonce: u64::from_be_bytes(value.nonce.0), number: value.number.0.to::(), timestamp: value.timestamp.0.to::(), transactions_root: value.transactions_trie, @@ -164,12 +164,12 @@ impl State { }, )?; if let Some(code_hash) = code_hash { - tx.put::(code_hash, Bytecode::new_raw(account.code.0.clone()))?; + tx.put::(code_hash, Bytecode::new_raw(account.code.clone()))?; } account.storage.iter().try_for_each(|(k, v)| { tx.put::( address, - StorageEntry { key: H256::from_slice(&k.0.to_be_bytes::<32>()), value: v.0 }, + StorageEntry { key: B256::from_slice(&k.0.to_be_bytes::<32>()), value: v.0 }, ) })?; } @@ -191,7 +191,7 @@ impl Deref for State { #[serde(untagged)] pub enum RootOrState { /// If state is too big, only state root is present - Root(H256), + Root(B256), /// State State(BTreeMap), } @@ -238,7 +238,7 @@ impl Account { let mut storage_cursor = tx.cursor_dup_read::()?; for (slot, value) in self.storage.iter() { if let Some(entry) = - storage_cursor.seek_by_key_subkey(address, H256(slot.0.to_be_bytes()))? + storage_cursor.seek_by_key_subkey(address, B256::new(slot.0.to_be_bytes()))? { if U256::from_be_bytes(entry.key.0) == slot.0 { assert_equal( @@ -396,7 +396,7 @@ pub struct Transaction { /// Max priority fee per gas pub max_priority_fee_per_gas: Option, /// Transaction hash. - pub hash: Option, + pub hash: Option, } /// Access list item @@ -404,9 +404,9 @@ pub struct Transaction { #[serde(rename_all = "camelCase")] pub struct AccessListItem { /// Account address - pub address: H160, + pub address: Address, /// Storage key. - pub storage_keys: Vec, + pub storage_keys: Vec, } /// Access list. diff --git a/testing/ef-tests/src/result.rs b/testing/ef-tests/src/result.rs index 9be3289c15..2709bcab54 100644 --- a/testing/ef-tests/src/result.rs +++ b/testing/ef-tests/src/result.rs @@ -46,7 +46,7 @@ pub enum Error { RethError(#[from] RethError), /// An error occurred while decoding RLP. #[error("An error occurred deserializing RLP")] - RlpDecodeError(#[from] reth_rlp::DecodeError), + RlpDecodeError(#[from] alloy_rlp::Error), } /// The result of running a test.