Merge branch 'main' into zkbesu

# Conflicts:
#	.github/workflows/codeql.yml
#	.github/workflows/gradle-wrapper-validation.yml
#	.github/workflows/pr-checklist-on-open.yml
#	.github/workflows/release.yml
#	.github/workflows/sonarcloud.yml
#	build.gradle
This commit is contained in:
Fabio Di Fabio
2024-02-01 14:29:33 +01:00
153 changed files with 9206 additions and 2994 deletions

114
.github/workflows/acceptance-tests.yml vendored Normal file
View File

@@ -0,0 +1,114 @@
name: acceptance-tests
on:
pull_request:
pull_request_review:
types: [submitted]
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
total-runners: 16
jobs:
shouldRun:
name: checks to ensure we should run
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const acceptanceTested = statuses && statuses.filter(({ context }) => context === 'acceptance-tests');
const alreadyRun = acceptanceTested && acceptanceTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
acceptanceTestEthereum:
runs-on: ubuntu-22.04
name: "Acceptance Runner"
needs: shouldRun
permissions:
statuses: write
checks: write
if: ${{ needs.shouldRun.outputs.shouldRun == 'true'}}
strategy:
fail-fast: true
matrix:
runner_index: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: get acceptance test report
uses: dawidd6/action-download-artifact@v2
with:
branch: main
name_is_regexp: true
name: 'acceptance-node-\d*\d-test-results'
path: tmp/junit-xml-reports-downloaded
if_no_artifact_found: true
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Split tests
id: split-tests
uses: r7kamura/split-tests-by-timings@v0
with:
reports: tmp/junit-xml-reports-downloaded
glob: 'acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/**/*Test.java'
total: ${{env.total-runners}}
index: ${{ matrix.runner_index }}
- name: write out test list
run: echo "${{ steps.split-tests.outputs.paths }}" >> testList.txt
- name: format gradle args
#regex means: first truncate file paths to align with package name, then swap path delimiter with package delimiter,
#then drop file extension, then insert --tests option between each.
run: cat testList.txt | sed -e 's@acceptance-tests/tests/src/test/java/@--tests\ @g;s@/@.@g;s/\.java//g' > gradleArgs.txt
- name: run acceptance tests
run: ./gradlew acceptanceTest `cat gradleArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: cleanup tempfiles
run: rm testList.txt gradleArgs.txt
- name: Upload Acceptance Test Results
uses: actions/upload-artifact@v3.1.0
with:
name: acceptance-node-${{matrix.runner_index}}-test-results
path: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: (success() || failure()) # always run even if the build step fails
with:
report_paths: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml'
acceptance-tests:
runs-on: ubuntu-22.04
needs: [ acceptanceTestEthereum ]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

76
.github/workflows/artifacts.yml vendored Normal file
View File

@@ -0,0 +1,76 @@
name: artifacts
on:
release:
types:
- prereleased
jobs:
artifacts:
runs-on: ubuntu-22.04
permissions:
contents: write
steps:
- name: checkout
uses: actions/checkout@v4.1.1
- name: Set up JDK 17
uses: actions/setup-java@v4.0.0
with:
distribution: 'temurin'
java-version: '17'
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: assemble distributions
run:
./gradlew -Prelease.releaseVersion=${{github.ref_name}} assemble -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: hashes
id: hashes
run: |
cd build/distributions
echo "zipSha=$(shasum -a 256 besu*.zip)" >> $GITHUB_OUTPUT
echo "tarSha=$(shasum -a 256 besu*.tar.gz)" >> $GITHUB_OUTPUT
- name: upload tarball
uses: actions/upload-artifact@v3
with:
path: 'build/distributions/besu*.tar.gz'
name: besu-${{ github.ref_name }}.tar.gz
- name: upload zipfile
uses: actions/upload-artifact@v3
with:
path: 'build/distributions/besu*.zip'
name: besu-${{ github.ref_name }}.zip
- name: Upload Release assets
uses: softprops/action-gh-release@v1
with:
append_body: true
files: |
build/distributions/besu*.tar.gz
build/distributions/besu*.zip
body: |
${{steps.hashes.outputs.tarSha}}
${{steps.hashes.outputs.zipSha}}
testWindows:
runs-on: windows-2022
needs: artifacts
timeout-minutes: 10
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt
java-version: 17
- name: Download zip
uses: actions/download-artifact@v3
with:
name: besu-${{ github.ref_name }}.zip
- name: test Besu
run: |
unzip besu-*.zip -d besu-tmp
cd besu-tmp
mv besu-* ../besu
cd ..
besu\bin\besu.bat --help
besu\bin\besu.bat --version

113
.github/workflows/docker.yml vendored Normal file
View File

@@ -0,0 +1,113 @@
name: docker
on:
release:
types:
- prereleased
env:
registry: ghcr.io
jobs:
hadolint:
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: hadoLint_openj9-jdk_17
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile
- name: hadoLint_openjdk_17
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker:
needs: hadolint
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- ubuntu-22.04
- [self-hosted, ARM64]
runs-on: ${{ matrix.platform }}
steps:
- name: Prepare
id: prep
run: |
platform=${{ matrix.platform }}
if [ "$platform" = 'ubuntu-22.04' ]; then
echo "PLATFORM_PAIR=linux-amd64" >> $GITHUB_OUTPUT
echo "ARCH=amd64" >> $GITHUB_OUTPUT
else
echo "PLATFORM_PAIR=linux-arm64" >> $GITHUB_OUTPUT
echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: short sha
id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: install goss
run: |
mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: build and test docker
uses: gradle/gradle-build-action@v2.12.0
env:
architecture: ${{ steps.prep.outputs.ARCH }}
with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish
env:
architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}
multiArch:
needs: buildDocker
runs-on: ubuntu-22.04
permissions:
contents: read
packages: write
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }}

73
.github/workflows/integration-tests.yml vendored Normal file
View File

@@ -0,0 +1,73 @@
name: integration-tests
on:
pull_request:
pull_request_review:
types:
- submitted
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
jobs:
shouldRun:
name: checks to ensure we should run
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const intTested = statuses && statuses.filter(({ context }) => context === 'integration-tests');
const alreadyRun = intTested && intTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
integration-tests:
runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
permissions:
statuses: write
checks: write
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run integration tests
run: ./gradlew integrationTest compileJmh -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: (success() || failure())
with:
report_paths: '**/build/test-results/integrationTest/TEST-*.xml'

121
.github/workflows/nightly.yml vendored Normal file
View File

@@ -0,0 +1,121 @@
name: nightly
on:
workflow_dispatch:
schedule:
# * is a special character in YAML so you have to quote this string
# expression evaluates to midnight every night
- cron: '0 0 * * *'
env:
nightly-tag: develop
registry: ghcr.io
jobs:
hadolint:
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: hadoLint_openj9-jdk_17
run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile
- name: hadoLint_openjdk_17
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17/Dockerfile
- name: hadoLint_openjdk_17_debug
run: docker run --rm -i hadolint/hadolint < docker/openjdk-17-debug/Dockerfile
- name: hadoLint_openjdk_latest
run: docker run --rm -i hadolint/hadolint < docker/openjdk-latest/Dockerfile
- name: hadoLint_graalvm
run: docker run --rm -i hadolint/hadolint < docker/graalvm/Dockerfile
buildDocker:
needs: hadolint
permissions:
contents: read
packages: write
strategy:
fail-fast: false
matrix:
platform:
- ubuntu-22.04
- [self-hosted, ARM64]
runs-on: ${{ matrix.platform }}
steps:
- name: Prepare
id: prep
run: |
platform=${{ matrix.platform }}
if [ "$platform" = 'ubuntu-22.04' ]; then
echo "PLATFORM_PAIR=linux-amd64" >> $GITHUB_OUTPUT
echo "ARCH=amd64" >> $GITHUB_OUTPUT
else
echo "PLATFORM_PAIR=linux-arm64" >> $GITHUB_OUTPUT
echo "ARCH=arm64" >> $GITHUB_OUTPUT
fi
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: short sha
id: shortSha
run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: build image
uses: gradle/gradle-build-action@v2.12.0
with:
arguments: distDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: install goss
run: |
mkdir -p docker/reports
curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }}
- name: test docker
uses: gradle/gradle-build-action@v2.12.0
env:
architecture: ${{ steps.prep.outputs.ARCH }}
with:
arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
- name: login to ghcr
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: publish
env:
architecture: ${{ steps.prep.outputs.ARCH }}
run: ./gradlew --no-daemon dockerUpload -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main
multiArch:
permissions:
contents: read
packages: write
needs: buildDocker
runs-on: ubuntu-22.04
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Login to DockerHub
uses: docker/login-action@v3.0.0
with:
registry: ${{ env.registry }}
username: ${{ github.actor }}
password: ${{ secrets.GITHUB_TOKEN }}
- name: multi-arch docker
run: ./gradlew manifestDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main

View File

@@ -0,0 +1,49 @@
name: parallel-unit-tests
#experimental work in progress - trying to figure out how to split tests across multi-modules by runtime
on:
workflow_dispatch:
env:
GRADLE_OPTS: "-Dorg.gradle.daemon=false"
total-runners: 4
jobs:
junit:
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
runner_index:
- 0
- 1
- 2
- 3
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Split tests
id: split-tests
uses: chaosaffe/split-tests@v1-alpha.1
with:
glob: '**/src/test/java/**/*.java'
split-total: ${{ env.total-runners }}
split-index: ${{ matrix.runner_index }}
line-count: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt
java-version: 17
cache: gradle
- name: write out test list
run: echo "${{ steps.split-tests.outputs.test-suite }}" >> testList.txt
- name: debug testfile paths
run: cat testList.txt
- name: format gradle args
# regex means: truncate file paths to align with package name, replacing with tests switch, then drop file extension,
# then swap path delimiter with package delimiter
run: cat testList.txt | sed -e 's/[^ ]*src\/test\/java\//--tests\ /g' -e 's/\.java//g' -e 's/\//\./g' >> gradleArgs.txt
- name: debug test class list
run: cat gradleArgs.txt
- name: run unit tests
run: ./gradlew test `cat gradleArgs.txt`

103
.github/workflows/pre-review.yml vendored Normal file
View File

@@ -0,0 +1,103 @@
name: pre-review
on:
pull_request:
workflow_dispatch:
permissions:
statuses: write
checks: write
jobs:
repolint:
name: "Repository Linting"
runs-on: ubuntu-22.04
container: ghcr.io/todogroup/repolinter:v0.11.2
steps:
- name: Checkout Code
uses: actions/checkout@v4.1.1
- name: Lint Repo
run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown
gradle-wrapper:
name: "Gradle Wrapper Validation"
runs-on: ubuntu-22.04
steps:
- uses: actions/checkout@v4.1.1
- uses: gradle/wrapper-validation-action@v1.1.0
spotless:
runs-on: ubuntu-22.04
if: ${{ github.actor != 'dependabot[bot]' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run spotless
run: ./gradlew spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
compile:
runs-on: ubuntu-22.04
timeout-minutes: 30
needs: [spotless, gradle-wrapper, repolint]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Gradle Compile
run: ./gradlew build -x test -x spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
unitTests:
env:
GRADLEW_UNIT_TEST_ARGS: ${{matrix.gradle_args}}
runs-on: ubuntu-22.04
needs: [ compile ]
permissions:
checks: write
statuses: write
strategy:
fail-fast: true
matrix:
gradle_args:
- "test -x besu:test -x consensus:test -x crypto:test -x ethereum:eth:test -x ethereum:api:test -x ethereum:core:test"
- "besu:test consensus:test crypto:test"
- "ethereum:api:testBonsai"
- "ethereum:api:testForest"
- "ethereum:api:testRemainder"
- "ethereum:core:test"
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: Setup Gradle
uses: gradle/gradle-build-action@v2.12.0
- name: run unit tests
id: unitTest
run: ./gradlew $GRADLEW_UNIT_TEST_ARGS -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the build step fails
with:
report_paths: '**/test-results/**/TEST-*.xml'
annotate_only: true
pre-review:
runs-on: ubuntu-22.04
needs: [unitTests]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

147
.github/workflows/reference-tests.yml vendored Normal file
View File

@@ -0,0 +1,147 @@
name: reference-tests
on:
pull_request:
pull_request_review:
types:
- submitted
env:
GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false"
total-runners: 6
jobs:
shouldRun:
name: checks to ensure we should run
# necessary because there is no single PR approved event, need to check all comments/approvals/denials
# might also be a job running, and additional approvals
runs-on: ubuntu-22.04
outputs:
shouldRun: ${{steps.shouldRun.outputs.result}}
steps:
- name: required check
id: shouldRun
uses: actions/github-script@v7.0.1
env:
# fun fact, this changes based on incoming event, it will be different when we run this on pushes to main
RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }}
with:
script: |
const { RELEVANT_SHA } = process.env;
const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({
owner: context.repo.owner,
repo: context.repo.repo,
ref: RELEVANT_SHA,
});
const refTested = statuses && statuses.filter(({ context }) => context === 'reference-tests');
const alreadyRun = refTested && refTested.find(({ state }) => state === 'success') > 0;
const { data: reviews } = await github.rest.pulls.listReviews({
owner: context.repo.owner,
repo: context.repo.repo,
pull_number: context.issue.number,
});
const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED');
const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0);
console.log("tests should be run = %j", shouldRun);
console.log("alreadyRun = %j", alreadyRun);
console.log("approvingReviews = %j", approvingReviews.length);
return shouldRun;
prepareReferenceTestEthereum:
runs-on: ubuntu-22.04
needs: shouldRun
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
with:
submodules: recursive
set-safe-directory: true
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: temurin
java-version: 17
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: execute generate reference tests
run: ./gradlew ethereum:referencetests:blockchainReferenceTests ethereum:referencetests:generalstateReferenceTests ethereum:referencetests:generalstateRegressionReferenceTests -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: store generated tests
uses: actions/upload-artifact@v3
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java'
referenceTestEthereum:
runs-on: ubuntu-22.04
permissions:
statuses: write
checks: write
needs:
- prepareReferenceTestEthereum
if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }}
strategy:
fail-fast: true
matrix:
runner_index: [0,1,2,3,4,5]
steps:
- name: Checkout Repo
uses: actions/checkout@v4.1.1
with:
submodules: recursive
- name: Set up Java
uses: actions/setup-java@v4.0.0
with:
distribution: adopt-openj9
java-version: 17
- name: retrieve generated tests
uses: actions/download-artifact@v3.0.2
with:
name: 'reference-tests'
path: 'ethereum/referencetests/build/generated/sources/reference-test/'
- name: get reference test report
uses: dawidd6/action-download-artifact@v2
with:
branch: main
name_is_regexp: true
name: 'reference-test-node-\d*\d-results'
path: tmp/ref-xml-reports-downloaded
if_no_artifact_found: true
- name: setup gradle
uses: gradle/gradle-build-action@v2.12.0
- name: Split tests
id: split-tests
uses: r7kamura/split-tests-by-timings@v0
with:
reports: tmp/ref-xml-reports-downloaded
glob: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java'
total: ${{env.total-runners}}
index: ${{ matrix.runner_index }}
- name: compose gradle args
run: echo ${{ steps.split-tests.outputs.paths }} | sed -e 's/^.*java\///' -e 's@/@.@g' -e 's/\.java//' -e 's/^/--tests /' > refTestArgs.txt
- name: run reference tests
run: ./gradlew ethereum:referenceTests:referenceTests `cat refTestArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true
- name: Upload Test Report
uses: actions/upload-artifact@v3
if: always() # always run even if the previous step fails
with:
name: reference-test-node-${{matrix.runner_index}}-results
path: '**/build/test-results/referenceTests/TEST-*.xml'
- name: Publish Test Report
uses: mikepenz/action-junit-report@v4
if: success() || failure() # always run even if the build step fails
with:
report_paths: '**/build/test-results/referenceTest/TEST-*.xml'
reference-tests:
runs-on: ubuntu-22.04
needs: [ referenceTestEthereum ]
permissions:
checks: write
statuses: write
steps:
- name: consolidation
run: echo "consolidating statuses"

View File

@@ -3,14 +3,27 @@
## 24.1.2-SNAPSHOT
### Breaking Changes
- The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` [#6446](https://github.com/hyperledger/besu/pull/6446)
- Following the OpenMetrics convention, the updated Prometheus client adds the `_total` suffix to every metrics of type counter, with the effect that some existing metrics have been renamed to have this suffix. If you are using the official Besu Grafana dashboard [(available here)](https://grafana.com/grafana/dashboards/16455-besu-full/), just update it to the latest revision, that accepts the old and the new name of the affected metrics. If you have a custom dashboard or use the metrics in other ways, then you need to manually update it to support the new naming.
- The `trace-filter` method in JSON-RPC API now has a default block range limit of 1000, adjustable with `--rpc-max-trace-filter-range` (thanks @alyokaz) [#6446](https://github.com/hyperledger/besu/pull/6446)
- Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628):
- `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`.
- `--engine-jwt-enabled` has been removed. Use `--engine-jwt-disabled` instead. [#6491](https://github.com/hyperledger/besu/pull/6491)
### Deprecations
- `--Xsnapsync-synchronizer-flat-db-healing-enabled` is deprecated (always enabled). [#6499](https://github.com/hyperledger/besu/pull/6499)
### Additions and Improvements
- Upgrade Prometheus and Opentelemetry dependencies [#6422](https://github.com/hyperledger/besu/pull/6422)
- Add `OperationTracer.tracePrepareTransaction`, where the sender account has not yet been altered[#6453](https://github.com/hyperledger/besu/pull/6453)
- Improve the high spec flag by limiting it to a few column families [#6354](https://github.com/hyperledger/besu/pull/6354)
- Log blob count when importing a block via Engine API [#6466](https://github.com/hyperledger/besu/pull/6466)
- Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390)
- Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303)
- Github Actions based build.
- Introduce caching mechanism to optimize Keccak hash calculations for account storage slots during block processing [#6452](https://github.com/hyperledger/besu/pull/6452)
- Added configuration options for `pragueTime` to genesis file for Prague fork development [#6473](https://github.com/hyperledger/besu/pull/6473)
- Moving trielog storage to RocksDB's blobdb to improve write amplications [#6289](https://github.com/hyperledger/besu/pull/6289)
### Bug fixes
- Fix the way an advertised host configured with `--p2p-host` is treated when communicating with the originator of a PING packet [#6225](https://github.com/hyperledger/besu/pull/6225)
@@ -22,11 +35,9 @@
### Breaking Changes
- New `EXECUTION_HALTED` error returned if there is an error executing or simulating a transaction, with the reason for execution being halted. Replaces the generic `INTERNAL_ERROR` return code in certain cases which some applications may be checking for [#6343](https://github.com/hyperledger/besu/pull/6343)
- The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 [#6360](https://github.com/hyperledger/besu/pull/6360)
- The Besu Docker images with `openjdk-latest` tags since 23.10.3 were incorrectly using UID 1001 instead of 1000 for the container's `besu` user. The user now uses 1000 again. Containers created from or migrated to images using UID 1001 will need to chown their persistent database files to UID 1000 (thanks @h4l) [#6360](https://github.com/hyperledger/besu/pull/6360)
- The deprecated `--privacy-onchain-groups-enabled` option has now been removed. Use the `--privacy-flexible-groups-enabled` option instead. [#6411](https://github.com/hyperledger/besu/pull/6411)
- Requesting the Ethereum Node Record (ENR) to acquire the fork id from bonded peers is now enabled by default, so the following change has been made [#5628](https://github.com/hyperledger/besu/pull/5628):
- `--Xfilter-on-enr-fork-id` has been removed. To disable the feature use `--filter-on-enr-fork-id=false`.
- The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis, this to prevent possible DoS in case a single transaction is taking too long to execute, and to have a stable block production rate, but it could be a breaking change if an existing network used to have transactions that takes more time to executed that the newly introduced limit, if it is mandatory for these network to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly.
- The time that can be spent selecting transactions during block creation is not capped at 5 seconds for PoS and PoW networks, and for PoA networks, at 75% of the block period specified in the genesis. This is to prevent possible DoS attacks in case a single transaction is taking too long to execute, and to have a stable block production rate. This could be a breaking change if an existing network needs to accept transactions that take more time to execute than the newly introduced limit. If it is mandatory for these networks to keep processing these long processing transaction, then the default value of `block-txs-selection-max-time` or `poa-block-txs-selection-max-time` needs to be tuned accordingly. [#6423](https://github.com/hyperledger/besu/pull/6423)
### Deprecations
@@ -40,8 +51,7 @@
- Upgrade Mockito [#6397](https://github.com/hyperledger/besu/pull/6397)
- Upgrade `tech.pegasys.discovery:discovery` [#6414](https://github.com/hyperledger/besu/pull/6414)
- Options to tune the max allowed time that can be spent selecting transactions during block creation are now stable [#6423](https://github.com/hyperledger/besu/pull/6423)
- Introduce `--Xbonsai-limit-trie-logs-enabled` experimental feature which by default will only retain the latest 512 trie logs, saving about 3GB per week in database growth [#5390](https://github.com/hyperledger/besu/issues/5390)
- Introduce `besu storage x-trie-log prune` experimental offline subcommand which will prune all redundant trie logs except the latest 512 [#6303](https://github.com/hyperledger/besu/pull/6303)
- Support for "pending" in `qbft_getValidatorsByBlockNumber` [#6436](https://github.com/hyperledger/besu/pull/6436)
### Bug fixes
- INTERNAL_ERROR from `eth_estimateGas` JSON/RPC calls [#6344](https://github.com/hyperledger/besu/issues/6344)
@@ -49,8 +59,18 @@
- Fluent EVM API definition for Tangerine Whistle had incorrect code size validation configured [#6382](https://github.com/hyperledger/besu/pull/6382)
- Correct mining beneficiary for Clique networks in TraceServiceImpl [#6390](https://github.com/hyperledger/besu/pull/6390)
- Fix to gas limit delta calculations used in block production. Besu should now increment or decrement the block gas limit towards its target correctly (thanks @arbora) #6425
- Ensure Backward Sync waits for initial sync before starting a session [#6455](https://github.com/hyperledger/besu/issues/6455)
- Silence the noisy DNS query errors [#6458](https://github.com/hyperledger/besu/issues/6458)
### Download Links
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.zip / sha256 e23c5b790180756964a70dcdd575ee2ed2c2efa79af00bce956d23bd2f7dc67c
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.tar.gz / sha256 4b0ddd5a25be2df5d2324bff935785eb63e4e3a5f421614ea690bacb5b9cb344
### Errata
Note, due to a CI race with the release job, the initial published version of 24.1.1 were overwritten by artifacts generated from the same sources, but differ in their embedded timestamps. The initial SHAs are noted here but are deprecated:
~~https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.zip / sha256 b6b64f939e0bb4937ce90fc647e0a7073ce3e359c10352b502059955070a60c6
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.1/besu-24.1.1.tar.gz / sha256 cfcae04c30769bf338b0740ac65870f9346d3469931bb46cdba3b2f65d311e7a~~
## 24.1.0
@@ -73,8 +93,8 @@
- mitigation for trielog failure [#6315]((https://github.com/hyperledger/besu/pull/6315)
### Download Links
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.0/besu-24.1.0.zip / sha256 TBA
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.0/besu-24.1.0.tar.gz / sha256 TBA
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.0/besu-24.1.0.zip / sha256 d36c8aeef70f0a516d4c26d3bc696c3e2a671e515c9e6e9475a31fe759e39f64
https://hyperledger.jfrog.io/artifactory/besu-binaries/besu/24.1.0/besu-24.1.0.tar.gz / sha256 602b04c0729a7b17361d1f0b39f4ce6a2ebe47932165add666560fe594d9ca99
## 23.10.3-hotfix

View File

@@ -19,6 +19,7 @@ import org.hyperledger.besu.datatypes.Hash;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.RpcErrorType;
import org.hyperledger.besu.ethereum.privacy.PrivateTransaction;
import org.hyperledger.besu.tests.acceptance.dsl.condition.Condition;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.condition.PrivateCondition;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.Transaction;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivacyTransactions;
@@ -112,4 +113,8 @@ public class PrivConditions {
final Transaction<?> transaction, final RpcErrorType error) {
return new ExpectJsonRpcError(transaction, error);
}
public PrivateCondition syncingStatus(final boolean isSyncing) {
return new PrivateSyncingStatusCondition(transactions.syncing(), isSyncing);
}
}

View File

@@ -0,0 +1,40 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl.condition.priv;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.WaitUtils;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.PrivacyNode;
import org.hyperledger.besu.tests.acceptance.dsl.privacy.condition.PrivateCondition;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy.PrivSyncingTransactions;
public class PrivateSyncingStatusCondition implements PrivateCondition {
private final PrivSyncingTransactions transaction;
private final boolean syncingMiningStatus;
public PrivateSyncingStatusCondition(
final PrivSyncingTransactions transaction, final boolean syncingStatus) {
this.transaction = transaction;
this.syncingMiningStatus = syncingStatus;
}
@Override
public void verify(final PrivacyNode node) {
WaitUtils.waitFor(
10, () -> assertThat(node.execute(transaction)).isEqualTo(syncingMiningStatus));
}
}

View File

@@ -0,0 +1,40 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.tests.acceptance.dsl.transaction.privacy;
import static org.assertj.core.api.Assertions.assertThat;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.NodeRequests;
import org.hyperledger.besu.tests.acceptance.dsl.transaction.Transaction;
import java.io.IOException;
import org.web3j.protocol.core.methods.response.EthSyncing;
public class PrivSyncingTransactions implements Transaction<Boolean> {
PrivSyncingTransactions() {}
@Override
public Boolean execute(final NodeRequests node) {
try {
EthSyncing response = node.eth().ethSyncing().send();
assertThat(response).isNotNull();
return response.isSyncing();
} catch (final IOException e) {
throw new RuntimeException(e);
}
}
}

View File

@@ -62,4 +62,8 @@ public class PrivacyTransactions {
public PrivGetTransactionReceiptTransaction getTransactionReceipt(final Hash transactionHash) {
return new PrivGetTransactionReceiptTransaction(transactionHash);
}
public PrivSyncingTransactions syncing() {
return new PrivSyncingTransactions();
}
}

View File

@@ -111,6 +111,9 @@ public class BftPrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
charlie = createNode(containerNetwork, "node3", 2);
privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
}
private PrivacyNode createNode(

View File

@@ -51,6 +51,7 @@ public class DeployPrivateSmartContractAcceptanceTest extends ParameterizedEncla
restriction == UNRESTRICTED);
privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -94,6 +94,9 @@ public class EnclaveErrorAcceptanceTest extends PrivacyAcceptanceTestBase {
"0xBB");
privacyCluster.start(alice, bob);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
final byte[] wrongPublicKeyBytes =
EnclaveEncryptorType.EC.equals(enclaveEncryptorType)
? getSECP256r1PublicKeyByteArray()

View File

@@ -111,6 +111,10 @@ public class FlexiblePrivacyAcceptanceTest extends FlexiblePrivacyAcceptanceTest
enclaveType,
Optional.of(containerNetwork));
privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -93,6 +93,8 @@ public class PluginPrivacySigningAcceptanceTest extends PrivacyAcceptanceTestBas
Optional.empty());
privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -76,6 +76,7 @@ public class PrivCallAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED);
privacyCluster.start(minerNode);
minerNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -76,6 +76,9 @@ public class PrivDebugGetStateRootFlexibleGroupAcceptanceTest
Optional.of(containerNetwork));
privacyCluster.start(aliceNode, bobNode);
aliceNode.verify(priv.syncingStatus(false));
bobNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -72,6 +72,9 @@ public class PrivDebugGetStateRootOffchainGroupAcceptanceTest extends Parameteri
"0xBB");
privacyCluster.start(aliceNode, bobNode);
aliceNode.verify(priv.syncingStatus(false));
bobNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -55,6 +55,7 @@ public class PrivGetCodeAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED);
privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -65,6 +65,7 @@ public class PrivGetLogsAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED);
privacyCluster.start(node);
node.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -75,6 +75,9 @@ public class PrivGetPrivateTransactionAcceptanceTest extends ParameterizedEnclav
"0xBB");
privacyCluster.start(alice, bob);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -108,6 +108,10 @@ public class PrivacyClusterAcceptanceTest extends PrivacyAcceptanceTestBase {
false,
false);
privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
}
@After

View File

@@ -94,6 +94,10 @@ public class PrivacyGroupAcceptanceTest extends PrivacyAcceptanceTestBase {
false,
false);
privacyCluster.start(alice, bob, charlie);
alice.verify(priv.syncingStatus(false));
bob.verify(priv.syncingStatus(false));
charlie.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -61,6 +61,8 @@ public class PrivacyReceiptAcceptanceTest extends ParameterizedEnclaveTestBase {
restriction == UNRESTRICTED,
"0xAA");
privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -75,6 +75,9 @@ public class PrivateContractPublicStateAcceptanceTest extends ParameterizedEncla
restriction == UNRESTRICTED);
privacyCluster.start(minerNode, transactionNode);
minerNode.verify(priv.syncingStatus(false));
transactionNode.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -60,6 +60,7 @@ public class PrivateGenesisAcceptanceTest extends ParameterizedEnclaveTestBase {
"AA");
privacyCluster.start(alice);
alice.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -60,6 +60,7 @@ public class PrivateLogFilterAcceptanceTest extends ParameterizedEnclaveTestBase
restriction == UNRESTRICTED);
privacyCluster.start(node);
node.verify(priv.syncingStatus(false));
}
@Test

View File

@@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42",
"safeBlockHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42",
"headBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"safeBlockHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
@@ -24,11 +24,11 @@
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42",
"latestValidHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"validationError": null
},
"payloadId": "0x282643c14de2dfef"
"payloadId": "0x282643d459a6f711"
}
},
"statusCode" : 200
"statusCode": 200
}

View File

@@ -3,7 +3,7 @@
"jsonrpc": "2.0",
"method": "engine_getPayloadV3",
"params": [
"0x282643c14de2dfef"
"0x282643d459a6f711"
],
"id": 67
},
@@ -12,7 +12,7 @@
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42",
"parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@@ -28,8 +28,8 @@
"withdrawals": [],
"blockNumber": "0x1",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"blobGasUsed": "0x0"
"blobGasUsed": "0x0",
"blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
},
"blockValue": "0x0",
"blobsBundle": {

View File

@@ -4,7 +4,7 @@
"method": "engine_newPayloadV3",
"params": [
{
"parentHash": "0x26118cf71453320edcebbc4ebb34af5b578087a32385b80108bf691fa23efc42",
"parentHash": "0x78a301e0d846bd169889c9755c9aa4ce2972dfc4bd63de61f3303887d3e81f98",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@@ -17,7 +17,7 @@
"transactions": [],
"withdrawals": [],
"blockNumber": "0x1",
"blockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"blockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"excessBlobGas": "0x0",
"blobGasUsed": "0x0"
@@ -32,7 +32,7 @@
"id": 67,
"result": {
"status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
}
},

View File

@@ -4,9 +4,9 @@
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"safeBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"finalizedBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356"
"headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315"
},
null
],
@@ -18,7 +18,7 @@
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
},
"payloadId": null

View File

@@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"safeBlockHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"headBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"safeBlockHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
@@ -24,10 +24,10 @@
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"latestValidHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"validationError": null
},
"payloadId": "0x282643b9c2d2a4df"
"payloadId": "0x282643b909febddf"
}
},
"statusCode": 200

View File

@@ -3,7 +3,7 @@
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"params": [
"0x282643b9c2d2a4df"
"0x282643b909febddf"
],
"id": 67
},
@@ -12,7 +12,7 @@
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x9b8c4a9a86cb49252075c0db2f0e72fb1e49350a0f70ea36f26f700201961e62",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@@ -28,9 +28,9 @@
"withdrawals": [],
"depositReceipts": [],
"blockNumber": "0x2",
"blockHash": "0xf6c3f1180ba58d6ea4c69c9328c7afb1fda41df06c368741c1f8310567879de7",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0"
"blockHash": "0xc8255831601171a628ef17f6601d3d1d30ff9b382e77592ed1af32354f6dafbb",
"blobGasUsed": "0x0",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"
},
"blockValue": "0x0",
"blobsBundle": {

View File

@@ -4,7 +4,7 @@
"method": "engine_newPayloadV6110",
"params": [
{
"parentHash": "0x45811fa27a100ce9035e5e086b9669275041a4ec0ebbd920be028fd7b0aa2356",
"parentHash": "0x1dd4f141551d53ce393845e2873754e43396101a8ebc0fd0eeb2e6798a591315",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x10000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000400000000000000000000000020000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000008000000000000000000000000",
@@ -23,7 +23,7 @@
{"amount":"0x773594000","index":"0x0","pubkey":"0x96a96086cff07df17668f35f7418ef8798079167e3f4f9b72ecde17b28226137cf454ab1dd20ef5d924786ab3483c2f9","signature":"0xb1acdb2c4d3df3f1b8d3bfd33421660df358d84d78d16c4603551935f4b67643373e7eb63dcb16ec359be0ec41fee33b03a16e80745f2374ff1d3c352508ac5d857c6476d3c3bcf7e6ca37427c9209f17be3af5264c0e2132b3dd1156c28b4e9","withdrawalCredentials":"0x003f5102dabe0a27b1746098d1dc17a5d3fbd478759fea9287e4e419b3c3cef2"}
],
"blockNumber": "0x2",
"blockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"blockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"receiptsRoot": "0x79ee3424eb720a3ad4b1c5a372bb8160580cbe4d893778660f34213c685627a9",
"blobGasUsed": "0x0"
},
@@ -37,7 +37,7 @@
"id": 67,
"result": {
"status": "VALID",
"latestValidHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null
}
},

View File

@@ -4,8 +4,8 @@
"method": "engine_forkchoiceUpdatedV3",
"params": [
{
"headBlockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"safeBlockHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"headBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"safeBlockHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"finalizedBlockHash": "0x0000000000000000000000000000000000000000000000000000000000000000"
},
{
@@ -24,10 +24,10 @@
"result": {
"payloadStatus": {
"status": "VALID",
"latestValidHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"latestValidHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"validationError": null
},
"payloadId": "0x282643daa04b7631"
"payloadId": "0x282643db882670cf"
}
},
"statusCode" : 200

View File

@@ -3,7 +3,7 @@
"jsonrpc": "2.0",
"method": "engine_getPayloadV6110",
"params": [
"0x282643daa04b7631"
"0x282643db882670cf"
],
"id": 67
},
@@ -12,7 +12,7 @@
"id": 67,
"result": {
"executionPayload": {
"parentHash": "0xb3b483867217b83b1e4a2f95c84d2da30cbff12eb8636f2becbcc05f4507fa7a",
"parentHash": "0xddb65a684b9b8980b6231ee0e388566c10a9c4583bbddf16f8d68bbc0b8ed965",
"feeRecipient": "0xa94f5374fce5edbc8e2a8697c15331677e6ebf0b",
"stateRoot": "0x14208ac0e218167936e220b72d5d5887a963cb858ea2f2d268518f014a3da3fa",
"logsBloom": "0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000",
@@ -28,7 +28,7 @@
"withdrawals": [],
"depositReceipts": [],
"blockNumber": "0x3",
"blockHash": "0xa28bf4db3363ce5b67848eb2ad52dbfead62ddb2287ae7eed36daa002528d1af",
"blockHash": "0xf1e7093b5d229885caab11a3acb95412af80f9077b742020a8014cf81c8c75f2",
"receiptsRoot": "0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421",
"blobGasUsed": "0x0"
},

View File

@@ -690,7 +690,7 @@ public class RunnerBuilder {
.timestampForks(besuController.getGenesisConfigOptions().getForkBlockTimestamps())
.allConnectionsSupplier(ethPeers::getAllConnections)
.allActiveConnectionsSupplier(ethPeers::getAllActiveConnections)
.peersLowerBound(ethPeers.getPeerLowerBound())
.maxPeers(ethPeers.getMaxPeers())
.build();
};

File diff suppressed because it is too large Load Diff

View File

@@ -14,8 +14,13 @@
*/
package org.hyperledger.besu.cli.config;
import org.apache.commons.lang3.StringUtils;
/** Enum for profile names. Each profile corresponds to a configuration file. */
public enum ProfileName {
/** The 'MINIMALIST_STAKER' profile */
MINIMALIST_STAKER("profiles/minimalist-staker.toml"),
/** The 'DEV' profile. Corresponds to the 'profiles/dev.toml' configuration file. */
DEV("profiles/dev.toml");
@@ -38,4 +43,9 @@ public enum ProfileName {
public String getConfigFile() {
return configFile;
}
@Override
public String toString() {
return StringUtils.capitalize(name().replaceAll("_", " ").toLowerCase());
}
}

View File

@@ -0,0 +1,142 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import static java.util.Arrays.asList;
import org.hyperledger.besu.cli.util.CommandLineUtils;
import org.hyperledger.besu.ethereum.api.ApiConfiguration;
import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration;
import org.hyperledger.besu.ethereum.core.MiningParameters;
import org.slf4j.Logger;
import picocli.CommandLine;
/**
* Handles configuration options for the API in Besu, including gas price settings, RPC log range,
* and trace filter range.
*/
public class ApiConfigurationOptions {
@CommandLine.Option(
names = {"--api-gas-price-blocks"},
description = "Number of blocks to consider for eth_gasPrice (default: ${DEFAULT-VALUE})")
private final Long apiGasPriceBlocks = 100L;
@CommandLine.Option(
names = {"--api-gas-price-percentile"},
description = "Percentile value to measure for eth_gasPrice (default: ${DEFAULT-VALUE})")
private final Double apiGasPricePercentile = 50.0;
@CommandLine.Option(
names = {"--api-gas-price-max"},
description = "Maximum gas price for eth_gasPrice (default: ${DEFAULT-VALUE})")
private final Long apiGasPriceMax = 500_000_000_000L;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-limiting-enabled"},
hidden = true,
description =
"Set to enable gas price and minimum priority fee limit in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Boolean apiGasAndPriorityFeeLimitingEnabled = false;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-lower-bound-coefficient"},
hidden = true,
description =
"Coefficient for setting the lower limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Long apiGasAndPriorityFeeLowerBoundCoefficient =
ApiConfiguration.DEFAULT_LOWER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT;
@CommandLine.Option(
names = {"--api-gas-and-priority-fee-upper-bound-coefficient"},
hidden = true,
description =
"Coefficient for setting the upper limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})")
private final Long apiGasAndPriorityFeeUpperBoundCoefficient =
ApiConfiguration.DEFAULT_UPPER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT;
@CommandLine.Option(
names = {"--rpc-max-logs-range"},
description =
"Specifies the maximum number of blocks to retrieve logs from via RPC. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})")
private final Long rpcMaxLogsRange = 5000L;
@CommandLine.Option(
names = {"--rpc-gas-cap"},
description =
"Specifies the gasLimit cap for transaction simulation RPC methods. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})")
private final Long rpcGasCap = 0L;
@CommandLine.Option(
names = {"--rpc-max-trace-filter-range"},
description =
"Specifies the maximum number of blocks for the trace_filter method. Must be >=0. 0 specifies no limit (default: $DEFAULT-VALUE)")
private final Long maxTraceFilterRange = 1000L;
/**
* Validates the API options.
*
* @param commandLine CommandLine instance
* @param logger Logger instance
*/
public void validate(final CommandLine commandLine, final Logger logger) {
if (apiGasAndPriorityFeeLimitingEnabled) {
if (apiGasAndPriorityFeeLowerBoundCoefficient > apiGasAndPriorityFeeUpperBoundCoefficient) {
throw new CommandLine.ParameterException(
commandLine,
"--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient");
}
}
checkApiOptionsDependencies(commandLine, logger);
}
private void checkApiOptionsDependencies(final CommandLine commandLine, final Logger logger) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--api-gas-and-priority-fee-limiting-enabled",
!apiGasAndPriorityFeeLimitingEnabled,
asList(
"--api-gas-and-priority-fee-upper-bound-coefficient",
"--api-gas-and-priority-fee-lower-bound-coefficient"));
}
/**
* Creates an ApiConfiguration based on the provided options.
*
* @param miningParameters The mining parameters
* @return An ApiConfiguration instance
*/
public ApiConfiguration apiConfiguration(final MiningParameters miningParameters) {
var builder =
ImmutableApiConfiguration.builder()
.gasPriceBlocks(apiGasPriceBlocks)
.gasPricePercentile(apiGasPricePercentile)
.gasPriceMinSupplier(
miningParameters.getMinTransactionGasPrice().getAsBigInteger()::longValueExact)
.gasPriceMax(apiGasPriceMax)
.maxLogsRange(rpcMaxLogsRange)
.gasCap(rpcGasCap)
.isGasAndPriorityFeeLimitingEnabled(apiGasAndPriorityFeeLimitingEnabled)
.maxTraceFilterRange(maxTraceFilterRange);
if (apiGasAndPriorityFeeLimitingEnabled) {
builder
.lowerBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeLowerBoundCoefficient)
.upperBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeUpperBoundCoefficient);
}
return builder.build();
}
}

View File

@@ -74,7 +74,7 @@ public class DataStorageOptions implements CLIOptions<DataStorageConfiguration>
@CommandLine.Option(
hidden = true,
names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED},
names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED, "--Xbonsai-trie-log-pruning-enabled"},
description =
"Limit the number of trie logs that are retained. (default: ${DEFAULT-VALUE})")
private boolean bonsaiLimitTrieLogsEnabled = DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED;

View File

@@ -0,0 +1,112 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import static java.util.Arrays.asList;
import static org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration.DEFAULT_GRAPHQL_HTTP_PORT;
import org.hyperledger.besu.cli.DefaultCommandValues;
import org.hyperledger.besu.cli.custom.CorsAllowedOriginsProperty;
import org.hyperledger.besu.cli.util.CommandLineUtils;
import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration;
import java.util.List;
import com.google.common.base.Strings;
import org.slf4j.Logger;
import picocli.CommandLine;
/** Handles configuration options for the GraphQL HTTP service in Besu. */
public class GraphQlOptions {
@CommandLine.Option(
names = {"--graphql-http-enabled"},
description = "Set to start the GraphQL HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isGraphQLHttpEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--graphql-http-host"},
paramLabel = DefaultCommandValues.MANDATORY_HOST_FORMAT_HELP,
description = "Host for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private String graphQLHttpHost;
@CommandLine.Option(
names = {"--graphql-http-port"},
paramLabel = DefaultCommandValues.MANDATORY_PORT_FORMAT_HELP,
description = "Port for GraphQL HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer graphQLHttpPort = DEFAULT_GRAPHQL_HTTP_PORT;
@CommandLine.Option(
names = {"--graphql-http-cors-origins"},
description = "Comma separated origin domain URLs for CORS validation (default: none)")
private final CorsAllowedOriginsProperty graphQLHttpCorsAllowedOrigins =
new CorsAllowedOriginsProperty();
/**
* Validates the GraphQL HTTP options.
*
* @param logger Logger instance
* @param commandLine CommandLine instance
*/
public void validate(final Logger logger, final CommandLine commandLine) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--graphql-http-enabled",
!isGraphQLHttpEnabled,
asList("--graphql-http-cors-origins", "--graphql-http-host", "--graphql-http-port"));
}
/**
* Creates a GraphQLConfiguration based on the provided options.
*
* @param hostsAllowlist List of hosts allowed
* @param defaultHostAddress Default host address
* @param timoutSec Timeout in seconds
* @return A GraphQLConfiguration instance
*/
public GraphQLConfiguration graphQLConfiguration(
final List<String> hostsAllowlist, final String defaultHostAddress, final Long timoutSec) {
final GraphQLConfiguration graphQLConfiguration = GraphQLConfiguration.createDefault();
graphQLConfiguration.setEnabled(isGraphQLHttpEnabled);
graphQLConfiguration.setHost(
Strings.isNullOrEmpty(graphQLHttpHost) ? defaultHostAddress : graphQLHttpHost);
graphQLConfiguration.setPort(graphQLHttpPort);
graphQLConfiguration.setHostsAllowlist(hostsAllowlist);
graphQLConfiguration.setCorsAllowedDomains(graphQLHttpCorsAllowedOrigins);
graphQLConfiguration.setHttpTimeoutSec(timoutSec);
return graphQLConfiguration;
}
/**
* Checks if GraphQL over HTTP is enabled.
*
* @return true if enabled, false otherwise
*/
public Boolean isGraphQLHttpEnabled() {
return isGraphQLHttpEnabled;
}
/**
* Returns the port for GraphQL over HTTP.
*
* @return The port number
*/
public Integer getGraphQLHttpPort() {
return graphQLHttpPort;
}
}

View File

@@ -0,0 +1,492 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import static java.util.Arrays.asList;
import static org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration.DEFAULT_JSON_RPC_PORT;
import static org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration.DEFAULT_PRETTY_JSON_ENABLED;
import static org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis.DEFAULT_RPC_APIS;
import static org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis.VALID_APIS;
import org.hyperledger.besu.cli.DefaultCommandValues;
import org.hyperledger.besu.cli.custom.CorsAllowedOriginsProperty;
import org.hyperledger.besu.cli.custom.RpcAuthFileValidator;
import org.hyperledger.besu.cli.util.CommandLineUtils;
import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.authentication.JwtAlgorithm;
import org.hyperledger.besu.ethereum.api.tls.FileBasedPasswordProvider;
import org.hyperledger.besu.ethereum.api.tls.TlsClientAuthConfiguration;
import org.hyperledger.besu.ethereum.api.tls.TlsConfiguration;
import java.io.File;
import java.nio.file.Path;
import java.security.KeyManagementException;
import java.security.NoSuchAlgorithmException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.function.Predicate;
import java.util.stream.Collectors;
import javax.net.ssl.SSLContext;
import javax.net.ssl.SSLEngine;
import com.google.common.base.Strings;
import org.slf4j.Logger;
import picocli.CommandLine;
/**
* Handles configuration options for the JSON-RPC HTTP service, including validation and creation of
* a JSON-RPC configuration.
*/
public class JsonRpcHttpOptions {
@CommandLine.Option(
names = {"--rpc-http-enabled"},
description = "Set to start the JSON-RPC HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isRpcHttpEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--rpc-http-host"},
paramLabel = DefaultCommandValues.MANDATORY_HOST_FORMAT_HELP,
description = "Host for JSON-RPC HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private String rpcHttpHost;
@CommandLine.Option(
names = {"--rpc-http-port"},
paramLabel = DefaultCommandValues.MANDATORY_PORT_FORMAT_HELP,
description = "Port for JSON-RPC HTTP to listen on (default: ${DEFAULT-VALUE})",
arity = "1")
private final Integer rpcHttpPort = DEFAULT_JSON_RPC_PORT;
@CommandLine.Option(
names = {"--rpc-http-max-active-connections"},
description =
"Maximum number of HTTP connections allowed for JSON-RPC (default: ${DEFAULT-VALUE}). Once this limit is reached, incoming connections will be rejected.",
arity = "1")
private final Integer rpcHttpMaxConnections = DefaultCommandValues.DEFAULT_HTTP_MAX_CONNECTIONS;
// A list of origins URLs that are accepted by the JsonRpcHttpServer (CORS)
@CommandLine.Option(
names = {"--rpc-http-cors-origins"},
description = "Comma separated origin domain URLs for CORS validation (default: none)")
private final CorsAllowedOriginsProperty rpcHttpCorsAllowedOrigins =
new CorsAllowedOriginsProperty();
@CommandLine.Option(
names = {"--rpc-http-api", "--rpc-http-apis"},
paramLabel = "<api name>",
split = " {0,1}, {0,1}",
arity = "1..*",
description =
"Comma separated list of APIs to enable on JSON-RPC HTTP service (default: ${DEFAULT-VALUE})")
private final List<String> rpcHttpApis = DEFAULT_RPC_APIS;
@CommandLine.Option(
names = {"--rpc-http-api-method-no-auth", "--rpc-http-api-methods-no-auth"},
paramLabel = "<api name>",
split = " {0,1}, {0,1}",
arity = "1..*",
description =
"Comma separated list of API methods to exclude from RPC authentication services, RPC HTTP authentication must be enabled")
private final List<String> rpcHttpApiMethodsNoAuth = new ArrayList<String>();
@CommandLine.Option(
names = {"--rpc-http-authentication-enabled"},
description =
"Require authentication for the JSON-RPC HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isRpcHttpAuthenticationEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--rpc-http-authentication-credentials-file"},
paramLabel = DefaultCommandValues.MANDATORY_FILE_FORMAT_HELP,
description =
"Storage file for JSON-RPC HTTP authentication credentials (default: ${DEFAULT-VALUE})",
arity = "1")
private String rpcHttpAuthenticationCredentialsFile = null;
@CommandLine.Option(
names = {"--rpc-http-authentication-jwt-public-key-file"},
paramLabel = DefaultCommandValues.MANDATORY_FILE_FORMAT_HELP,
description = "JWT public key file for JSON-RPC HTTP authentication",
arity = "1")
private final File rpcHttpAuthenticationPublicKeyFile = null;
@CommandLine.Option(
names = {"--rpc-http-authentication-jwt-algorithm"},
description =
"Encryption algorithm used for HTTP JWT public key. Possible values are ${COMPLETION-CANDIDATES}"
+ " (default: ${DEFAULT-VALUE})",
arity = "1")
private final JwtAlgorithm rpcHttpAuthenticationAlgorithm =
DefaultCommandValues.DEFAULT_JWT_ALGORITHM;
@CommandLine.Option(
names = {"--rpc-http-tls-enabled"},
description = "Enable TLS for the JSON-RPC HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isRpcHttpTlsEnabled = false;
@CommandLine.Option(
names = {"--rpc-http-tls-keystore-file"},
paramLabel = DefaultCommandValues.MANDATORY_FILE_FORMAT_HELP,
description =
"Keystore (PKCS#12) containing key/certificate for the JSON-RPC HTTP service. Required if TLS is enabled.")
private final Path rpcHttpTlsKeyStoreFile = null;
@CommandLine.Option(
names = {"--rpc-http-tls-keystore-password-file"},
paramLabel = DefaultCommandValues.MANDATORY_FILE_FORMAT_HELP,
description =
"File containing password to unlock keystore for the JSON-RPC HTTP service. Required if TLS is enabled.")
private final Path rpcHttpTlsKeyStorePasswordFile = null;
@CommandLine.Option(
names = {"--rpc-http-tls-client-auth-enabled"},
description =
"Enable TLS client authentication for the JSON-RPC HTTP service (default: ${DEFAULT-VALUE})")
private final Boolean isRpcHttpTlsClientAuthEnabled = false;
@CommandLine.Option(
names = {"--rpc-http-tls-known-clients-file"},
paramLabel = DefaultCommandValues.MANDATORY_FILE_FORMAT_HELP,
description =
"Path to file containing clients certificate common name and fingerprint for client authentication")
private final Path rpcHttpTlsKnownClientsFile = null;
@CommandLine.Option(
names = {"--rpc-http-tls-ca-clients-enabled"},
description =
"Enable to accept clients certificate signed by a valid CA for client authentication (default: ${DEFAULT-VALUE})")
private final Boolean isRpcHttpTlsCAClientsEnabled = false;
@CommandLine.Option(
names = {"--rpc-http-tls-protocol", "--rpc-http-tls-protocols"},
description = "Comma separated list of TLS protocols to support (default: ${DEFAULT-VALUE})",
split = ",",
arity = "1..*")
private final List<String> rpcHttpTlsProtocols =
new ArrayList<>(DefaultCommandValues.DEFAULT_TLS_PROTOCOLS);
@CommandLine.Option(
names = {"--rpc-http-tls-cipher-suite", "--rpc-http-tls-cipher-suites"},
description = "Comma separated list of TLS cipher suites to support",
split = ",",
arity = "1..*")
private final List<String> rpcHttpTlsCipherSuites = new ArrayList<>();
@CommandLine.Option(
names = {"--rpc-http-max-batch-size"},
paramLabel = DefaultCommandValues.MANDATORY_INTEGER_FORMAT_HELP,
description =
"Specifies the maximum number of requests in a single RPC batch request via RPC. -1 specifies no limit (default: ${DEFAULT-VALUE})")
private final Integer rpcHttpMaxBatchSize = DefaultCommandValues.DEFAULT_HTTP_MAX_BATCH_SIZE;
@CommandLine.Option(
names = {"--rpc-http-max-request-content-length"},
paramLabel = DefaultCommandValues.MANDATORY_LONG_FORMAT_HELP,
description = "Specifies the maximum request content length. (default: ${DEFAULT-VALUE})")
private final Long rpcHttpMaxRequestContentLength =
DefaultCommandValues.DEFAULT_MAX_REQUEST_CONTENT_LENGTH;
@CommandLine.Option(
names = {"--json-pretty-print-enabled"},
description = "Enable JSON pretty print format (default: ${DEFAULT-VALUE})")
private final Boolean prettyJsonEnabled = DEFAULT_PRETTY_JSON_ENABLED;
/**
* Validates the Rpc Http options.
*
* @param logger Logger instance
* @param commandLine CommandLine instance
* @param configuredApis Predicate for configured APIs
*/
public void validate(
final Logger logger, final CommandLine commandLine, final Predicate<String> configuredApis) {
if (!rpcHttpApis.stream().allMatch(configuredApis)) {
final List<String> invalidHttpApis = new ArrayList<>(rpcHttpApis);
invalidHttpApis.removeAll(VALID_APIS);
throw new CommandLine.ParameterException(
commandLine,
"Invalid value for option '--rpc-http-api': invalid entries found " + invalidHttpApis);
}
final boolean validHttpApiMethods =
rpcHttpApiMethodsNoAuth.stream().allMatch(RpcMethod::rpcMethodExists);
if (!validHttpApiMethods) {
throw new CommandLine.ParameterException(
commandLine,
"Invalid value for option '--rpc-http-api-methods-no-auth', options must be valid RPC methods");
}
if (isRpcHttpAuthenticationEnabled) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--rpc-http-authentication-public-key-file",
rpcHttpAuthenticationPublicKeyFile == null,
List.of("--rpc-http-authentication-jwt-algorithm"));
}
if (isRpcHttpAuthenticationEnabled
&& rpcHttpAuthenticationCredentialsFile(commandLine) == null
&& rpcHttpAuthenticationPublicKeyFile == null) {
throw new CommandLine.ParameterException(
commandLine,
"Unable to authenticate JSON-RPC HTTP endpoint without a supplied credentials file or authentication public key file");
}
checkDependencies(logger, commandLine);
if (isRpcTlsConfigurationRequired()) {
validateTls(commandLine);
}
}
/**
* Creates a JsonRpcConfiguration based on the provided options.
*
* @param hostsAllowlist List of hosts allowed
* @param defaultHostAddress Default host address
* @param timoutSec timeout in seconds
* @return A JsonRpcConfiguration instance
*/
public JsonRpcConfiguration jsonRpcConfiguration(
final List<String> hostsAllowlist, final String defaultHostAddress, final Long timoutSec) {
final JsonRpcConfiguration jsonRpcConfiguration = JsonRpcConfiguration.createDefault();
jsonRpcConfiguration.setEnabled(isRpcHttpEnabled);
jsonRpcConfiguration.setHost(
Strings.isNullOrEmpty(rpcHttpHost) ? defaultHostAddress : rpcHttpHost);
jsonRpcConfiguration.setPort(rpcHttpPort);
jsonRpcConfiguration.setMaxActiveConnections(rpcHttpMaxConnections);
jsonRpcConfiguration.setCorsAllowedDomains(rpcHttpCorsAllowedOrigins);
jsonRpcConfiguration.setRpcApis(rpcHttpApis.stream().distinct().collect(Collectors.toList()));
jsonRpcConfiguration.setNoAuthRpcApis(
rpcHttpApiMethodsNoAuth.stream().distinct().collect(Collectors.toList()));
jsonRpcConfiguration.setHostsAllowlist(hostsAllowlist);
jsonRpcConfiguration.setAuthenticationEnabled(isRpcHttpAuthenticationEnabled);
jsonRpcConfiguration.setAuthenticationCredentialsFile(rpcHttpAuthenticationCredentialsFile);
jsonRpcConfiguration.setAuthenticationPublicKeyFile(rpcHttpAuthenticationPublicKeyFile);
jsonRpcConfiguration.setAuthenticationAlgorithm(rpcHttpAuthenticationAlgorithm);
jsonRpcConfiguration.setTlsConfiguration(rpcHttpTlsConfiguration());
jsonRpcConfiguration.setHttpTimeoutSec(timoutSec);
jsonRpcConfiguration.setMaxBatchSize(rpcHttpMaxBatchSize);
jsonRpcConfiguration.setMaxRequestContentLength(rpcHttpMaxRequestContentLength);
jsonRpcConfiguration.setPrettyJsonEnabled(prettyJsonEnabled);
return jsonRpcConfiguration;
}
/**
* Checks dependencies between options.
*
* @param logger Logger instance
* @param commandLine CommandLine instance
*/
public void checkDependencies(final Logger logger, final CommandLine commandLine) {
checkRpcTlsClientAuthOptionsDependencies(logger, commandLine);
checkRpcTlsOptionsDependencies(logger, commandLine);
checkRpcHttpOptionsDependencies(logger, commandLine);
}
private void checkRpcTlsClientAuthOptionsDependencies(
final Logger logger, final CommandLine commandLine) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--rpc-http-tls-client-auth-enabled",
!isRpcHttpTlsClientAuthEnabled,
asList("--rpc-http-tls-known-clients-file", "--rpc-http-tls-ca-clients-enabled"));
}
private void checkRpcTlsOptionsDependencies(final Logger logger, final CommandLine commandLine) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--rpc-http-tls-enabled",
!isRpcHttpTlsEnabled,
asList(
"--rpc-http-tls-keystore-file",
"--rpc-http-tls-keystore-password-file",
"--rpc-http-tls-client-auth-enabled",
"--rpc-http-tls-known-clients-file",
"--rpc-http-tls-ca-clients-enabled",
"--rpc-http-tls-protocols",
"--rpc-http-tls-cipher-suite",
"--rpc-http-tls-cipher-suites"));
}
private void checkRpcHttpOptionsDependencies(final Logger logger, final CommandLine commandLine) {
CommandLineUtils.checkOptionDependencies(
logger,
commandLine,
"--rpc-http-enabled",
!isRpcHttpEnabled,
asList(
"--rpc-http-api",
"--rpc-http-apis",
"--rpc-http-api-method-no-auth",
"--rpc-http-api-methods-no-auth",
"--rpc-http-cors-origins",
"--rpc-http-host",
"--rpc-http-port",
"--rpc-http-max-active-connections",
"--rpc-http-authentication-enabled",
"--rpc-http-authentication-credentials-file",
"--rpc-http-authentication-public-key-file",
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
"--rpc-http-tls-keystore-password-file",
"--rpc-http-tls-client-auth-enabled",
"--rpc-http-tls-known-clients-file",
"--rpc-http-tls-ca-clients-enabled",
"--rpc-http-authentication-jwt-algorithm",
"--rpc-http-tls-protocols",
"--rpc-http-tls-cipher-suite",
"--rpc-http-tls-cipher-suites"));
}
private void validateTls(final CommandLine commandLine) {
if (rpcHttpTlsKeyStoreFile == null) {
throw new CommandLine.ParameterException(
commandLine, "Keystore file is required when TLS is enabled for JSON-RPC HTTP endpoint");
}
if (rpcHttpTlsKeyStorePasswordFile == null) {
throw new CommandLine.ParameterException(
commandLine,
"File containing password to unlock keystore is required when TLS is enabled for JSON-RPC HTTP endpoint");
}
if (isRpcHttpTlsClientAuthEnabled
&& !isRpcHttpTlsCAClientsEnabled
&& rpcHttpTlsKnownClientsFile == null) {
throw new CommandLine.ParameterException(
commandLine,
"Known-clients file must be specified or CA clients must be enabled when TLS client authentication is enabled for JSON-RPC HTTP endpoint");
}
rpcHttpTlsProtocols.retainAll(getJDKEnabledProtocols());
if (rpcHttpTlsProtocols.isEmpty()) {
throw new CommandLine.ParameterException(
commandLine,
"No valid TLS protocols specified (the following protocols are enabled: "
+ getJDKEnabledProtocols()
+ ")");
}
for (final String cipherSuite : rpcHttpTlsCipherSuites) {
if (!getJDKEnabledCipherSuites().contains(cipherSuite)) {
throw new CommandLine.ParameterException(
commandLine, "Invalid TLS cipher suite specified " + cipherSuite);
}
}
}
private Optional<TlsConfiguration> rpcHttpTlsConfiguration() {
if (!isRpcTlsConfigurationRequired()) {
return Optional.empty();
}
rpcHttpTlsCipherSuites.retainAll(getJDKEnabledCipherSuites());
return Optional.of(
TlsConfiguration.Builder.aTlsConfiguration()
.withKeyStorePath(rpcHttpTlsKeyStoreFile)
.withKeyStorePasswordSupplier(
new FileBasedPasswordProvider(rpcHttpTlsKeyStorePasswordFile))
.withClientAuthConfiguration(rpcHttpTlsClientAuthConfiguration())
.withSecureTransportProtocols(rpcHttpTlsProtocols)
.withCipherSuites(rpcHttpTlsCipherSuites)
.build());
}
private boolean isRpcTlsConfigurationRequired() {
return isRpcHttpEnabled && isRpcHttpTlsEnabled;
}
private TlsClientAuthConfiguration rpcHttpTlsClientAuthConfiguration() {
if (isRpcHttpTlsClientAuthEnabled) {
return TlsClientAuthConfiguration.Builder.aTlsClientAuthConfiguration()
.withKnownClientsFile(rpcHttpTlsKnownClientsFile)
.withCaClientsEnabled(isRpcHttpTlsCAClientsEnabled)
.build();
}
return null;
}
private static List<String> getJDKEnabledCipherSuites() {
try {
final SSLContext context = SSLContext.getInstance("TLS");
context.init(null, null, null);
final SSLEngine engine = context.createSSLEngine();
return Arrays.asList(engine.getEnabledCipherSuites());
} catch (final KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private static List<String> getJDKEnabledProtocols() {
try {
final SSLContext context = SSLContext.getInstance("TLS");
context.init(null, null, null);
final SSLEngine engine = context.createSSLEngine();
return Arrays.asList(engine.getEnabledProtocols());
} catch (final KeyManagementException | NoSuchAlgorithmException e) {
throw new RuntimeException(e);
}
}
private String rpcHttpAuthenticationCredentialsFile(final CommandLine commandLine) {
final String filename = rpcHttpAuthenticationCredentialsFile;
if (filename != null) {
RpcAuthFileValidator.validate(commandLine, filename, "HTTP");
}
return filename;
}
/**
* Returns the list of APIs enabled for RPC over HTTP.
*
* @return A list of APIs
*/
public List<String> getRpcHttpApis() {
return rpcHttpApis;
}
/**
* Returns the port for RPC over HTTP.
*
* @return The port number
*/
public Integer getRpcHttpPort() {
return rpcHttpPort;
}
/**
* Checks if RPC over HTTP is enabled.
*
* @return true if enabled, false otherwise
*/
public Boolean isRpcHttpEnabled() {
return isRpcHttpEnabled;
}
}

View File

@@ -0,0 +1,206 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options.stable;
import org.hyperledger.besu.cli.DefaultCommandValues;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeDnsConfiguration;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfigurationBuilder;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import java.nio.file.Path;
import java.util.Optional;
import org.slf4j.Logger;
import picocli.CommandLine;
/** Handles configuration options for permissions in Besu. */
public class PermissionsOptions {
@CommandLine.Option(
names = {"--permissions-nodes-config-file-enabled"},
description = "Enable node level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-nodes-config-file"},
description =
"Node permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String nodePermissionsConfigFile = null;
@CommandLine.Option(
names = {"--permissions-accounts-config-file-enabled"},
description = "Enable account level permissions (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsEnabled = false;
@SuppressWarnings({"FieldCanBeFinal", "FieldMayBeFinal"}) // PicoCLI requires non-final Strings.
@CommandLine.Option(
names = {"--permissions-accounts-config-file"},
description =
"Account permissioning config TOML file (default: a file named \"permissions_config.toml\" in the Besu data folder)")
private String accountPermissionsConfigFile = null;
@CommandLine.Option(
names = {"--permissions-nodes-contract-address"},
description = "Address of the node permissioning smart contract",
arity = "1")
private final Address permissionsNodesContractAddress = null;
@CommandLine.Option(
names = {"--permissions-nodes-contract-version"},
description = "Version of the EEA Node Permissioning interface (default: ${DEFAULT-VALUE})")
private final Integer permissionsNodesContractVersion = 1;
@CommandLine.Option(
names = {"--permissions-nodes-contract-enabled"},
description = "Enable node level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsNodesContractEnabled = false;
@CommandLine.Option(
names = {"--permissions-accounts-contract-address"},
description = "Address of the account permissioning smart contract",
arity = "1")
private final Address permissionsAccountsContractAddress = null;
@CommandLine.Option(
names = {"--permissions-accounts-contract-enabled"},
description =
"Enable account level permissions via smart contract (default: ${DEFAULT-VALUE})")
private final Boolean permissionsAccountsContractEnabled = false;
/**
* Creates a PermissioningConfiguration based on the provided options.
*
* @param jsonRpcHttpOptions The JSON-RPC HTTP options
* @param rpcWebsocketOptions The RPC websocket options
* @param enodeDnsConfiguration The enode DNS configuration
* @param dataPath The data path
* @param logger The logger
* @param commandLine The command line
* @return An Optional PermissioningConfiguration instance
* @throws Exception If an error occurs while creating the configuration
*/
public Optional<PermissioningConfiguration> permissioningConfiguration(
final JsonRpcHttpOptions jsonRpcHttpOptions,
final RpcWebsocketOptions rpcWebsocketOptions,
final EnodeDnsConfiguration enodeDnsConfiguration,
final Path dataPath,
final Logger logger,
final CommandLine commandLine)
throws Exception {
if (!(localPermissionsEnabled() || contractPermissionsEnabled())) {
if (jsonRpcHttpOptions.getRpcHttpApis().contains(RpcApis.PERM.name())
|| rpcWebsocketOptions.getRpcWsApis().contains(RpcApis.PERM.name())) {
logger.warn(
"Permissions are disabled. Cannot enable PERM APIs when not using Permissions.");
}
return Optional.empty();
}
final Optional<LocalPermissioningConfiguration> localPermissioningConfigurationOptional;
if (localPermissionsEnabled()) {
final Optional<String> nodePermissioningConfigFile =
Optional.ofNullable(nodePermissionsConfigFile);
final Optional<String> accountPermissioningConfigFile =
Optional.ofNullable(accountPermissionsConfigFile);
final LocalPermissioningConfiguration localPermissioningConfiguration =
PermissioningConfigurationBuilder.permissioningConfiguration(
permissionsNodesEnabled,
enodeDnsConfiguration,
nodePermissioningConfigFile.orElse(getDefaultPermissioningFilePath(dataPath)),
permissionsAccountsEnabled,
accountPermissioningConfigFile.orElse(getDefaultPermissioningFilePath(dataPath)));
localPermissioningConfigurationOptional = Optional.of(localPermissioningConfiguration);
} else {
if (nodePermissionsConfigFile != null && !permissionsNodesEnabled) {
logger.warn(
"Node permissioning config file set {} but no permissions enabled",
nodePermissionsConfigFile);
}
if (accountPermissionsConfigFile != null && !permissionsAccountsEnabled) {
logger.warn(
"Account permissioning config file set {} but no permissions enabled",
accountPermissionsConfigFile);
}
localPermissioningConfigurationOptional = Optional.empty();
}
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
SmartContractPermissioningConfiguration.createDefault();
if (Boolean.TRUE.equals(permissionsNodesContractEnabled)) {
if (permissionsNodesContractAddress == null) {
throw new CommandLine.ParameterException(
commandLine,
"No node permissioning contract address specified. Cannot enable smart contract based node permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(
permissionsNodesContractEnabled);
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
permissionsNodesContractAddress);
smartContractPermissioningConfiguration.setNodeSmartContractInterfaceVersion(
permissionsNodesContractVersion);
}
} else if (permissionsNodesContractAddress != null) {
logger.warn(
"Node permissioning smart contract address set {} but smart contract node permissioning is disabled.",
permissionsNodesContractAddress);
}
if (Boolean.TRUE.equals(permissionsAccountsContractEnabled)) {
if (permissionsAccountsContractAddress == null) {
throw new CommandLine.ParameterException(
commandLine,
"No account permissioning contract address specified. Cannot enable smart contract based account permissioning.");
} else {
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(
permissionsAccountsContractEnabled);
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
permissionsAccountsContractAddress);
}
} else if (permissionsAccountsContractAddress != null) {
logger.warn(
"Account permissioning smart contract address set {} but smart contract account permissioning is disabled.",
permissionsAccountsContractAddress);
}
final PermissioningConfiguration permissioningConfiguration =
new PermissioningConfiguration(
localPermissioningConfigurationOptional,
Optional.of(smartContractPermissioningConfiguration));
return Optional.of(permissioningConfiguration);
}
private boolean localPermissionsEnabled() {
return permissionsAccountsEnabled || permissionsNodesEnabled;
}
private boolean contractPermissionsEnabled() {
return permissionsNodesContractEnabled || permissionsAccountsContractEnabled;
}
private String getDefaultPermissioningFilePath(final Path dataPath) {
return dataPath
+ System.getProperty("file.separator")
+ DefaultCommandValues.PERMISSIONING_CONFIG_LOCATION;
}
}

View File

@@ -47,7 +47,6 @@ public class EvmOptions implements CLIOptions<EvmConfiguration> {
"size in kilobytes to allow the cache "
+ "of valid jump destinations to grow to before evicting the least recently used entry",
fallbackValue = "32000",
defaultValue = "32000",
hidden = true,
arity = "1")
private Long jumpDestCacheWeightKilobytes =
@@ -57,7 +56,6 @@ public class EvmOptions implements CLIOptions<EvmConfiguration> {
names = {WORLDSTATE_UPDATE_MODE},
description = "How to handle worldstate updates within a transaction",
fallbackValue = "STACKED",
defaultValue = "STACKED",
hidden = true,
arity = "1")
private EvmConfiguration.WorldUpdaterMode worldstateUpdateMode =

View File

@@ -30,7 +30,6 @@ public class MetricsCLIOptions implements CLIOptions<MetricsConfiguration.Builde
@CommandLine.Option(
names = TIMERS_ENABLED_FLAG,
hidden = true,
defaultValue = "true",
description = "Whether to enable timer metrics (default: ${DEFAULT-VALUE}).")
private Boolean timersEnabled = MetricsConfiguration.DEFAULT_METRICS_TIMERS_ENABLED;

View File

@@ -42,7 +42,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option(
names = INITIATE_CONNECTIONS_FREQUENCY_FLAG,
hidden = true,
defaultValue = "30",
paramLabel = "<INTEGER>",
description =
"The frequency (in seconds) at which to initiate new outgoing connections (default: ${DEFAULT-VALUE})")
@@ -52,7 +51,6 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option(
names = CHECK_MAINTAINED_CONNECTIONS_FREQUENCY_FLAG,
hidden = true,
defaultValue = "60",
paramLabel = "<INTEGER>",
description =
"The frequency (in seconds) at which to check maintained connections (default: ${DEFAULT-VALUE})")
@@ -69,14 +67,12 @@ public class NetworkingOptions implements CLIOptions<NetworkingConfiguration> {
@CommandLine.Option(
names = DISCOVERY_PROTOCOL_V5_ENABLED,
hidden = true,
defaultValue = "false",
description = "Whether to enable P2P Discovery Protocol v5 (default: ${DEFAULT-VALUE})")
private final Boolean isPeerDiscoveryV5Enabled = false;
@CommandLine.Option(
names = FILTER_ON_ENR_FORK_ID,
hidden = true,
defaultValue = "true",
description = "Whether to enable filtering of peers based on the ENR field ForkId)")
private final Boolean filterOnEnrForkId = NetworkingConfiguration.DEFAULT_FILTER_ON_ENR_FORK_ID;

View File

@@ -106,7 +106,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_HEIGHT_FLAG,
hidden = true,
defaultValue = "200",
paramLabel = "<LONG>",
description =
"Minimum height difference before switching fast sync download peers (default: ${DEFAULT-VALUE})")
@@ -116,7 +115,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_TD_FLAG,
hidden = true,
defaultValue = "1000000000000000000",
paramLabel = "<UINT256>",
description =
"Minimum total difficulty difference before switching fast sync download peers (default: ${DEFAULT-VALUE})")
@@ -126,7 +124,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_HEADER_REQUEST_SIZE_FLAG,
hidden = true,
defaultValue = "200",
paramLabel = "<INTEGER>",
description = "Number of headers to request per packet (default: ${DEFAULT-VALUE})")
private int downloaderHeaderRequestSize =
@@ -135,7 +132,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_CHECKPOINT_TIMEOUTS_PERMITTED_FLAG,
hidden = true,
defaultValue = "5",
paramLabel = "<INTEGER>",
description =
"Number of tries to attempt to download checkpoints before stopping (default: ${DEFAULT-VALUE})")
@@ -145,7 +141,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_CHAIN_SEGMENT_SIZE_FLAG,
hidden = true,
defaultValue = "200",
paramLabel = "<INTEGER>",
description = "Distance between checkpoint headers (default: ${DEFAULT-VALUE})")
private int downloaderChainSegmentSize =
@@ -154,7 +149,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = DOWNLOADER_PARALLELISM_FLAG,
hidden = true,
defaultValue = "4",
paramLabel = "<INTEGER>",
description = "Number of threads to provide to chain downloader (default: ${DEFAULT-VALUE})")
private int downloaderParallelism = SynchronizerConfiguration.DEFAULT_DOWNLOADER_PARALLELISM;
@@ -162,7 +156,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = TRANSACTIONS_PARALLELISM_FLAG,
hidden = true,
defaultValue = "2",
paramLabel = "<INTEGER>",
description =
"Number of threads to commit to transaction processing (default: ${DEFAULT-VALUE})")
@@ -179,7 +172,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = PIVOT_DISTANCE_FROM_HEAD_FLAG,
hidden = true,
defaultValue = "50",
paramLabel = "<INTEGER>",
description =
"Distance from initial chain head to fast sync target (default: ${DEFAULT-VALUE})")
@@ -188,7 +180,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = FULL_VALIDATION_RATE_FLAG,
hidden = true,
defaultValue = "0.1",
paramLabel = "<FLOAT>",
description = "Fraction of headers fast sync will fully validate (default: ${DEFAULT-VALUE})")
private float fastSyncFullValidationRate = SynchronizerConfiguration.DEFAULT_FULL_VALIDATION_RATE;
@@ -196,7 +187,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = WORLD_STATE_HASH_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>",
description = "Fast sync world state hashes queried per request (default: ${DEFAULT-VALUE})")
private int worldStateHashCountPerRequest =
@@ -205,7 +195,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = WORLD_STATE_REQUEST_PARALLELISM_FLAG,
hidden = true,
defaultValue = "10",
paramLabel = "<INTEGER>",
description =
"Number of concurrent requests to use when downloading fast sync world state (default: ${DEFAULT-VALUE})")
@@ -215,7 +204,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = WORLD_STATE_MAX_REQUESTS_WITHOUT_PROGRESS_FLAG,
hidden = true,
defaultValue = "1000",
paramLabel = "<INTEGER>",
description =
"Number of world state requests accepted without progress before considering the download stalled (default: ${DEFAULT-VALUE})")
@@ -225,7 +213,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = WORLD_STATE_MIN_MILLIS_BEFORE_STALLING_FLAG,
hidden = true,
defaultValue = "300000",
paramLabel = "<LONG>",
description =
"Minimum time in ms without progress before considering a world state download as stalled (default: ${DEFAULT-VALUE})")
@@ -235,7 +222,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = WORLD_STATE_TASK_CACHE_SIZE_FLAG,
hidden = true,
defaultValue = "1000000",
paramLabel = "<INTEGER>",
description =
"The max number of pending node data requests cached in-memory during fast sync world state download. (default: ${DEFAULT-VALUE})")
@@ -245,7 +231,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_PIVOT_BLOCK_WINDOW_VALIDITY_FLAG,
hidden = true,
defaultValue = "126",
paramLabel = "<INTEGER>",
description =
"The size of the pivot block window before having to change it (default: ${DEFAULT-VALUE})")
@@ -255,7 +240,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_PIVOT_BLOCK_DISTANCE_BEFORE_CACHING_FLAG,
hidden = true,
defaultValue = "60",
paramLabel = "<INTEGER>",
description =
"The distance from the head before loading a pivot block into the cache to have a ready pivot block when the window is finished (default: ${DEFAULT-VALUE})")
@@ -265,7 +249,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_STORAGE_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>",
description = "Snap sync storage queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncStorageCountPerRequest =
@@ -274,7 +257,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "84",
paramLabel = "<INTEGER>",
description = "Snap sync bytecode queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncBytecodeCountPerRequest =
@@ -283,7 +265,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "384",
paramLabel = "<INTEGER>",
description = "Snap sync trie node queried per request (default: ${DEFAULT-VALUE})")
private int snapsyncTrieNodeCountPerRequest =
@@ -292,7 +273,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "128",
paramLabel = "<INTEGER>",
description =
"Snap sync flat accounts verified and healed per request (default: ${DEFAULT-VALUE})")
@@ -302,7 +282,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG,
hidden = true,
defaultValue = "1024",
paramLabel = "<INTEGER>",
description =
"Snap sync flat slots verified and healed per request (default: ${DEFAULT-VALUE})")
@@ -312,9 +291,9 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
@CommandLine.Option(
names = SNAP_FLAT_DB_HEALING_ENABLED_FLAG,
hidden = true,
defaultValue = "false",
paramLabel = "<Boolean>",
description = "Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})")
description =
"(Deprecated) Always enabled: Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})")
private Boolean snapsyncFlatDbHealingEnabled =
SnapSyncConfiguration.DEFAULT_IS_FLAT_DB_HEALING_ENABLED;
@@ -336,15 +315,6 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
return new SynchronizerOptions();
}
/**
* Flag to know whether the flat db healing feature is enabled or disabled.
*
* @return true is the flat db healing is enabled
*/
public boolean isSnapsyncFlatDbHealingEnabled() {
return snapsyncFlatDbHealingEnabled;
}
/**
* Create synchronizer options from Synchronizer Configuration.
*
@@ -471,15 +441,11 @@ public class SynchronizerOptions implements CLIOptions<SynchronizerConfiguration
SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncBytecodeCountPerRequest),
SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncTrieNodeCountPerRequest));
if (isSnapsyncFlatDbHealingEnabled()) {
value.addAll(
Arrays.asList(
SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatAccountHealedCountPerRequest),
SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatStorageHealedCountPerRequest)));
}
OptionParser.format(snapsyncTrieNodeCountPerRequest),
SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatAccountHealedCountPerRequest),
SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG,
OptionParser.format(snapsyncFlatStorageHealedCountPerRequest));
return value;
}
}

View File

@@ -18,17 +18,53 @@ package org.hyperledger.besu.cli.subcommands.storage;
import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import java.util.function.BiConsumer;
import org.bouncycastle.util.Arrays;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** RocksDB Usage subcommand helper methods for formatting and printing. */
public class RocksDbUsageHelper {
private static final Logger LOG = LoggerFactory.getLogger(RocksDbUsageHelper.class);
/** RocksDB subcommand helper methods. */
public class RocksDbHelper {
private static final Logger LOG = LoggerFactory.getLogger(RocksDbHelper.class);
static void forEachColumnFamily(
final String dbPath, final BiConsumer<RocksDB, ColumnFamilyHandle> task) {
RocksDB.loadLibrary();
Options options = new Options();
options.setCreateIfMissing(true);
// Open the RocksDB database with multiple column families
List<byte[]> cfNames;
try {
cfNames = RocksDB.listColumnFamilies(options, dbPath);
} catch (RocksDBException e) {
throw new RuntimeException(e);
}
final List<ColumnFamilyHandle> cfHandles = new ArrayList<>();
final List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>();
for (byte[] cfName : cfNames) {
cfDescriptors.add(new ColumnFamilyDescriptor(cfName));
}
try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) {
for (ColumnFamilyHandle cfHandle : cfHandles) {
task.accept(rocksdb, cfHandle);
}
} catch (RocksDBException e) {
throw new RuntimeException(e);
} finally {
for (ColumnFamilyHandle cfHandle : cfHandles) {
cfHandle.close();
}
}
}
static void printUsageForColumnFamily(
final RocksDB rocksdb, final ColumnFamilyHandle cfHandle, final PrintWriter out)
@@ -62,7 +98,7 @@ public class RocksDbUsageHelper {
}
}
private static String formatOutputSize(final long size) {
static String formatOutputSize(final long size) {
if (size > (1024 * 1024 * 1024)) {
long sizeInGiB = size / (1024 * 1024 * 1024);
return sizeInGiB + " GiB";

View File

@@ -19,13 +19,7 @@ import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import org.hyperledger.besu.cli.util.VersionProvider;
import java.io.PrintWriter;
import java.util.ArrayList;
import java.util.List;
import org.rocksdb.ColumnFamilyDescriptor;
import org.rocksdb.ColumnFamilyHandle;
import org.rocksdb.Options;
import org.rocksdb.RocksDB;
import org.rocksdb.RocksDBException;
import picocli.CommandLine;
import picocli.CommandLine.Command;
@@ -82,34 +76,17 @@ public class RocksDbSubCommand implements Runnable {
.concat("/")
.concat(DATABASE_PATH);
RocksDB.loadLibrary();
Options options = new Options();
options.setCreateIfMissing(true);
RocksDbHelper.printTableHeader(out);
// Open the RocksDB database with multiple column families
List<byte[]> cfNames;
try {
cfNames = RocksDB.listColumnFamilies(options, dbPath);
} catch (RocksDBException e) {
throw new RuntimeException(e);
}
final List<ColumnFamilyHandle> cfHandles = new ArrayList<>();
final List<ColumnFamilyDescriptor> cfDescriptors = new ArrayList<>();
for (byte[] cfName : cfNames) {
cfDescriptors.add(new ColumnFamilyDescriptor(cfName));
}
RocksDbUsageHelper.printTableHeader(out);
try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) {
for (ColumnFamilyHandle cfHandle : cfHandles) {
RocksDbUsageHelper.printUsageForColumnFamily(rocksdb, cfHandle, out);
}
} catch (RocksDBException e) {
throw new RuntimeException(e);
} finally {
for (ColumnFamilyHandle cfHandle : cfHandles) {
cfHandle.close();
}
}
RocksDbHelper.forEachColumnFamily(
dbPath,
(rocksdb, cfHandle) -> {
try {
RocksDbHelper.printUsageForColumnFamily(rocksdb, cfHandle, out);
} catch (RocksDBException e) {
throw new RuntimeException(e);
}
});
}
}
}

View File

@@ -60,11 +60,12 @@ public class TrieLogHelper {
private static final int ROCKSDB_MAX_INSERTS_PER_TRANSACTION = 1000;
private static final Logger LOG = LoggerFactory.getLogger(TrieLogHelper.class);
void prune(
boolean prune(
final DataStorageConfiguration config,
final BonsaiWorldStateKeyValueStorage rootWorldStateStorage,
final MutableBlockchain blockchain,
final Path dataDirectoryPath) {
final String batchFileNameBase =
dataDirectoryPath.resolve(DATABASE_PATH).resolve(TRIE_LOG_FILE).toString();
@@ -82,10 +83,14 @@ public class TrieLogHelper {
lastBlockNumberToRetainTrieLogsFor,
rootWorldStateStorage,
layersToRetain)) {
return;
return false;
}
final long numberOfBatches = calculateNumberOfBatches(layersToRetain);
LOG.info(
"Starting pruning: retain {} trie logs, processing in {} batches...",
layersToRetain,
numberOfBatches);
processTrieLogBatches(
rootWorldStateStorage,
@@ -102,7 +107,7 @@ public class TrieLogHelper {
.count();
if (countAfterPrune == layersToRetain) {
if (deleteFiles(batchFileNameBase, numberOfBatches)) {
LOG.info("Prune ran successfully. Enjoy some disk space back! \uD83D\uDE80");
return true;
} else {
throw new IllegalStateException(
"There was an error deleting the trie log backup files. Please ensure besu is working before deleting them manually.");
@@ -110,8 +115,11 @@ public class TrieLogHelper {
} else {
throw new IllegalStateException(
String.format(
"Remaining trie logs (%d) did not match %s (%d). Trie logs backup files have not been deleted, it is safe to rerun the subcommand.",
countAfterPrune, BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, layersToRetain));
"Remaining trie logs (%d) did not match %s (%d). Trie logs backup files (in %s) have not been deleted, it is safe to rerun the subcommand.",
countAfterPrune,
BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD,
layersToRetain,
batchFileNameBase));
}
}
@@ -131,7 +139,7 @@ public class TrieLogHelper {
final List<Hash> trieLogKeys =
getTrieLogKeysForBlocks(blockchain, firstBlockOfBatch, lastBlockOfBatch);
LOG.info("Saving trie logs to retain in file (batch {})...", batchNumber);
LOG.info("Saving trie logs to retain in file {} (batch {})...", batchFileName, batchNumber);
saveTrieLogBatches(batchFileName, rootWorldStateStorage, trieLogKeys);
}
@@ -319,7 +327,7 @@ public class TrieLogHelper {
File file = new File(batchFileName);
if (file.exists()) {
LOG.error("File already exists, skipping file creation");
LOG.warn("File already exists {}, skipping file creation", batchFileName);
return;
}
@@ -354,7 +362,7 @@ public class TrieLogHelper {
final String batchFileName) {
File file = new File(batchFileName);
if (file.exists()) {
LOG.error("File already exists, skipping file creation");
LOG.warn("File already exists {}, skipping file creation", batchFileName);
return;
}

View File

@@ -16,6 +16,9 @@ package org.hyperledger.besu.cli.subcommands.storage;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkNotNull;
import static org.hyperledger.besu.cli.subcommands.storage.RocksDbHelper.formatOutputSize;
import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH;
import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.TRIE_LOG_STORAGE;
import org.hyperledger.besu.cli.util.VersionProvider;
import org.hyperledger.besu.controller.BesuController;
@@ -31,10 +34,14 @@ import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.logging.log4j.Level;
import org.apache.logging.log4j.core.config.Configurator;
import org.rocksdb.RocksDBException;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import picocli.CommandLine;
import picocli.CommandLine.Command;
@@ -54,6 +61,8 @@ import picocli.CommandLine.ParentCommand;
})
public class TrieLogSubCommand implements Runnable {
private static final Logger LOG = LoggerFactory.getLogger(TrieLogSubCommand.class);
@SuppressWarnings("UnusedVariable")
@ParentCommand
private static StorageSubCommand parentCommand;
@@ -124,12 +133,67 @@ public class TrieLogSubCommand implements Runnable {
final Path dataDirectoryPath =
Paths.get(
TrieLogSubCommand.parentCommand.parentCommand.dataDir().toAbsolutePath().toString());
LOG.info("Estimating trie logs size before pruning...");
long sizeBefore = estimatedSizeOfTrieLogs();
LOG.info("Estimated trie logs size before pruning: {}", formatOutputSize(sizeBefore));
final TrieLogHelper trieLogHelper = new TrieLogHelper();
trieLogHelper.prune(
context.config(),
context.rootWorldStateStorage(),
context.blockchain(),
dataDirectoryPath);
boolean success =
trieLogHelper.prune(
context.config(),
context.rootWorldStateStorage(),
context.blockchain(),
dataDirectoryPath);
if (success) {
LOG.info("Finished pruning. Re-estimating trie logs size...");
final long sizeAfter = estimatedSizeOfTrieLogs();
LOG.info(
"Estimated trie logs size after pruning: {} (0 B estimate is normal when using default settings)",
formatOutputSize(sizeAfter));
long estimatedSaving = sizeBefore - sizeAfter;
LOG.info(
"Prune ran successfully. We estimate you freed up {}! \uD83D\uDE80",
formatOutputSize(estimatedSaving));
spec.commandLine()
.getOut()
.printf(
"Prune ran successfully. We estimate you freed up %s! \uD83D\uDE80\n",
formatOutputSize(estimatedSaving));
}
}
private long estimatedSizeOfTrieLogs() {
final String dbPath =
TrieLogSubCommand.parentCommand
.parentCommand
.dataDir()
.toString()
.concat("/")
.concat(DATABASE_PATH);
AtomicLong estimatedSaving = new AtomicLong(0L);
try {
RocksDbHelper.forEachColumnFamily(
dbPath,
(rocksdb, cfHandle) -> {
try {
if (Arrays.equals(cfHandle.getName(), TRIE_LOG_STORAGE.getId())) {
estimatedSaving.set(
Long.parseLong(
rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size")));
}
} catch (RocksDBException | NumberFormatException e) {
throw new RuntimeException(e);
}
});
} catch (Exception e) {
LOG.warn("Error while estimating trie log size, returning 0 for estimate", e);
return 0L;
}
return estimatedSaving.get();
}
}

View File

@@ -0,0 +1,152 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class ApiConfigurationOptionsTest extends CommandTestAbstract {
@Test
public void apiPriorityFeeLimitingEnabledOptionMustBeUsed() {
parseCommand("--api-gas-and-priority-fee-limiting-enabled");
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(
ImmutableApiConfiguration.builder().isGasAndPriorityFeeLimitingEnabled(true).build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void apiPriorityFeeLowerBoundCoefficientOptionMustBeUsed() {
final long lowerBound = 150L;
parseCommand(
"--api-gas-and-priority-fee-lower-bound-coefficient",
Long.toString(lowerBound),
"--api-gas-and-priority-fee-limiting-enabled");
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(
ImmutableApiConfiguration.builder()
.lowerBoundGasAndPriorityFeeCoefficient(lowerBound)
.isGasAndPriorityFeeLimitingEnabled(true)
.build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void
apiPriorityFeeLowerBoundCoefficients_MustNotBeGreaterThan_apiPriorityFeeUpperBoundCoefficient() {
final long lowerBound = 200L;
final long upperBound = 100L;
parseCommand(
"--api-gas-and-priority-fee-limiting-enabled",
"--api-gas-and-priority-fee-lower-bound-coefficient",
Long.toString(lowerBound),
"--api-gas-and-priority-fee-upper-bound-coefficient",
Long.toString(upperBound));
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient");
}
@Test
public void apiPriorityFeeUpperBoundCoefficientsOptionMustBeUsed() {
final long upperBound = 200L;
parseCommand(
"--api-gas-and-priority-fee-upper-bound-coefficient",
Long.toString(upperBound),
"--api-gas-and-priority-fee-limiting-enabled");
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(
ImmutableApiConfiguration.builder()
.upperBoundGasAndPriorityFeeCoefficient(upperBound)
.isGasAndPriorityFeeLimitingEnabled(true)
.build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcMaxLogsRangeOptionMustBeUsed() {
final long rpcMaxLogsRange = 150L;
parseCommand("--rpc-max-logs-range", Long.toString(rpcMaxLogsRange));
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(ImmutableApiConfiguration.builder().maxLogsRange((rpcMaxLogsRange)).build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcGasCapOptionMustBeUsed() {
final long rpcGasCap = 150L;
parseCommand("--rpc-gas-cap", Long.toString(rpcGasCap));
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(ImmutableApiConfiguration.builder().gasCap((rpcGasCap)).build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcMaxTraceFilterOptionMustBeUsed() {
final long rpcMaxTraceFilterOption = 150L;
parseCommand("--rpc-max-trace-filter-range", Long.toString(rpcMaxTraceFilterOption));
verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(apiConfigurationCaptor.getValue())
.isEqualTo(
ImmutableApiConfiguration.builder()
.maxTraceFilterRange((rpcMaxTraceFilterOption))
.build());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
}

View File

@@ -0,0 +1,93 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class GraphQlOptionsTest extends CommandTestAbstract {
@Test
public void graphQLHttpEnabledPropertyMustBeUsed() {
parseCommand("--graphql-http-enabled");
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().isEnabled()).isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostAndPortOptionsMustBeUsed() {
final String host = "1.2.3.4";
final int port = 1234;
parseCommand(
"--graphql-http-enabled",
"--graphql-http-host",
host,
"--graphql-http-port",
String.valueOf(port));
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(graphQLConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeLocalhost() {
final String host = "localhost";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void graphQLHttpHostMayBeIPv6() {
final String host = "2600:DB8::8545";
parseCommand("--graphql-http-enabled", "--graphql-http-host", host);
verify(mockRunnerBuilder).graphQLConfiguration(graphQLConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(graphQLConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
}

View File

@@ -0,0 +1,926 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis.ETH;
import static org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis.NET;
import static org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis.PERM;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.verifyNoInteractions;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.ethereum.api.jsonrpc.RpcMethod;
import org.hyperledger.besu.ethereum.api.jsonrpc.authentication.JwtAlgorithm;
import org.hyperledger.besu.ethereum.api.tls.TlsConfiguration;
import org.hyperledger.besu.plugin.services.rpc.PluginRpcRequest;
import java.io.IOException;
import java.net.ServerSocket;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Optional;
import java.util.function.Function;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class JsonRpcHttpOptionsTest extends CommandTestAbstract {
@Test
public void rpcHttpEnabledPropertyMustBeUsed() {
parseCommand("--rpc-http-enabled");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().isEnabled()).isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcApisPropertyMustBeUsed() {
parseCommand("--rpc-http-api", "ETH,NET,PERM", "--rpc-http-enabled");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
verify(mockLogger)
.warn("Permissions are disabled. Cannot enable PERM APIs when not using Permissions.");
assertThat(jsonRpcConfigArgumentCaptor.getValue().getRpcApis())
.containsExactlyInAnyOrder(ETH.name(), NET.name(), PERM.name());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcApisPropertyIgnoresDuplicatesAndMustBeUsed() {
parseCommand("--rpc-http-api", "ETH,NET,NET", "--rpc-http-enabled");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getRpcApis())
.containsExactlyInAnyOrder(ETH.name(), NET.name());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcApiNoAuthMethodsIgnoresDuplicatesAndMustBeUsed() {
parseCommand(
"--rpc-http-api-methods-no-auth",
"admin_peers, admin_peers, eth_getWork",
"--rpc-http-enabled");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getNoAuthRpcApis())
.containsExactlyInAnyOrder(
RpcMethod.ADMIN_PEERS.getMethodName(), RpcMethod.ETH_GET_WORK.getMethodName());
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpNoAuthApiMethodsCannotBeInvalid() {
parseCommand("--rpc-http-enabled", "--rpc-http-api-method-no-auth", "invalid");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"Invalid value for option '--rpc-http-api-methods-no-auth', options must be valid RPC methods");
}
@Test
public void rpcHttpOptionsRequiresServiceToBeEnabled() {
parseCommand(
"--rpc-http-api",
"ETH,NET",
"--rpc-http-host",
"0.0.0.0",
"--rpc-http-port",
"1234",
"--rpc-http-cors-origins",
"all",
"--rpc-http-max-active-connections",
"88");
verifyOptionsConstraintLoggerCall(
"--rpc-http-enabled",
"--rpc-http-host",
"--rpc-http-port",
"--rpc-http-cors-origins",
"--rpc-http-api",
"--rpc-http-max-active-connections");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpOptionsRequiresServiceToBeEnabledToml() throws IOException {
final Path toml =
createTempFile(
"toml",
"rpc-http-api=[\"ETH\",\"NET\"]\n"
+ "rpc-http-host=\"0.0.0.0\"\n"
+ "rpc-http-port=1234\n"
+ "rpc-http-cors-origins=[\"all\"]\n"
+ "rpc-http-max-active-connections=88");
parseCommand("--config-file", toml.toString());
verifyOptionsConstraintLoggerCall(
"--rpc-http-enabled",
"--rpc-http-host",
"--rpc-http-port",
"--rpc-http-cors-origins",
"--rpc-http-api",
"--rpc-http-max-active-connections");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpHostAndPortOptionsMustBeUsed() {
final String host = "1.2.3.4";
final int port = 1234;
parseCommand(
"--rpc-http-enabled", "--rpc-http-host", host, "--rpc-http-port", String.valueOf(port));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpHostMayBeLocalhost() {
final String host = "localhost";
parseCommand("--rpc-http-enabled", "--rpc-http-host", host);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpHostMayBeIPv6() {
final String host = "2600:DB8::8545";
parseCommand("--rpc-http-enabled", "--rpc-http-host", host);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpMaxActiveConnectionsPropertyMustBeUsed() {
final int maxConnections = 99;
parseCommand("--rpc-http-max-active-connections", String.valueOf(maxConnections));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getMaxActiveConnections())
.isEqualTo(maxConnections);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsRequiresRpcHttpEnabled() {
parseCommand("--rpc-http-tls-enabled");
verifyOptionsConstraintLoggerCall("--rpc-http-enabled", "--rpc-http-tls-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsRequiresRpcHttpEnabledToml() throws IOException {
final Path toml = createTempFile("toml", "rpc-http-tls-enabled=true\n");
parseCommand("--config-file", toml.toString());
verifyOptionsConstraintLoggerCall("--rpc-http-enabled", "--rpc-http-tls-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsWithoutKeystoreReportsError() {
parseCommand("--rpc-http-enabled", "--rpc-http-tls-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Keystore file is required when TLS is enabled for JSON-RPC HTTP endpoint");
}
@Test
public void rpcHttpTlsWithoutPasswordfileReportsError() {
parseCommand(
"--rpc-http-enabled",
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
"/tmp/test.p12");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"File containing password to unlock keystore is required when TLS is enabled for JSON-RPC HTTP endpoint");
}
@Test
public void rpcHttpTlsKeystoreAndPasswordMustBeUsed() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration.isPresent()).isTrue();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().isEmpty()).isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsClientAuthWithoutKnownFileReportsError() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-client-auth-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"Known-clients file must be specified or CA clients must be enabled when TLS client authentication is enabled for JSON-RPC HTTP endpoint");
}
@Test
public void rpcHttpTlsClientAuthWithKnownClientFile() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
final String knownClientFile = "/tmp/knownClientFile";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-client-auth-enabled",
"--rpc-http-tls-known-clients-file",
knownClientFile);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration.isPresent()).isTrue();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().isPresent()).isTrue();
assertThat(
tlsConfiguration.get().getClientAuthConfiguration().get().getKnownClientsFile().get())
.isEqualTo(Path.of(knownClientFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().get().isCaClientsEnabled())
.isFalse();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsClientAuthWithCAClient() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-client-auth-enabled",
"--rpc-http-tls-ca-clients-enabled");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration.isPresent()).isTrue();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().isPresent()).isTrue();
assertThat(
tlsConfiguration
.get()
.getClientAuthConfiguration()
.get()
.getKnownClientsFile()
.isEmpty())
.isTrue();
assertThat(tlsConfiguration.get().getClientAuthConfiguration().get().isCaClientsEnabled())
.isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsClientAuthWithCAClientAndKnownClientFile() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
final String knownClientFile = "/tmp/knownClientFile";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-client-auth-enabled",
"--rpc-http-tls-ca-clients-enabled",
"--rpc-http-tls-known-clients-file",
knownClientFile);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration.isPresent()).isTrue();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().isPresent()).isTrue();
assertThat(
tlsConfiguration.get().getClientAuthConfiguration().get().getKnownClientsFile().get())
.isEqualTo(Path.of(knownClientFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration().get().isCaClientsEnabled())
.isTrue();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsCheckDefaultProtocolsAndCipherSuites() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration).isPresent();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration()).isEmpty();
assertThat(tlsConfiguration.get().getCipherSuites().get()).isEmpty();
assertThat(tlsConfiguration.get().getSecureTransportProtocols().get())
.containsExactly("TLSv1.3", "TLSv1.2");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsCheckInvalidProtocols() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
final String protocol = "TLsv1.4";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-protocols",
protocol);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).contains("No valid TLS protocols specified");
}
@Test
public void rpcHttpTlsCheckInvalidCipherSuites() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
final String cipherSuites = "Invalid";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-cipher-suites",
cipherSuites);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Invalid TLS cipher suite specified " + cipherSuites);
}
@Test
public void rpcHttpTlsCheckValidProtocolsAndCipherSuites() {
final String host = "1.2.3.4";
final int port = 1234;
final String keystoreFile = "/tmp/test.p12";
final String keystorePasswordFile = "/tmp/test.txt";
final String protocols = "TLSv1.3,TLSv1.2";
final String cipherSuites =
"TLS_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384,TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256";
parseCommand(
"--rpc-http-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-enabled",
"--rpc-http-tls-keystore-file",
keystoreFile,
"--rpc-http-tls-keystore-password-file",
keystorePasswordFile,
"--rpc-http-tls-protocols",
protocols,
"--rpc-http-tls-cipher-suites",
cipherSuites);
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getHost()).isEqualTo(host);
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
final Optional<TlsConfiguration> tlsConfiguration =
jsonRpcConfigArgumentCaptor.getValue().getTlsConfiguration();
assertThat(tlsConfiguration).isPresent();
assertThat(tlsConfiguration.get().getKeyStorePath()).isEqualTo(Path.of(keystoreFile));
assertThat(tlsConfiguration.get().getClientAuthConfiguration()).isEmpty();
assertThat(tlsConfiguration.get().getCipherSuites().get())
.containsExactlyInAnyOrder(
"TLS_AES_256_GCM_SHA384",
"TLS_ECDHE_RSA_WITH_AES_256_GCM_SHA384",
"TLS_ECDHE_RSA_WITH_AES_128_GCM_SHA256");
assertThat(tlsConfiguration.get().getSecureTransportProtocols().get())
.containsExactlyInAnyOrder("TLSv1.2", "TLSv1.3");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpTlsWarnIfCipherSuitesSpecifiedWithoutTls() {
final String host = "1.2.3.4";
final int port = 1234;
final String cipherSuites = "Invalid";
parseCommand(
"--rpc-http-enabled",
"--engine-rpc-enabled",
"--rpc-http-host",
host,
"--rpc-http-port",
String.valueOf(port),
"--rpc-http-tls-cipher-suite",
cipherSuites);
verify(
mockLogger,
times(2)) // this is verified for both the full suite of apis, and the engine group.
.warn(
"{} has been ignored because {} was not defined on the command line.",
"--rpc-http-tls-cipher-suite",
"--rpc-http-tls-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsTwoDomainsMustBuildListWithBothDomains() {
final String[] origins = {"http://domain1.com", "https://domain2.com"};
parseCommand("--rpc-http-enabled", "--rpc-http-cors-origins", String.join(",", origins));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getCorsAllowedDomains().toArray())
.isEqualTo(origins);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsDoubleCommaFilteredOut() {
final String[] origins = {"http://domain1.com", "https://domain2.com"};
parseCommand("--rpc-http-enabled", "--rpc-http-cors-origins", String.join(",,", origins));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getCorsAllowedDomains().toArray())
.isEqualTo(origins);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsWithWildcardMustBuildListWithWildcard() {
final String[] origins = {"*"};
parseCommand("--rpc-http-enabled", "--rpc-http-cors-origins", String.join(",", origins));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getCorsAllowedDomains().toArray())
.isEqualTo(origins);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsWithAllMustBuildListWithWildcard() {
parseCommand("--rpc-http-enabled", "--rpc-http-cors-origins", "all");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getCorsAllowedDomains()).containsExactly("*");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsWithNoneMustBuildEmptyList() {
final String[] origins = {"none"};
parseCommand("--rpc-http-enabled", "--rpc-http-cors-origins", String.join(",", origins));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getCorsAllowedDomains()).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpCorsOriginsNoneWithAnotherDomainMustFail() {
final String[] origins = {"http://domain1.com", "none"};
parseCommand("--rpc-http-cors-origins", String.join(",", origins));
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Value 'none' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsNoneWithAnotherDomainMustFailNoneFirst() {
final String[] origins = {"none", "http://domain1.com"};
parseCommand("--rpc-http-cors-origins", String.join(",", origins));
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Value 'none' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsAllWithAnotherDomainMustFail() {
parseCommand("--rpc-http-cors-origins=http://domain1.com,all");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Values '*' or 'all' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsAllWithAnotherDomainMustFailAsFlags() {
parseCommand("--rpc-http-cors-origins=http://domain1.com", "--rpc-http-cors-origins=all");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Values '*' or 'all' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsWildcardWithAnotherDomainMustFail() {
parseCommand("--rpc-http-cors-origins=http://domain1.com,*");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Values '*' or 'all' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsWildcardWithAnotherDomainMustFailAsFlags() {
parseCommand("--rpc-http-cors-origins=http://domain1.com", "--rpc-http-cors-origins=*");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Values '*' or 'all' can't be used with other domains");
}
@Test
public void rpcHttpCorsOriginsInvalidRegexShouldFail() {
final String[] origins = {"**"};
parseCommand("--rpc-http-cors-origins", String.join(",", origins));
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Domain values result in invalid regex pattern");
}
@Test
public void rpcHttpCorsOriginsEmptyValueFails() {
parseCommand("--rpc-http-cors-origins=");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Domain cannot be empty string or null string.");
}
@Test
public void rpcApisPropertyWithInvalidEntryMustDisplayError() {
parseCommand("--rpc-http-api", "BOB");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
// PicoCLI uses longest option name for message when option has multiple names, so here plural.
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Invalid value for option '--rpc-http-api': invalid entries found [BOB]");
}
@Test
public void rpcApisPropertyWithPluginNamespaceAreValid() {
rpcEndpointServiceImpl.registerRPCEndpoint(
"bob", "method", (Function<PluginRpcRequest, Object>) request -> "nothing");
parseCommand("--rpc-http-api", "BOB");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getRpcApis())
.containsExactlyInAnyOrder("BOB");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpMaxRequestContentLengthOptionMustBeUsed() {
final int rpcHttpMaxRequestContentLength = 1;
parseCommand(
"--rpc-http-max-request-content-length", Long.toString(rpcHttpMaxRequestContentLength));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getMaxRequestContentLength())
.isEqualTo(rpcHttpMaxRequestContentLength);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void rpcHttpMaxBatchSizeOptionMustBeUsed() {
final int rpcHttpMaxBatchSize = 1;
parseCommand("--rpc-http-max-batch-size", Integer.toString(rpcHttpMaxBatchSize));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getMaxBatchSize())
.isEqualTo(rpcHttpMaxBatchSize);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void portInUseReportsError() throws IOException {
final ServerSocket serverSocket = new ServerSocket(8545);
parseCommandWithPortCheck("--rpc-http-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains("Port(s) '[8545]' already in use. Check for other processes using the port(s).");
serverSocket.close();
}
@Test
public void assertThatCheckPortClashRejectsAsExpected() throws Exception {
// use WS port for HTTP
final int port = 8546;
parseCommand("--rpc-http-enabled", "--rpc-http-port", String.valueOf(port), "--rpc-ws-enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"Port number '8546' has been specified multiple times. Please review the supplied configuration.");
}
@Test
public void assertThatCheckPortClashAcceptsAsExpected() throws Exception {
// use WS port for HTTP
final int port = 8546;
parseCommand("--rpc-http-enabled", "--rpc-http-port", String.valueOf(port));
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getPort()).isEqualTo(port);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
@Test
public void httpAuthenticationWithoutRequiredConfiguredOptionsMustFail() {
parseCommand("--rpc-http-enabled", "--rpc-http-authentication-enabled");
verifyNoInteractions(mockRunnerBuilder);
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8))
.contains(
"Unable to authenticate JSON-RPC HTTP endpoint without a supplied credentials file or authentication public key file");
}
@Test
public void httpAuthenticationAlgorithIsConfigured() {
parseCommand("--rpc-http-authentication-jwt-algorithm", "ES256");
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getAuthenticationAlgorithm())
.isEqualTo(JwtAlgorithm.ES256);
}
@Test
public void httpAuthenticationPublicKeyIsConfigured() throws IOException {
final Path publicKey = Files.createTempFile("public_key", "");
parseCommand("--rpc-http-authentication-jwt-public-key-file", publicKey.toString());
verify(mockRunnerBuilder).jsonRpcConfiguration(jsonRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(jsonRpcConfigArgumentCaptor.getValue().getAuthenticationPublicKeyFile().getPath())
.isEqualTo(publicKey.toString());
}
}

View File

@@ -0,0 +1,453 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.cli.options;
import static java.nio.charset.StandardCharsets.UTF_8;
import static org.assertj.core.api.Assertions.assertThat;
import static org.mockito.Mockito.verify;
import org.hyperledger.besu.cli.CommandTestAbstract;
import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.p2p.peers.EnodeURLImpl;
import org.hyperledger.besu.ethereum.permissioning.LocalPermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.PermissioningConfiguration;
import org.hyperledger.besu.ethereum.permissioning.SmartContractPermissioningConfiguration;
import org.hyperledger.besu.plugin.data.EnodeURL;
import java.io.IOException;
import java.net.URL;
import java.nio.file.Files;
import java.nio.file.Path;
import java.util.Collections;
import java.util.List;
import java.util.stream.Collectors;
import com.google.common.collect.Lists;
import com.google.common.io.Resources;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.mockito.Mockito;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
public class PermissionsOptionsTest extends CommandTestAbstract {
private static final String PERMISSIONING_CONFIG_TOML = "/permissioning_config.toml";
@Test
public void errorIsRaisedIfStaticNodesAreNotAllowed(final @TempDir Path testFolder)
throws IOException {
final Path staticNodesFile = testFolder.resolve("static-nodes.json");
final Path permissioningConfig = testFolder.resolve("permissioning.json");
final EnodeURL staticNodeURI =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.ipAddress("127.0.0.1")
.useDefaultPorts()
.build();
final EnodeURL allowedNode =
EnodeURLImpl.builder()
.nodeId(
"50203c6bfca6874370e71aecc8958529fd723feb05013dc1abca8fc1fff845c5259faba05852e9dfe5ce172a7d6e7c2a3a5eaa8b541c8af15ea5518bbff5f2fa")
.useDefaultPorts()
.ipAddress("127.0.0.1")
.listeningPort(30304)
.build();
Files.write(staticNodesFile, ("[\"" + staticNodeURI.toString() + "\"]").getBytes(UTF_8));
Files.write(
permissioningConfig,
("nodes-allowlist=[\"" + allowedNode.toString() + "\"]").getBytes(UTF_8));
parseCommand(
"--data-path=" + testFolder,
"--bootnodes",
"--permissions-nodes-config-file-enabled=true",
"--permissions-nodes-config-file=" + permissioningConfig);
assertThat(commandErrorOutput.toString(UTF_8))
.contains(staticNodeURI.toString(), "not in nodes-allowlist");
}
@Test
public void nodePermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-nodes-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-nodes-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No node permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-nodes-contract-enabled", "--permissions-nodes-contract-address", "0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setNodeSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractNodeAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(smartContractPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionDefaultValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(1);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissionsContractVersionSetsValue() {
final SmartContractPermissioningConfiguration expectedConfig =
new SmartContractPermissioningConfiguration();
expectedConfig.setNodeSmartContractAddress(
Address.fromHexString("0x0000000000000000000000000000000000001234"));
expectedConfig.setSmartContractNodeAllowlistEnabled(true);
expectedConfig.setNodeSmartContractInterfaceVersion(2);
parseCommand(
"--permissions-nodes-contract-enabled",
"--permissions-nodes-contract-address",
"0x0000000000000000000000000000000000001234",
"--permissions-nodes-contract-version",
"2");
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getSmartContractConfig().get())
.usingRecursiveComparison()
.isEqualTo(expectedConfig);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractWithoutOptionMustError() {
parseCommand("--permissions-accounts-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith(
"Missing required parameter for option '--permissions-accounts-contract-address'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithoutContractAddressMustError() {
parseCommand("--permissions-accounts-contract-enabled");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.contains("No account permissioning contract address specified");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithInvalidContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"invalid-smart-contract-address");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsEnabledWithTooShortContractAddressMustError() {
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
"0x1234");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Invalid value");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissionsSmartContractMustUseOption() {
final String smartContractAddress = "0x0000000000000000000000000000000000001234";
parseCommand(
"--permissions-accounts-contract-enabled",
"--permissions-accounts-contract-address",
smartContractAddress);
final SmartContractPermissioningConfiguration smartContractPermissioningConfiguration =
new SmartContractPermissioningConfiguration();
smartContractPermissioningConfiguration.setAccountSmartContractAddress(
Address.fromHexString(smartContractAddress));
smartContractPermissioningConfiguration.setSmartContractAccountAllowlistEnabled(true);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getSmartContractConfig()).isPresent();
final SmartContractPermissioningConfiguration effectiveSmartContractConfig =
permissioningConfiguration.getSmartContractConfig().get();
assertThat(effectiveSmartContractConfig.isSmartContractAccountAllowlistEnabled()).isTrue();
assertThat(effectiveSmartContractConfig.getAccountSmartContractAddress())
.isEqualTo(Address.fromHexString(smartContractAddress));
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-nodes-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-nodes-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathWithoutOptionMustDisplayUsage() {
parseCommand("--permissions-accounts-config-file");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8))
.startsWith("Missing required parameter for option '--permissions-accounts-config-file'");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningEnabledWithNonexistentConfigFileMustError() {
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
"file-does-not-exist");
Mockito.verifyNoInteractions(mockRunnerBuilder);
assertThat(commandErrorOutput.toString(UTF_8)).contains("Configuration file does not exist");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlFileWithNoPermissionsEnabledMustNotError() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-nodes-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlFileWithNoPermissionsEnabledMustNotError()
throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand("--permissions-accounts-config-file", permToml.toString());
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void defaultPermissionsTomlFileWithNoPermissionsEnabledMustNotError() {
parseCommand("--p2p-enabled", "false");
verify(mockRunnerBuilder).build();
assertThat(commandErrorOutput.toString(UTF_8)).doesNotContain("no permissions enabled");
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void nodePermissioningTomlPathMustUseOption() throws IOException {
final List<EnodeURL> allowedNodes =
Lists.newArrayList(
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.168.0.9:4567"),
EnodeURLImpl.fromString(
"enode://6f8a80d14311c39f35f516fa664deaaaa13e85b2f7493f37f6144d86991ec012937307647bd3b9a82abe2974e1407241d54947bbb39763a4cac9f77166ad92a0@192.169.0.9:4568"));
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
final String allowedNodesString =
allowedNodes.stream().map(Object::toString).collect(Collectors.joining(","));
parseCommand(
"--permissions-nodes-config-file-enabled",
"--permissions-nodes-config-file",
permToml.toString(),
"--bootnodes",
allowedNodesString);
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setNodePermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setNodeAllowlist(allowedNodes);
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
final PermissioningConfiguration config =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(config.getLocalConfig().get())
.usingRecursiveComparison()
.isEqualTo(localPermissioningConfiguration);
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
@Test
public void accountPermissioningTomlPathMustUseOption() throws IOException {
final URL configFile = this.getClass().getResource(PERMISSIONING_CONFIG_TOML);
final Path permToml = createTempFile("toml", Resources.toByteArray(configFile));
parseCommand(
"--permissions-accounts-config-file-enabled",
"--permissions-accounts-config-file",
permToml.toString());
final LocalPermissioningConfiguration localPermissioningConfiguration =
LocalPermissioningConfiguration.createDefault();
localPermissioningConfiguration.setAccountPermissioningConfigFilePath(permToml.toString());
localPermissioningConfiguration.setAccountAllowlist(
Collections.singletonList("0x0000000000000000000000000000000000000009"));
verify(mockRunnerBuilder)
.permissioningConfiguration(permissioningConfigurationArgumentCaptor.capture());
final PermissioningConfiguration permissioningConfiguration =
permissioningConfigurationArgumentCaptor.getValue().get();
assertThat(permissioningConfiguration.getLocalConfig()).isPresent();
final LocalPermissioningConfiguration effectiveLocalPermissioningConfig =
permissioningConfiguration.getLocalConfig().get();
assertThat(effectiveLocalPermissioningConfig.isAccountAllowlistEnabled()).isTrue();
assertThat(effectiveLocalPermissioningConfig.getAccountPermissioningConfigFilePath())
.isEqualTo(permToml.toString());
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
}
}

View File

@@ -246,4 +246,17 @@ public class RpcWebsocketOptionsTest extends CommandTestAbstract {
.contains(
"Unable to authenticate JSON-RPC WebSocket endpoint without a supplied credentials file or authentication public key file");
}
@Test
public void rpcWsRpcEnabledPropertyDefaultIsFalse() {
parseCommand();
verify(mockRunnerBuilder).webSocketConfiguration(wsRpcConfigArgumentCaptor.capture());
verify(mockRunnerBuilder).build();
assertThat(wsRpcConfigArgumentCaptor.getValue().isEnabled()).isFalse();
assertThat(commandOutput.toString(UTF_8)).isEmpty();
assertThat(commandErrorOutput.toString(UTF_8)).isEmpty();
}
}

View File

@@ -291,8 +291,8 @@ class TrieLogHelperTest {
nonValidatingTrieLogHelper.prune(
dataStorageConfiguration, inMemoryWorldStateSpy, blockchain, dataDir))
.isInstanceOf(RuntimeException.class)
.hasMessage(
"Remaining trie logs (0) did not match --bonsai-historical-block-limit (3). Trie logs backup files have not been deleted, it is safe to rerun the subcommand.");
.hasMessageContaining(
"Remaining trie logs (0) did not match --bonsai-historical-block-limit (3)");
}
@Test

View File

@@ -624,6 +624,8 @@ task autocomplete(type: JavaExec) {
}
}
def archiveBuildVersion = project.hasProperty('release.releaseVersion') ? project.property('release.releaseVersion') : "${rootProject.version}"
installDist { dependsOn checkLicense, untunedStartScripts, evmToolStartScripts }
distTar {
@@ -632,6 +634,7 @@ distTar {
delete fileTree(dir: 'build/distributions', include: '*.tar.gz')
}
compression = Compression.GZIP
setVersion(archiveBuildVersion)
archiveExtension = 'tar.gz'
archiveBaseName = 'linea-besu'
}
@@ -642,6 +645,7 @@ distZip {
delete fileTree(dir: 'build/distributions', include: '*.zip')
}
archiveBaseName = 'linea-besu'
setVersion(archiveBuildVersion)
}
publishing {
@@ -938,6 +942,12 @@ task checkSpdxHeader(type: CheckSpdxHeader) {
].join("|")
}
jacocoTestReport {
reports {
xml.enabled true
}
}
task jacocoRootReport(type: org.gradle.testing.jacoco.tasks.JacocoReport) {
additionalSourceDirs.from files(subprojects.sourceSets.main.allSource.srcDirs)
sourceDirectories.from files(subprojects.sourceSets.main.allSource.srcDirs)

View File

@@ -253,6 +253,13 @@ public interface GenesisConfigOptions {
*/
OptionalLong getCancunTime();
/**
* Gets prague time.
*
* @return the prague time
*/
OptionalLong getPragueTime();
/**
* Gets future eips time.
*

View File

@@ -28,7 +28,6 @@ import java.util.Optional;
import java.util.OptionalInt;
import java.util.OptionalLong;
import java.util.TreeMap;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import com.fasterxml.jackson.databind.node.ObjectNode;
@@ -302,6 +301,11 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
return getOptionalLong("cancuntime");
}
@Override
public OptionalLong getPragueTime() {
return getOptionalLong("praguetime");
}
@Override
public OptionalLong getFutureEipsTime() {
return getOptionalLong("futureeipstime");
@@ -314,10 +318,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
@Override
public Optional<Wei> getBaseFeePerGas() {
return Optional.ofNullable(configOverrides.get("baseFeePerGas"))
.map(Wei::fromHexString)
.map(Optional::of)
.orElse(Optional.empty());
return Optional.ofNullable(configOverrides.get("baseFeePerGas")).map(Wei::fromHexString);
}
@Override
@@ -458,6 +459,7 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l));
getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l));
getCancunTime().ifPresent(l -> builder.put("cancunTime", l));
getPragueTime().ifPresent(l -> builder.put("pragueTime", l));
getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l));
getTerminalBlockHash().ifPresent(h -> builder.put("terminalBlockHash", h.toHexString()));
getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l));
@@ -597,14 +599,18 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
.map(OptionalLong::getAsLong)
.distinct()
.sorted()
.collect(Collectors.toList());
.toList();
}
@Override
public List<Long> getForkBlockTimestamps() {
Stream<OptionalLong> forkBlockTimestamps =
Stream.of(
getShanghaiTime(), getCancunTime(), getFutureEipsTime(), getExperimentalEipsTime());
getShanghaiTime(),
getCancunTime(),
getPragueTime(),
getFutureEipsTime(),
getExperimentalEipsTime());
// when adding forks add an entry to ${REPO_ROOT}/config/src/test/resources/all_forks.json
return forkBlockTimestamps
@@ -612,6 +618,6 @@ public class JsonGenesisConfigOptions implements GenesisConfigOptions {
.map(OptionalLong::getAsLong)
.distinct()
.sorted()
.collect(Collectors.toList());
.toList();
}
}

View File

@@ -51,6 +51,7 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
private OptionalLong shanghaiTime = OptionalLong.empty();
private OptionalLong cancunTime = OptionalLong.empty();
private OptionalLong pragueTime = OptionalLong.empty();
private OptionalLong futureEipsTime = OptionalLong.empty();
private OptionalLong experimentalEipsTime = OptionalLong.empty();
private OptionalLong terminalBlockNumber = OptionalLong.empty();
@@ -245,6 +246,11 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
return cancunTime;
}
@Override
public OptionalLong getPragueTime() {
return pragueTime;
}
@Override
public OptionalLong getFutureEipsTime() {
return futureEipsTime;
@@ -377,6 +383,7 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
getMergeNetSplitBlockNumber().ifPresent(l -> builder.put("mergeNetSplitBlock", l));
getShanghaiTime().ifPresent(l -> builder.put("shanghaiTime", l));
getCancunTime().ifPresent(l -> builder.put("cancunTime", l));
getPragueTime().ifPresent(l -> builder.put("pragueTime", l));
getFutureEipsTime().ifPresent(l -> builder.put("futureEipsTime", l));
getExperimentalEipsTime().ifPresent(l -> builder.put("experimentalEipsTime", l));
getTerminalBlockNumber().ifPresent(l -> builder.put("terminalBlockNumber", l));
@@ -646,6 +653,17 @@ public class StubGenesisConfigOptions implements GenesisConfigOptions, Cloneable
return this;
}
/**
* Prague time.
*
* @param timestamp the timestamp
* @return the stub genesis config options
*/
public StubGenesisConfigOptions pragueTime(final long timestamp) {
pragueTime = OptionalLong.of(timestamp);
return this;
}
/**
* Future EIPs Time block.
*

View File

@@ -0,0 +1,4 @@
sync-mode="X_CHECKPOINT"
data-storage-format="BONSAI"
bonsai-historical-block-limit=128
max-peers=25

View File

@@ -38,14 +38,14 @@ import org.apache.tuweni.units.bigints.UInt256;
import org.assertj.core.api.ThrowableAssert.ThrowingCallable;
import org.junit.jupiter.api.Test;
public class GenesisConfigFileTest {
class GenesisConfigFileTest {
private static final BigInteger MAINNET_CHAIN_ID = BigInteger.ONE;
private static final BigInteger DEVELOPMENT_CHAIN_ID = BigInteger.valueOf(1337);
private static final GenesisConfigFile EMPTY_CONFIG = fromConfig("{}");
@Test
public void shouldLoadMainnetConfigFile() {
void shouldLoadMainnetConfigFile() {
final GenesisConfigFile config = GenesisConfigFile.mainnet();
// Sanity check some basic properties to confirm this is the mainnet file.
assertThat(config.getConfigOptions().isEthHash()).isTrue();
@@ -58,7 +58,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldLoadDevelopmentConfigFile() {
void shouldLoadDevelopmentConfigFile() {
final GenesisConfigFile config = GenesisConfigFile.development();
// Sanity check some basic properties to confirm this is the dev file.
assertThat(config.getConfigOptions().isEthHash()).isTrue();
@@ -71,82 +71,82 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldGetParentHash() {
void shouldGetParentHash() {
assertThat(configWithProperty("parentHash", "844633").getParentHash()).isEqualTo("844633");
}
@Test
public void shouldDefaultParentHashToEmptyString() {
void shouldDefaultParentHashToEmptyString() {
assertThat(EMPTY_CONFIG.getParentHash()).isEmpty();
}
@Test
public void shouldGetDifficulty() {
void shouldGetDifficulty() {
assertThat(configWithProperty("difficulty", "1234").getDifficulty()).isEqualTo("1234");
}
@Test
public void shouldRequireDifficulty() {
void shouldRequireDifficulty() {
assertInvalidConfiguration(EMPTY_CONFIG::getDifficulty);
}
@Test
public void shouldGetExtraData() {
void shouldGetExtraData() {
assertThat(configWithProperty("extraData", "yay").getExtraData()).isEqualTo("yay");
}
@Test
public void shouldDefaultExtraDataToEmptyString() {
void shouldDefaultExtraDataToEmptyString() {
assertThat(EMPTY_CONFIG.getExtraData()).isEmpty();
}
@Test
public void shouldGetGasLimit() {
void shouldGetGasLimit() {
assertThat(configWithProperty("gasLimit", "1000").getGasLimit()).isEqualTo(1000);
}
@Test
public void shouldRequireGasLimit() {
void shouldRequireGasLimit() {
assertInvalidConfiguration(EMPTY_CONFIG::getGasLimit);
}
@Test
public void shouldGetMixHash() {
void shouldGetMixHash() {
assertThat(configWithProperty("mixHash", "asdf").getMixHash()).isEqualTo("asdf");
}
@Test
public void shouldDefaultMixHashToEmptyString() {
void shouldDefaultMixHashToEmptyString() {
assertThat(EMPTY_CONFIG.getMixHash()).isEmpty();
}
@Test
public void shouldGetNonce() {
void shouldGetNonce() {
assertThat(configWithProperty("nonce", "0x10").getNonce()).isEqualTo("0x10");
}
@Test
public void shouldDefaultNonceToZero() {
void shouldDefaultNonceToZero() {
assertThat(EMPTY_CONFIG.getNonce()).isEqualTo("0x0");
}
@Test
public void shouldGetCoinbase() {
void shouldGetCoinbase() {
assertThat(configWithProperty("coinbase", "abcd").getCoinbase()).contains("abcd");
}
@Test
public void shouldReturnEmptyWhenCoinbaseNotSpecified() {
void shouldReturnEmptyWhenCoinbaseNotSpecified() {
assertThat(EMPTY_CONFIG.getCoinbase()).isEmpty();
}
@Test
public void shouldGetTimestamp() {
void shouldGetTimestamp() {
assertThat(configWithProperty("timestamp", "0x10").getTimestamp()).isEqualTo(16L);
}
@Test
public void shouldGetBaseFeeAtGenesis() {
void shouldGetBaseFeeAtGenesis() {
GenesisConfigFile withBaseFeeAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0},\"baseFeePerGas\":\"0xa\"}");
assertThat(withBaseFeeAtGenesis.getBaseFeePerGas()).isPresent();
@@ -154,7 +154,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldGetDefaultBaseFeeAtGenesis() {
void shouldGetDefaultBaseFeeAtGenesis() {
GenesisConfigFile withBaseFeeAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":0}}");
// no specified baseFeePerGas:
@@ -165,7 +165,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldGetBaseFeeExplicitlyAtGenesis() {
void shouldGetBaseFeeExplicitlyAtGenesis() {
GenesisConfigFile withBaseFeeNotAtGenesis =
GenesisConfigFile.fromConfig("{\"config\":{\"londonBlock\":10},\"baseFeePerGas\":\"0xa\"}");
// specified baseFeePerGas:
@@ -176,14 +176,14 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldOverrideConfigOptionsBaseFeeWhenSpecified() {
void shouldOverrideConfigOptionsBaseFeeWhenSpecified() {
GenesisConfigOptions withOverrides =
EMPTY_CONFIG.getConfigOptions(Map.of("baseFeePerGas", Wei.of(8).toString()));
assertThat(withOverrides.getBaseFeePerGas()).contains(Wei.of(8L));
}
@Test
public void shouldGetTerminalTotalDifficultyAtGenesis() {
void shouldGetTerminalTotalDifficultyAtGenesis() {
GenesisConfigFile withTerminalTotalDifficultyAtGenesis =
fromConfig("{\"config\":{\"terminalTotalDifficulty\":1000}}");
assertThat(withTerminalTotalDifficultyAtGenesis.getConfigOptions().getTerminalTotalDifficulty())
@@ -191,12 +191,12 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldGetEmptyTerminalTotalDifficultyAtGenesis() {
void shouldGetEmptyTerminalTotalDifficultyAtGenesis() {
assertThat(EMPTY_CONFIG.getConfigOptions().getTerminalTotalDifficulty()).isNotPresent();
}
@Test
public void assertSepoliaTerminalTotalDifficulty() {
void assertSepoliaTerminalTotalDifficulty() {
GenesisConfigOptions sepoliaOptions =
GenesisConfigFile.genesisFileFromResources("/sepolia.json").getConfigOptions();
@@ -206,7 +206,7 @@ public class GenesisConfigFileTest {
}
@Test
public void assertGoerliTerminalTotalDifficulty() {
void assertGoerliTerminalTotalDifficulty() {
GenesisConfigOptions goerliOptions =
GenesisConfigFile.genesisFileFromResources("/goerli.json").getConfigOptions();
@@ -216,7 +216,7 @@ public class GenesisConfigFileTest {
}
@Test
public void assertMainnetTerminalTotalDifficulty() {
void assertMainnetTerminalTotalDifficulty() {
GenesisConfigOptions mainnetOptions =
GenesisConfigFile.genesisFileFromResources("/mainnet.json").getConfigOptions();
@@ -227,7 +227,7 @@ public class GenesisConfigFileTest {
}
@Test
public void assertTerminalTotalDifficultyOverride() {
void assertTerminalTotalDifficultyOverride() {
GenesisConfigOptions sepoliaOverrideOptions =
GenesisConfigFile.genesisFileFromResources("/sepolia.json")
.getConfigOptions(Map.of("terminalTotalDifficulty", String.valueOf(Long.MAX_VALUE)));
@@ -238,7 +238,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldFindMergeNetSplitForkAndAlias() {
void shouldFindMergeNetSplitForkAndAlias() {
GenesisConfigFile mergeNetSplitGenesis =
GenesisConfigFile.fromConfig(
"{\"config\":{\"mergeNetsplitBlock\":11},\"baseFeePerGas\":\"0xa\"}");
@@ -255,12 +255,12 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldDefaultTimestampToZero() {
void shouldDefaultTimestampToZero() {
assertThat(EMPTY_CONFIG.getTimestamp()).isZero();
}
@Test
public void shouldGetAllocations() {
void shouldGetAllocations() {
final GenesisConfigFile config =
fromConfig(
"{"
@@ -309,13 +309,13 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldGetEmptyAllocationsWhenAllocNotPresent() {
void shouldGetEmptyAllocationsWhenAllocNotPresent() {
final GenesisConfigFile config = fromConfig("{}");
assertThat(config.streamAllocations()).isEmpty();
}
@Test
public void shouldGetLargeChainId() {
void shouldGetLargeChainId() {
final GenesisConfigFile config =
fromConfig(
"{\"config\": { \"chainId\": 31415926535897932384626433832795028841971693993751058209749445923078164062862089986280348253421170679821480865132823066470938446095 }}");
@@ -326,7 +326,7 @@ public class GenesisConfigFileTest {
}
@Test
public void mustNotAcceptComments() {
void mustNotAcceptComments() {
assertThatThrownBy(
() ->
fromConfig(
@@ -336,7 +336,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testOverridePresent() {
void testOverridePresent() {
final GenesisConfigFile config = GenesisConfigFile.development();
final int bigBlock = 999_999_999;
final String bigBlockString = Integer.toString(bigBlock);
@@ -353,7 +353,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testOverrideNull() {
void testOverrideNull() {
final GenesisConfigFile config = GenesisConfigFile.development();
final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
override.put("istanbulBlock", null);
@@ -367,7 +367,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testOverrideCaseInsensitivity() {
void testOverrideCaseInsensitivity() {
final GenesisConfigFile config = GenesisConfigFile.development();
final int bigBlock = 999_999_999;
final String bigBlockString = Integer.toString(bigBlock);
@@ -386,7 +386,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testOverrideEmptyString() {
void testOverrideEmptyString() {
final GenesisConfigFile config = GenesisConfigFile.development();
final Map<String, String> override = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
override.put("istanbulBlock", "");
@@ -399,7 +399,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testNoOverride() {
void testNoOverride() {
final GenesisConfigFile config = GenesisConfigFile.development();
assertThat(config.getConfigOptions().getLondonBlockNumber()).hasValue(0);
@@ -411,7 +411,7 @@ public class GenesisConfigFileTest {
}
@Test
public void testConstantinopleFixShouldNotBeSupportedAlongPetersburg() {
void testConstantinopleFixShouldNotBeSupportedAlongPetersburg() {
// petersburg node
final GenesisConfigFile config = GenesisConfigFile.genesisFileFromResources("/all_forks.json");
@@ -428,7 +428,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldLoadForksInSortedOrder() throws IOException {
void shouldLoadForksInSortedOrder() throws IOException {
final ObjectNode configNode =
new ObjectMapper()
.createObjectNode()
@@ -449,7 +449,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldLoadForksIgnoreClassicForkBlock() throws IOException {
void shouldLoadForksIgnoreClassicForkBlock() throws IOException {
final ObjectNode configNode =
new ObjectMapper()
.createObjectNode()
@@ -469,7 +469,7 @@ public class GenesisConfigFileTest {
}
@Test
public void shouldLoadForksIgnoreUnexpectedValues() throws IOException {
void shouldLoadForksIgnoreUnexpectedValues() throws IOException {
final ObjectNode configNoUnexpectedForks =
new ObjectMapper()
.createObjectNode()
@@ -533,7 +533,7 @@ public class GenesisConfigFileTest {
* been case agnostic.
*/
@Test
public void roundTripForkIdBlocks() throws IOException {
void roundTripForkIdBlocks() throws IOException {
final String configText =
Resources.toString(Resources.getResource("all_forks.json"), StandardCharsets.UTF_8);
final ObjectNode genesisNode = JsonUtil.objectNodeFromString(configText);

View File

@@ -29,30 +29,30 @@ import com.fasterxml.jackson.databind.node.ObjectNode;
import org.apache.tuweni.units.bigints.UInt256;
import org.junit.jupiter.api.Test;
public class GenesisConfigOptionsTest {
class GenesisConfigOptionsTest {
@Test
public void shouldUseEthHashWhenEthHashInConfig() {
void shouldUseEthHashWhenEthHashInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("ethash", emptyMap()));
assertThat(config.isEthHash()).isTrue();
assertThat(config.getConsensusEngine()).isEqualTo("ethash");
}
@Test
public void shouldNotUseEthHashIfEthHashNotPresent() {
void shouldNotUseEthHashIfEthHashNotPresent() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.isEthHash()).isFalse();
}
@Test
public void shouldUseIbft2WhenIbft2InConfig() {
void shouldUseIbft2WhenIbft2InConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("ibft2", emptyMap()));
assertThat(config.isIbft2()).isTrue();
assertThat(config.isPoa()).isTrue();
assertThat(config.getConsensusEngine()).isEqualTo("ibft2");
}
public void shouldUseQbftWhenQbftInConfig() {
void shouldUseQbftWhenQbftInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("qbft", emptyMap()));
assertThat(config.isQbft()).isTrue();
assertThat(config.isPoa()).isTrue();
@@ -60,7 +60,7 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldUseCliqueWhenCliqueInConfig() {
void shouldUseCliqueWhenCliqueInConfig() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("clique", emptyMap()));
assertThat(config.isClique()).isTrue();
assertThat(config.isPoa()).isTrue();
@@ -69,7 +69,7 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldNotUseCliqueIfCliqueNotPresent() {
void shouldNotUseCliqueIfCliqueNotPresent() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.isClique()).isFalse();
assertThat(config.isPoa()).isFalse();
@@ -77,63 +77,63 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldGetHomesteadBlockNumber() {
void shouldGetHomesteadBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("homesteadBlock", 1000));
assertThat(config.getHomesteadBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetDaoForkBlockNumber() {
void shouldGetDaoForkBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 1000));
assertThat(config.getDaoForkBlock()).hasValue(1000);
}
@Test
public void shouldNotHaveDaoForkBlockWhenSetToZero() {
void shouldNotHaveDaoForkBlockWhenSetToZero() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("daoForkBlock", 0));
assertThat(config.getDaoForkBlock()).isEmpty();
}
@Test
public void shouldGetTangerineWhistleBlockNumber() {
void shouldGetTangerineWhistleBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip150Block", 1000));
assertThat(config.getTangerineWhistleBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetSpuriousDragonBlockNumber() {
void shouldGetSpuriousDragonBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("eip158Block", 1000));
assertThat(config.getSpuriousDragonBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetByzantiumBlockNumber() {
void shouldGetByzantiumBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("byzantiumBlock", 1000));
assertThat(config.getByzantiumBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetConstantinopleBlockNumber() {
void shouldGetConstantinopleBlockNumber() {
final GenesisConfigOptions config =
fromConfigOptions(singletonMap("constantinopleBlock", 1000));
assertThat(config.getConstantinopleBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetConstantinopleFixBlockNumber() {
void shouldGetConstantinopleFixBlockNumber() {
final GenesisConfigOptions config =
fromConfigOptions(singletonMap("constantinopleFixBlock", 1000));
assertThat(config.getPetersburgBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetPetersburgBlockNumber() {
void shouldGetPetersburgBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("petersburgBlock", 1000));
assertThat(config.getPetersburgBlockNumber()).hasValue(1000);
}
@Test
public void shouldFailWithBothPetersburgAndConstantinopleFixBlockNumber() {
void shouldFailWithBothPetersburgAndConstantinopleFixBlockNumber() {
Map<String, Object> configMap = new HashMap<>();
configMap.put("constantinopleFixBlock", 1000);
configMap.put("petersburgBlock", 1000);
@@ -145,68 +145,74 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldGetIstanbulBlockNumber() {
void shouldGetIstanbulBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("istanbulBlock", 1000));
assertThat(config.getIstanbulBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetMuirGlacierBlockNumber() {
void shouldGetMuirGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("muirGlacierBlock", 1000));
assertThat(config.getMuirGlacierBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetBerlinBlockNumber() {
void shouldGetBerlinBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("berlinBlock", 1000));
assertThat(config.getBerlinBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetLondonBlockNumber() {
void shouldGetLondonBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("londonblock", 1000));
assertThat(config.getLondonBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetArrowGlacierBlockNumber() {
void shouldGetArrowGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("arrowGlacierBlock", 1000));
assertThat(config.getArrowGlacierBlockNumber()).hasValue(1000);
}
@Test
public void shouldGetGrayGlacierBlockNumber() {
void shouldGetGrayGlacierBlockNumber() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("grayGlacierBlock", 4242));
assertThat(config.getGrayGlacierBlockNumber()).hasValue(4242);
}
@Test
public void shouldGetShanghaiTime() {
void shouldGetShanghaiTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("shanghaiTime", 1670470141));
assertThat(config.getShanghaiTime()).hasValue(1670470141);
}
@Test
public void shouldGetCancunTime() {
void shouldGetCancunTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("cancunTime", 1670470142));
assertThat(config.getCancunTime()).hasValue(1670470142);
}
@Test
public void shouldGetFutureEipsTime() {
void shouldGetPragueTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("pragueTime", 1670470143));
assertThat(config.getPragueTime()).hasValue(1670470143);
}
@Test
void shouldGetFutureEipsTime() {
final GenesisConfigOptions config = fromConfigOptions(singletonMap("futureEipsTime", 1337));
assertThat(config.getFutureEipsTime()).hasValue(1337);
}
@Test
public void shouldGetExperimentalEipsTime() {
void shouldGetExperimentalEipsTime() {
final GenesisConfigOptions config =
fromConfigOptions(singletonMap("experimentalEipsTime", 1337));
assertThat(config.getExperimentalEipsTime()).hasValue(1337);
}
@Test
public void shouldNotReturnEmptyOptionalWhenBlockNumberNotSpecified() {
void shouldNotReturnEmptyOptionalWhenBlockNumberNotSpecified() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getHomesteadBlockNumber()).isEmpty();
assertThat(config.getDaoForkBlock()).isEmpty();
@@ -224,19 +230,20 @@ public class GenesisConfigOptionsTest {
assertThat(config.getMergeNetSplitBlockNumber()).isEmpty();
assertThat(config.getShanghaiTime()).isEmpty();
assertThat(config.getCancunTime()).isEmpty();
assertThat(config.getPragueTime()).isEmpty();
assertThat(config.getFutureEipsTime()).isEmpty();
assertThat(config.getExperimentalEipsTime()).isEmpty();
}
@Test
public void shouldGetChainIdWhenSpecified() {
void shouldGetChainIdWhenSpecified() {
final GenesisConfigOptions config =
fromConfigOptions(singletonMap("chainId", BigInteger.valueOf(32)));
assertThat(config.getChainId()).hasValue(BigInteger.valueOf(32));
}
@Test
public void shouldSupportEmptyGenesisConfig() {
void shouldSupportEmptyGenesisConfig() {
final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions();
assertThat(config.isEthHash()).isFalse();
assertThat(config.isClique()).isFalse();
@@ -245,7 +252,7 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldGetTerminalTotalDifficultyWhenSpecified() {
void shouldGetTerminalTotalDifficultyWhenSpecified() {
final GenesisConfigOptions config =
fromConfigOptions(singletonMap("terminalTotalDifficulty", BigInteger.valueOf(1000)));
assertThat(config.getTerminalTotalDifficulty()).isPresent();
@@ -259,7 +266,7 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldNotReturnTerminalTotalDifficultyWhenNotSpecified() {
void shouldNotReturnTerminalTotalDifficultyWhenNotSpecified() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getTerminalTotalDifficulty()).isNotPresent();
// stubJsonGenesis
@@ -267,28 +274,28 @@ public class GenesisConfigOptionsTest {
}
@Test
public void isZeroBaseFeeShouldDefaultToFalse() {
void isZeroBaseFeeShouldDefaultToFalse() {
final GenesisConfigOptions config = GenesisConfigFile.fromConfig("{}").getConfigOptions();
assertThat(config.isZeroBaseFee()).isFalse();
}
@Test
public void isZeroBaseFeeParsedCorrectly() {
void isZeroBaseFeeParsedCorrectly() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true));
assertThat(config.isZeroBaseFee()).isTrue();
}
@Test
public void asMapIncludesZeroBaseFee() {
void asMapIncludesZeroBaseFee() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("zerobasefee", true));
assertThat(config.asMap()).containsOnlyKeys("zeroBaseFee").containsValue(true);
}
@Test
public void shouldGetDepositContractAddress() {
void shouldGetDepositContractAddress() {
final GenesisConfigOptions config =
fromConfigOptions(
singletonMap("depositContractAddress", "0x00000000219ab540356cbb839cbe05303d7705fa"));
@@ -297,13 +304,13 @@ public class GenesisConfigOptionsTest {
}
@Test
public void shouldNotHaveDepositContractAddressWhenEmpty() {
void shouldNotHaveDepositContractAddressWhenEmpty() {
final GenesisConfigOptions config = fromConfigOptions(emptyMap());
assertThat(config.getDepositContractAddress()).isEmpty();
}
@Test
public void asMapIncludesDepositContractAddress() {
void asMapIncludesDepositContractAddress() {
final GenesisConfigOptions config = fromConfigOptions(Map.of("depositContractAddress", "0x0"));
assertThat(config.asMap())

View File

@@ -16,6 +16,7 @@
"mergeNetSplitBlock": 14,
"shanghaiTime": 15,
"cancunTime": 16,
"pragueTime": 17,
"futureEipsTime": 98,
"experimentalEipsTime": 99,
"ecip1015Block": 102,

View File

@@ -28,8 +28,14 @@ import org.hyperledger.besu.datatypes.Address;
import java.time.Clock;
import java.util.Collection;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** This is the full data set, or context, required for many of the aspects of BFT workflows. */
public class BftFinalState {
private static final Logger LOG = LoggerFactory.getLogger(BftFinalState.class);
private final ValidatorProvider validatorProvider;
private final NodeKey nodeKey;
private final Address localAddress;
@@ -126,7 +132,9 @@ public class BftFinalState {
* @return the boolean
*/
public boolean isLocalNodeValidator() {
return getValidators().contains(localAddress);
final boolean isValidator = getValidators().contains(localAddress);
LOG.debug(isValidator ? "Local node is a validator" : "Local node is a non-validator");
return isValidator;
}
/**

View File

@@ -53,6 +53,15 @@ public class QbftGetValidatorsByBlockNumber extends AbstractBlockParameterMethod
return request.getRequiredParameter(0, BlockParameter.class);
}
@Override
protected Object pendingResult(final JsonRpcRequestContext request) {
final BlockHeader blockHeader = getBlockchainQueries().headBlockHeader();
LOG.trace("Received RPC rpcName={} block={}", getName(), blockHeader.getNumber());
return validatorProvider.getValidatorsAfterBlock(blockHeader).stream()
.map(Address::toString)
.collect(Collectors.toList());
}
@Override
protected Object resultByBlockNumber(
final JsonRpcRequestContext request, final long blockNumber) {

View File

@@ -23,6 +23,7 @@ import org.hyperledger.besu.datatypes.Address;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequest;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.JsonRpcRequestContext;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.parameters.BlockParameter;
import org.hyperledger.besu.ethereum.api.jsonrpc.internal.response.JsonRpcSuccessResponse;
import org.hyperledger.besu.ethereum.api.query.BlockchainQueries;
import org.hyperledger.besu.ethereum.core.BlockHeader;
@@ -73,4 +74,33 @@ public class QbftGetValidatorsByBlockNumberTest {
Object result = method.resultByBlockNumber(request, 12);
assertThat(result).isEqualTo(expectedOutput);
}
@Test
public void shouldReturnListOfValidatorsFromLatestBlock() {
request =
new JsonRpcRequestContext(
new JsonRpcRequest("2.0", "qbft_getValidatorsByBlockNumber", new String[] {"latest"}));
when(blockchainQueries.headBlockNumber()).thenReturn(12L);
when(blockchainQueries.getBlockHeaderByNumber(12)).thenReturn(Optional.of(blockHeader));
final List<Address> addresses = Collections.singletonList(Address.ID);
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
when(validatorProvider.getValidatorsForBlock(any())).thenReturn(addresses);
Object result = method.response(request);
assertThat(result).isInstanceOf(JsonRpcSuccessResponse.class);
assertThat(((JsonRpcSuccessResponse) result).getResult()).isEqualTo(expectedOutput);
}
@Test
public void shouldReturnListOfValidatorsFromPendingBlock() {
request =
new JsonRpcRequestContext(
new JsonRpcRequest("2.0", "qbft_getValidatorsByBlockNumber", new String[] {"pending"}));
when(blockchainQueries.headBlockHeader()).thenReturn(blockHeader);
final List<Address> addresses = Collections.singletonList(Address.ID);
final List<String> expectedOutput = Collections.singletonList(Address.ID.toString());
when(validatorProvider.getValidatorsAfterBlock(any())).thenReturn(addresses);
Object result = method.response(request);
assertThat(result).isInstanceOf(JsonRpcSuccessResponse.class);
assertThat(((JsonRpcSuccessResponse) result).getResult()).isEqualTo(expectedOutput);
}
}

View File

@@ -168,3 +168,29 @@ tasks.register('generateTestBlockchain') {
}
}
test.dependsOn(generateTestBlockchain)
/*
Utility tasks used to separate out long running suites of tests so they can be parallelized in CI
*/
tasks.register("testBonsai", Test) {
useJUnitPlatform()
filter {
includeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.bonsai.*")
}
dependsOn(generateTestBlockchain)
}
tasks.register("testForest", Test) {
useJUnitPlatform()
filter {
includeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.forest.*")
}
dependsOn(generateTestBlockchain)
}
tasks.register("testRemainder", Test) {
useJUnitPlatform()
filter {
excludeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.bonsai.*")
excludeTestsMatching("org.hyperledger.besu.ethereum.api.jsonrpc.forest.*")
}
}

View File

@@ -234,9 +234,12 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
return respondWithInvalid(reqId, blockParam, null, getInvalidBlockHashStatus(), errorMessage);
}
final var blobTransactions =
transactions.stream().filter(transaction -> transaction.getType().supportsBlob()).toList();
ValidationResult<RpcErrorType> blobValidationResult =
validateBlobs(
transactions,
blobTransactions,
newBlockHeader,
maybeParentHeader,
maybeVersionedHashes,
@@ -302,7 +305,8 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
final BlockProcessingResult executionResult = mergeCoordinator.rememberBlock(block);
if (executionResult.isSuccessful()) {
logImportedBlockInfo(block, (System.currentTimeMillis() - startTimeMs) / 1000.0);
logImportedBlockInfo(
block, blobTransactions.size(), (System.currentTimeMillis() - startTimeMs) / 1000.0);
return respondWith(reqId, blockParam, newBlockHeader.getHash(), VALID);
} else {
if (executionResult.causedBy().isPresent()) {
@@ -380,10 +384,6 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
invalidStatus, latestValidHash, Optional.of(validationError)));
}
protected boolean requireTerminalPoWBlockValidation() {
return false;
}
protected EngineStatus getInvalidBlockHashStatus() {
return INVALID;
}
@@ -396,15 +396,12 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
}
protected ValidationResult<RpcErrorType> validateBlobs(
final List<Transaction> transactions,
final List<Transaction> blobTransactions,
final BlockHeader header,
final Optional<BlockHeader> maybeParentHeader,
final Optional<List<VersionedHash>> maybeVersionedHashes,
final ProtocolSpec protocolSpec) {
var blobTransactions =
transactions.stream().filter(transaction -> transaction.getType().supportsBlob()).toList();
final List<VersionedHash> transactionVersionedHashes = new ArrayList<>();
for (Transaction transaction : blobTransactions) {
var versionedHashes = transaction.getVersionedHashes();
@@ -489,7 +486,7 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
.collect(Collectors.toList()));
}
private void logImportedBlockInfo(final Block block, final double timeInS) {
private void logImportedBlockInfo(final Block block, final int blobCount, final double timeInS) {
final StringBuilder message = new StringBuilder();
message.append("Imported #%,d / %d tx");
final List<Object> messageArgs =
@@ -503,9 +500,10 @@ public abstract class AbstractEngineNewPayload extends ExecutionEngineJsonRpcMet
message.append(" / %d ds");
messageArgs.add(block.getBody().getDeposits().get().size());
}
message.append(" / base fee %s / %,d (%01.1f%%) gas / (%s) in %01.3fs. Peers: %d");
message.append(" / %d blobs / base fee %s / %,d (%01.1f%%) gas / (%s) in %01.3fs. Peers: %d");
messageArgs.addAll(
List.of(
blobCount,
block.getHeader().getBaseFee().map(Wei::toHumanReadableString).orElse("N/A"),
block.getHeader().getGasUsed(),
(block.getHeader().getGasUsed() * 100.0) / block.getHeader().getGasLimit(),

View File

@@ -50,11 +50,6 @@ public class EngineNewPayloadV1 extends AbstractEngineNewPayload {
return RpcMethod.ENGINE_NEW_PAYLOAD_V1.getMethodName();
}
@Override
protected boolean requireTerminalPoWBlockValidation() {
return true;
}
@Override
protected EngineStatus getInvalidBlockHashStatus() {
return INVALID_BLOCK_HASH;

View File

@@ -281,7 +281,7 @@ public final class GenesisState {
if (shanghaiTimestamp.isPresent()) {
return genesis.getTimestamp() >= shanghaiTimestamp.getAsLong();
}
return false;
return isCancunAtGenesis(genesis);
}
private static boolean isCancunAtGenesis(final GenesisConfigFile genesis) {
@@ -289,7 +289,23 @@ public final class GenesisState {
if (cancunTimestamp.isPresent()) {
return genesis.getTimestamp() >= cancunTimestamp.getAsLong();
}
return false;
return isPragueAtGenesis(genesis);
}
private static boolean isPragueAtGenesis(final GenesisConfigFile genesis) {
final OptionalLong pragueTimestamp = genesis.getConfigOptions().getPragueTime();
if (pragueTimestamp.isPresent()) {
return genesis.getTimestamp() >= pragueTimestamp.getAsLong();
}
return isFutureEipsTimeAtGenesis(genesis);
}
private static boolean isFutureEipsTimeAtGenesis(final GenesisConfigFile genesis) {
final OptionalLong futureEipsTime = genesis.getConfigOptions().getFutureEipsTime();
if (futureEipsTime.isPresent()) {
return genesis.getTimestamp() >= futureEipsTime.getAsLong();
}
return isExperimentalEipsTimeAtGenesis(genesis);
}
private static boolean isExperimentalEipsTimeAtGenesis(final GenesisConfigFile genesis) {

View File

@@ -22,6 +22,7 @@ import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.po
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFrontier;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForFutureEIPs;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForIstanbul;
import static org.hyperledger.besu.evm.precompile.MainnetPrecompiledContracts.populateForPrague;
import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.FlexiblePrivacyPrecompiledContract;
import org.hyperledger.besu.ethereum.mainnet.precompiles.privacy.PrivacyPluginPrecompiledContract;
@@ -59,6 +60,13 @@ public interface MainnetPrecompiledContractRegistries {
return registry;
}
static PrecompileContractRegistry prague(
final PrecompiledContractConfiguration precompiledContractConfiguration) {
final PrecompileContractRegistry registry = new PrecompileContractRegistry();
populateForPrague(registry, precompiledContractConfiguration.getGasCalculator());
return registry;
}
static PrecompileContractRegistry futureEips(
final PrecompiledContractConfiguration precompiledContractConfiguration) {
final PrecompileContractRegistry registry = new PrecompileContractRegistry();

View File

@@ -168,6 +168,16 @@ public class MainnetProtocolSpecFactory {
evmConfiguration);
}
public ProtocolSpecBuilder pragueDefinition(final GenesisConfigOptions genesisConfigOptions) {
return MainnetProtocolSpecs.pragueDefinition(
chainId,
contractSizeLimit,
evmStackSize,
isRevertReasonEnabled,
genesisConfigOptions,
evmConfiguration);
}
/**
* The "future" fork consists of EIPs that have been approved for Ethereum Mainnet but not
* scheduled for a fork. This is also known as "Eligible For Inclusion" (EFI) or "Considered for

View File

@@ -51,6 +51,7 @@ import org.hyperledger.besu.evm.gascalculator.HomesteadGasCalculator;
import org.hyperledger.besu.evm.gascalculator.IstanbulGasCalculator;
import org.hyperledger.besu.evm.gascalculator.LondonGasCalculator;
import org.hyperledger.besu.evm.gascalculator.PetersburgGasCalculator;
import org.hyperledger.besu.evm.gascalculator.PragueGasCalculator;
import org.hyperledger.besu.evm.gascalculator.ShanghaiGasCalculator;
import org.hyperledger.besu.evm.gascalculator.SpuriousDragonGasCalculator;
import org.hyperledger.besu.evm.gascalculator.TangerineWhistleGasCalculator;
@@ -661,7 +662,7 @@ public abstract class MainnetProtocolSpecs {
feeMarket ->
new CancunTargetingGasLimitCalculator(
londonForkBlockNumber, (BaseFeeMarket) feeMarket))
// EVM changes to support EOF EIPs (3670, 4200, 4750, 5450)
// EVM changes to support EIP-1153: TSTORE and EIP-5656: MCOPY
.evmBuilder(
(gasCalculator, jdCacheConfig) ->
MainnetEVMs.cancun(
@@ -703,6 +704,45 @@ public abstract class MainnetProtocolSpecs {
.name("Cancun");
}
static ProtocolSpecBuilder pragueDefinition(
final Optional<BigInteger> chainId,
final OptionalInt configContractSizeLimit,
final OptionalInt configStackSizeLimit,
final boolean enableRevertReason,
final GenesisConfigOptions genesisConfigOptions,
final EvmConfiguration evmConfiguration) {
final int contractSizeLimit =
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
return cancunDefinition(
chainId,
configContractSizeLimit,
configStackSizeLimit,
enableRevertReason,
genesisConfigOptions,
evmConfiguration)
// EVM changes to support EOF EIPs (3670, 4200, 4750, 5450)
.gasCalculator(PragueGasCalculator::new)
.evmBuilder(
(gasCalculator, jdCacheConfig) ->
MainnetEVMs.prague(
gasCalculator, chainId.orElse(BigInteger.ZERO), evmConfiguration))
// change contract call creator to accept EOF code
.contractCreationProcessorBuilder(
(gasCalculator, evm) ->
new ContractCreationProcessor(
gasCalculator,
evm,
true,
List.of(
MaxCodeSizeRule.of(contractSizeLimit), EOFValidationCodeRule.of(1, false)),
1,
SPURIOUS_DRAGON_FORCE_DELETE_WHEN_EMPTY_ADDRESSES))
// use prague precompiled contracts
.precompileContractRegistryBuilder(MainnetPrecompiledContractRegistries::prague)
.name("Prague");
}
static ProtocolSpecBuilder futureEipsDefinition(
final Optional<BigInteger> chainId,
final OptionalInt configContractSizeLimit,
@@ -712,7 +752,7 @@ public abstract class MainnetProtocolSpecs {
final EvmConfiguration evmConfiguration) {
final int contractSizeLimit =
configContractSizeLimit.orElse(SPURIOUS_DRAGON_CONTRACT_SIZE_LIMIT);
return cancunDefinition(
return pragueDefinition(
chainId,
configContractSizeLimit,
configStackSizeLimit,

View File

@@ -239,6 +239,7 @@ public class ProtocolScheduleBuilder {
// Begin timestamp forks
lastForkBlock = validateForkOrder("Shanghai", config.getShanghaiTime(), lastForkBlock);
lastForkBlock = validateForkOrder("Cancun", config.getCancunTime(), lastForkBlock);
lastForkBlock = validateForkOrder("Prague", config.getPragueTime(), lastForkBlock);
lastForkBlock = validateForkOrder("FutureEips", config.getFutureEipsTime(), lastForkBlock);
lastForkBlock =
validateForkOrder("ExperimentalEips", config.getExperimentalEipsTime(), lastForkBlock);
@@ -343,6 +344,7 @@ public class ProtocolScheduleBuilder {
// Timestamp Forks
timestampMilestone(config.getShanghaiTime(), specFactory.shanghaiDefinition(config)),
timestampMilestone(config.getCancunTime(), specFactory.cancunDefinition(config)),
timestampMilestone(config.getPragueTime(), specFactory.pragueDefinition(config)),
timestampMilestone(config.getFutureEipsTime(), specFactory.futureEipsDefinition(config)),
timestampMilestone(
config.getExperimentalEipsTime(), specFactory.experimentalEipsDefinition(config)),

View File

@@ -23,15 +23,15 @@ import org.bouncycastle.util.Arrays;
public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
DEFAULT("default".getBytes(StandardCharsets.UTF_8)),
BLOCKCHAIN(new byte[] {1}, true, true),
WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true),
WORLD_STATE(new byte[] {2}, new int[] {0, 1}, false, true, false),
PRIVATE_TRANSACTIONS(new byte[] {3}),
PRIVATE_STATE(new byte[] {4}),
PRUNING_STATE(new byte[] {5}, new int[] {0, 1}),
ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true),
ACCOUNT_INFO_STATE(new byte[] {6}, new int[] {2}, false, true, false),
CODE_STORAGE(new byte[] {7}, new int[] {2}),
ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true),
TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true),
TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}),
ACCOUNT_STORAGE_STORAGE(new byte[] {8}, new int[] {2}, false, true, false),
TRIE_BRANCH_STORAGE(new byte[] {9}, new int[] {2}, false, true, false),
TRIE_LOG_STORAGE(new byte[] {10}, new int[] {2}, true, false, true),
VARIABLES(new byte[] {11}), // formerly GOQUORUM_PRIVATE_WORLD_STATE
// previously supported GoQuorum private states
@@ -49,6 +49,7 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
private final int[] versionList;
private final boolean containsStaticData;
private final boolean eligibleToHighSpecFlag;
private final boolean staticDataGarbageCollectionEnabled;
KeyValueSegmentIdentifier(final byte[] id) {
this(id, new int[] {0, 1, 2});
@@ -56,22 +57,24 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
KeyValueSegmentIdentifier(
final byte[] id, final boolean containsStaticData, final boolean eligibleToHighSpecFlag) {
this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag);
this(id, new int[] {0, 1, 2}, containsStaticData, eligibleToHighSpecFlag, false);
}
KeyValueSegmentIdentifier(final byte[] id, final int[] versionList) {
this(id, versionList, false, false);
this(id, versionList, false, false, false);
}
KeyValueSegmentIdentifier(
final byte[] id,
final int[] versionList,
final boolean containsStaticData,
final boolean eligibleToHighSpecFlag) {
final boolean eligibleToHighSpecFlag,
final boolean staticDataGarbageCollectionEnabled) {
this.id = id;
this.versionList = versionList;
this.containsStaticData = containsStaticData;
this.eligibleToHighSpecFlag = eligibleToHighSpecFlag;
this.staticDataGarbageCollectionEnabled = staticDataGarbageCollectionEnabled;
}
@Override
@@ -94,6 +97,11 @@ public enum KeyValueSegmentIdentifier implements SegmentIdentifier {
return eligibleToHighSpecFlag;
}
@Override
public boolean isStaticDataGarbageCollectionEnabled() {
return staticDataGarbageCollectionEnabled;
}
@Override
public boolean includeInDatabaseVersion(final int version) {
return Arrays.contains(versionList, version);

View File

@@ -74,6 +74,7 @@ public class BonsaiWorldStateUpdateAccumulator
private final Map<Address, StorageConsumingMap<StorageSlotKey, BonsaiValue<UInt256>>>
storageToUpdate = new ConcurrentHashMap<>();
private final Map<UInt256, Hash> storageKeyHashLookup = new ConcurrentHashMap<>();
protected boolean isAccumulatorStateChanged;
public BonsaiWorldStateUpdateAccumulator(
@@ -142,7 +143,7 @@ public class BonsaiWorldStateUpdateAccumulator
new BonsaiAccount(
this,
address,
hashAndSavePreImage(address),
hashAndSaveAccountPreImage(address),
nonce,
balance,
Hash.EMPTY_TRIE_HASH,
@@ -364,11 +365,11 @@ public class BonsaiWorldStateUpdateAccumulator
entries.forEach(
storageUpdate -> {
final UInt256 keyUInt = storageUpdate.getKey();
final Hash slotHash = hashAndSavePreImage(keyUInt);
final StorageSlotKey slotKey =
new StorageSlotKey(slotHash, Optional.of(keyUInt));
new StorageSlotKey(hashAndSaveSlotPreImage(keyUInt), Optional.of(keyUInt));
final UInt256 value = storageUpdate.getValue();
final BonsaiValue<UInt256> pendingValue = pendingStorageUpdates.get(slotKey);
if (pendingValue == null) {
pendingStorageUpdates.put(
slotKey,
@@ -409,7 +410,7 @@ public class BonsaiWorldStateUpdateAccumulator
@Override
public UInt256 getStorageValue(final Address address, final UInt256 slotKey) {
StorageSlotKey storageSlotKey =
new StorageSlotKey(hashAndSavePreImage(slotKey), Optional.of(slotKey));
new StorageSlotKey(hashAndSaveSlotPreImage(slotKey), Optional.of(slotKey));
return getStorageValueByStorageSlotKey(address, storageSlotKey).orElse(UInt256.ZERO);
}
@@ -453,7 +454,7 @@ public class BonsaiWorldStateUpdateAccumulator
public UInt256 getPriorStorageValue(final Address address, final UInt256 storageKey) {
// TODO maybe log the read into the trie layer?
StorageSlotKey storageSlotKey =
new StorageSlotKey(hashAndSavePreImage(storageKey), Optional.of(storageKey));
new StorageSlotKey(hashAndSaveSlotPreImage(storageKey), Optional.of(storageKey));
final Map<StorageSlotKey, BonsaiValue<UInt256>> localAccountStorage =
storageToUpdate.get(address);
if (localAccountStorage != null) {
@@ -765,6 +766,7 @@ public class BonsaiWorldStateUpdateAccumulator
resetAccumulatorStateChanged();
updatedAccounts.clear();
deletedAccounts.clear();
storageKeyHashLookup.clear();
}
public static class AccountConsumingMap<T> extends ForwardingMap<Address, T> {
@@ -828,8 +830,17 @@ public class BonsaiWorldStateUpdateAccumulator
void process(final Address address, T value);
}
protected Hash hashAndSavePreImage(final Bytes bytes) {
// by default do not save hash preImages
return Hash.hash(bytes);
protected Hash hashAndSaveAccountPreImage(final Address address) {
// no need to save account preimage by default
return Hash.hash(address);
}
protected Hash hashAndSaveSlotPreImage(final UInt256 slotKey) {
Hash hash = storageKeyHashLookup.get(slotKey);
if (hash == null) {
hash = Hash.hash(slotKey);
storageKeyHashLookup.put(slotKey, hash);
}
return hash;
}
}

View File

@@ -1,39 +0,0 @@
/*
* Copyright Hyperledger Besu Contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
*
* SPDX-License-Identifier: Apache-2.0
*/
package org.hyperledger.besu.ethereum.util;
import java.util.concurrent.Executors;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Consumer;
public class LogUtil {
static ScheduledExecutorService executor = Executors.newSingleThreadScheduledExecutor();
public static void throttledLog(
final Consumer<String> logger,
final String logMessage,
final AtomicBoolean shouldLog,
final int logRepeatDelay) {
if (shouldLog.compareAndSet(true, false)) {
logger.accept(logMessage);
final Runnable runnable = () -> shouldLog.set(true);
executor.schedule(runnable, logRepeatDelay, TimeUnit.SECONDS);
}
}
}

View File

@@ -40,7 +40,7 @@ import org.junit.jupiter.params.provider.Arguments;
import org.junit.jupiter.params.provider.ArgumentsProvider;
import org.junit.jupiter.params.provider.ArgumentsSource;
public final class GenesisStateTest {
final class GenesisStateTest {
/** Known RLP encoded bytes of the Olympic Genesis Block. */
private static final String OLYMPIC_RLP =
@@ -63,7 +63,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
void createFromJsonWithAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -93,7 +93,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonNoAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
void createFromJsonNoAllocs(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -136,8 +136,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithContract(final DataStorageFormat dataStorageFormat)
throws Exception {
void createFromJsonWithContract(final DataStorageFormat dataStorageFormat) throws Exception {
assertContractInvariants(
dataStorageFormat,
"genesis3.json",
@@ -146,7 +145,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void createFromJsonWithNonce(final DataStorageFormat dataStorageFormat) throws Exception {
void createFromJsonWithNonce(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -162,7 +161,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void encodeOlympicBlock(final DataStorageFormat dataStorageFormat) throws Exception {
void encodeOlympicBlock(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -183,7 +182,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void genesisFromShanghai(final DataStorageFormat dataStorageFormat) throws Exception {
void genesisFromShanghai(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -196,8 +195,8 @@ public final class GenesisStateTest {
Hash.fromHexString(
"0xfdc41f92053811b877be43e61cab6b0d9ee55501ae2443df0970c753747f12d8"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isEqualTo(0);
assertThat(header.getNumber()).isEqualTo(0);
assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot())
.isEqualTo(
Hash.fromHexString(
@@ -223,7 +222,7 @@ public final class GenesisStateTest {
final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isEqualTo(0);
assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull();
@@ -233,7 +232,7 @@ public final class GenesisStateTest {
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
public void genesisFromCancun(final DataStorageFormat dataStorageFormat) throws Exception {
void genesisFromCancun(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
@@ -246,8 +245,8 @@ public final class GenesisStateTest {
Hash.fromHexString(
"0x87846b86c1026fa7d7be2da045716274231de1871065a320659c9b111287c688"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isEqualTo(0);
assertThat(header.getNumber()).isEqualTo(0);
assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot())
.isEqualTo(
Hash.fromHexString(
@@ -273,7 +272,57 @@ public final class GenesisStateTest {
final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isEqualTo(0);
assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull();
Wei lastBalance = last.getBalance();
assertThat(lastBalance).isEqualTo(Wei.fromHexString("0x123450000000000000000"));
}
@ParameterizedTest
@ArgumentsSource(GenesisStateTestArguments.class)
void genesisFromPrague(final DataStorageFormat dataStorageFormat) throws Exception {
final GenesisState genesisState =
GenesisState.fromJson(
dataStorageFormat,
Resources.toString(
GenesisStateTest.class.getResource("genesis_prague.json"), Charsets.UTF_8),
ProtocolScheduleFixture.MAINNET);
final BlockHeader header = genesisState.getBlock().getHeader();
assertThat(header.getHash())
.isEqualTo(
Hash.fromHexString(
"0x87846b86c1026fa7d7be2da045716274231de1871065a320659c9b111287c688"));
assertThat(header.getGasLimit()).isEqualTo(0x2fefd8);
assertThat(header.getGasUsed()).isZero();
assertThat(header.getNumber()).isZero();
assertThat(header.getReceiptsRoot())
.isEqualTo(
Hash.fromHexString(
"0x56e81f171bcc55a6ff8345e692c0f86e5b48e01b996cadc001622fb5e363b421"));
assertThat(header.getTransactionsRoot()).isEqualTo(Hash.EMPTY_TRIE_HASH);
assertThat(header.getOmmersHash())
.isEqualTo(
Hash.fromHexString(
"0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d49347"));
assertThat(header.getExtraData()).isEqualTo(Bytes.EMPTY);
assertThat(header.getParentHash()).isEqualTo(Hash.ZERO);
final MutableWorldState worldState = InMemoryKeyValueStorageProvider.createInMemoryWorldState();
genesisState.writeStateTo(worldState);
Hash computedStateRoot = worldState.rootHash();
assertThat(computedStateRoot).isEqualTo(header.getStateRoot());
assertThat(header.getStateRoot())
.isEqualTo(
Hash.fromHexString(
"0x7f5cfe1375a61009a22d24512d18035bc8f855129452fa9c6a6be2ef4e9da7db"));
final Account first =
worldState.get(Address.fromHexString("0000000000000000000000000000000000000100"));
final Account last =
worldState.get(Address.fromHexString("fb289e2b2b65fb63299a682d000744671c50417b"));
assertThat(first).isNotNull();
assertThat(first.getBalance().toLong()).isZero();
assertThat(first.getCode())
.isEqualTo(Bytes.fromHexString("0x5f804955600180495560028049556003804955"));
assertThat(last).isNotNull();

View File

@@ -41,7 +41,7 @@ import org.mockito.junit.jupiter.MockitoExtension;
import org.mockito.stubbing.Answer;
@ExtendWith(MockitoExtension.class)
public class ProtocolScheduleBuilderTest {
class ProtocolScheduleBuilderTest {
private final long PRE_SHANGHAI_TIMESTAMP = 1680488620L; // Mon, 03 Apr 2023 02:23:40 UTC
@Mock GenesisConfigOptions configOptions;
@Mock private Function<ProtocolSpecBuilder, ProtocolSpecBuilder> modifier;
@@ -61,13 +61,14 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void createProtocolScheduleInOrder() {
void createProtocolScheduleInOrder() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(1L));
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(2L));
when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(13L));
when(configOptions.getMergeNetSplitBlockNumber()).thenReturn(OptionalLong.of(15L));
when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 1));
when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 3));
when(configOptions.getPragueTime()).thenReturn(OptionalLong.of(PRE_SHANGHAI_TIMESTAMP + 5));
final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule();
assertThat(protocolSchedule.getChainId()).contains(CHAIN_ID);
@@ -102,10 +103,20 @@ public class ProtocolScheduleBuilderTest {
.getByBlockHeader(blockHeader(54, PRE_SHANGHAI_TIMESTAMP + 4))
.getName())
.isEqualTo("Cancun");
assertThat(
protocolSchedule
.getByBlockHeader(blockHeader(55, PRE_SHANGHAI_TIMESTAMP + 5))
.getName())
.isEqualTo("Prague");
assertThat(
protocolSchedule
.getByBlockHeader(blockHeader(56, PRE_SHANGHAI_TIMESTAMP + 6))
.getName())
.isEqualTo("Prague");
}
@Test
public void createProtocolScheduleOverlappingUsesLatestFork() {
void createProtocolScheduleOverlappingUsesLatestFork() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(0L));
when(configOptions.getByzantiumBlockNumber()).thenReturn(OptionalLong.of(0L));
final ProtocolSchedule protocolSchedule = builder.createProtocolSchedule();
@@ -116,7 +127,7 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void createProtocolScheduleOutOfOrderThrows() {
void createProtocolScheduleOutOfOrderThrows() {
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L));
when(configOptions.getArrowGlacierBlockNumber()).thenReturn(OptionalLong.of(12L));
when(configOptions.getGrayGlacierBlockNumber()).thenReturn(OptionalLong.of(11L));
@@ -127,7 +138,7 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void createProtocolScheduleWithTimestampsOutOfOrderThrows() {
void createProtocolScheduleWithTimestampsOutOfOrderThrows() {
when(configOptions.getDaoForkBlock()).thenReturn(OptionalLong.of(0L));
when(configOptions.getShanghaiTime()).thenReturn(OptionalLong.of(3L));
when(configOptions.getCancunTime()).thenReturn(OptionalLong.of(2L));
@@ -138,7 +149,7 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void modifierInsertedBetweenBlocksIsAppliedToLaterAndCreatesInterimMilestone() {
void modifierInsertedBetweenBlocksIsAppliedToLaterAndCreatesInterimMilestone() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L));
when(modifier.apply(any()))
@@ -158,7 +169,7 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void modifierPastEndOfDefinedMilestonesGetsItsOwnMilestoneCreated() {
void modifierPastEndOfDefinedMilestonesGetsItsOwnMilestoneCreated() {
when(modifier.apply(any()))
.thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0));
@@ -175,7 +186,7 @@ public class ProtocolScheduleBuilderTest {
}
@Test
public void modifierOnDefinedMilestoneIsAppliedButDoesNotGetAnExtraMilestoneCreated() {
void modifierOnDefinedMilestoneIsAppliedButDoesNotGetAnExtraMilestoneCreated() {
when(configOptions.getHomesteadBlockNumber()).thenReturn(OptionalLong.of(5L));
when(modifier.apply(any()))
.thenAnswer((Answer<ProtocolSpecBuilder>) invocation -> invocation.getArgument(0));

View File

@@ -91,8 +91,6 @@ public class ForkchoiceEvent {
+ safeBlockHash
+ ", finalizedBlockHash="
+ finalizedBlockHash
+ ", safeBlockHash="
+ safeBlockHash
+ '}';
}
}

View File

@@ -212,7 +212,7 @@ public class EthPeer implements Comparable<EthPeer> {
public void recordRequestTimeout(final int requestCode) {
LOG.atDebug()
.setMessage("Timed out while waiting for response from peer {}...")
.addArgument(this::getShortNodeId)
.addArgument(this::getLoggableId)
.log();
LOG.trace("Timed out while waiting for response from peer {}", this);
reputation.recordRequestTimeout(requestCode).ifPresent(this::disconnect);
@@ -222,7 +222,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.atTrace()
.setMessage("Received useless response for request type {} from peer {}...")
.addArgument(requestType)
.addArgument(this::getShortNodeId)
.addArgument(this::getLoggableId)
.log();
reputation.recordUselessResponse(System.currentTimeMillis()).ifPresent(this::disconnect);
}
@@ -264,7 +264,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.atDebug()
.setMessage("Protocol {} unavailable for this peer {}...")
.addArgument(protocolName)
.addArgument(this.getShortNodeId())
.addArgument(this.getLoggableId())
.log();
return null;
}
@@ -274,7 +274,7 @@ public class EthPeer implements Comparable<EthPeer> {
LOG.info(
"Permissioning blocked sending of message code {} to {}...",
messageData.getCode(),
this.getShortNodeId());
this.getLoggableId());
if (LOG.isDebugEnabled()) {
LOG.debug(
"Permissioning blocked by providers {}",
@@ -608,7 +608,7 @@ public class EthPeer implements Comparable<EthPeer> {
public String toString() {
return String.format(
"PeerId: %s... %s, validated? %s, disconnected? %s, client: %s, %s, %s",
getShortNodeId(),
getLoggableId(),
reputation,
isFullyValidated(),
isDisconnected(),
@@ -618,8 +618,9 @@ public class EthPeer implements Comparable<EthPeer> {
}
@Nonnull
public String getShortNodeId() {
return nodeId().toString().substring(0, 20);
public String getLoggableId() {
// 8 bytes plus the 0x prefix is 18 characters
return nodeId().toString().substring(0, 18) + "...";
}
@Override

View File

@@ -54,10 +54,14 @@ import org.slf4j.LoggerFactory;
public class EthPeers {
private static final Logger LOG = LoggerFactory.getLogger(EthPeers.class);
public static final Comparator<EthPeer> TOTAL_DIFFICULTY =
Comparator.comparing(((final EthPeer p) -> p.chainState().getEstimatedTotalDifficulty()));
Comparator.comparing((final EthPeer p) -> p.chainState().getEstimatedTotalDifficulty());
public static final Comparator<EthPeer> CHAIN_HEIGHT =
Comparator.comparing(((final EthPeer p) -> p.chainState().getEstimatedHeight()));
Comparator.comparing((final EthPeer p) -> p.chainState().getEstimatedHeight());
public static final Comparator<EthPeer> MOST_USEFUL_PEER =
Comparator.comparing((final EthPeer p) -> p.getReputation().getScore())
.thenComparing(CHAIN_HEIGHT);
public static final Comparator<EthPeer> HEAVIEST_CHAIN =
TOTAL_DIFFICULTY.thenComparing(CHAIN_HEIGHT);
@@ -200,7 +204,7 @@ public class EthPeers {
if (peer.getReputation().getScore() > USEFULL_PEER_SCORE_THRESHOLD) {
LOG.debug("Disconnected USEFULL peer {}", peer);
} else {
LOG.debug("Disconnected EthPeer {}", peer.getShortNodeId());
LOG.debug("Disconnected EthPeer {}", peer.getLoggableId());
}
}
}
@@ -389,7 +393,7 @@ public class EthPeers {
LOG.atDebug()
.setMessage(
"disconnecting peer {}. Waiting for better peers. Current {} of max {}")
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.addArgument(this::peerCount)
.addArgument(this::getMaxPeers)
.log();

View File

@@ -400,13 +400,16 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
if (peer.getForkId().map(forkIdManager::peerCheck).orElse(true)) {
LOG.atDebug()
.setMessage("ForkId OK or not available for peer {}")
.addArgument(peer::getId)
.addArgument(peer::getLoggableId)
.log();
if (ethPeers.shouldConnect(peer, incoming)) {
return true;
}
}
LOG.atDebug().setMessage("ForkId check failed for peer {}").addArgument(peer::getId).log();
LOG.atDebug()
.setMessage("ForkId check failed for peer {}")
.addArgument(peer::getLoggableId)
.log();
return false;
}
@@ -417,10 +420,10 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
final boolean initiatedByPeer) {
if (ethPeers.registerDisconnect(connection)) {
LOG.atDebug()
.setMessage("Disconnect - {} - {} - {}... - {} peers left")
.setMessage("Disconnect - {} - {} - {} - {} peers left")
.addArgument(initiatedByPeer ? "Inbound" : "Outbound")
.addArgument(reason::toString)
.addArgument(() -> connection.getPeer().getId().slice(0, 8))
.addArgument(() -> connection.getPeer().getLoggableId())
.addArgument(ethPeers::peerCount)
.log();
LOG.atTrace().setMessage("{}").addArgument(ethPeers::toString).log();
@@ -478,7 +481,7 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
} catch (final RLPException e) {
LOG.atDebug()
.setMessage("Unable to parse status message from peer {}... {}")
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.addArgument(e)
.log();
// Parsing errors can happen when clients broadcast network ids outside the int range,
@@ -488,7 +491,7 @@ public class EthProtocolManager implements ProtocolManager, MinedBlockObserver {
}
private Object getPeerOrPeerId(final EthPeer peer) {
return LOG.isTraceEnabled() ? peer : peer.getShortNodeId();
return LOG.isTraceEnabled() ? peer : peer.getLoggableId();
}
@Override

View File

@@ -74,19 +74,19 @@ public abstract class AbstractGetHeadersFromPeerTask
final List<BlockHeader> headers = headersMessage.getHeaders(protocolSchedule);
if (headers.isEmpty()) {
// Message contains no data - nothing to do
LOG.debug("headers.isEmpty. Peer: {}", peer.getShortNodeId());
LOG.debug("headers.isEmpty. Peer: {}", peer.getLoggableId());
return Optional.empty();
}
if (headers.size() > count) {
// Too many headers - this isn't our response
LOG.debug("headers.size()>count. Peer: {}", peer.getShortNodeId());
LOG.debug("headers.size()>count. Peer: {}", peer.getLoggableId());
return Optional.empty();
}
final BlockHeader firstHeader = headers.get(0);
if (!matchesFirstHeader(firstHeader)) {
// This isn't our message - nothing to do
LOG.debug("!matchesFirstHeader. Peer: {}", peer.getShortNodeId());
LOG.debug("!matchesFirstHeader. Peer: {}", peer.getLoggableId());
return Optional.empty();
}
@@ -100,7 +100,7 @@ public abstract class AbstractGetHeadersFromPeerTask
header = headers.get(i);
if (header.getNumber() != prevBlockHeader.getNumber() + expectedDelta) {
// Skip doesn't match, this isn't our data
LOG.debug("header not matching the expected number. Peer: {}", peer.getShortNodeId());
LOG.debug("header not matching the expected number. Peer: {}", peer.getLoggableId());
return Optional.empty();
}
// if headers are supposed to be sequential check if a chain is formed
@@ -110,7 +110,7 @@ public abstract class AbstractGetHeadersFromPeerTask
if (!parent.getHash().equals(child.getParentHash())) {
LOG.debug(
"Sequential headers must form a chain through hashes (BREACH_OF_PROTOCOL), disconnecting peer: {}",
peer.getShortNodeId());
peer.getLoggableId());
peer.disconnect(DisconnectMessage.DisconnectReason.BREACH_OF_PROTOCOL);
return Optional.empty();
}
@@ -129,7 +129,7 @@ public abstract class AbstractGetHeadersFromPeerTask
.setMessage("Received {} of {} headers requested from peer {}...")
.addArgument(headersList::size)
.addArgument(count)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return Optional.of(headersList);
}
@@ -138,7 +138,7 @@ public abstract class AbstractGetHeadersFromPeerTask
if (blockHeader.getNumber() > peer.chainState().getEstimatedHeight()) {
LOG.atTrace()
.setMessage("Updating chain state for peer {}... to block header {}")
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.addArgument(blockHeader::toLogString)
.log();
peer.chainState().update(blockHeader);

View File

@@ -110,7 +110,7 @@ public abstract class AbstractPeerRequestTask<R> extends AbstractPeerTask<R> {
// Peer sent us malformed data - disconnect
LOG.debug(
"Disconnecting with BREACH_OF_PROTOCOL due to malformed message: {}",
peer.getShortNodeId(),
peer.getLoggableId(),
e);
LOG.trace("Peer {} Malformed message data: {}", peer, message.getData());
peer.disconnect(DisconnectReason.BREACH_OF_PROTOCOL);

View File

@@ -137,16 +137,15 @@ public abstract class AbstractRetryingSwitchingPeerTask<T> extends AbstractRetry
// or the least useful
if (peers.peerCount() >= peers.getMaxPeers()) {
failedPeers.stream()
.filter(peer -> !peer.isDisconnected())
.findAny()
.or(() -> peers.streamAvailablePeers().min(peers.getBestChainComparator()))
failedPeers.stream().filter(peer -> !peer.isDisconnected()).findAny().stream()
.min(EthPeers.MOST_USEFUL_PEER)
.or(() -> peers.streamAvailablePeers().min(EthPeers.MOST_USEFUL_PEER))
.ifPresent(
peer -> {
LOG.atDebug()
.setMessage(
"Refresh peers disconnecting peer {}... Waiting for better peers. Current {} of max {}")
.addArgument(peer::getShortNodeId)
"Refresh peers disconnecting peer {} Waiting for better peers. Current {} of max {}")
.addArgument(peer::getLoggableId)
.addArgument(peers::peerCount)
.addArgument(peers::getMaxPeers)
.log();

View File

@@ -92,7 +92,7 @@ public class BufferedGetPooledTransactionsFromPeerFetcher {
.setMessage("Got {} transactions of {} hashes requested from peer {}...")
.addArgument(retrievedTransactions::size)
.addArgument(task.getTransactionHashes()::size)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
transactionPool.addRemoteTransactions(retrievedTransactions);
@@ -121,7 +121,7 @@ public class BufferedGetPooledTransactionsFromPeerFetcher {
LOG.atTrace()
.setMessage(
"Transaction hashes to request from peer {}... fresh count {}, already seen count {}")
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.addArgument(toRetrieve::size)
.addArgument(alreadySeenCount)
.log();

View File

@@ -123,7 +123,7 @@ public class GetHeadersFromPeerByHashTask extends AbstractGetHeadersFromPeerTask
.setMessage("Requesting {} headers (hash {}...) from peer {}...")
.addArgument(count)
.addArgument(referenceHash.slice(0, 6))
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return peer.getHeadersByHash(referenceHash, count, skip, reverse);
},

View File

@@ -81,7 +81,7 @@ public class GetHeadersFromPeerByNumberTask extends AbstractGetHeadersFromPeerTa
.setMessage("Requesting {} headers (blockNumber {}) from peer {}.")
.addArgument(count)
.addArgument(blockNumber)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return peer.getHeadersByNumber(blockNumber, count, skip, reverse);
},

View File

@@ -69,7 +69,7 @@ public class GetNodeDataFromPeerTask extends AbstractPeerRequestTask<Map<Hash, B
LOG.atTrace()
.setMessage("Requesting {} node data entries from peer {}...")
.addArgument(hashes::size)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return peer.getNodeData(hashes);
},

View File

@@ -64,7 +64,7 @@ public class GetPooledTransactionsFromPeerTask extends AbstractPeerRequestTask<L
LOG.atTrace()
.setMessage("Requesting {} transaction pool entries from peer {}...")
.addArgument(hashes::size)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return peer.getPooledTransactions(new ArrayList<>(hashes));
},

View File

@@ -86,7 +86,7 @@ public class GetReceiptsFromPeerTask
LOG.atTrace()
.setMessage("Requesting {} receipts from peer {}...")
.addArgument(blockHeaders::size)
.addArgument(peer::getShortNodeId)
.addArgument(peer::getLoggableId)
.log();
return peer.getReceipts(blockHashes);
},

Some files were not shown because too many files have changed in this diff Show More