Compare commits

..

12 Commits

Author SHA1 Message Date
georgehao
582038be0c fix 2024-08-22 20:24:03 +08:00
colinlyguo
dde4f4064a use viper 2024-08-22 15:06:17 +08:00
colinlyguo
48aad344bd move same implementations to util package 2024-08-22 02:46:54 +08:00
colinlyguo
fd5d662178 fix CI 2024-08-22 02:39:54 +08:00
colinlyguo
5d95d6e4db Revert "use viper"
This reverts commit 7dc8b8327d.
2024-08-22 02:35:54 +08:00
colinlyguo
7dc8b8327d use viper 2024-08-22 02:35:12 +08:00
Daniel Helm
1032fcb4ee add envvar loading for bridge-history-api and coordinator 2024-08-21 10:59:55 -05:00
Daniel Helm
712068a4b3 Merge remote-tracking branch 'refs/remotes/origin/feat-envvar-for-rollup' into feat-envvar-for-rollup 2024-08-21 10:57:46 -05:00
colinlyguo
7598c5bc29 chore: auto version bump [bot] 2024-08-21 13:11:50 +00:00
colinlyguo
8ec8b6f561 fix CI 2024-08-21 21:01:05 +08:00
Daniel Helm
fdde6eb0a7 change to SCROLL_ROLLUP envvar prefix 2024-08-21 07:45:49 -05:00
Daniel Helm
b8fcf5e933 modify config loader to check envvars with SDKROLLUP_ prefix 2024-08-20 13:44:59 -05:00
105 changed files with 1301 additions and 3099 deletions

View File

@@ -1,15 +0,0 @@
# yaml-language-server: $schema=https://coderabbit.ai/integrations/schema.v2.json
language: "en-US"
early_access: false
reviews:
profile: "chill"
request_changes_workflow: false
high_level_summary: true
poem: true
review_status: true
collapse_walkthrough: false
auto_review:
enabled: true
drafts: false
chat:
auto_reply: true

View File

@@ -49,8 +49,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -94,8 +94,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -139,8 +139,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -184,8 +184,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -229,8 +229,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -274,8 +274,8 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -318,8 +318,8 @@ jobs:
file: ./build/dockerfiles/coordinator-api.Dockerfile
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest
@@ -363,7 +363,7 @@ jobs:
platforms: linux/amd64,linux/arm64
push: true
tags: |
scrolltech/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
scrolltech/${{ env.REPOSITORY }}:latest
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ secrets.DOCKERHUB_USERNAME }}/${{ env.REPOSITORY }}:latest
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:${{ env.IMAGE_TAG }}
${{ env.ECR_REGISTRY }}/${{ env.REPOSITORY }}:latest

View File

@@ -1,6 +1,6 @@
.PHONY: fmt dev_docker build_test_docker run_test_docker clean update
L2GETH_TAG=scroll-v5.6.3
L2GETH_TAG=scroll-v5.5.1
help: ## Display this help message
@grep -h \
@@ -40,8 +40,8 @@ fmt: ## Format the code
dev_docker: ## Build docker images for development/testing usages
docker pull postgres
docker build -t scroll_l1geth --platform linux/amd64 ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth --platform linux/amd64 ./common/testcontainers/docker/l2geth/
docker build -t scroll_l1geth ./common/testcontainers/docker/l1geth/
docker build -t scroll_l2geth ./common/testcontainers/docker/l2geth/
clean: ## Empty out the bin folder
@rm -rf build/bin

View File

@@ -17,7 +17,7 @@ RUN --mount=target=. \
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -17,8 +17,7 @@ RUN --mount=target=. \
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install ca-certificates vim netcat-openbsd net-tools curl -y
RUN update-ca-certificates
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -40,7 +40,6 @@ FROM ubuntu:20.04
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# ENV CHAIN_ID=534353
RUN apt update && apt install vim netcat-openbsd net-tools curl jq -y
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/coordinator_api /bin/

View File

@@ -19,8 +19,9 @@ RUN --mount=target=. \
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
RUN apt update && apt install vim netcat-openbsd net-tools curl -y
COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app
ENTRYPOINT ["coordinator_cron"]

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull gas_oracle into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"

View File

@@ -21,7 +21,7 @@ RUN --mount=target=. \
# Pull rollup_relayer into a second stage deploy ubuntu container
FROM ubuntu:20.04
RUN apt update && apt install vim netcat-openbsd net-tools curl ca-certificates -y
RUN apt update && apt install ca-certificates -y
ENV CGO_LDFLAGS="-ldl"

View File

@@ -16,10 +16,8 @@ func GetHardforkName(config *params.ChainConfig, blockHeight, blockTimestamp uin
return "bernoulli"
} else if !config.IsDarwin(blockTimestamp) {
return "curie"
} else if !config.IsDarwinV2(blockTimestamp) {
return "darwin"
} else {
return "darwinV2"
return "darwin"
}
}
@@ -32,10 +30,8 @@ func GetCodecVersion(config *params.ChainConfig, blockHeight, blockTimestamp uin
return encoding.CodecV1
} else if !config.IsDarwin(blockTimestamp) {
return encoding.CodecV2
} else if !config.IsDarwinV2(blockTimestamp) {
return encoding.CodecV3
} else {
return encoding.CodecV4
return encoding.CodecV3
}
}
@@ -48,8 +44,6 @@ func GetMaxChunksPerBatch(config *params.ChainConfig, blockHeight, blockTimestam
return 15
} else if !config.IsDarwin(blockTimestamp) {
return 45
} else if !config.IsDarwinV2(blockTimestamp) {
return 45
} else {
return 45
}

View File

@@ -6,8 +6,6 @@ export RUST_BACKTRACE=full
export RUST_LOG=debug
export RUST_MIN_STACK=100000000
export PROVER_OUTPUT_DIR=test_zkp_test
export SCROLL_PROVER_ASSETS_DIR=/assets/test_assets
export DARWIN_V2_TEST_DIR=/assets
#export LD_LIBRARY_PATH=/:/usr/local/cuda/lib64
mkdir -p $PROVER_OUTPUT_DIR
@@ -15,16 +13,32 @@ mkdir -p $PROVER_OUTPUT_DIR
REPO=$(realpath ../..)
function build_test_bins() {
cd impl
cargo build --release
ln -f -s $(realpath target/release/libzkp.so) $REPO/prover/core/lib
ln -f -s $(realpath target/release/libzkp.so) $REPO/coordinator/internal/logic/verifier/lib
cd $REPO/prover
make tests_binary
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd $REPO/coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd $REPO/common/libzkp
}
function build_test_bins_old() {
cd $REPO
cd prover
make libzkp
go test -tags="gpu ffi" -timeout 0 -c core/prover_test.go
cd ..
cd coordinator
make libzkp
go test -tags="gpu ffi" -timeout 0 -c ./internal/logic/verifier
cd ..
cd common/libzkp
}
build_test_bins
rm -rf $PROVER_OUTPUT_DIR/*
#rm -rf test_zkp_test/*
#rm -rf prover.log verifier.log
$REPO/prover/prover.test --exact zk_circuits_handler::darwin_v2::tests::test_circuits 2>&1 | tee prover.log
#$REPO/prover/core.test -test.v 2>&1 | tee prover.log
$REPO/coordinator/verifier.test -test.v 2>&1 | tee verifier.log

View File

@@ -28,10 +28,44 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -62,40 +96,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor",
"encoder",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "ahash"
version = "0.8.3"
@@ -171,9 +171,9 @@ dependencies = [
[[package]]
name = "anyhow"
version = "1.0.86"
version = "1.0.72"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b3d1d046238990b9cf5bcde22a3fb3584ee5cf65fb2765f454ed428c7a0063da"
checksum = "3b13c32d80ecc7ab747b80c3784bce54ee8a7a0cc4fbda9bf4cda2cf6fe90854"
[[package]]
name = "arc-swap"
@@ -568,10 +568,37 @@ version = "3.13.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"ethers-core",
@@ -593,31 +620,6 @@ dependencies = [
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-providers",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "byte-slice-cast"
version = "1.2.2"
@@ -1182,8 +1184,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1200,6 +1202,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1210,8 +1213,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1366,11 +1369,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"eth-types 0.11.0",
"geth-utils 0.11.0",
"log",
"serde",
"serde_json",
@@ -1379,11 +1382,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types 0.12.0",
"geth-utils 0.12.0",
"log",
"serde",
"serde_json",
@@ -1561,10 +1564,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1573,10 +1576,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.12.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1596,8 +1599,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -1606,8 +1609,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2353,10 +2356,25 @@ dependencies = [
"subtle",
]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"ethers-core",
@@ -2369,24 +2387,23 @@ dependencies = [
]
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"eth-types 0.11.0",
"halo2curves",
"hex",
"log",
"rand",
"rand_chacha",
"num-bigint",
"poseidon-base",
"zktrie",
]
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"halo2curves",
@@ -2394,21 +2411,7 @@ dependencies = [
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -2869,10 +2872,44 @@ dependencies = [
"unarray",
]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"aggregator 0.12.0",
"anyhow",
@@ -2903,40 +2940,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "psm"
version = "0.1.21"
@@ -4540,10 +4543,52 @@ dependencies = [
"syn 2.0.27",
]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.0",
"eth-types 0.11.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
@@ -4582,61 +4627,18 @@ dependencies = [
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.0",
"eth-types 0.13.0",
"ethers-core",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkp"
version = "0.1.0"
dependencies = [
"anyhow",
"base64 0.13.1",
"env_logger 0.9.3",
"halo2_proofs",
"libc",
"log",
"once_cell",
"prover 0.11.0",
"prover 0.12.0",
"prover 0.13.0",
"serde",
"serde_derive",
"serde_json",
@@ -4649,16 +4651,7 @@ version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -4675,20 +4668,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -24,10 +24,10 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
# curie
prover_v3 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
# darwin_v2
prover_v5 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_v4 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.0"
env_logger = "0.9.0"
@@ -37,7 +37,6 @@ once_cell = "1.19"
serde = "1.0"
serde_derive = "1.0"
serde_json = "1.0.66"
anyhow = "1.0.86"
[profile.test]
opt-level = 3

View File

@@ -0,0 +1,69 @@
use crate::utils::{c_char_to_str, c_char_to_vec, panic_catch};
use libc::c_char;
use prover_v3::BatchProof as BatchProofLoVersion;
use prover_v4::{
aggregator::Verifier as VerifierHiVersion, utils::init_env_and_log,
BatchProof as BatchProofHiVersion, BundleProof,
};
use snark_verifier_sdk::verify_evm_calldata;
use std::{cell::OnceCell, env};
static mut VERIFIER: OnceCell<VerifierHiVersion> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_batch_verifier(params_dir: *const c_char, assets_dir: *const c_char) {
init_env_and_log("ffi_batch_verify");
let params_dir = c_char_to_str(params_dir);
let assets_dir = c_char_to_str(assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier_hi = VerifierHiVersion::from_dirs(params_dir, assets_dir);
VERIFIER.set(verifier_hi).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
// As of upgrade #3 (Curie), we verify batch proofs on-chain (EVM).
let proof = serde_json::from_slice::<BatchProofLoVersion>(proof.as_slice()).unwrap();
verify_evm_calldata(
include_bytes!("plonk_verifier_0.11.4.bin").to_vec(),
proof.calldata(),
)
} else {
// Post upgrade #4 (Darwin), batch proofs are not EVM-verifiable. Instead they are
// halo2 proofs meant to be bundled recursively.
let proof = serde_json::from_slice::<BatchProofHiVersion>(proof.as_slice()).unwrap();
VERIFIER.get().unwrap().verify_batch_proof(&proof)
}
});
verified.unwrap_or(false) as c_char
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(proof: *const c_char) -> c_char {
let proof = c_char_to_vec(proof);
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
let verified = panic_catch(|| VERIFIER.get().unwrap().verify_bundle_proof(proof));
verified.unwrap_or(false) as c_char
}

View File

@@ -0,0 +1,63 @@
use crate::utils::{c_char_to_str, c_char_to_vec, panic_catch};
use libc::c_char;
use prover_v3::{zkevm::Verifier as VerifierLoVersion, ChunkProof as ChunkProofLoVersion};
use prover_v4::{
utils::init_env_and_log, zkevm::Verifier as VerifierHiVersion,
ChunkProof as ChunkProofHiVersion,
};
use std::{cell::OnceCell, env};
static mut VERIFIER_LO_VERSION: OnceCell<VerifierLoVersion> = OnceCell::new();
static mut VERIFIER_HI_VERSION: OnceCell<VerifierHiVersion> = OnceCell::new();
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init_chunk_verifier(
params_dir: *const c_char,
v3_assets_dir: *const c_char,
v4_assets_dir: *const c_char,
) {
init_env_and_log("ffi_chunk_verify");
let params_dir = c_char_to_str(params_dir);
let v3_assets_dir = c_char_to_str(v3_assets_dir);
let v4_assets_dir = c_char_to_str(v4_assets_dir);
// TODO: add a settings in scroll-prover.
env::set_var("SCROLL_PROVER_ASSETS_DIR", v3_assets_dir);
let verifier_lo = VerifierLoVersion::from_dirs(params_dir, v3_assets_dir);
env::set_var("SCROLL_PROVER_ASSETS_DIR", v4_assets_dir);
let verifier_hi = VerifierHiVersion::from_dirs(params_dir, v4_assets_dir);
VERIFIER_LO_VERSION.set(verifier_lo).unwrap();
VERIFIER_HI_VERSION.set(verifier_hi).unwrap();
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let fork_id = match fork_name_str {
"curie" => 3,
"darwin" => 4,
_ => {
log::warn!("unexpected fork_name {fork_name_str}, treated as darwin");
4
}
};
let verified = panic_catch(|| {
if fork_id == 3 {
let proof = serde_json::from_slice::<ChunkProofLoVersion>(proof.as_slice()).unwrap();
VERIFIER_LO_VERSION.get().unwrap().verify_chunk_proof(proof)
} else {
let proof = serde_json::from_slice::<ChunkProofHiVersion>(proof.as_slice()).unwrap();
VERIFIER_HI_VERSION.get().unwrap().verify_chunk_proof(proof)
}
});
verified.unwrap_or(false) as c_char
}

View File

@@ -1,63 +1,4 @@
mod batch;
mod chunk;
mod types;
mod utils;
mod verifier;
use crate::utils::{c_char_to_str, c_char_to_vec};
use libc::c_char;
use prover_v5::utils::init_env_and_log;
use verifier::{TaskType, VerifierConfig};
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn init(config: *const c_char) {
init_env_and_log("ffi_init");
let config_str = c_char_to_str(config);
let verifier_config = serde_json::from_str::<VerifierConfig>(config_str).unwrap();
verifier::init(verifier_config);
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_chunk_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Chunk)
}
fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskType) -> c_char {
let proof = c_char_to_vec(proof);
let fork_name_str = c_char_to_str(fork_name);
let verifier = verifier::get_verifier(fork_name_str);
if let Err(e) = verifier {
log::warn!("failed to get verifier, error: {:#}", e);
return 0 as c_char;
}
match verifier.unwrap().verify(task_type, proof) {
Err(e) => {
log::error!("{:?} verify failed, error: {:#}", task_type, e);
false as c_char
}
Ok(result) => result as c_char,
}
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_batch_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Batch)
}
/// # Safety
#[no_mangle]
pub unsafe extern "C" fn verify_bundle_proof(
proof: *const c_char,
fork_name: *const c_char,
) -> c_char {
verify_proof(proof, fork_name, TaskType::Bundle)
}

View File

@@ -0,0 +1,22 @@
use serde::{Deserialize, Serialize};
// Represents the result of a chunk proof checking operation.
// `ok` indicates whether the proof checking was successful.
// `error` provides additional details in case the check failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct CheckChunkProofsResponse {
pub ok: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}
// Encapsulates the result from generating a proof.
// `message` holds the generated proof in byte slice format.
// `error` provides additional details in case the proof generation failed.
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct ProofResult {
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<Vec<u8>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub error: Option<String>,
}

View File

@@ -1,110 +0,0 @@
mod darwin;
mod darwin_v2;
use anyhow::{bail, Result};
use darwin::DarwinVerifier;
use darwin_v2::DarwinV2Verifier;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use prover_v4::utils::load_params;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, collections::BTreeMap, rc::Rc};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
Chunk,
Batch,
Bundle,
}
pub trait ProofVerifier {
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
}
#[derive(Debug, Serialize, Deserialize)]
pub struct CircuitConfig {
pub fork_name: String,
pub params_path: String,
pub assets_path: String,
}
#[derive(Debug, Serialize, Deserialize)]
pub struct VerifierConfig {
pub low_version_circuit: CircuitConfig,
pub high_version_circuit: CircuitConfig,
}
type HardForkName = String;
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
static mut VERIFIER_LOW: OnceCell<VerifierPair> = OnceCell::new();
static mut PARAMS_MAP: OnceCell<BTreeMap<u32, ParamsKZG<Bn256>>> = OnceCell::new();
pub fn init(config: VerifierConfig) {
let low_conf = config.low_version_circuit;
std::env::set_var("SCROLL_PROVER_ASSETS_DIR", &low_conf.assets_path);
let params_degrees = [
*prover_v4::config::LAYER2_DEGREE,
*prover_v4::config::LAYER4_DEGREE,
];
// params should be shared between low and high
let mut params_map = BTreeMap::new();
for degree in params_degrees {
if let std::collections::btree_map::Entry::Vacant(e) = params_map.entry(degree) {
match load_params(&low_conf.params_path, degree, None) {
Ok(params) => {
e.insert(params);
}
Err(e) => panic!(
"failed to load params, degree {}, dir {}, err {}",
degree, low_conf.params_path, e
),
}
}
}
unsafe {
PARAMS_MAP.set(params_map).unwrap_unchecked();
}
let verifier = DarwinVerifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &low_conf.assets_path);
unsafe {
VERIFIER_LOW
.set(VerifierPair(
low_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
let high_conf = config.high_version_circuit;
let verifier =
DarwinV2Verifier::new(unsafe { PARAMS_MAP.get().unwrap() }, &high_conf.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
high_conf.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
}
}
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
unsafe {
if let Some(verifier) = VERIFIER_LOW.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
if let Some(verifier) = VERIFIER_HIGH.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
}
}
bail!("failed to get verifier, key not found, {}", fork_name)
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v4::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinVerifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinVerifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinVerifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,48 +0,0 @@
use super::{ProofVerifier, TaskType};
use anyhow::Result;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
use crate::utils::panic_catch;
use prover_v5::{
aggregator::Verifier as AggVerifier, zkevm::Verifier, BatchProof, BundleProof, ChunkProof,
};
use std::{collections::BTreeMap, env};
pub struct DarwinV2Verifier<'params> {
verifier: Verifier<'params>,
agg_verifier: AggVerifier<'params>,
}
impl<'params> DarwinV2Verifier<'params> {
pub fn new(params_map: &'params BTreeMap<u32, ParamsKZG<Bn256>>, assets_dir: &str) -> Self {
env::set_var("SCROLL_PROVER_ASSETS_DIR", assets_dir);
let verifier = Verifier::from_params_and_assets(params_map, assets_dir);
let agg_verifier = AggVerifier::from_params_and_assets(params_map, assets_dir);
Self {
verifier,
agg_verifier,
}
}
}
impl<'params> ProofVerifier for DarwinV2Verifier<'params> {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
let result = panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.verifier.verify_chunk_proof(proof)
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_batch_proof(&proof)
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.agg_verifier.verify_bundle_proof(proof)
}
});
result.map_err(|e| anyhow::anyhow!(e))
}
}

View File

@@ -1,10 +1,11 @@
// BatchVerifier is used to:
// - Verify a batch proof
// - Verify a bundle proof
void init(char* config);
void init_batch_verifier(char* params_dir, char* assets_dir);
char verify_batch_proof(char* proof, char* fork_name);
char verify_bundle_proof(char* proof, char* fork_name);
char verify_bundle_proof(char* proof);
void init_chunk_verifier(char* params_dir, char* v3_assets_dir, char* v4_assets_dir);
char verify_chunk_proof(char* proof, char* fork_name);

View File

@@ -21,10 +21,9 @@ import (
// TestcontainerApps testcontainers struct
type TestcontainerApps struct {
postgresContainer *postgres.PostgresContainer
l2GethContainer *testcontainers.DockerContainer
poSL1Container compose.ComposeStack
web3SignerContainer *testcontainers.DockerContainer
postgresContainer *postgres.PostgresContainer
l2GethContainer *testcontainers.DockerContainer
poSL1Container compose.ComposeStack
// common time stamp in nanoseconds.
Timestamp int
@@ -113,47 +112,6 @@ func (t *TestcontainerApps) StartPoSL1Container() error {
return nil
}
func (t *TestcontainerApps) StartWeb3SignerContainer(chainId int) error {
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
return nil
}
var (
err error
rootDir string
)
if rootDir, err = findProjectRootDir(); err != nil {
return fmt.Errorf("failed to find project root directory: %v", err)
}
// web3signerconf/keyconf.yaml may contain multiple keys configured and web3signer then choses one corresponding to from field of tx
web3SignerConfDir := filepath.Join(rootDir, "common", "testcontainers", "web3signerconf")
req := testcontainers.ContainerRequest{
Image: "consensys/web3signer:develop",
ExposedPorts: []string{"9000/tcp"},
Cmd: []string{"--key-config-path", "/web3signerconf/", "eth1", "--chain-id", fmt.Sprintf("%d", chainId)},
Files: []testcontainers.ContainerFile{
{
HostFilePath: web3SignerConfDir,
ContainerFilePath: "/",
FileMode: 0o777,
},
},
WaitingFor: wait.ForLog("ready to handle signing requests"),
}
genericContainerReq := testcontainers.GenericContainerRequest{
ContainerRequest: req,
Started: true,
}
container, err := testcontainers.GenericContainer(context.Background(), genericContainerReq)
if err != nil {
log.Printf("failed to start web3signer container: %s", err)
return err
}
t.web3SignerContainer, _ = container.(*testcontainers.DockerContainer)
return nil
}
// GetPoSL1EndPoint returns the endpoint of the running PoS L1 endpoint
func (t *TestcontainerApps) GetPoSL1EndPoint() (string, error) {
if t.poSL1Container == nil {
@@ -195,14 +153,6 @@ func (t *TestcontainerApps) GetL2GethEndPoint() (string, error) {
return endpoint, nil
}
// GetL2GethEndPoint returns the endpoint of the running L2Geth container
func (t *TestcontainerApps) GetWeb3SignerEndpoint() (string, error) {
if t.web3SignerContainer == nil || !t.web3SignerContainer.IsRunning() {
return "", errors.New("web3signer is not running")
}
return t.web3SignerContainer.PortEndpoint(context.Background(), "9000/tcp", "http")
}
// GetGormDBClient returns a gorm.DB by connecting to the running postgres container
func (t *TestcontainerApps) GetGormDBClient() (*gorm.DB, error) {
endpoint, err := t.GetDBEndPoint()
@@ -251,11 +201,6 @@ func (t *TestcontainerApps) Free() {
t.poSL1Container = nil
}
}
if t.web3SignerContainer != nil && t.web3SignerContainer.IsRunning() {
if err := t.web3SignerContainer.Terminate(ctx); err != nil {
log.Printf("failed to stop web3signer container: %s", err)
}
}
}
// findProjectRootDir find project root directory

View File

@@ -44,11 +44,6 @@ func TestNewTestcontainerApps(t *testing.T) {
assert.NoError(t, err)
assert.NotNil(t, ethclient)
assert.NoError(t, testApps.StartWeb3SignerContainer(1))
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.NoError(t, err)
assert.NotEmpty(t, endpoint)
// test free testcontainers
testApps.Free()
endpoint, err = testApps.GetDBEndPoint()
@@ -62,8 +57,4 @@ func TestNewTestcontainerApps(t *testing.T) {
endpoint, err = testApps.GetPoSL1EndPoint()
assert.EqualError(t, err, "PoS L1 container is not running")
assert.Empty(t, endpoint)
endpoint, err = testApps.GetWeb3SignerEndpoint()
assert.EqualError(t, err, "web3signer is not running")
assert.Empty(t, endpoint)
}

View File

@@ -1,7 +0,0 @@
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1313131313131313131313131313131313131313131313131313131313131313"
---
type: "file-raw"
keyType: "SECP256K1"
privateKey: "0x1212121212121212121212121212121212121212121212121212121212121212"

View File

@@ -4,6 +4,7 @@ import (
"errors"
"fmt"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/go-ethereum/common"
)
@@ -51,10 +52,9 @@ type ChunkTaskDetail struct {
// BatchTaskDetail is a type containing BatchTask detail.
type BatchTaskDetail struct {
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader interface{} `json:"batch_header"`
BlobBytes []byte `json:"blob_bytes"`
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
ChunkProofs []*ChunkProof `json:"chunk_proofs"`
BatchHeader *codecv3.DABatch `json:"batch_header"`
}
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.

View File

@@ -16,7 +16,6 @@ import (
"github.com/modern-go/reflect2"
"github.com/scroll-tech/go-ethereum/core"
"github.com/scroll-tech/go-ethereum/log"
)
// TryTimes try run several times until the function return true.
@@ -122,7 +121,6 @@ func OverrideConfigWithEnv(cfg interface{}, prefix string) error {
}
default:
if envValue, exists := os.LookupEnv(envKey); exists {
log.Info("Overriding config with env var", "key", envKey)
err := setField(fieldValue, envValue)
if err != nil {
return err

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.66"
var tag = "v4.4.46"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -88,25 +88,13 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
}
// Reset prover manager config for manager test cases.
cfg.ProverManager = &coordinatorConfig.ProverManager{
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{
MockMode: true,
LowVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwin",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "darwinV2",
MinProverVersion: "v4.3.0",
},
},
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true},
BatchCollectionTimeSec: 60,
ChunkCollectionTimeSec: 60,
SessionAttempts: 10,
MaxVerifierWorkers: 4,
MinProverVersion: "v1.0.0",
}
endpoint, err := c.testApps.GetDBEndPoint()
if err != nil {

View File

@@ -6,20 +6,14 @@
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"fork_name": "bernoulli",
"mock_mode": true,
"low_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwin",
"min_prover_version": "v4.4.43"
},
"high_version_circuit": {
"params_path": "params",
"assets_path": "assets",
"fork_name": "darwinV2",
"min_prover_version": "v4.4.45"
}
}
"params_path": "",
"assets_path_lo": "",
"assets_path_hi": ""
},
"max_verifier_workers": 4,
"min_prover_version": "v1.0.0"
},
"db": {
"driver_name": "postgres",

View File

@@ -6,10 +6,7 @@ require (
github.com/appleboy/gin-jwt/v2 v2.9.1
github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0
github.com/google/uuid v1.6.0
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.9.0
@@ -45,6 +42,12 @@ require (
google.golang.org/protobuf v1.33.0 // indirect
)
require (
github.com/google/uuid v1.6.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
)
require (
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.13.0 // indirect

View File

@@ -173,8 +173,8 @@ github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjR
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068 h1:oVGwhg4cCq35B04eG/S4OBXDwXiFH7+LezuH2ZTRBPs=
github.com/scroll-tech/da-codec v0.0.0-20240819100936-c6af3bbe7068/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -24,6 +24,10 @@ type ProverManager struct {
ChunkCollectionTimeSec int `json:"chunk_collection_time_sec"`
// BundleCollectionTimeSec bundle Proof collection time (in seconds).
BundleCollectionTimeSec int `json:"bundle_collection_time_sec"`
// Max number of workers in verifier worker pool
MaxVerifierWorkers int `json:"max_verifier_workers"`
// MinProverVersion is the minimum version of the prover that is required.
MinProverVersion string `json:"min_prover_version"`
}
// L2 loads l2geth configuration items.
@@ -47,19 +51,13 @@ type Config struct {
Auth *Auth `json:"auth"`
}
// CircuitConfig circuit items.
type CircuitConfig struct {
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
MinProverVersion string `json:"min_prover_version"`
}
// VerifierConfig load zk verifier config.
type VerifierConfig struct {
MockMode bool `json:"mock_mode"`
LowVersionCircuit *CircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
ForkName string `json:"fork_name"`
MockMode bool `json:"mock_mode"`
ParamsPath string `json:"params_path"`
AssetsPathLo string `json:"assets_path_lo"` // lower version Verifier
AssetsPathHi string `json:"assets_path_hi"` // higher version Verifier
}
// NewConfig returns a new instance of Config.

View File

@@ -6,6 +6,7 @@ import (
jwt "github.com/appleboy/gin-jwt/v2"
"github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/log"
"gorm.io/gorm"
"scroll-tech/coordinator/internal/config"
@@ -44,34 +45,33 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
return "", fmt.Errorf("check the login parameter failure: %w", err)
}
hardForkNames, err := a.loginLogic.ProverHardForkName(&login)
if err != nil {
return "", fmt.Errorf("prover hard name failure:%w", err)
}
// check the challenge is used, if used, return failure
if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil {
return "", fmt.Errorf("login insert challenge string failure:%w", err)
}
returnData := types.LoginParameterWithHardForkName{
HardForkName: hardForkNames,
LoginParameter: login,
}
return returnData, nil
return login, nil
}
// PayloadFunc returns jwt.MapClaims with {public key, prover name}.
func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
v, ok := data.(types.LoginParameterWithHardForkName)
v, ok := data.(types.LoginParameter)
if !ok {
return jwt.MapClaims{}
}
publicKey := v.PublicKey
if publicKey == "" {
var err error
publicKey, err = v.RecoverPublicKeyFromSignature()
if err != nil {
// do not handle error here since already called v.Verify() beforehands so there should be no error
// add log just in case some error happens
log.Error("impossible path: failed to recover public key from signature", "error", err.Error())
}
}
return jwt.MapClaims{
types.HardForkName: v.HardForkName,
types.PublicKey: v.PublicKey,
types.PublicKey: publicKey,
types.ProverName: v.Message.ProverName,
types.ProverVersion: v.Message.ProverVersion,
}
@@ -91,10 +91,5 @@ func (a *AuthController) IdentityHandler(c *gin.Context) interface{} {
if proverVersion, ok := claims[types.ProverVersion]; ok {
c.Set(types.ProverVersion, proverVersion)
}
if hardForkName, ok := claims[types.HardForkName]; ok {
c.Set(types.HardForkName, hardForkName)
}
return nil
}

View File

@@ -26,7 +26,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
panic("proof receiver new verifier failure")
}
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap, "bundleVerifier", vf.BundleVkMap)
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap)
Auth = NewAuthController(db, cfg, vf)
GetTask = NewGetTaskController(cfg, chainCfg, db, reg)

View File

@@ -108,6 +108,10 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
func (ptc *GetTaskController) proofType(para *coordinatorType.GetTaskParameter) message.ProofType {
var proofTypes []message.ProofType
if para.TaskType != 0 {
proofTypes = append(proofTypes, message.ProofType(para.TaskType))
}
for _, proofType := range para.TaskTypes {
proofTypes = append(proofTypes, message.ProofType(proofType))
}

View File

@@ -3,7 +3,6 @@ package auth
import (
"errors"
"fmt"
"strings"
"github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/log"
@@ -24,35 +23,31 @@ type LoginLogic struct {
chunkVks map[string]struct{}
batchVKs map[string]struct{}
bundleVks map[string]struct{}
proverVersionHardForkMap map[string][]string
}
// NewLoginLogic new a LoginLogic
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
proverVersionHardForkMap := make(map[string][]string)
if version.CheckScrollRepoVersion(cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
log.Error("config file error, low verifier min_prover_version should not more than high verifier min_prover_version",
"low verifier min_prover_version", cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion,
"high verifier min_prover_version", cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion)
panic("verifier config file error")
l := &LoginLogic{
cfg: cfg,
chunkVks: make(map[string]struct{}),
batchVKs: make(map[string]struct{}),
bundleVks: make(map[string]struct{}),
challengeOrm: orm.NewChallenge(db),
}
var highHardForks []string
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
return &LoginLogic{
cfg: cfg,
chunkVks: vf.ChunkVKMap,
batchVKs: vf.BatchVKMap,
bundleVks: vf.BundleVkMap,
challengeOrm: orm.NewChallenge(db),
proverVersionHardForkMap: proverVersionHardForkMap,
for _, vk := range vf.ChunkVKMap {
l.chunkVks[vk] = struct{}{}
}
for _, vk := range vf.BatchVKMap {
l.batchVKs[vk] = struct{}{}
}
for _, vk := range vf.BundleVkMap {
l.bundleVks[vk] = struct{}{}
}
return l
}
// InsertChallengeString insert and check the challenge string is existed
@@ -61,16 +56,18 @@ func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, challenge string) e
}
func (l *LoginLogic) Check(login *types.LoginParameter) error {
verify, err := login.Verify()
if err != nil || !verify {
log.Error("auth message verify failure", "prover_name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
return errors.New("auth message verify failure")
if login.PublicKey != "" {
verify, err := login.Verify()
if err != nil || !verify {
log.Error("auth message verify failure", "prover_name", login.Message.ProverName,
"prover_version", login.Message.ProverVersion, "message", login.Message)
return errors.New("auth message verify failure")
}
}
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion) {
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, login.Message.ProverVersion)
l.cfg.ProverManager.MinProverVersion, login.Message.ProverVersion)
}
if len(login.Message.ProverTypes) > 0 {
@@ -108,18 +105,3 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
}
return nil
}
// ProverHardForkName retrieves hard fork name which prover belongs to
func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, error) {
proverVersionSplits := strings.Split(login.Message.ProverVersion, "-")
if len(proverVersionSplits) == 0 {
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}
proverVersion := proverVersionSplits[0]
if hardForkNames, ok := l.proverVersionHardForkMap[proverVersion]; ok {
return strings.Join(hardForkNames, ","), nil
}
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}

View File

@@ -9,9 +9,7 @@ import (
"github.com/gin-gonic/gin"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
@@ -31,8 +29,9 @@ import (
type BatchProverTask struct {
BaseProverTask
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
batchAttemptsExceedTotal prometheus.Counter
batchTaskGetTaskTotal *prometheus.CounterVec
batchTaskGetTaskProver *prometheus.CounterVec
}
// NewBatchProverTask new a batch collector
@@ -48,6 +47,10 @@ func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
batchAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_batch_attempts_exceed_total",
Help: "Total number of batch attempts exceed.",
}),
batchTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_batch_get_task_total",
Help: "Total number of batch get task.",
@@ -120,15 +123,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, batchTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", batchTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
@@ -214,9 +208,17 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
chunkInfos = append(chunkInfos, &chunkInfo)
}
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
if err != nil {
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
taskDetail := message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if hardForkName == "darwin" {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(batch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header, taskID:%s err:%w", task.TaskID, decodeErr)
}
taskDetail.BatchHeader = batchHeader
}
chunkProofsBytes, err := json.Marshal(taskDetail)
@@ -239,34 +241,3 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
log.Error("failed to recover batch active attempts", "hash", batchTask.Hash, "error", err)
}
}
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []*message.ChunkProof) (*message.BatchTaskDetail, error) {
taskDetail := &message.BatchTaskDetail{
ChunkInfos: chunkInfos,
ChunkProofs: chunkProofs,
}
if encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV3 && encoding.CodecVersion(dbBatch.CodecVersion) != encoding.CodecV4 {
return taskDetail, nil
}
if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV3 {
batchHeader, decodeErr := codecv3.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v3) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
} else {
batchHeader, decodeErr := codecv4.NewDABatchFromBytes(dbBatch.BatchHeader)
if decodeErr != nil {
return nil, fmt.Errorf("failed to decode batch header (v4) for batch %d: %w", dbBatch.Index, decodeErr)
}
taskDetail.BatchHeader = batchHeader
taskDetail.BlobBytes = dbBatch.BlobBytes
}
return taskDetail, nil
}

View File

@@ -27,8 +27,9 @@ import (
type BundleProverTask struct {
BaseProverTask
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
bundleAttemptsExceedTotal prometheus.Counter
bundleTaskGetTaskTotal *prometheus.CounterVec
bundleTaskGetTaskProver *prometheus.CounterVec
}
// NewBundleProverTask new a bundle collector
@@ -45,6 +46,10 @@ func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *g
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
bundleAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_bundle_attempts_exceed_total",
Help: "Total number of bundle attempts exceed.",
}),
bundleTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_bundle_get_task_total",
Help: "Total number of bundle get task.",
@@ -117,15 +122,6 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// bp.recoverActiveAttempts(ctx, bundleTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", bundleTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey,

View File

@@ -27,8 +27,9 @@ import (
type ChunkProverTask struct {
BaseProverTask
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
chunkAttemptsExceedTotal prometheus.Counter
chunkTaskGetTaskTotal *prometheus.CounterVec
chunkTaskGetTaskProver *prometheus.CounterVec
}
// NewChunkProverTask new a chunk prover task
@@ -43,6 +44,10 @@ func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *go
proverTaskOrm: orm.NewProverTask(db),
proverBlockListOrm: orm.NewProverBlockList(db),
},
chunkAttemptsExceedTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "coordinator_chunk_attempts_exceed_total",
Help: "Total number of chunk attempts exceed.",
}),
chunkTaskGetTaskTotal: promauto.With(reg).NewCounterVec(prometheus.CounterOpts{
Name: "coordinator_chunk_get_task_total",
Help: "Total number of chunk get task.",
@@ -65,7 +70,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
for i := 0; i < 5; i++ {
var getTaskError error
var tmpChunkTask *orm.Chunk
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
@@ -74,7 +79,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
if tmpChunkTask == nil {
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
@@ -115,15 +120,6 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, ErrCoordinatorInternalFailure
}
//if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
// cp.recoverActiveAttempts(ctx, chunkTask)
// log.Error("incompatible prover version",
// "requisite hard fork name", hardForkName,
// "prover hard fork name", taskCtx.HardForkNames,
// "task_id", chunkTask.Hash)
// return nil, ErrCoordinatorInternalFailure
//}
proverTask := orm.ProverTask{
TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey,

View File

@@ -3,7 +3,6 @@ package provertask
import (
"errors"
"fmt"
"strings"
"sync"
"github.com/gin-gonic/gin"
@@ -50,13 +49,11 @@ type proverTaskContext struct {
PublicKey string
ProverName string
ProverVersion string
HardForkNames map[string]struct{}
}
// checkParameter check the prover task parameter illegal
func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, error) {
var ptc proverTaskContext
ptc.HardForkNames = make(map[string]struct{})
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
if !publicKeyExist {
@@ -76,15 +73,6 @@ func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, e
}
ptc.ProverVersion = proverVersion.(string)
hardForkNamesStr, hardForkNameExist := ctx.Get(coordinatorType.HardForkName)
if !hardForkNameExist {
return nil, errors.New("get hard fork name from context failed")
}
hardForkNames := strings.Split(hardForkNamesStr.(string), ",")
for _, hardForkName := range hardForkNames {
ptc.HardForkNames[hardForkName] = struct{}{}
}
isBlocked, err := b.proverBlockListOrm.IsPublicKeyBlocked(ctx.Copy(), publicKey.(string))
if err != nil {
return nil, fmt.Errorf("failed to check whether the public key %s is blocked before assigning a chunk task, err: %w, proverName: %s, proverVersion: %s", publicKey, err, proverName, proverVersion)

View File

@@ -187,7 +187,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof, hardForkName)
success, verifyErr = m.verifier.VerifyBundleProof(&bundleProof)
}
if verifyErr != nil || !success {

View File

@@ -0,0 +1,11 @@
#!/bin/bash
work_dir="$(dirname -- "${BASH_SOURCE[0]}")"
work_dir="$(cd -- "$work_dir" && pwd)"
echo $work_dir
rm $work_dir/*.vkey
version=release-v0.11.4
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/chunk_vk.vkey -O $work_dir/chunk_vk.vkey
wget https://circuit-release.s3.us-west-2.amazonaws.com/${version}/agg_vk.vkey -O $work_dir/agg_vk.vkey

View File

@@ -10,8 +10,8 @@ import (
// NewVerifier Sets up a mock verifier.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
batchVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
}
@@ -32,7 +32,7 @@ func (v *Verifier) VerifyBatchProof(proof *message.BatchProof, forkName string)
}
// VerifyBundleProof return a mock verification result for a BundleProof.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) {
if string(proof.Proof) == InvalidTestProof {
return false, nil
}

View File

@@ -10,7 +10,7 @@ const InvalidTestProof = "this is a invalid proof"
// Verifier represents a rust ffi to a halo2 verifier.
type Verifier struct {
cfg *config.VerifierConfig
ChunkVKMap map[string]struct{}
BatchVKMap map[string]struct{}
BundleVkMap map[string]struct{}
ChunkVKMap map[string]string
BatchVKMap map[string]string
BundleVkMap map[string]string
}

View File

@@ -11,9 +11,11 @@ package verifier
import "C" //nolint:typecheck
import (
"embed"
"encoding/base64"
"encoding/json"
"io"
"io/fs"
"os"
"path"
"unsafe"
@@ -25,87 +27,52 @@ import (
"scroll-tech/coordinator/internal/config"
)
// This struct maps to `CircuitConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.CircuitConfig` being changed
type rustCircuitConfig struct {
ForkName string `json:"fork_name"`
ParamsPath string `json:"params_path"`
AssetsPath string `json:"assets_path"`
}
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
return &rustCircuitConfig{
ForkName: cfg.ForkName,
ParamsPath: cfg.ParamsPath,
AssetsPath: cfg.AssetsPath,
}
}
// This struct maps to `VerifierConfig` in common/libzkp/impl/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.VerifierConfig` being changed
type rustVerifierConfig struct {
LowVersionCircuit *rustCircuitConfig `json:"low_version_circuit"`
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
}
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
return &rustVerifierConfig{
LowVersionCircuit: newRustCircuitConfig(cfg.LowVersionCircuit),
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
}
}
// NewVerifier Sets up a rust ffi to call verify.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
if cfg.MockMode {
chunkVKMap := map[string]struct{}{"mock_vk": {}}
batchVKMap := map[string]struct{}{"mock_vk": {}}
bundleVKMap := map[string]struct{}{"mock_vk": {}}
chunkVKMap := map[string]string{cfg.ForkName: "mock_vk"}
batchVKMap := map[string]string{cfg.ForkName: "mock_vk"}
bundleVKMap := map[string]string{cfg.ForkName: "mock_vk"}
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap, BundleVkMap: bundleVKMap}, nil
}
verifierConfig := newRustVerifierConfig(cfg)
configBytes, err := json.Marshal(verifierConfig)
if err != nil {
return nil, err
}
configStr := C.CString(string(configBytes))
paramsPathStr := C.CString(cfg.ParamsPath)
assetsPathLoStr := C.CString(cfg.AssetsPathLo)
assetsPathHiStr := C.CString(cfg.AssetsPathHi)
defer func() {
C.free(unsafe.Pointer(configStr))
C.free(unsafe.Pointer(paramsPathStr))
C.free(unsafe.Pointer(assetsPathLoStr))
C.free(unsafe.Pointer(assetsPathHiStr))
}()
C.init(configStr)
C.init_batch_verifier(paramsPathStr, assetsPathHiStr)
C.init_chunk_verifier(paramsPathStr, assetsPathLoStr, assetsPathHiStr)
v := &Verifier{
cfg: cfg,
ChunkVKMap: make(map[string]struct{}),
BatchVKMap: make(map[string]struct{}),
BundleVkMap: make(map[string]struct{}),
ChunkVKMap: make(map[string]string),
BatchVKMap: make(map[string]string),
BundleVkMap: make(map[string]string),
}
bundleVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_bundle.vkey"))
bundleVK, err := v.readVK(path.Join(cfg.AssetsPathHi, "vk_bundle.vkey"))
if err != nil {
return nil, err
}
batchVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_batch.vkey"))
batchVK, err := v.readVK(path.Join(cfg.AssetsPathHi, "vk_batch.vkey"))
if err != nil {
return nil, err
}
chunkVK, err := v.readVK(path.Join(cfg.HighVersionCircuit.AssetsPath, "vk_chunk.vkey"))
chunkVK, err := v.readVK(path.Join(cfg.AssetsPathHi, "vk_chunk.vkey"))
if err != nil {
return nil, err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BundleVkMap[cfg.ForkName] = bundleVK
v.BatchVKMap[cfg.ForkName] = batchVK
v.ChunkVKMap[cfg.ForkName] = chunkVK
if err := v.loadLowVersionVKs(cfg); err != nil {
if err := v.loadEmbedVK(); err != nil {
return nil, err
}
v.loadCurieVersionVKs()
return v, nil
}
@@ -164,7 +131,7 @@ func (v *Verifier) VerifyChunkProof(proof *message.ChunkProof, forkName string)
}
// VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier.
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string) (bool, error) {
func (v *Verifier) VerifyBundleProof(proof *message.BundleProof) (bool, error) {
if v.cfg.MockMode {
log.Info("Mock mode, verifier disabled")
if string(proof.Proof) == InvalidTestProof {
@@ -179,14 +146,12 @@ func (v *Verifier) VerifyBundleProof(proof *message.BundleProof, forkName string
}
proofStr := C.CString(string(buf))
forkNameStr := C.CString(forkName)
defer func() {
C.free(unsafe.Pointer(proofStr))
C.free(unsafe.Pointer(forkNameStr))
}()
log.Info("Start to verify bundle proof ...")
verified := C.verify_bundle_proof(proofStr, forkNameStr)
verified := C.verify_bundle_proof(proofStr)
return verified != 0, nil
}
@@ -202,27 +167,23 @@ func (v *Verifier) readVK(filePat string) (string, error) {
return base64.StdEncoding.EncodeToString(byt), nil
}
// load low version vks, current is darwin
func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
bundleVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_bundle.vkey"))
//go:embed legacy_vk/*
var legacyVKFS embed.FS
func (v *Verifier) loadEmbedVK() error {
batchVKBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/agg_vk.vkey")
if err != nil {
log.Error("load embed batch vk failure", "err", err)
return err
}
batchVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_batch.vkey"))
chunkVkBytes, err := fs.ReadFile(legacyVKFS, "legacy_vk/chunk_vk.vkey")
if err != nil {
log.Error("load embed chunk vk failure", "err", err)
return err
}
chunkVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_chunk.vkey"))
if err != nil {
return err
}
v.BundleVkMap[bundleVK] = struct{}{}
v.BatchVKMap[batchVK] = struct{}{}
v.ChunkVKMap[chunkVK] = struct{}{}
v.BatchVKMap["curie"] = base64.StdEncoding.EncodeToString(batchVKBytes)
v.ChunkVKMap["curie"] = base64.StdEncoding.EncodeToString(chunkVkBytes)
return nil
}
func (v *Verifier) loadCurieVersionVKs() {
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
}

View File

@@ -18,8 +18,8 @@ import (
var (
paramsPath = flag.String("params", "/assets/test_params", "params dir")
assetsPathLo = flag.String("assets_lo", "/assets/test_assets_lo", "assets dir")
assetsPathHi = flag.String("assets", "/assets/test_assets", "assets dir")
assetsPathLo = flag.String("assets", "/assets/test_assets_lo", "assets dir")
assetsPathHi = flag.String("assets", "/assets/test_assets_hi", "assets dir")
batchProofPath = flag.String("batch_proof", "/assets/proof_data/batch_proof", "batch proof file path")
chunkProofPath1 = flag.String("chunk_proof1", "/assets/proof_data/chunk_proof1", "chunk proof file path 1")
chunkProofPath2 = flag.String("chunk_proof2", "/assets/proof_data/chunk_proof2", "chunk proof file path 2")
@@ -29,38 +29,29 @@ func TestFFI(t *testing.T) {
as := assert.New(t)
cfg := &config.VerifierConfig{
MockMode: false,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: *paramsPath,
AssetsPath: *assetsPathLo,
ForkName: "darwin",
MinProverVersion: "",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: *paramsPath,
AssetsPath: *assetsPathHi,
ForkName: "darwinV2",
MinProverVersion: "",
},
MockMode: false,
ParamsPath: *paramsPath,
AssetsPathLo: *assetsPathLo,
AssetsPathHi: *assetsPathHi,
}
v, err := NewVerifier(cfg)
as.NoError(err)
chunkProof1 := readChunkProof(*chunkProofPath1, as)
chunkOk1, err := v.VerifyChunkProof(chunkProof1, "darwinV2")
chunkOk1, err := v.VerifyChunkProof(chunkProof1)
as.NoError(err)
as.True(chunkOk1)
t.Log("Verified chunk proof 1")
chunkProof2 := readChunkProof(*chunkProofPath2, as)
chunkOk2, err := v.VerifyChunkProof(chunkProof2, "darwinV2")
chunkOk2, err := v.VerifyChunkProof(chunkProof2)
as.NoError(err)
as.True(chunkOk2)
t.Log("Verified chunk proof 2")
batchProof := readBatchProof(*batchProofPath, as)
batchOk, err := v.VerifyBatchProof(batchProof, "darwinV2")
batchOk, err := v.VerifyBatchProof(batchProof, "curie")
as.NoError(err)
as.True(batchOk)
t.Log("Verified batch proof")

View File

@@ -31,9 +31,6 @@ type Batch struct {
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
@@ -228,19 +225,6 @@ func (o *Batch) GetBatchesByBundleHash(ctx context.Context, bundleHash string) (
return batches, nil
}
// GetBatchByIndex retrieves the batch by the given index.
func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("index = ?", index)
var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByIndex error: %w, index: %v", err, index)
}
return &batch, nil
}
// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {

View File

@@ -74,11 +74,11 @@ func (*Chunk) TableName() string {
// GetUnassignedChunk retrieves unassigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) {
var chunk Chunk
db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, height)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts)
err := db.Raw(sql).Scan(&chunk).Error
if err != nil {
return nil, fmt.Errorf("Chunk.GetUnassignedChunk error: %w", err)
@@ -91,11 +91,11 @@ func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTo
// GetAssignedChunk retrieves assigned chunk based on the specified limit.
// The returned chunks are sorted in ascending order by their index.
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Chunk, error) {
var chunk Chunk
db := o.db.WithContext(ctx)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, height)
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts)
err := db.Raw(sql).Scan(&chunk).Error
if err != nil {
return nil, fmt.Errorf("Chunk.GetAssignedChunk error: %w", err)

View File

@@ -87,55 +87,6 @@ func (o *L2Block) GetL2BlockByNumber(ctx context.Context, blockNumber uint64) (*
return &l2Block, nil
}
// GetL2BlocksInRange retrieves the L2 blocks within the specified range (inclusive).
// The range is closed, i.e., it includes both start and end block numbers.
// The returned blocks are sorted in ascending order by their block number.
func (o *L2Block) GetL2BlocksInRange(ctx context.Context, startBlockNumber uint64, endBlockNumber uint64) ([]*encoding.Block, error) {
if startBlockNumber > endBlockNumber {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: start block number should be less than or equal to end block number, start block: %v, end block: %v", startBlockNumber, endBlockNumber)
}
db := o.db.WithContext(ctx)
db = db.Model(&L2Block{})
db = db.Select("header, transactions, withdraw_root, row_consumption")
db = db.Where("number >= ? AND number <= ?", startBlockNumber, endBlockNumber)
db = db.Order("number ASC")
var l2Blocks []L2Block
if err := db.Find(&l2Blocks).Error; err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
// sanity check
if uint64(len(l2Blocks)) != endBlockNumber-startBlockNumber+1 {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange: unexpected number of results, expected: %v, got: %v", endBlockNumber-startBlockNumber+1, len(l2Blocks))
}
var blocks []*encoding.Block
for _, v := range l2Blocks {
var block encoding.Block
if err := json.Unmarshal([]byte(v.Transactions), &block.Transactions); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.Header = &gethTypes.Header{}
if err := json.Unmarshal([]byte(v.Header), block.Header); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
block.WithdrawRoot = common.HexToHash(v.WithdrawRoot)
if err := json.Unmarshal([]byte(v.RowConsumption), &block.RowConsumption); err != nil {
return nil, fmt.Errorf("L2Block.GetL2BlocksInRange error: %w, start block: %v, end block: %v", err, startBlockNumber, endBlockNumber)
}
blocks = append(blocks, &block)
}
return blocks, nil
}
// InsertL2Blocks inserts l2 blocks into the "l2_block" table.
// for unit test
func (o *L2Block) InsertL2Blocks(ctx context.Context, blocks []*encoding.Block) error {

View File

@@ -116,6 +116,25 @@ func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType messag
return proverTasks, nil
}
// GetAssignedProverTaskByTaskIDAndProver get prover task taskID and public key
// TODO: when prover all upgrade need DEPRECATED this function
func (o *ProverTask) GetAssignedProverTaskByTaskIDAndProver(ctx context.Context, taskType message.ProofType, taskID, proverPublicKey, proverVersion string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type", int(taskType))
db = db.Where("task_id", taskID)
db = db.Where("prover_public_key", proverPublicKey)
db = db.Where("prover_version", proverVersion)
db = db.Where("proving_status", types.ProverAssigned)
var proverTask ProverTask
err := db.First(&proverTask).Error
if err != nil {
return nil, fmt.Errorf("ProverTask.GetProverTaskByTaskIDAndProver err:%w, taskID:%s, pubkey:%s, prover_version:%s", err, taskID, proverPublicKey, proverVersion)
}
return &proverTask, nil
}
// GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key
func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) {
db := o.db.WithContext(ctx)

View File

@@ -18,8 +18,6 @@ const (
ProverName = "prover_name"
// ProverVersion the prover version for context
ProverVersion = "prover_version"
// HardForkName the hard fork name for context
HardForkName = "hard_fork_name"
)
// LoginSchema for /login response
@@ -28,6 +26,22 @@ type LoginSchema struct {
Token string `json:"token"`
}
// TODO just use for darwin upgrade, need delete next upgrade
type identity struct {
ProverName string `json:"prover_name"`
ProverVersion string `json:"prover_version"`
Challenge string `json:"challenge"`
}
func (i *identity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}
// Message the login message struct
type Message struct {
Challenge string `form:"challenge" json:"challenge" binding:"required"`
@@ -37,12 +51,6 @@ type Message struct {
VKs []string `form:"vks" json:"vks"`
}
// LoginParameterWithHardForkName constructs new payload for login
type LoginParameterWithHardForkName struct {
LoginParameter
HardForkName string `form:"hard_fork_name" json:"hard_fork_name"`
}
// LoginParameter for /login api
type LoginParameter struct {
Message Message `form:"message" json:"message" binding:"required"`
@@ -85,6 +93,28 @@ func (a *LoginParameter) Verify() (bool, error) {
return isValid, nil
}
// RecoverPublicKeyFromSignature get public key from signature.
// This method is for pre-darwin's compatible.
func (a *LoginParameter) RecoverPublicKeyFromSignature() (string, error) {
curieIdentity := identity{
ProverName: a.Message.ProverName,
ProverVersion: a.Message.ProverVersion,
Challenge: a.Message.Challenge,
}
hash, err := curieIdentity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *Message) Hash() ([]byte, error) {

View File

@@ -60,10 +60,11 @@ func TestGenerateSignature(t *testing.T) {
authMsg := LoginParameter{
Message: Message{
ProverName: "test",
ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
ProverTypes: []ProverType{ProverTypeChunk},
VKs: []string{"mock_vk"},
ProverVersion: "v4.4.32-37af5ef5-38a68e2-1c5093c",
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjEzMjc5MTIsIm9yaWdfaWF0IjoxNzIxMzI0MzEyLCJyYW5kb20iOiJWMVFlT19yNEV5eGRmYUtDalprVExEa0ZIemEyNTdQRG93dTV4SnVxYTdZPSJ9.x-B_TnkTUvs8-hiMfJXejxetAP6rXfeRUmyZ3S0uBiM",
ProverTypes: []ProverType{ProverTypeBatch},
VKs: []string{"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA=",
"AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD1DEjW4Kell67H07wazT5DdzrSh4+amh+cmosQHp9p9snFypyoBGt3UHtoJGQBZlywZWDS9ht5pnaEoGBdaKcQk+lFb+WxTiId0KOAa0mafTZTQw8yToy57Jple64qzlRu1dux30tZZGuerLN1CKzg5Xl2iOpMK+l87jCINwVp5cUtF/XrvhBbU7onKh3KBiy99iUqVyA3Y6iiIZhGKWBSuSA4bNgDYIoVkqjHpdL35aEShoRO6pNXt7rDzxFoPzH0JuPI54nE4OhVrzZXwtkAEosxVa/fszcE092FH+HhhtxZBYe/KEzwdISU9TOPdId3UF/UMYC0MiYOlqffVTgAg="},
},
PublicKey: publicKeyHex,
}

View File

@@ -3,6 +3,7 @@ package types
// GetTaskParameter for ProverTasks request parameter
type GetTaskParameter struct {
ProverHeight uint64 `form:"prover_height" json:"prover_height"`
TaskType int `form:"task_type" json:"task_type"`
TaskTypes []int `form:"task_types" json:"task_types"`
}

View File

@@ -33,6 +33,12 @@ import (
"scroll-tech/coordinator/internal/route"
)
const (
forkNumberTwo = 2
forkNumberOne = 1
minProverVersion = "v2.0.0"
)
var (
conf *config.Config
@@ -66,7 +72,7 @@ func randomURL() string {
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
}
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, forks []string) (*cron.Collector, *http.Server) {
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, nameForkMap map[string]int64) (*cron.Collector, *http.Server) {
var err error
db, err = testApps.GetGormDBClient()
@@ -84,23 +90,13 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
ProversPerSession: proversPerSession,
Verifier: &config.VerifierConfig{
MockMode: true,
LowVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "homestead",
MinProverVersion: "v4.2.0",
},
HighVersionCircuit: &config.CircuitConfig{
ParamsPath: "",
AssetsPath: "",
ForkName: "bernoulli",
MinProverVersion: "v4.3.0",
},
},
BatchCollectionTimeSec: 10,
ChunkCollectionTimeSec: 10,
BundleCollectionTimeSec: 10,
MaxVerifierWorkers: 10,
SessionAttempts: 5,
MinProverVersion: minProverVersion,
},
Auth: &config.Auth{
ChallengeExpireDurationSec: tokenTimeout,
@@ -109,12 +105,20 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
}
var chainConf params.ChainConfig
for _, forkName := range forks {
for forkName, forkNumber := range nameForkMap {
switch forkName {
case "shanghai":
chainConf.ShanghaiBlock = big.NewInt(forkNumber)
case "bernoulli":
chainConf.BernoulliBlock = big.NewInt(100)
chainConf.BernoulliBlock = big.NewInt(forkNumber)
case "london":
chainConf.LondonBlock = big.NewInt(forkNumber)
case "istanbul":
chainConf.IstanbulBlock = big.NewInt(forkNumber)
case "homestead":
chainConf.HomesteadBlock = big.NewInt(0)
chainConf.HomesteadBlock = big.NewInt(forkNumber)
case "eip155":
chainConf.EIP155Block = big.NewInt(forkNumber)
}
}
@@ -197,7 +201,7 @@ func TestApis(t *testing.T) {
func testHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -210,7 +214,7 @@ func testHandshake(t *testing.T) {
func testFailedHandshake(t *testing.T) {
// Setup coordinator and http server.
coordinatorURL := randomURL()
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
proofCollector.Stop()
}()
@@ -228,7 +232,7 @@ func testFailedHandshake(t *testing.T) {
func testGetTaskBlocked(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -272,7 +276,7 @@ func testGetTaskBlocked(t *testing.T) {
func testOutdatedProverVersion(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"homestead": forkNumberOne})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -284,14 +288,12 @@ func testOutdatedProverVersion(t *testing.T) {
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999")
assert.True(t, chunkProver.healthCheckSuccess(t))
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, chunkProver.proverVersion)
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, chunkProver.proverVersion)
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, batchProver.proverVersion)
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", minProverVersion, batchProver.proverVersion)
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
assert.Equal(t, types.ErrJWTCommonErr, code)
assert.Equal(t, expectedErr, errors.New(errMsg))
@@ -299,7 +301,7 @@ func testOutdatedProverVersion(t *testing.T) {
func testValidProof(t *testing.T) {
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -382,7 +384,7 @@ func testValidProof(t *testing.T) {
func testInvalidProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -470,7 +472,7 @@ func testInvalidProof(t *testing.T) {
func testProofGeneratedFailed(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))
@@ -571,7 +573,7 @@ func testProofGeneratedFailed(t *testing.T) {
func testTimeoutProof(t *testing.T) {
// Setup coordinator and ws server.
coordinatorURL := randomURL()
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"darwinV2"})
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, map[string]int64{"istanbul": forkNumberTwo})
defer func() {
collector.Stop()
assert.NoError(t, httpHandler.Shutdown(context.Background()))

View File

@@ -160,7 +160,7 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) (*
resp, err := client.R().
SetHeader("Content-Type", "application/json").
SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)).
SetBody(map[string]interface{}{"prover_height": 100, "task_types": []int{int(proofType)}}).
SetBody(map[string]interface{}{"prover_height": 100, "task_type": int(proofType)}).
SetResult(&result).
Post("http://" + r.coordinatorURL + "/coordinator/v1/get_task")
assert.NoError(t, err)

View File

@@ -4,8 +4,6 @@ import (
"encoding/json"
"os"
"path/filepath"
"scroll-tech/common/utils"
)
// DBConfig db config
@@ -31,11 +29,5 @@ func NewConfig(file string) (*DBConfig, error) {
return nil, err
}
// Override config with environment variables
err = utils.OverrideConfigWithEnv(cfg, "SCROLL_ROLLUP_DB_CONFIG")
if err != nil {
return nil, err
}
return cfg, nil
}

View File

@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
cur, err := Current(pgDB)
assert.NoError(t, err)
// total number of tables.
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testMigrate(t *testing.T) {
assert.NoError(t, Migrate(pgDB))
cur, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), cur)
assert.Equal(t, int64(22), cur)
}
func testRollback(t *testing.T) {
version, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(24), version)
assert.Equal(t, int64(22), version)
assert.NoError(t, Rollback(pgDB, nil))

View File

@@ -1,23 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE chunk
ADD COLUMN codec_version SMALLINT NOT NULL DEFAULT 0,
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
ALTER TABLE batch
ADD COLUMN enable_compress BOOLEAN NOT NULL DEFAULT false;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS chunk
DROP COLUMN IF EXISTS enable_compress,
DROP COLUMN IF EXISTS codec_version;
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS enable_compress;
-- +goose StatementEnd

View File

@@ -1,15 +0,0 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE batch
ADD COLUMN blob_bytes BYTEA;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS batch
DROP COLUMN IF EXISTS blob_bytes;
-- +goose StatementEnd

View File

@@ -1,14 +1,5 @@
bitbucket.org/liamstask/goose v0.0.0-20150115234039-8488cc47d90c h1:bkb2NMGo3/Du52wvYj9Whth5KZfMV6d3O0Vbr3nz/UE=
bitbucket.org/liamstask/goose v0.0.0-20150115234039-8488cc47d90c/go.mod h1:hSVuE3qU7grINVSwrmzHfpg9k87ALBk+XaualNyUzI4=
cloud.google.com/go v0.34.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
cloud.google.com/go v0.38.0/go.mod h1:990N+gfupTy94rShfmMCWGDn0LpTmnzTp2qbd1dvSRU=
cloud.google.com/go v0.43.0/go.mod h1:BOSR3VbTLkk6FDC/TcffxP4NF/FFBGA5ku+jvKOP7pg=
cloud.google.com/go v0.44.1/go.mod h1:iSa0KzasP4Uvy3f1mN/7PiObzGgflwredwwASm/v6AU=
cloud.google.com/go v0.44.2/go.mod h1:60680Gw3Yr4ikxnPRS/oxxkBccT6SA1yMk63TGekxKY=
cloud.google.com/go v0.45.1/go.mod h1:RpBamKRgapWJb87xiFSdk4g1CME7QZg3uwTez+TSTjc=
cloud.google.com/go v0.46.3/go.mod h1:a6bKKbmY7er1mI7TEI4lsAkts/mkhTSZK8w33B4RAg0=
cloud.google.com/go v0.50.0/go.mod h1:r9sluTvynVuxRIOHXQEHMFffphuXHOMZMycpNR5e6To=
cloud.google.com/go v0.51.0/go.mod h1:hWtGJ6gnXH+KgDv+V0zFGDvpi07n3z8ZNj3T1RW0Gcw=
cloud.google.com/go v0.110.10 h1:LXy9GEO+timppncPIAZoOj3l58LIU9k+kn48AN7IO3Y=
cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic=
cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4=
@@ -61,13 +52,9 @@ cloud.google.com/go/batch v1.8.0/go.mod h1:k8V7f6VE2Suc0zUM4WtoibNrA6D3dqBpB+++e
cloud.google.com/go/beyondcorp v1.0.3 h1:VXf9SnrnSmj2BF2cHkoTHvOUp8gjsz1KJFOMW7czdsY=
cloud.google.com/go/beyondcorp v1.0.3/go.mod h1:HcBvnEd7eYr+HGDd5ZbuVmBYX019C6CEXBonXbCVwJo=
cloud.google.com/go/beyondcorp v1.0.4/go.mod h1:Gx8/Rk2MxrvWfn4WIhHIG1NV7IBfg14pTKv1+EArVcc=
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
cloud.google.com/go/bigquery v1.57.1 h1:FiULdbbzUxWD0Y4ZGPSVCDLvqRSyCIO6zKV7E2nf5uA=
cloud.google.com/go/bigquery v1.57.1/go.mod h1:iYzC0tGVWt1jqSzBHqCr3lrRn0u13E8e+AqowBsDgug=
cloud.google.com/go/bigquery v1.59.1/go.mod h1:VP1UJYgevyTwsV7desjzNzDND5p6hZB+Z8gZJN1GQUc=
cloud.google.com/go/bigtable v1.2.0 h1:F4cCmA4nuV84V5zYQ3MKY+M1Cw1avHDuf3S/LcZPA9c=
cloud.google.com/go/bigtable v1.2.0/go.mod h1:JcVAOl45lrTmQfLj7T6TxyMzIN/3FGGcFm+2xVAli2o=
cloud.google.com/go/billing v1.17.4 h1:77/4kCqzH6Ou5CCDzNmqmboE+WvbwFBJmw1QZQz19AI=
cloud.google.com/go/billing v1.17.4/go.mod h1:5DOYQStCxquGprqfuid/7haD7th74kyMBHkjO/OvDtk=
cloud.google.com/go/billing v1.18.2/go.mod h1:PPIwVsOOQ7xzbADCwNe8nvK776QpfrOAUkvKjCUcpSE=
@@ -129,7 +116,6 @@ cloud.google.com/go/dataproc/v2 v2.4.0/go.mod h1:3B1Ht2aRB8VZIteGxQS/iNSJGzt9+CA
cloud.google.com/go/dataqna v0.8.4 h1:NJnu1kAPamZDs/if3bJ3+Wb6tjADHKL83NUWsaIp2zg=
cloud.google.com/go/dataqna v0.8.4/go.mod h1:mySRKjKg5Lz784P6sCov3p1QD+RZQONRMRjzGNcFd0c=
cloud.google.com/go/dataqna v0.8.5/go.mod h1:vgihg1mz6n7pb5q2YJF7KlXve6tCglInd6XO0JGOlWM=
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
cloud.google.com/go/datastore v1.15.0 h1:0P9WcsQeTWjuD1H14JIY7XQscIPQ4Laje8ti96IC5vg=
cloud.google.com/go/datastore v1.15.0/go.mod h1:GAeStMBIt9bPS7jMJA85kgkpsMkvseWWXiaHya9Jes8=
cloud.google.com/go/datastream v1.10.3 h1:Z2sKPIB7bT2kMW5Uhxy44ZgdJzxzE5uKjavoW+EuHEE=
@@ -269,8 +255,6 @@ cloud.google.com/go/policytroubleshooter v1.10.3/go.mod h1:+ZqG3agHT7WPb4EBIRqUv
cloud.google.com/go/privatecatalog v0.9.4 h1:Vo10IpWKbNvc/z/QZPVXgCiwfjpWoZ/wbgful4Uh/4E=
cloud.google.com/go/privatecatalog v0.9.4/go.mod h1:SOjm93f+5hp/U3PqMZAHTtBtluqLygrDrVO8X8tYtG0=
cloud.google.com/go/privatecatalog v0.9.5/go.mod h1:fVWeBOVe7uj2n3kWRGlUQqR/pOd450J9yZoOECcQqJk=
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
cloud.google.com/go/pubsub v1.33.0 h1:6SPCPvWav64tj0sVX/+npCBKhUi/UjJehy9op/V3p2g=
cloud.google.com/go/pubsub v1.33.0/go.mod h1:f+w71I33OMyxf9VpMVcZbnG5KSUkCOUHYpFd5U1GdRc=
cloud.google.com/go/pubsub v1.36.1/go.mod h1:iYjCa9EzWOoBiTdd4ps7QoMtMln5NwaZQpK1hbRfBDE=
@@ -324,8 +308,6 @@ cloud.google.com/go/spanner v1.56.0/go.mod h1:DndqtUKQAt3VLuV2Le+9Y3WTnq5cNKrnLb
cloud.google.com/go/speech v1.21.0 h1:qkxNao58oF8ghAHE1Eghen7XepawYEN5zuZXYWaUTA4=
cloud.google.com/go/speech v1.21.0/go.mod h1:wwolycgONvfz2EDU8rKuHRW3+wc9ILPsAWoikBEWavY=
cloud.google.com/go/speech v1.21.1/go.mod h1:E5GHZXYQlkqWQwY5xRSLHw2ci5NMQNG52FfMU1aZrIA=
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
cloud.google.com/go/storage v1.30.1 h1:uOdMxAs8HExqBlnLtnQyP0YkvbiDpdGShGKtx6U/oNM=
cloud.google.com/go/storage v1.30.1/go.mod h1:NfxhC0UJE1aXSx7CIIbCf7y9HKT7BiccwkR7+P7gN8E=
cloud.google.com/go/storage v1.38.0/go.mod h1:tlUADB0mAb9BgYls9lq+8MGkfzOXuLrnHXlpHmvFJoY=
@@ -374,11 +356,6 @@ cloud.google.com/go/websecurityscanner v1.6.5/go.mod h1:QR+DWaxAz2pWooylsBF854/I
cloud.google.com/go/workflows v1.12.3 h1:qocsqETmLAl34mSa01hKZjcqAvt699gaoFbooGGMvaM=
cloud.google.com/go/workflows v1.12.3/go.mod h1:fmOUeeqEwPzIU81foMjTRQIdwQHADi/vEr1cx9R1m5g=
cloud.google.com/go/workflows v1.12.4/go.mod h1:yQ7HUqOkdJK4duVtMeBCAOPiN1ZF1E9pAMX51vpwB/w=
collectd.org v0.3.0 h1:iNBHGw1VvPJxH2B6RiFWFZ+vsjo1lCdRszBeOuwGi00=
collectd.org v0.3.0/go.mod h1:A/8DzQBkF6abtvrT2j/AU/4tiBgJWYyh0y/oB/4MlWE=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9 h1:VpgP7xuJadIUuKccphEpTJnWhS2jkQyMt6Y7pJCD7fY=
dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU=
github.com/Azure/azure-pipeline-go v0.2.1/go.mod h1:UGSo8XybXnIGZ3epmeBw7Jdz+HiUVpqIlpz/HKHylF4=
github.com/Azure/azure-pipeline-go v0.2.2 h1:6oiIS9yaG6XCCzhgAgKFfIWyo4LLCiDhZot6ltoThhY=
github.com/Azure/azure-pipeline-go v0.2.2/go.mod h1:4rQ/NZncSvGqNkkOsNpOU1tgoNuIlp9AfUH5G1tvCHc=
github.com/Azure/azure-sdk-for-go/sdk/azcore v1.1.0 h1:Ut0ZGdOwJDw0npYEg+TLlPls3Pq6JiZaP2/aGKir7Zw=
@@ -391,26 +368,12 @@ github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1 h1:QSdcrd/UFJv6Bp/Cf
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob v0.4.1/go.mod h1:eZ4g6GUvXiGulfIbbhh1Xr4XwUYaYaWMqzGD/284wCA=
github.com/Azure/azure-storage-blob-go v0.7.0 h1:MuueVOYkufCxJw5YZzF842DY2MBsp+hLuh2apKY0mck=
github.com/Azure/azure-storage-blob-go v0.7.0/go.mod h1:f9YQKtsG1nMisotuTPpO0tjNuEjKRYAcJU8/ydDI++4=
github.com/Azure/go-autorest v14.2.0+incompatible h1:V5VMDjClD3GiElqLWO7mz2MxNAK/vTfRHdAubSIPRgs=
github.com/Azure/go-autorest v14.2.0+incompatible/go.mod h1:r+4oMnoxhatjLLJ6zxSWATqVooLgysK6ZNox3g/xq24=
github.com/Azure/go-autorest/autorest/adal v0.9.23 h1:Yepx8CvFxwNKpH6ja7RZ+sKX+DWYNldbLiALMC3BTz8=
github.com/Azure/go-autorest/autorest/adal v0.9.23/go.mod h1:5pcMqFkdPhviJdlEy3kC/v1ZLnQl0MH6XA5YCcMhy4c=
github.com/Azure/go-autorest/autorest/date v0.3.0 h1:7gUk1U5M/CQbp9WoqinNzJar+8KY+LPI6wiWrP/myHw=
github.com/Azure/go-autorest/autorest/date v0.3.0/go.mod h1:BI0uouVdmngYNUzGWeSYnokU+TrmwEsOqdt8Y6sso74=
github.com/Azure/go-autorest/autorest/mocks v0.4.1 h1:K0laFcLE6VLTOwNgSxaGbUcLPuGXlNkbVvq4cW4nIHk=
github.com/Azure/go-autorest/autorest/mocks v0.4.1/go.mod h1:LTp+uSrOhSkaKrUy935gNZuuIPPVsHlr9DSOxSayd+k=
github.com/Azure/go-autorest/logger v0.2.1 h1:IG7i4p/mDa2Ce4TRyAO8IHnVhAVF3RFU+ZtXWSmf4Tg=
github.com/Azure/go-autorest/logger v0.2.1/go.mod h1:T9E3cAhj2VqvPOtCYAvby9aBXkZmbF5NWuPV8+WeEW8=
github.com/Azure/go-autorest/tracing v0.6.0 h1:TYi4+3m5t6K48TGI9AUdb+IzbnSxvnvUMfuitfgcfuo=
github.com/Azure/go-autorest/tracing v0.6.0/go.mod h1:+vhtPC754Xsa23ID7GlGsrdKBpUA79WCAKPPZVC2DeU=
github.com/AzureAD/microsoft-authentication-library-for-go v0.6.0 h1:XMEdVDFxgulDDl0lQmAZS6j8gRQ/0pJ+ZpXH2FHVtDc=
github.com/AzureAD/microsoft-authentication-library-for-go v0.6.0/go.mod h1:BDJ5qMFKx9DugEg3+uQSDCdbYPr5s9vBTrL9P8TpqOU=
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802 h1:1BDTz0u9nC3//pOCMdNH+CiXJVYJh5UQNCOBG7jbELc=
github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo=
github.com/DATA-DOG/go-sqlmock v1.3.3 h1:CWUqKXe0s8A2z6qCgkP4Kru7wC11YoAnoupUKFDnH08=
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
@@ -421,15 +384,11 @@ github.com/PuerkitoBio/purell v1.1.1 h1:WEQqlqaGbrPkxLJWfBwQmfEAE1Z7ONdDLqrN38tN
github.com/PuerkitoBio/purell v1.1.1/go.mod h1:c11w/QuzBsJSee3cPx9rAFu61PvFxuPbtSwDGJws/X0=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578 h1:d+Bc7a5rLufV/sSk/8dngufqelfh6jnri85riMAaF/M=
github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdkoq3SwY9Y+13GIhn11/XLaGBb4BfwItxLd5jeuXE=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
github.com/aead/siphash v1.0.1 h1:FwHfE/T45KPKYuuSAKyyvE+oPWcaQ+CUmFW0bPlM+kg=
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558=
github.com/agnivade/levenshtein v1.0.1 h1:3oJU7J3FGFmyhn8KHjmVaZCN5hxTr7GxgRue+sxIXdQ=
github.com/agnivade/levenshtein v1.0.1/go.mod h1:CURSv5d9Uaml+FovSIICkLbAUZ9S4RqaHDIsdSBg7lM=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af h1:wVe6/Ea46ZMeNkQjjBW6xcqyQA/j5e0D6GytH95g0gQ=
github.com/ajstarks/svgo v0.0.0-20180226025133-644b8db467af/go.mod h1:K08gAheRH3/J6wwsYMMT4xOr94bZjxIelGM0+d/wbFw=
github.com/akavel/rsrc v0.10.2 h1:Zxm8V5eI1hW4gGaYsJQUhxpjkENuG91ki8B4zCrvEsw=
github.com/akavel/rsrc v0.10.2/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY=
@@ -439,14 +398,10 @@ github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuy
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137 h1:s6gZFSlWYmbqAuRjVTiNNhvNRfY2Wxp9nhfyel4rklc=
github.com/alecthomas/units v0.0.0-20211218093645-b94a6e3cc137/go.mod h1:OMCwj8VM1Kc9e19TLln2VL61YJF0x1XFtfdL4JdbSyE=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883 h1:bvNMNQO63//z+xNgfBlViaCIJKLlCJ6/fmUseuG0wVQ=
github.com/andreyvit/diff v0.0.0-20170406064948-c7f18ee00883/go.mod h1:rCTlJbsFo29Kk6CurOXKm700vrz8f0KW0JNfpkRJY/8=
github.com/antihax/optional v1.0.0 h1:xK2lYat7ZLaVVcIuj82J8kIro4V6kDe0AUDFboUCwcg=
github.com/antihax/optional v1.0.0/go.mod h1:uupD/76wgC+ih3iEmQUL+0Ugr19nfwCT1kdvxnR2qWY=
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10 h1:yL7+Jz0jTC6yykIK/Wh74gnTJnrGr5AyrNMXuA0gves=
github.com/antlr/antlr4/runtime/Go/antlr v1.4.10/go.mod h1:F7bn7fEU90QkQ3tnmaTx3LTKLEDqnwWODIYppRQ5hnY=
github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db h1:nxAtV4VajJDhKysp2kdcJZsq8Ss1xSA0vZTkVHHJd0E=
github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0=
github.com/apache/arrow/go/v12 v12.0.0 h1:xtZE63VWl7qLdB0JObIXvvhGjoVNrQ9ciIHG2OK5cmc=
github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/PEd/zm8mDS9Vg=
github.com/apache/arrow/go/v14 v14.0.2/go.mod h1:u3fgh3EdgN/YQ8cVQRguVW3R+seMybFg8QBQ5LU+eBY=
@@ -466,12 +421,8 @@ github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA=
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
github.com/aws/aws-sdk-go-v2 v1.2.0/go.mod h1:zEQs02YRBw1DjK0PoJv3ygDYOFTre1ejlJWl8FwAuQo=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10 h1:dK82zF6kkPeCo8J1e+tGx4JdvDIQzj7ygIoLg8WMuGs=
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream v1.4.10/go.mod h1:VeTZetY5KRJLuD/7fkQXMU6Mw7H5m/KP2J5Iy9osMno=
github.com/aws/aws-sdk-go-v2/config v1.1.1/go.mod h1:0XsVy9lBI/BCXm+2Tuvt39YmdHwS5unDQmxZOYe8F5Y=
github.com/aws/aws-sdk-go-v2/credentials v1.1.1/go.mod h1:mM2iIjwl7LULWtS6JCACyInboHirisUUdkBPoTHMOUo=
github.com/aws/aws-sdk-go-v2/feature/ec2/imds v1.0.2/go.mod h1:3hGg3PpiEjHnrkrlasTfxFqUsZ2GCk/fMUn4CbKgSkM=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.56 h1:kFDCPqqVvb9vYcW82L7xYfrBGpuxXQ/8A/zYVayRQK4=
github.com/aws/aws-sdk-go-v2/feature/s3/manager v1.11.56/go.mod h1:FoSBuessadgy8Cqp9gQF8U5rzi1XVQhiEJ6su2/kBEE=
github.com/aws/aws-sdk-go-v2/internal/v4a v1.0.22 h1:lTqBRUuy8oLhBsnnVZf14uRbIHPHCrGqg4Plc8gU/1U=
@@ -480,32 +431,21 @@ github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11 h1:y2+VQzC
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding v1.9.11/go.mod h1:iV4q2hsqtNECrfmlXyord9u4zyuFEJX9eLgLpSPzWA8=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.25 h1:B/hO3jfWRm7hP00UeieNlI5O2xP5WJ27tyJG5lzc7AM=
github.com/aws/aws-sdk-go-v2/service/internal/checksum v1.1.25/go.mod h1:54K1zgxK/lai3a4HosE4IKBwZsP/5YAJ6dzJfwsjJ0U=
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url v1.0.2/go.mod h1:45MfaXZ0cNbeuT0KQ1XJylq8A6+OpVV2E5kvY/Kq+u8=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.24 h1:i4RH8DLv/BHY0fCrXYQDr+DGnWzaxB3Ee/esxUaSavk=
github.com/aws/aws-sdk-go-v2/service/internal/s3shared v1.13.24/go.mod h1:N8X45/o2cngvjCYi2ZnvI0P4mU4ZRJfEYC3maCSsPyw=
github.com/aws/aws-sdk-go-v2/service/route53 v1.1.1 h1:cKr6St+CtC3/dl/rEBJvlk7A/IN5D5F02GNkGzfbtVU=
github.com/aws/aws-sdk-go-v2/service/route53 v1.1.1/go.mod h1:rLiOUrPLW/Er5kRcQ7NkwbjlijluLsrIbu/iyl35RO4=
github.com/aws/aws-sdk-go-v2/service/s3 v1.30.6 h1:zzTm99krKsFcF4N7pu2z17yCcAZpQYZ7jnJZPIgEMXE=
github.com/aws/aws-sdk-go-v2/service/s3 v1.30.6/go.mod h1:PudwVKUTApfm0nYaPutOXaKdPKTlZYClGBQpVIRdcbs=
github.com/aws/aws-sdk-go-v2/service/sso v1.1.1/go.mod h1:SuZJxklHxLAXgLTc1iFXbEWkXs7QRTQpCLGaKIprQW0=
github.com/aws/aws-sdk-go-v2/service/sts v1.1.1/go.mod h1:Wi0EBZwiz/K44YliU0EKxqTCJGUfYTWXrrBwkq736bM=
github.com/aws/smithy-go v1.1.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw=
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
github.com/bitly/go-hostpool v0.1.0 h1:XKmsF6k5el6xHG3WPJ8U0Ku/ye7njX7W81Ng7O2ioR0=
github.com/bitly/go-simplejson v0.5.0 h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
github.com/bmizerany/assert v0.0.0-20160611221934-b7ed37b82869 h1:DDGfHa7BWjL4YnC6+E63dPcxHo2sUxDIu8g3QgEJdRY=
github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40 h1:y4B3+GPxKlrigF1ha5FFErxK+sr6sWxQovRMzwMhejo=
github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40/go.mod h1:8rLXio+WjiTceGBHIoTvn60HIbs7Hm7bcHjyrSqYB9c=
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k=
github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU=
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc=
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f h1:bAs4lUbRJpnnkd9VhRV3jjAVU7DJVjMaK+IsvSeZvFo=
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d h1:yJzD/yFppdVCf6ApMkVy8cUxV0XrxdP9rVf6D87/Mng=
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd h1:R/opQEbFEy9JGkIguV40SvRY1uliPX8ifOvi6ICsFCw=
@@ -513,18 +453,10 @@ github.com/btcsuite/goleveldb v0.0.0-20160330041536-7834afc9e8cd h1:qdGvebPBDuYD
github.com/btcsuite/snappy-go v0.0.0-20151229074030-0bdef8d06723 h1:ZA/jbKoGcVAnER6pCHPEkGdZOV7U1oLUedErBHCUMs0=
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792 h1:R8vQdOQdZ9Y3SkEwmHoWBmX1DNXhXZqlTpq6s4tyJGc=
github.com/btcsuite/winsvc v1.0.0 h1:J9B4L7e3oqhXOcm+2IuNApwzQec85lE+QaikUcCs+dk=
github.com/c-bata/go-prompt v0.2.2 h1:uyKRz6Z6DUyj49QVijyM339UJV9yhbr70gESwbNU3e0=
github.com/c-bata/go-prompt v0.2.2/go.mod h1:VzqtzE2ksDBcdln8G7mk2RX9QyGjH+OVqOCSiVIqS34=
github.com/census-instrumentation/opencensus-proto v0.4.1 h1:iKLQ0xPNFxR/2hzXZMrBo8f1j86j5WHzznCCQxV/b8g=
github.com/census-instrumentation/opencensus-proto v0.4.1/go.mod h1:4T9NM4+4Vw91VeyqjLS6ao50K5bOcLKN6Q42XnYaRYw=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chzyer/logex v1.1.10 h1:Swpa1K6QvQznwJRcfTfQJmTE72DqScAa40E+fbHEXEE=
github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWRnGsAI=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e h1:fY5BOSpyZCqRo5OhCuC+XN+r/bBCmeuuJtjz+bCNIf8=
github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1 h1:q763qf9huN11kDQavWsoZXJNW3xEE4JJyHa5Q25/sd8=
github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
github.com/cilium/ebpf v0.9.1 h1:64sn2K3UKw8NbP/blsixRpF3nXuyhz/VjRlRzvlBRu4=
github.com/cilium/ebpf v0.9.1/go.mod h1:+OhNOIXx/Fnu1IE8bJz2dzOA+VSfyTfdNUVdlQnxUFY=
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
@@ -583,27 +515,16 @@ github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSV
github.com/coreos/go-systemd/v22 v22.5.0 h1:RrqgGjYQKalulkV8NGVIfkXQf6YYmOyiJKk8iXXhfZs=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f h1:lBNOc5arjvs8E5mO2tbpBpLoyyu8B6e44T7hJy6potg=
github.com/cpuguy83/go-md2man/v2 v2.0.0-20190314233015-f79a8a8ca69d/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c h1:/ovYnF02fwL0kvspmy9AuyKg1JhdTRUgPw4nUxd9oZM=
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
github.com/danieljoos/wincred v1.2.0 h1:ozqKHaLK0W/ii4KVbbvluM91W2H3Sh0BncbUNPS7jLE=
github.com/danieljoos/wincred v1.2.0/go.mod h1:FzQLLMKBFdvu+osBrnFODiv32YGwCfx0SkRa/eYHgec=
github.com/dave/jennifer v1.2.0 h1:S15ZkFMRoJ36mGAQgWL1tnr0NQJh9rZ8qatseX/VbBc=
github.com/dave/jennifer v1.2.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg=
github.com/dchest/blake512 v1.0.0 h1:oDFEQFIqFSeuA34xLtXZ/rWxCXdSjirjzPhey5EUvmA=
github.com/dchest/blake512 v1.0.0/go.mod h1:FV1x7xPPLWukZlpDpWQ88rF/SFwZ5qbskrzhLMB92JI=
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ=
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
github.com/deepmap/oapi-codegen v1.6.0/go.mod h1:ryDa9AgbELGeB+YEXE1dR53yAjHwFvE9iAUlWl9Al3M=
github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU=
github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw=
github.com/denisenkom/go-mssqldb v0.0.0-20191128021309-1d7a30a10f73 h1:OGNva6WhsKst5OZf7eZOklDztV3hwtTHovdrLHV+MsA=
github.com/dgrijalva/jwt-go v3.2.0+incompatible h1:7qlOGliEKZXTDg6OTjfoBKDXWrumCAMpl/TFQ4/5kLM=
github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8 h1:akOQj8IVgoeFfBTzGOEQakCYshWD6RNo1M5pivFXt70=
github.com/dgryski/go-bitstream v0.0.0-20180413035011-3522498ce2c8/go.mod h1:VMaSuZ+SZcx/wljOQKvp5srsbCiKDEb6K2wC4+PiBmQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954 h1:RMLoZVzv4GliuWafOuPuQDKSm1SJph7uCRnnS61JAn4=
github.com/dimchansky/utfbom v1.1.1 h1:vV6w1AhK4VMnhBno/TPVCoK9U/LP0PkLCS9tbxHdi/U=
github.com/dimchansky/utfbom v1.1.1/go.mod h1:SxdoEBH5qIqFocHMyGOXVAybYJdr71b1Q/j0mACtrfE=
@@ -611,17 +532,10 @@ github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91 h1:Izz0+t1Z5nI16
github.com/dlclark/regexp2 v1.4.1-0.20201116162257-a2a8dda75c91/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/docker/cli-docs-tool v0.6.0 h1:Z9x10SaZgFaB6jHgz3OWooynhSa40CsWkpe5hEnG/qA=
github.com/docker/cli-docs-tool v0.6.0/go.mod h1:zMjqTFCU361PRh8apiXzeAZ1Q/xupbIwTusYpzCXS/o=
github.com/docker/docker v1.4.2-0.20180625184442-8e610b2b55bf/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docopt/docopt-go v0.0.0-20180111231733-ee0de3bc6815 h1:bWDMxwH3px2JBh6AyO7hdCn/PkvCZXii8TGj7sbtEbQ=
github.com/dop251/goja v0.0.0-20211011172007-d99e4b8cbf48 h1:iZOop7pqsg+56twTopWgwCGxdB5SI2yDO8Ti7eTRliQ=
github.com/dop251/goja v0.0.0-20211011172007-d99e4b8cbf48/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf h1:Yt+4K30SdjOkRoRRm3vYNQgR+/ZIy0RmeUDZo7Y8zeQ=
github.com/dop251/goja v0.0.0-20220405120441-9037c2b61cbf/go.mod h1:R9ET47fwRVRPZnOGvHxxhuZcbrMCuiqOz3Rlrh4KSnk=
github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7 h1:tYwu/z8Y0NkkzGEh3z21mSWggMg4LwLRFucLS7TjARg=
github.com/dop251/goja_nodejs v0.0.0-20210225215109-d91c329300e7/go.mod h1:hn7BA7c8pLvoGndExHudxTDKZ84Pyvv+90pbBjbTz0Y=
github.com/dvsekhvalnov/jose2go v0.0.0-20170216131308-f21a8cedbbae h1:UTOyRlLeWJrZx+ynml6q6qzZ1uDkJe/0Z5CMZRbEIJg=
github.com/eclipse/paho.mqtt.golang v1.2.0 h1:1F8mhG9+aO5/xpdtFkW4SxOJB67ukuDC3t2y2qayIX0=
github.com/eclipse/paho.mqtt.golang v1.2.0/go.mod h1:H9keYFcgq3Qr5OUJm/JZI/i6U7joQ8SYLhZwfeOo6Ts=
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM=
github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g=
@@ -639,118 +553,66 @@ github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5Kwzbycv
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg=
github.com/fjl/gencodec v0.0.0-20220412091415-8bb9e558978c h1:CndMRAH4JIwxbW8KYq6Q+cGWcGHz0FjGR3QqcInWcW0=
github.com/fjl/gencodec v0.0.0-20220412091415-8bb9e558978c/go.mod h1:AzA8Lj6YtixmJWL+wkKoBGsLWy9gFrAzi4g+5bCKwpY=
github.com/fjl/memsize v0.0.0-20190710130421-bcb5799ab5e5/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7hi0=
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90 h1:WXb3TSNmHp2vHoCroCIB1foO/yQ36swABL8aOVeDpgg=
github.com/fogleman/gg v1.2.1-0.20190220221249-0403632d5b90/go.mod h1:R/bRT+9gY/C5z7JzPU0zXsXHKM4/ayA+zqcVNZzPa1k=
github.com/form3tech-oss/jwt-go v3.2.3+incompatible h1:7ZaBxOI7TMoYBfyA3cQHErNNyAWIKUMIwqxEtgHOs5c=
github.com/form3tech-oss/jwt-go v3.2.3+incompatible/go.mod h1:pbq4aXjuKjdthFRnoDwaVPLA+WlJuPGy+QneDUgJi2k=
github.com/fxamacker/cbor/v2 v2.4.0 h1:ri0ArlOR+5XunOP8CRUowT0pSJOwhW098ZCUyskZD88=
github.com/fxamacker/cbor/v2 v2.4.0/go.mod h1:TA1xS00nchWmaBnEIxPSE5oHLuJBAVvqrtAnWBwBCVo=
github.com/garslo/gogen v0.0.0-20170306192744-1d203ffc1f61 h1:IZqZOB2fydHte3kUgxrzK5E1fW7RQGeDwE8F/ZZnUYc=
github.com/garslo/gogen v0.0.0-20170306192744-1d203ffc1f61/go.mod h1:Q0X6pkwTILDlzrGEckF6HKjXe48EgsY/l7K7vhY4MW8=
github.com/getkin/kin-openapi v0.53.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4=
github.com/getkin/kin-openapi v0.61.0 h1:6awGqF5nG5zkVpMsAih1QH4VgzS8phTxECUWIFo7zko=
github.com/getkin/kin-openapi v0.61.0/go.mod h1:7Yn5whZr5kJi6t+kShccXS8ae1APpYTW6yheSwk8Yi4=
github.com/getsentry/sentry-go v0.11.0 h1:qro8uttJGvNAMr5CLcFI9CHR0aDzXl0Vs3Pmw/oTPg8=
github.com/getsentry/sentry-go v0.11.0/go.mod h1:KBQIxiZAetw62Cj8Ri964vAEWVdgfaUCn30Q3bCvANo=
github.com/ghodss/yaml v1.0.0 h1:wQHKEahhL6wmXdzwWG11gIVCkOv05bNOh+Rxn0yngAk=
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd h1:r04MMPyLHj/QwZuMJ5+7tJcBr1AQjpiAK/rZWRrQT7o=
github.com/glycerine/go-unsnap-stream v0.0.0-20180323001048-9f0cb55181dd/go.mod h1:/20jfyN9Y5QPEAprSgKAUr+glWDY39ZiUEAYOEv5dsE=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31 h1:gclg6gY70GLy3PbkQ1AERPfmLMMagS60DKF78eWwLn8=
github.com/glycerine/goconvey v0.0.0-20190410193231-58a59202ab31/go.mod h1:Ogl1Tioa0aV7gstGFO7KhffUsb9M4ydbEbbxpcEDc24=
github.com/go-chi/chi/v5 v5.0.0 h1:DBPx88FjZJH3FsICfDAfIfnb7XxKIYVGG6lOPlhENAg=
github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1 h1:QbL/5oDUmRBzO9/Z7Seo6zf912W/a6Sr4Eu0G/3Jho0=
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72 h1:b+9H1GAsx5RsjvDFLoS5zkNBzIQMuVKUYQDmxU3N5XE=
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU=
github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0=
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/go-sql-driver/mysql v1.4.1/go.mod h1:zAC/RDZ24gD3HViQzih4MyKcchzm+sOG5ZlKdlhCg5w=
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 h1:p104kn46Q8WdvHunIJ9dAyjPVtrBPhSr3KT2yUst43I=
github.com/gobwas/glob v0.2.3 h1:A4xDbljILXROh+kObIiy5kIaPYD8e96x1tgBhUI5J+Y=
github.com/gobwas/glob v0.2.3/go.mod h1:d3Ez4x06l9bZtSvzIay5+Yzi0fmZzPgnTbPcKjJAkT8=
github.com/godbus/dbus/v5 v5.0.4/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/godbus/dbus/v5 v5.1.0 h1:4KLkAxT3aOY8Li4FRJe/KvhoNFFxo0m6fNuFUO8QJUk=
github.com/godbus/dbus/v5 v5.1.0/go.mod h1:xhWf0FNVPg57R7Z0UbKHbJfkEywrmjJnf7w5xrFpKfA=
github.com/gofrs/uuid v3.3.0+incompatible h1:8K4tyRfvU1CYPgJsveYFQMhpFd/wXNM7iK6rR7UHz84=
github.com/gofrs/uuid v3.3.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
github.com/gogo/protobuf v1.3.1/go.mod h1:SlYgWuQ5SjCEi6WLHjHCa1yvBfUnHcTbrrZtXPKa29o=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9 h1:au07oEsX2xN0ktxqI+Sida1w446QrXBRJ0nee3SNZlA=
github.com/golang-sql/civil v0.0.0-20220223132316-b832511892a9/go.mod h1:8vg3r2VgvsThLBIFL93Qb5yWzgyZWhEmBwUJWevAkK0=
github.com/golang-sql/sqlexp v0.1.0 h1:ZCD6MBpcuOVfGVqsEmY5/4FtYiKz6tSyUv9LPEDei6A=
github.com/golang-sql/sqlexp v0.1.0/go.mod h1:J4ad9Vo8ZCWQ2GMrC4UCQy1JpCbwU9m3EOqtpKwwwHI=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0 h1:DACJavvAHhabrF08vX0COfcOBJRhZ8lUbR+ZWIs0Y5g=
github.com/golang/freetype v0.0.0-20170609003504-e2365dfdc4a0/go.mod h1:E/TSTwGwJL78qG/PmXZO1EjYhfJinVAhrmmHX6Z8B9k=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec h1:lJwO/92dFXWeXOZdoGXgptLmNLwynMSHUmU6besqtiw=
github.com/golang/geo v0.0.0-20190916061304-5b978397cfec/go.mod h1:QZ0nwyI2jOfgRAoBvP+ab5aRr7c9x7lhGEJrKvBwjWI=
github.com/golang/glog v1.2.0/go.mod h1:6AhwSGph0fcJtXVM/PEHPqZlFeoLxhs7/t5UDAwmO+w=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A=
github.com/golang/mock v1.3.1/go.mod h1:sBzyDLLjw3U8JLTeZvSv8jJB+tU5PVekmnlKIyFUx0Y=
github.com/golang/mock v1.6.0 h1:ErTB+efbowRARo13NNdxyJji2egdxLGQhRaY+DUumQc=
github.com/golang/mock v1.6.0/go.mod h1:p6yTPP+5HYm5mzsMV8JkE6ZKdX+/wYM6Hr+LicevLPs=
github.com/golang/protobuf v1.4.3/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/snappy v0.0.0-20180518054509-2e65f85255db/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219 h1:utua3L2IbQJmauC5IXdEA547bcoU5dozgQAfc8Onsg4=
github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
github.com/google/btree v1.0.1 h1:gK4Kx5IaGY9CD5sPJ36FHiBJ6ZXl0kilRiiCj+jdYp4=
github.com/google/btree v1.0.1/go.mod h1:xXMiIv4Fb/0kKde4SpL7qlzvu5cMJDRkFDxJfI9uaxA=
github.com/google/cel-go v0.12.6 h1:kjeKudqV0OygrAqA9fX6J55S8gj+Jre2tckIm5RoG4M=
github.com/google/cel-go v0.12.6/go.mod h1:Jk7ljRzLBhkmiAwBoUxB1sZSCVBAzkqPF25olK/iRDw=
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/flatbuffers v2.0.8+incompatible h1:ivUb1cGomAB101ZM1T0nOiWz9pSrTMoa9+EiY7igmkM=
github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-containerregistry v0.14.0 h1:z58vMqHxuwvAsVwvKEkmVBz2TlgBgH5k6koEXBtlYkw=
github.com/google/go-containerregistry v0.14.0/go.mod h1:aiJ2fp/SXvkWgmYHioXnbMdlgB8eXiiYOY55gfN91Wk=
github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY=
github.com/google/gofuzz v1.1.1-0.20200604201612-c04b05f3adfa/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible h1:/CP5g8u/VJHijgedC/Legn3BAbAaWPgecwXBIDzw5no=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
github.com/google/martian/v3 v3.3.2 h1:IqNFLAmvJOgVlpdEBiQbDc2EwKW77amAycfTuWKdfvw=
github.com/google/martian/v3 v3.3.2/go.mod h1:oBOf6HBosgwRXnUGWUB05QECsc6uvmMiJ3+6W4l/CUk=
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20190515194954-54271f7e092f/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
github.com/google/pprof v0.0.0-20191218002539-d4f498aebedc/go.mod h1:ZgVRPoUq/hfqzAqh7sHMqb3I9Rq5C59dIz2SbBwJ4eM=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38 h1:yAJXTCF9TqKcTiHJAE8dj7HMvPfh66eeA2JYW7eFpSE=
github.com/google/pprof v0.0.0-20210407192527-94a9f03dee38/go.mod h1:kpwsk12EmLew5upagYY7GY0pfYCcupk39gWOCRROcvE=
github.com/google/renameio v0.1.0 h1:GOZbcHa3HfsPKPlmyPyN2KEohoMXOhdMbHrvbpl2QaA=
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
github.com/google/s2a-go v0.1.7 h1:60BLSyTrOV4/haCDW4zb1guZItoSq8foHCXrAnjBo/o=
github.com/google/s2a-go v0.1.7/go.mod h1:50CgR4k1jNlWBu4UfS4AcfhVe1r6pdZPygJ3R8F0Qdw=
github.com/google/subcommands v1.2.0 h1:vWQspBTo2nEqTUFita5/KeEWlUL8kQObDFbub/EN9oE=
github.com/google/uuid v1.1.2/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/google/uuid v1.1.5/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/googleapis/enterprise-certificate-proxy v0.3.2 h1:Vie5ybvEvT75RniqhfFxPRy3Bf7vr3h0cechB90XaQs=
github.com/googleapis/enterprise-certificate-proxy v0.3.2/go.mod h1:VLSiSSBs/ksPL8kq3OBOQ6WRI2QnaFynd1DCjZ62+V0=
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56etFpas=
github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
github.com/googleapis/gax-go/v2 v2.12.1/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc=
github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc=
github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4=
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
github.com/gotestyourself/gotestyourself v1.4.0 h1:CDSlSIuRL/Fsc72Ln5lMybtrCvSRDddsHsDRG/nP7Rg=
github.com/gotestyourself/gotestyourself v1.4.0/go.mod h1:zZKM6oeNM8k+FRljX1mnzVYeS8wiGgQyvST1/GafPbY=
github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29 h1:sezaKhEfPFg8W0Enm61B9Gs911H8iesGY5R8NDPtd1M=
github.com/graph-gophers/graphql-go v0.0.0-20201113091052-beb923fada29/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc=
github.com/graph-gophers/graphql-go v1.3.0 h1:Eb9x/q6MFpCLz7jBCiP/WTxjSDrYLR1QY41SORZyNJ0=
github.com/graph-gophers/graphql-go v1.3.0/go.mod h1:9CQHMSxwO4MprSdzoIEobiHpoLtHm77vfxsvsIN5Vuc=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7 h1:pdN6V1QBWetyv/0+wjACpqVH+eVULgEjkurDLq3goeM=
github.com/gregjones/httpcache v0.0.0-20180305231024-9cad4c3443a7/go.mod h1:FecbI9+v66THATjSRHfNgh1IVFe/9kFxbXtjV0ctIMA=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0 h1:Ovs26xHkKqVztRpIrF/92BcuyuQ/YW4NSIpoGtfXNho=
@@ -782,7 +644,6 @@ github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
@@ -791,36 +652,15 @@ github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/
github.com/hashicorp/memberlist v0.5.0/go.mod h1:yvyXLpo0QaGE59Y7hDTsTzDD25JYBZ4mHgHUZ8lrOI0=
github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4=
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
github.com/huin/goupnp v1.0.2/go.mod h1:0dxJBVBHqTMjIUMkESDTNgOOx/Mw5wYIfyFmdzSamkM=
github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150 h1:vlNjIqmUZ9CMAWsbURYl3a6wZbw7q5RHVvlXTNS/Bs8=
github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o=
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6 h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
github.com/iden3/go-iden3-crypto v0.0.15/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E=
github.com/influxdata/flux v0.65.1 h1:77BcVUCzvN5HMm8+j9PRBQ4iZcu98Dl4Y9rf+J5vhnc=
github.com/influxdata/flux v0.65.1/go.mod h1:J754/zds0vvpfwuq7Gc2wRdVwEodfpCFM7mYlOw2LqY=
github.com/influxdata/influxdb v1.8.3 h1:WEypI1BQFTT4teLM+1qkEcvUi0dAvopAI/ir0vAiBg8=
github.com/influxdata/influxdb v1.8.3/go.mod h1:JugdFhsvvI8gadxOI6noqNeeBHvWNTbfYGtiAn+2jhI=
github.com/influxdata/influxdb-client-go/v2 v2.4.0 h1:HGBfZYStlx3Kqvsv1h2pJixbCl/jhnFtxpKFAv9Tu5k=
github.com/influxdata/influxdb-client-go/v2 v2.4.0/go.mod h1:vLNHdxTJkIf2mSLvGrpj8TCcISApPoXkaxP8g9uRlW8=
github.com/influxdata/influxql v1.1.1-0.20200828144457-65d3ef77d385 h1:ED4e5Cc3z5vSN2Tz2GkOHN7vs4Sxe2yds6CXvDnvZFE=
github.com/influxdata/influxql v1.1.1-0.20200828144457-65d3ef77d385/go.mod h1:gHp9y86a/pxhjJ+zMjNXiQAA197Xk9wLxaz+fGG+kWk=
github.com/influxdata/line-protocol v0.0.0-20180522152040-32c6aa80de5e/go.mod h1:4kt73NQhadE3daL3WhR5EJ/J2ocX0PZzwxQ0gXJ7oFE=
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo=
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097 h1:vilfsDSy7TDxedi9gyBkMvAirat/oRcL0lFdJBf6tdM=
github.com/influxdata/line-protocol v0.0.0-20210311194329-9aa0e372d097/go.mod h1:xaLFMmpvUxqXtVkUJfg9QmT88cDaCJ3ZKgdZ78oO8Qo=
github.com/influxdata/promql/v2 v2.12.0 h1:kXn3p0D7zPw16rOtfDR+wo6aaiH8tSMfhPwONTxrlEc=
github.com/influxdata/promql/v2 v2.12.0/go.mod h1:fxOPu+DY0bqCTCECchSRtWfc+0X19ybifQhZoQNF5D8=
github.com/influxdata/roaring v0.4.13-0.20180809181101-fc520f41fab6 h1:UzJnB7VRL4PSkUJHwsyzseGOmrO/r4yA+AuxGJxiZmA=
github.com/influxdata/roaring v0.4.13-0.20180809181101-fc520f41fab6/go.mod h1:bSgUQ7q5ZLSO+bKBGqJiCBGAl+9DxyW63zLTujjUlOE=
github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9 h1:MHTrDWmQpHq/hkq+7cw9oYAt2PqUw52TZazRA0N7PGE=
github.com/influxdata/tdigest v0.0.0-20181121200506-bf2b5ad3c0a9/go.mod h1:Js0mqiSBE6Ffsg94weZZ2c+v/ciT8QRHFOap7EKDrR0=
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368 h1:+TUUmaFa4YD1Q+7bH9o5NCHQGPMqZCYJiNW6lIIS9z4=
github.com/influxdata/usage-client v0.0.0-20160829180054-6d3895376368/go.mod h1:Wbbw6tYNvwa5dlB6304Sd+82Z3f7PmVZHVKU637d4po=
github.com/intel/goresctrl v0.3.0 h1:K2D3GOzihV7xSBedGxONSlaw/un1LZgWsc9IfqipN4c=
github.com/intel/goresctrl v0.3.0/go.mod h1:fdz3mD85cmP9sHD8JUlrNWAxvwM86CrbmVXltEKd7zk=
github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc=
github.com/jedisct1/go-minisign v0.0.0-20190909160543-45766022959e h1:UvSe12bq+Uj2hWd8aOlwPmoZ+CITRFrdit+sDGfAg8U=
github.com/jedisct1/go-minisign v0.0.0-20190909160543-45766022959e/go.mod h1:G1CVv03EnqU1wYL2dFwXxW2An0az9JTl/ZsqXQeBlkU=
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89 h1:12K8AlpT0/6QUXSfV0yi4Q0jkbq8NDtIKFtF61AoqV0=
@@ -834,23 +674,11 @@ github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2E
github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4=
github.com/jrick/logrotate v1.0.0 h1:lQ1bL/n9mBNeIXoTUoYRlK4dHuNJVofX9oWqBtPnSzI=
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1:6v2b51hI/fHJwM22ozAgKL4VKDeJcHhJFhtBdhmNjmU=
github.com/jstemmer/go-junit-report v0.9.1 h1:6QPYqodiu3GuPL+7mfx+NwDdp2eTkp9IfEUpgAwUN0o=
github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk=
github.com/jsternberg/zap-logfmt v1.0.0 h1:0Dz2s/eturmdUS34GM82JwNEdQ9hPoJgqptcEKcbpzY=
github.com/jsternberg/zap-logfmt v1.0.0/go.mod h1:uvPs/4X51zdkcm5jXl5SYoN+4RK21K8mysFmDaM/h+o=
github.com/juju/loggo v0.0.0-20190526231331-6e530bcce5d8 h1:UUHMLvzt/31azWTN/ifGWef4WUqvXk0iRqdhdy/2uzI=
github.com/julienschmidt/httprouter v1.3.0 h1:U0609e9tgbseu3rBINet9P48AI/D3oJs4dN7jwJOQ1U=
github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM=
github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5 h1:PJr+ZMXIecYc1Ey2zucXdR73SMBtgjPgwa31099IMv0=
github.com/jung-kurt/gofpdf v1.0.3-0.20190309125859-24315acbbda5/go.mod h1:7Id9E/uU8ce6rXgefFLlgrJj/GYY22cpxn+r32jIOes=
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef h1:2jNeR4YUziVtswNP9sEFAI913cVrzH85T+8Q6LpYbT0=
github.com/jwilder/encoding v0.0.0-20170811194829-b4e1701a28ef/go.mod h1:Ct9fl0F6iIOGgxJ5npU/IUOhOhqlVrGjyIZc8/MagT0=
github.com/karalabe/usb v0.0.0-20211005121534-4c5740d64559 h1:0VWDXPNE0brOek1Q8bLfzKkvOzwbQE/snjGojlCr8CY=
github.com/karalabe/usb v0.0.0-20211005121534-4c5740d64559/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
github.com/karalabe/usb v0.0.2 h1:M6QQBNxF+CQ8OFvxrT90BA0qBOXymndZnk5q235mFc4=
github.com/karalabe/usb v0.0.2/go.mod h1:Od972xHfMJowv7NGVDiWVxk2zxnWgjLlJzE+F4F7AGU=
github.com/kisielk/errcheck v1.2.0/go.mod h1:/BMXB+zMLi60iA8Vv6Ksmxu/1UDYcXs4uQLJ+jE2L00=
github.com/kisielk/errcheck v1.5.0 h1:e8esj/e4R+SAOwFwN+n3zr0nYeCyeweozKfO23MvHzY=
github.com/kisielk/gotool v1.0.0 h1:AV2c/EiW3KqPNT9ZKl07ehoAGi4C5/01Cfbblndcapg=
github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46 h1:veS9QfglfvqAw2e+eeNT/SbGySq8ajECXJ9e4fPoLhY=
@@ -858,14 +686,7 @@ github.com/kisielk/sqlstruct v0.0.0-20201105191214-5f3e10d3ab46/go.mod h1:yyMNCy
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23 h1:FOOIBWrEkLgmlgGfMuZT83xIwfPDxEI2OHu6xUmJMFE=
github.com/klauspost/asmfmt v1.3.2 h1:4Ri7ox3EwapiOjCki+hw14RyKk201CN4rzyCJRFLpK4=
github.com/klauspost/asmfmt v1.3.2/go.mod h1:AG8TuvYojzulgDAMCnYn50l/5QV3Bs/tp6j0HLHbNSE=
github.com/klauspost/compress v1.4.0/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
github.com/klauspost/compress v1.17.2/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE=
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5 h1:2U0HzY8BJ8hVwDKIzp7y4voR9CX/nvcfymLmg2UiOio=
github.com/klauspost/cpuid v0.0.0-20170728055534-ae7887de9fa5/go.mod h1:Pj4uuM528wm8OyEC2QMXAi2YiTZ96dNQPGgoMS4s3ek=
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6 h1:KAZ1BW2TCmT6PRihDPpocIy1QTtsAsrx6TneU/4+CMg=
github.com/klauspost/crc32 v0.0.0-20161016154125-cb6bfca970f6/go.mod h1:+ZoRqAPRLkC4NPOvfYeR5KNOrY6TD+/sAC3HXPZgDYg=
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada h1:3L+neHp83cTjegPdCiOxVOJtRIy7/8RldvMTsyPYH10=
github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada/go.mod h1:Ch1tH69qFZu15pkjo5kYi6mth2Zzwzt50oCQKQE9RUs=
github.com/knz/go-libedit v1.10.1 h1:0pHpWtx9vcvC0xGZqEQlQdfSQs7WRlAjuPvk3fOZDCo=
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
@@ -875,10 +696,6 @@ github.com/kylelemons/go-gypsy v1.0.0 h1:7/wQ7A3UL1bnqRMnZ6T8cwCOArfZCxFmb1iTxaO
github.com/kylelemons/go-gypsy v1.0.0/go.mod h1:chkXM0zjdpXOiqkCW1XcCHDfjfk14PH2KKkQWxfJUcU=
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
github.com/labstack/echo/v4 v4.2.1 h1:LF5Iq7t/jrtUuSutNuiEWtB5eiHfZ5gSe2pcu5exjQw=
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
github.com/labstack/gommon v0.3.0 h1:JEeO0bvc78PKdyHxloTKiF8BD5iGrH8T6MSeGvSgob0=
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
github.com/lestrrat-go/backoff/v2 v2.0.8 h1:oNb5E5isby2kiro9AgdHLv5N5tint1AnDVVf2E2un5A=
github.com/lestrrat-go/backoff/v2 v2.0.8/go.mod h1:rHP/q/r9aT27n24JQLa7JhSQZCKBBOiM/uP402WwN8Y=
github.com/lestrrat-go/blackmagic v1.0.0 h1:XzdxDbuQTz0RZZEmdU7cnQxUtFUzgCSPq8RCz4BxIi4=
@@ -891,32 +708,20 @@ github.com/lestrrat-go/jwx v1.2.25 h1:tAx93jN2SdPvFn08fHNAhqFJazn5mBBOB8Zli0g0ot
github.com/lestrrat-go/jwx v1.2.25/go.mod h1:zoNuZymNl5lgdcu6P7K6ie2QRll5HVfF4xwxBBK1NxY=
github.com/lestrrat-go/option v1.0.0 h1:WqAWL8kh8VcSoD6xjSH34/1m8yxluXQbDeKNfvFeEO4=
github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmtT7T5nBBp3I=
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3 h1:jUp75lepDg0phMUJBCmvaeFDldD2N3S1lBuPwUTszio=
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo=
github.com/lyft/protoc-gen-star/v2 v2.0.3/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk=
github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd h1:HvFwW+cm9bCbZ/+vuGNq7CRWXql8c0y8nGeYpqmpvmk=
github.com/matryer/moq v0.0.0-20190312154309-6cfb0558e1bd/go.mod h1:9ELz6aaclSIGnZBoaSLZ3NAl1VTufbOrXBPvtcy6WiQ=
github.com/mattn/go-colorable v0.0.9/go.mod h1:9vuHe8Xs5qXnSaW/c/ABM9alt+Vo+STaOChaDxuIBZU=
github.com/mattn/go-colorable v0.1.4/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-colorable v0.1.6/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.7/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.9/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-colorable v0.1.12/go.mod h1:u5H1YNBxpqRaxsYJYSkiCWKzEfiAb1Gb520KVy5xxl4=
github.com/mattn/go-ieproxy v0.0.0-20190610004146-91bb50d98149/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d h1:oNAwILwmgWKFpuU+dXvI6dl9jG2mAWAZLX3r9s0PPiw=
github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HNzdxOmlERGGEc4v/dMssOfmp2p5bT/okiKFFc=
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-runewidth v0.0.3/go.mod h1:LwmH8dsx7+W8Uxz3IHJYH5QSwggIsqBzpuz5H//U1FU=
github.com/mattn/go-sqlite3 v1.11.0/go.mod h1:FPy6KqzDD04eiIsT53CuJW3U88zkxoIYsOqkbpncsNc=
github.com/mattn/go-tty v0.0.0-20180907095812-13ff1204f104 h1:d8RFOZ2IiFtFWBcKEHAFYJcPTf0wY5q0exFNJZVWa1U=
github.com/mattn/go-tty v0.0.0-20180907095812-13ff1204f104/go.mod h1:XPvLUNfbS4fJH25nqRHfWLMa1ONC8Amw+mIA639KxkE=
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0 h1:jWpvCLoY8Z/e3VKvlsiIGKtc+UG6U5vzxaoagmhXfyg=
github.com/matttproud/golang_protobuf_extensions/v2 v2.0.0/go.mod h1:QUyp042oQthUoa9bqDv0ER0wrtXnBruoNd7aNjkbP+k=
github.com/microsoft/go-mssqldb v1.6.0 h1:mM3gYdVwEPFrlg/Dvr2DNVEgYFG7L42l+dGc67NNNpc=
@@ -944,8 +749,6 @@ github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:F
github.com/mmcloughlin/profile v0.1.1 h1:jhDmAqPyebOsVDOCICJoINoLb/AnLBaUw58nFzxWS2w=
github.com/moby/sys/mount v0.3.3 h1:fX1SVkXFJ47XWDoeFW4Sq7PdQJnV2QIDZAqjNqgEjUs=
github.com/moby/sys/mount v0.3.3/go.mod h1:PBaEorSNTLG5t/+4EgukEQVlAvVEc6ZjTySwKdqp5K0=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae h1:VeRdUYdCw49yizlSbMEn2SZ+gT+3IUKx8BqxyQdz+BY=
github.com/mschoch/smat v0.0.0-20160514031455-90eadee771ae/go.mod h1:qAyveg+e4CE+eKJXWVjKXM4ck2QobLqTDytGJbLLhJg=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f h1:KUppIJq7/+SVif2QVs3tOP0zanoHgBEVAwHxUSIzRqU=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mxk/go-flowrate v0.0.0-20140419014527-cca7078d478f h1:y5//uYreIhSUg3J1GEMiLbxo1LJaP8RfCpH6pymGZus=
@@ -967,33 +770,22 @@ github.com/open-policy-agent/opa v0.42.2 h1:qocVAKyjrqMjCqsU02S/gHyLr4AQQ9xMtuV1
github.com/open-policy-agent/opa v0.42.2/go.mod h1:MrmoTi/BsKWT58kXlVayBb+rYVeaMwuBm3nYAN3923s=
github.com/opencontainers/runtime-tools v0.9.1-0.20221107090550-2e043c6bd626 h1:DmNGcqH3WDbV5k8OJ+esPWbqUOX5rMLR2PMvziDMJi0=
github.com/opencontainers/runtime-tools v0.9.1-0.20221107090550-2e043c6bd626/go.mod h1:BRHJJd0E+cx42OybVYSgUvZmU0B8P9gZuRXlZUP7TKI=
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.0.3-0.20180606204148-bd9c31933947/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU=
github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w=
github.com/package-url/packageurl-go v0.1.1-0.20220428063043-89078438f170 h1:DiLBVp4DAcZlBVBEtJpNWZpZVq0AEeCY7Hqk8URVs4o=
github.com/package-url/packageurl-go v0.1.1-0.20220428063043-89078438f170/go.mod h1:uQd4a7Rh3ZsVg5j0lNyAfyxIeGde9yrlhjF78GzeW0c=
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
github.com/paulbellamy/ratecounter v0.2.0 h1:2L/RhJq+HA8gBQImDXtLPrDXK5qAj6ozWVK/zFXVJGs=
github.com/paulbellamy/ratecounter v0.2.0/go.mod h1:Hfx1hDpSGoqxkVVpBi/IlYD7kChlfo5C6hzIHwPqfFE=
github.com/peterbourgon/diskv v2.0.1+incompatible h1:UBdAOUP5p4RWqPBg048CAvpKN+vxiaj6gdUUzhl4XmI=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
github.com/peterh/liner v1.0.1-0.20180619022028-8c1271fcf47f/go.mod h1:xIteQHvHuaLYG9IFj6mSxM0fCKrs34IrEQUhOYuGPHc=
github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7 h1:oYW+YCJ1pachXTQmzR3rNLYGGz4g/UgFcjb28p/viDM=
github.com/peterh/liner v1.1.1-0.20190123174540-a2c9a5303de7/go.mod h1:CRroGNssyjTd/qIG2FyxByd2S8JEAZXBl4qUrZf8GS0=
github.com/philhofer/fwd v1.0.0 h1:UbZqGr5Y38ApvM/V/jEljVxwocdweyH+vmYvRPBnbqQ=
github.com/philhofer/fwd v1.0.0/go.mod h1:gk3iGcWd9+svBvR0sR+KPcfE+RNWozjowpeBVG3ZVNU=
github.com/pierrec/lz4 v2.0.5+incompatible h1:2xWsjqPFWcplujydGg4WmhC/6fZqK42wMM8aXeqhl0I=
github.com/pierrec/lz4 v2.0.5+incompatible/go.mod h1:pdkljMzZIN41W+lC3N2tnIh5sFi+IEE17M5jbnwPHcY=
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4 h1:Qj1ukM4GlMWXNdMBuXcXfz/Kw9s1qm0CLY32QxuSImI=
github.com/pkg/browser v0.0.0-20210115035449-ce105d075bb4/go.mod h1:N6UoU20jOqggOuDwUaBQpluzLNDqif3kq9z2wpdYEfQ=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgcC4zIxODThtZNPirFr42+A=
github.com/pkg/profile v1.5.0 h1:042Buzk+NhDI+DeSAA62RwJL8VAuZUMQZUjCsRz1Mug=
github.com/pkg/profile v1.5.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
github.com/pkg/term v0.0.0-20180730021639-bffc007b7fd5 h1:tFwafIEMf0B7NlcxV/zJ6leBIa81D3hgGSgsE5hCkOQ=
github.com/pkg/term v0.0.0-20180730021639-bffc007b7fd5/go.mod h1:eCbImbZ95eXtAUIbLAuAVnBnwf83mjf6QIVH8SHYwqQ=
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc=
@@ -1005,30 +797,17 @@ github.com/prometheus/common v0.9.1/go.mod h1:yhUN8i9wzaXS3w1O07YhxHEBxD+W35wd8b
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ=
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52 h1:RnWNS9Hlm8BIkjr6wx8li5abe0fr73jljLycdfemTp0=
github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52/go.mod h1:RDpi1RftBQPUCDRw6SmxeaREsAaRKnOclghuzp/WRzc=
github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/sagikazarmark/crypt v0.19.0/go.mod h1:c6vimRziqqERhtSe0MhIvzE1w54FrCHtrXb5NH/ja78=
github.com/scroll-tech/da-codec v0.0.0-20240605080813-32bfc9fccde7/go.mod h1:1wWYii0OPwd5kw+xrz0PFgS420xNadrNF1x/ELJT+TM=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
github.com/segmentio/kafka-go v0.2.0 h1:HtCSf6B4gN/87yc5qTl7WsxPKQIIGXLPPM1bMCPOsoY=
github.com/segmentio/kafka-go v0.2.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ=
github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo=
github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636 h1:aSISeOcal5irEhJd1M+IrApc0PdcN7e7Aj4yuEnOrfQ=
github.com/shurcooL/go v0.0.0-20200502201357-93f07166e636/go.mod h1:TDJrrUr11Vxrven61rcy3hJMUqaf/CLWYhHNPmT14Lk=
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749 h1:bUGsEnyNbVPw06Bs80sCeARAlK8lhwqGyi6UT8ymuGk=
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
github.com/shurcooL/sanitized_anchor_name v1.0.0 h1:PdmoCO6wvbs+7yrJyMORt4/BmY5IYyJwS/kOiWx8mHo=
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546 h1:pXY9qYc/MP5zdvqWEUH6SjNiu7VhSjuVFTFiTcphaLU=
github.com/shurcooL/vfsgen v0.0.0-20200824052919-0d455de96546/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
github.com/sirupsen/logrus v1.7.0/go.mod h1:yWOB1SBYBC5VeMP7gHvWumXLIWorT60ONWic61uBYv0=
@@ -1037,23 +816,17 @@ github.com/sirupsen/logrus v1.9.0/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVs
github.com/soheilhy/cmux v0.1.5 h1:jjzc5WVemNEDTLwv9tlmemhC73tI08BNOIGwBOo10Js=
github.com/soheilhy/cmux v0.1.5/go.mod h1:T7TcVDs9LWfQgPlPsdngu6I6QIoyIFZDDC6sNE1GqG0=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72 h1:qLC7fQah7D6K1B0ujays3HV9gkFtllcxhzImRR7ArPQ=
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spiffe/go-spiffe/v2 v2.1.1 h1:RT9kM8MZLZIsPTH+HKQEP5yaAk3yd/VBzlINaRjXs8k=
github.com/spiffe/go-spiffe/v2 v2.1.1/go.mod h1:5qg6rpqlwIub0JAiF1UK9IMD6BpPTmvG6yfSgDBs5lg=
github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4/go.mod h1:RZLeN1LMWmRsyYjvAu+I6Dm9QmlDaIIt+Y+4Kd7Tp+Q=
github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980 h1:lIOOHPEbXzO3vnmx2gok1Tfs31Q8GQqKLc8vVqyQq/I=
github.com/stefanberger/go-pkcs11uri v0.0.0-20201008174630-78d3cae3a980/go.mod h1:AO3tvPzVZ/ayst6UlUKUv6rcPQInYe3IknH3jYhAKu8=
github.com/stoewer/go-strcase v1.2.0 h1:Z2iHWqGXH00XYgqDmNgQbIBxf3wrNq0F3feEy0ainaU=
github.com/stretchr/objx v0.5.2 h1:xuMeJ0Sdp5ZMRXx/aWO6RZxdr3beISkG5/G/aIRr3pY=
github.com/stretchr/testify v1.2.0/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.7.2/go.mod h1:R6va5+xMeoiuVRoj+gSkQ7d3FALtqAAGI1FQKckRals=
github.com/supranational/blst v0.3.11-0.20230124161941-ca03e11a3ff2/go.mod h1:jZJtfjgudtNl4en1tzwPIV3KjUnQUvG3/j+w+fVonLw=
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635 h1:kdXcSzyDtseVEc4yCz2qF8ZrQvIDBJLl4S1c3GCXmoI=
github.com/syndtr/gocapability v0.0.0-20200815063812-42c35b437635/go.mod h1:hkRG7XYTFWNJGYcbNJQlaLq0fg1yr4J4t/NcTQtrfww=
github.com/tchap/go-patricia/v2 v2.3.1 h1:6rQp39lgIYZ+MHmdEq4xzuk1t7OdC35z/xm0BGhTkes=
github.com/tchap/go-patricia/v2 v2.3.1/go.mod h1:VZRHKAb53DLaG+nA9EaYYiaEx6YztwDlLElMsnSHD4k=
github.com/tinylib/msgp v1.0.2 h1:DfdQrzQa7Yh2es9SuLkixqxuXS2SxsdYn0KbdrOGWD8=
github.com/tinylib/msgp v1.0.2/go.mod h1:+d+yLhGm8mzTaHzB+wgMYrodPfmZrzkirds8fDWklFE=
github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802 h1:uruHq4dN7GR16kFc5fp3d1RIYzJW5onx8Ybykw2YQFA=
github.com/tmc/grpc-websocket-proxy v0.0.0-20201229170055-e5319fda7802/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
github.com/tonistiigi/go-actions-cache v0.0.0-20220404170428-0bdeb6e1eac7 h1:8eY6m1mjgyB8XySUR7WvebTM8D/Vs86jLJzD/Tw7zkc=
@@ -1061,16 +834,9 @@ github.com/tonistiigi/go-actions-cache v0.0.0-20220404170428-0bdeb6e1eac7/go.mod
github.com/tonistiigi/go-archvariant v1.0.0 h1:5LC1eDWiBNflnTF1prCiX09yfNHIxDC/aukdhCdTyb0=
github.com/tonistiigi/go-archvariant v1.0.0/go.mod h1:TxFmO5VS6vMq2kvs3ht04iPXtu2rUT/erOnGFYfk5Ho=
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs=
github.com/ugorji/go v1.2.7 h1:qYhyWUUd6WbiM+C6JZAUkIJt/1WrjzNHY9+KCIjVqTo=
github.com/urfave/cli v1.22.12 h1:igJgVw1JdKH+trcLWLeLwZjU9fEfPesQ+9/e4MQ44S8=
github.com/urfave/cli v1.22.12/go.mod h1:sSBEIC79qR6OvcmsD4U3KABeOTxDqQtdDnaFuUN30b8=
github.com/urfave/cli/v2 v2.3.0/go.mod h1:LJmUH05zAU44vOAcrfzZQKsZbVcdbOG8rtL3/XcUArI=
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/valyala/fasttemplate v1.2.1 h1:TVEnxayobAdVkhQfrfes2IzOB6o+z4roRkPF52WA1u4=
github.com/valyala/fasttemplate v1.2.1/go.mod h1:KHLXt3tVN2HBp8eijSv/kGJopbvo7S+qRAEEKiv+SiQ=
github.com/vektah/gqlparser/v2 v2.4.5 h1:C02NsyEsL4TXJB7ndonqTfuQOL4XPIu0aAWugdmTgmc=
github.com/vektah/gqlparser/v2 v2.4.5/go.mod h1:flJWIR04IMQPGz+BXLrORkrARBxv/rtyIAFvd/MceW0=
github.com/veraison/go-cose v1.0.0-rc.1 h1:4qA7dbFJGvt7gcqv5MCIyCQvN+NpHFPkW7do3EeDLb8=
@@ -1079,8 +845,6 @@ github.com/vishvananda/netlink v1.2.1-beta.2 h1:Llsql0lnQEbHj0I1OuKyp8otXp0r3q0m
github.com/vishvananda/netlink v1.2.1-beta.2/go.mod h1:twkDnbuQxJYemMlGd4JFIcuhgX83tXhKS2B/PRMpOho=
github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f h1:p4VB7kIXpOQvVn1ZaTIVp+3vuYAXFe3OJEvjbUYJLaA=
github.com/vishvananda/netns v0.0.0-20210104183010-2eb08e3e575f/go.mod h1:DD4vA1DwXk04H54A1oHXtwZmA0grkVMdPxx/VGLCah0=
github.com/willf/bitset v1.1.3 h1:ekJIKh6+YbUIVt9DfNbkR5d6aFcFTLDRyJNAACURBg8=
github.com/willf/bitset v1.1.3/go.mod h1:RjeCKbqT1RxIR/KWY6phxZiaY1IyutSBfGjNPySAYV4=
github.com/x448/float16 v0.8.4 h1:qLwI1I70+NjRFUR3zs1JPUCgaCXSh3SW62uAKT1mSBM=
github.com/x448/float16 v0.8.4/go.mod h1:14CWIYCyZA/cWjXOioeEpHeN/83MdbZDRQHoFcYsOfg=
github.com/xhit/go-str2duration v1.2.0 h1:BcV5u025cITWxEQKGWr1URRzrcXtu7uk8+luz3Yuhwc=
@@ -1088,8 +852,6 @@ github.com/xhit/go-str2duration v1.2.0/go.mod h1:3cPSlfZlUHVlneIVfePFWcJZsuwf+P1
github.com/xhit/go-str2duration/v2 v2.1.0 h1:lxklc02Drh6ynqX+DdPyp5pCKLUQpRT8bp8Ydu2Bstc=
github.com/xhit/go-str2duration/v2 v2.1.0/go.mod h1:ohY8p+0f07DiV6Em5LKB0s2YpLtXVyJfNt1+BlmyAsU=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2 h1:eY9dn8+vbi4tKz5Qo6v2eYzo7kUS51QINcR5jNpbZS8=
github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6 h1:YdYsPAZ2pC6Tow/nPZOPQ96O3hm/ToAkGsPLzedXERk=
github.com/xlab/treeprint v0.0.0-20180616005107-d6fb6747feb6/go.mod h1:ce1O1j6UtZfjr22oyGxGLbauSBp2YVXpARAosm7dHBg=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77 h1:ESFSdwYZvkeru3RtdrYueztKhOBCSAAzS4Gf+k0tEow=
github.com/yashtewari/glob-intersection v0.1.0 h1:6gJvMYQlTDOL3dMsPF6J0+26vwX9MB8/1q3uAdhmTrg=
github.com/yashtewari/glob-intersection v0.1.0/go.mod h1:LK7pIC3piUjovexikBbJ26Yml7g8xa5bsjfx2v1fwok=
@@ -1126,9 +888,6 @@ go.etcd.io/etcd/server/v3 v3.5.5 h1:jNjYm/9s+f9A9r6+SC4RvNaz6AqixpOvhrFdT0PvIj0=
go.etcd.io/etcd/server/v3 v3.5.5/go.mod h1:rZ95vDw/jrvsbj9XpTqPrTAB9/kzchVdhRirySPkUBc=
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1 h1:A/5uWzF44DlIgdm/PQFwfMkW0JX+cIcQi/SwLAmZP5M=
go.mozilla.org/pkcs7 v0.0.0-20200128120323-432b2356ecb1/go.mod h1:SNgMg+EgDFwmvSmLRTNKC5fegJjB7v23qTQ0XLGUNHk=
go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU=
go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8=
go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.48.0/go.mod h1:tIKj3DbO8N9Y2xo52og3irLsPI4GW02DSMtrVgNMgxg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0 h1:4Pp6oUg3+e/6M4C0A/3kJ2VYa++dsWVTtGgLVj5xtHg=
go.opentelemetry.io/contrib/instrumentation/google.golang.org/grpc/otelgrpc v0.49.0/go.mod h1:Mjt1i1INqiaoZOMGR1RIUJN+i3ChKoFRqzrRQhlkbs0=
@@ -1146,153 +905,61 @@ go.opentelemetry.io/otel/sdk v1.21.0/go.mod h1:Nna6Yv7PWTdgJHVRD9hIYywQBRx7pbox6
go.opentelemetry.io/otel/sdk v1.22.0/go.mod h1:iu7luyVGYovrRpe2fmj3CVKouQNdTOkxtLzPvPz1DOc=
go.opentelemetry.io/otel/trace v1.22.0/go.mod h1:RbbHXVqKES9QhzZq/fE5UnOSILqRt40a21sPw2He1xo=
go.opentelemetry.io/otel/trace v1.23.0/go.mod h1:GSGTbIClEsuZrGIzoEHqsVfxgn5UkggkflQwDScNUsk=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/goleak v1.1.11/go.mod h1:cwTWslyiVhfpKIDGSZEM2HlOvcqm+tG4zioyIeLoqMQ=
go.uber.org/multierr v1.6.0/go.mod h1:cdWPpRnG4AhwMwsgIHip0KRBQjJy5kYEpYjJxpXp9iU=
go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.19.0 h1:mZQZefskPPCMIBCSEH0v2/iUqqLrYtaeqwD6FUGUnFE=
go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190909091759-094676da4a83/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
golang.org/x/crypto v0.0.0-20190923035154-9ee001bba392/go.mod h1:/lpIB1dKB+9EgE3H3cr1v9wB50oz8l4C4h62xy7jSTY=
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad/go.mod h1:jdWPYTVW3xRLrWPugEBEK3UY2ZEsg3UU495nc5E+M+I=
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
golang.org/x/crypto v0.21.0/go.mod h1:0BP7YvVV9gBbVKyeTG0Gyn+gZm94bibOW5BjDEYAOMs=
golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8=
golang.org/x/exp v0.0.0-20180321215751-8460e604b9de/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20180807140117-3d87b88a115f/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190125153040-c74c464bbbf2/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
golang.org/x/exp v0.0.0-20191129062945-2f5052295587/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4=
golang.org/x/image v0.0.0-20180708004352-c73c2afc3b81/go.mod h1:ux5Hcp/YLpHSI86hEcLt0YII63i6oz57MZXIpbrjZUs=
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b h1:+qEpEAPhDZ1o0x3tHzZTQDArnOixOzGD9HUJfcg0mb4=
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
golang.org/x/lint v0.0.0-20190301231843-5614ed5bae6f/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3 h1:XQyxROzUlZH+WIQwySDgnISgOivlhjIEwaQaJEJrrN0=
golang.org/x/lint v0.0.0-20190409202823-959b441ac422/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190909230951-414d861bb4ac/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f h1:J5lckAjkw6qYlOZNj90mLYNTEKDvWeuc1yieZ8qUzUE=
golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRuDixDT3tpyyb+LUpUlRWLxfhWrs=
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs=
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028/go.mod h1:E/iHnbuqvinMTCcRqshq8CkpyQDoeVncDDYHnLhea+o=
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
golang.org/x/mod v0.1.0/go.mod h1:0QHyrYULN0/3qlju5TqG8bIK38QM8yzMo5ekMj3DlcY=
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
golang.org/x/mod v0.4.2/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
golang.org/x/mod v0.8.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs=
golang.org/x/mod v0.14.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
golang.org/x/net v0.0.0-20190108225652-1e06a53dbb7e/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190501004415-9ce7a6920f09/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190503192946-f4e77d36d62c/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190628185345-da137c7871d7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190724013045-ca1201d0de80/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190923162816-aa69164e4478/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20191209160850-c0dbc17a3553/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200822124328-c89045814202/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20201110031124-69a78807bb2b/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
golang.org/x/net v0.0.0-20210119194325-5f4716e94777/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210220033124-5f55cee0dc0d/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20210405180319-a5a99cb37ef4/go.mod h1:p54w0d4576C0XHj96bSt6lcn1PtDYWL6XObtHCRCNQM=
golang.org/x/net v0.0.0-20210410081132-afb366fc7cd1/go.mod h1:9tjilg8BloeKEkVJvy7fQ90B1CfIiPueXVOjqfkSzI8=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.16.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.17.0/go.mod h1:NxSsAGuq816PNPmqtQdLE42eU2Fs7NoRIZrHJAlaCOE=
golang.org/x/net v0.20.0/go.mod h1:z8BVo6PvndSri0LbOE3hAn0apkU+1YvI6E70E9jsnvY=
golang.org/x/net v0.22.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.17.0/go.mod h1:OzPDGQiuQMguemayvdylqddI7qcD9lnSDb+1FiwQ5HA=
golang.org/x/oauth2 v0.18.0 h1:09qnuIAgzdx1XplqJvW6CQqMCtGZykZWcXzPMPUusvI=
golang.org/x/oauth2 v0.18.0/go.mod h1:Wf7knwG0MPoWIMMBgFlEaSUDaKskp0dCfrlJRJXbBi8=
golang.org/x/sync v0.0.0-20190227155943-e225da77a7e6/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.6.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180823144017-11551d06cbcc/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190312061237-fead79001313/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190502145724-3ef323f4f1fd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190507160741-ecd444e8653b/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190606165138-5da285871e9c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190624142023-c5567b49c5d0/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190726091711-fc99dfbffb4e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200107162124-548cf772de50/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200122134326-e047566fdf82/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210303074136-134d130e1a04/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210330210617-4fbd30eecc44/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210510120138-977fb7262007/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20210927094055-39ccf1dd6fa6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220503163025-988cb79eb6c6/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.19.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 h1:IRJeR9r1pYWsHKTRe/IInb7lYvbBVIqOgsX/u0mbOWY=
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.4/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.5/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.8/go.mod h1:E6s5w1FMmriuDzIBO73fBruAKo1PCIq6d2Q6DHfQ8WQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20201208040808-7e3f01d25324/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180525024113-a5b4c53f6e8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20181030221726-6c7e314b6563/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190206041539-40960b6deb8e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20190312151545-0bb0c0a6e846/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190312170243-e65039ee4138/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
golang.org/x/tools v0.0.0-20190425150028-36563e24a262/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190506145303-2d16b83fe98c/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
golang.org/x/tools v0.0.0-20190606124116-d0a3d012864b/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190628153133-6cdbf07be9d0/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
golang.org/x/tools v0.0.0-20190816200558-6889da9d5479/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190907020128-2ca718005c18/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20190911174233-4f2ddba30aff/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191012152004-8de300cfc20a/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191113191852-77e3bb0ad9e7/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191115202509-3a792d9c32b2/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.0.0-20191216173652-a0e659d51361/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20191227053925-7b8e75db28f4/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.0.0-20200108203644-89082a384178/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
golang.org/x/tools v0.1.5/go.mod h1:o0xws9oXOQQZyjljx8fwUC0k7L1pTE6eaCbjGeHmOkk=
golang.org/x/tools v0.6.0/go.mod h1:Xwgl3UAJ/d3gWutnCtw505GrjyAbvKui8lOU390QaIU=
golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58=
@@ -1300,22 +967,6 @@ golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
gonum.org/v1/gonum v0.6.0 h1:DJy6UzXbahnGUf1ujUNkh/NEtK14qMo2nvlBPs4U5yw=
gonum.org/v1/gonum v0.6.0/go.mod h1:9mxDZsDKxgMAuccQkewq682L+0eCu4dCN2yonUJTCLU=
gonum.org/v1/netlib v0.0.0-20181029234149-ec6d1f5cefe6/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0 h1:OE9mWmgKkjJyEmDAAtGMPjXu+YNeGvK9VTSHY6+Qihc=
gonum.org/v1/netlib v0.0.0-20190313105609-8cb42192e0e0/go.mod h1:wa6Ws7BG/ESfp6dHfk7C6KdzKA7wR7u/rKwOGE66zvw=
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b h1:Qh4dB5D/WpoUUp3lSod7qgoyEHbDGPUWjIbnqdqqe1k=
gonum.org/v1/plot v0.0.0-20190515093506-e2840ee46a6b/go.mod h1:Wt8AAjI+ypCyYX3nZBvf6cAIx93T+c/OS2HFAYskSZc=
google.golang.org/api v0.4.0/go.mod h1:8k5glujaEP+g9n7WNsDg8QP6cUVNI86fCNMcbazEtwE=
google.golang.org/api v0.7.0/go.mod h1:WtwebWUNSVBH/HAw79HIFXZNqEvBhG+Ra+ax0hx3E3M=
google.golang.org/api v0.8.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.9.0/go.mod h1:o4eAsZoiT+ibD93RtjEohWalFOjRDx6CVaqeizhEnKg=
google.golang.org/api v0.13.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.14.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.15.0/go.mod h1:iLdEw5Ide6rF15KTC1Kkl0iskquN2gFfn9o9XIsbkAI=
google.golang.org/api v0.149.0 h1:b2CqT6kG+zqJIVKRQ3ELJVLN1PwHZ6DJ3dW8yl82rgY=
google.golang.org/api v0.149.0/go.mod h1:Mwn1B7JTXrzXtnvmzQE2BD6bYZQ8DShKZDZbeN9I7qI=
google.golang.org/api v0.162.0/go.mod h1:6SulDkfoBIg4NFmCuZ39XeeAgSHCPecfSUuDyYlAHs0=
@@ -1323,22 +974,7 @@ google.golang.org/api v0.166.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe
google.golang.org/api v0.167.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA=
google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg=
google.golang.org/api v0.171.0/go.mod h1:Hnq5AHm4OTMt2BUVjael2CWZFD6vksJdWCWiUAmjC9o=
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190425155659-357c62f0e4bb/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190502173448-54afdca5d873/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
google.golang.org/genproto v0.0.0-20190716160619-c506a9f90610/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190801165951-fa694d86fc64/go.mod h1:DMBHOl98Agz4BDEuKkezgsaosCRResVns1a3J2ZsMNc=
google.golang.org/genproto v0.0.0-20190911173649-1774047e7e51/go.mod h1:IbNlFCBrqXvoKpeg0TB2l7cyZUmoaFKYIwrEpbDKLA8=
google.golang.org/genproto v0.0.0-20191108220845-16a3f7862a1a/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191115194625-c23dd37a84c9/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191216164720-4f79533eabd1/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20191230161307-f3c370f40bfb/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20200108215221-bd8f9a0ef82f/go.mod h1:n3cpQtvxv34hfy77yVDNjmbRyujviMdxYliBSkLhpCc=
google.golang.org/genproto v0.0.0-20231030173426-d783a09b4405/go.mod h1:3WDQMjmJk36UQhjQ89emUzb1mdaHcPeeAh4SCBKznB4=
google.golang.org/genproto v0.0.0-20231120223509-83a465c0220f h1:Vn+VyHU5guc9KjB5KrjI2q0wCOWEOIh0OEsleqakHJg=
google.golang.org/genproto v0.0.0-20231120223509-83a465c0220f/go.mod h1:nWSwAFPb+qfNJXsoeO3Io7zf4tMSfN8EA8RlDA04GhY=
@@ -1366,9 +1002,6 @@ google.golang.org/genproto/googleapis/rpc v0.0.0-20240304161311-37d4d3c04a78/go.
google.golang.org/genproto/googleapis/rpc v0.0.0-20240311132316-a219d84964c2/go.mod h1:UCOku4NytXMJuLQE5VuqA5lX3PcHCBo8pxNyvkf4xBs=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c h1:lfpJ/2rWPa/kJgxyyXM8PrNnfCzcmxJ265mADgwmvLI=
google.golang.org/genproto/googleapis/rpc v0.0.0-20240314234333-6e1732d8331c/go.mod h1:WtryC6hu0hhx87FDGxWCDptyssuo68sk10vYjF+T9fY=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.26.0/go.mod h1:qbnxyOmOxrQa7FizSgH+ReBfzJrCY1pSN7KXBS8abTk=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
google.golang.org/grpc v1.37.0/go.mod h1:NREThFqKR1f3iQ6oBuvc5LadQuXVGo9rkm5ZGrQdJfM=
google.golang.org/grpc v1.61.0/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFLBNJs=
@@ -1392,14 +1025,9 @@ gopkg.in/olebedev/go-duktape.v3 v3.0.0-20200619000410-60c24ae608a6/go.mod h1:uAJ
gopkg.in/resty.v1 v1.12.0 h1:CuXP0Pjfw9rOuY6EP+UvtNvt5DSqHpIxILZKT/quCZI=
gopkg.in/square/go-jose.v2 v2.5.1 h1:7odma5RETjNHWJnR32wx8t+Io4djHE1PqxCFx3iiZ2w=
gopkg.in/square/go-jose.v2 v2.5.1/go.mod h1:M9dMgbHiYLoDGQrXy7OpJDJWiKiU//h+vD76mk0e1AI=
gopkg.in/yaml.v2 v2.2.3/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.5/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gotest.tools v1.4.0/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
honnef.co/go/tools v0.0.0-20190106161140-3f1c8253044a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069zDB68us2sMGsfkFJO0iZs=
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
k8s.io/component-base v0.26.7 h1:uqsOyZh0Zqoaup8tmHa491D/CvgFdGUs+X2H/inNUKM=
k8s.io/component-base v0.26.7/go.mod h1:CZe1HTmX/DQdeBrb9XYOXzs96jXth8ZbFvhLMsoJLUg=
k8s.io/cri-api v0.27.1 h1:KWO+U8MfI9drXB/P4oU9VchaWYOlwDglJZVHWMpTT3Q=
@@ -1413,8 +1041,6 @@ kernel.org/pub/linux/libs/security/libcap/cap v1.2.67/go.mod h1:GkntoBuwffz19qtd
kernel.org/pub/linux/libs/security/libcap/psx v1.2.67 h1:NxbXJ7pDVq0FKBsqjieT92QDXI2XaqH2HAi4QcCOHt8=
kernel.org/pub/linux/libs/security/libcap/psx v1.2.67/go.mod h1:+l6Ee2F59XiJ2I6WR5ObpC1utCQJZ/VLsEbQCD8RG24=
nullprogram.com/x/optparse v1.0.0 h1:xGFgVi5ZaWOnYdac2foDT3vg0ZZC9ErXFV57mr4OHrI=
rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.37 h1:fAPTNEpzQMOLMGwOHNbUkR2xXTQwMJOZYNx+/mLlOh0=
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.37/go.mod h1:vfnxT4FXNT8eGvO+xi/DsyC/qHmdujqwrUa1WSspCsk=

406
prover/Cargo.lock generated
View File

@@ -28,10 +28,44 @@ dependencies = [
"cpufeatures",
]
[[package]]
name = "aggregator"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "aggregator"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
@@ -62,40 +96,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "aggregator"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"ark-std 0.3.0",
"bitstream-io",
"c-kzg",
"ctor 0.1.26",
"encoder",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"num-bigint",
"once_cell",
"rand",
"revm-precompile",
"revm-primitives",
"serde",
"serde_json",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "ahash"
version = "0.8.11"
@@ -632,10 +632,37 @@ version = "3.16.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "79296716171880943b8470b5f8d03aa55eb2e645a4874bdbb28adb49162e012c"
[[package]]
name = "bus-mapping"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"external-tracer 0.11.0",
"gadgets 0.11.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"poseidon-circuit",
"rand",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -657,31 +684,6 @@ dependencies = [
"strum_macros 0.25.3",
]
[[package]]
name = "bus-mapping"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-providers 2.0.7 (registry+https://github.com/rust-lang/crates.io-index)",
"ethers-signers",
"gadgets 0.13.0",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"poseidon-circuit",
"revm-precompile",
"serde",
"serde_json",
"strum 0.25.0",
"strum_macros 0.25.3",
]
[[package]]
name = "byte-slice-cast"
version = "1.2.2"
@@ -1309,8 +1311,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1327,6 +1329,7 @@ dependencies = [
"revm-primitives",
"serde",
"serde_json",
"serde_stacker",
"serde_with",
"sha3 0.10.8",
"strum 0.25.0",
@@ -1337,8 +1340,8 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"base64 0.13.1",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -1557,11 +1560,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.12.0",
"geth-utils 0.12.0",
"eth-types 0.11.0",
"geth-utils 0.11.0",
"log",
"serde",
"serde_json",
@@ -1570,11 +1573,11 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.13.0",
"geth-utils 0.13.0",
"eth-types 0.12.0",
"geth-utils 0.12.0",
"log",
"serde",
"serde_json",
@@ -1787,10 +1790,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.12.0",
"eth-types 0.11.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1799,10 +1802,10 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.13.0",
"eth-types 0.12.0",
"halo2_proofs",
"poseidon-base",
"sha3 0.10.8",
@@ -1822,8 +1825,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -1832,8 +1835,8 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"env_logger 0.10.2",
"gobuild",
@@ -2670,10 +2673,25 @@ dependencies = [
"subtle",
]
[[package]]
name = "mock"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.11.0",
"itertools 0.11.0",
"log",
"rand",
"rand_chacha",
]
[[package]]
name = "mock"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
@@ -2686,24 +2704,23 @@ dependencies = [
]
[[package]]
name = "mock"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
name = "mpt-zktrie"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"external-tracer 0.13.0",
"itertools 0.11.0",
"eth-types 0.11.0",
"halo2curves",
"hex",
"log",
"rand",
"rand_chacha",
"num-bigint",
"poseidon-base",
"zktrie",
]
[[package]]
name = "mpt-zktrie"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"eth-types 0.12.0",
"halo2curves",
@@ -2711,21 +2728,7 @@ dependencies = [
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "mpt-zktrie"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"eth-types 0.13.0",
"halo2curves",
"hex",
"log",
"num-bigint",
"poseidon-base",
"zktrie 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie",
]
[[package]]
@@ -3291,8 +3294,8 @@ dependencies = [
"http 1.1.0",
"log",
"once_cell",
"prover 0.11.0",
"prover 0.12.0",
"prover 0.13.0",
"rand",
"reqwest 0.12.4",
"reqwest-middleware",
@@ -3306,10 +3309,44 @@ dependencies = [
"tokio",
]
[[package]]
name = "prover"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"aggregator 0.11.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.11.0",
"chrono",
"dotenvy",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.11.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.11.0",
]
[[package]]
name = "prover"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"aggregator 0.12.0",
"anyhow",
@@ -3340,40 +3377,6 @@ dependencies = [
"zkevm-circuits 0.12.0",
]
[[package]]
name = "prover"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"aggregator 0.13.0",
"anyhow",
"base64 0.13.1",
"blake2",
"bus-mapping 0.13.0",
"chrono",
"dotenvy",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"git-version",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"log4rs",
"mpt-zktrie 0.13.0",
"num-bigint",
"rand",
"rand_xorshift",
"serde",
"serde_derive",
"serde_json",
"serde_stacker",
"sha2",
"snark-verifier",
"snark-verifier-sdk",
"zkevm-circuits 0.13.0",
]
[[package]]
name = "psm"
version = "0.1.21"
@@ -5335,10 +5338,52 @@ dependencies = [
"syn 2.0.66",
]
[[package]]
name = "zkevm-circuits"
version = "0.11.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.5#6ea8fb3fad4d8a8bfe873e18e2f881ad1c807ded"
dependencies = [
"array-init",
"bus-mapping 0.11.0",
"either",
"env_logger 0.10.2",
"eth-types 0.11.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.11.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.11.0",
"mpt-zktrie 0.11.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.12.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.2#6f7b46a3b1ccf9dc448735e8455e1ac6f9e30643"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.12.0#6a1f65a1f99429f3725ef4d6788f5643bb61aa6f"
dependencies = [
"array-init",
"bus-mapping 0.12.0",
@@ -5377,64 +5422,13 @@ dependencies = [
"subtle",
]
[[package]]
name = "zkevm-circuits"
version = "0.13.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.13.1#4009e5593f13ba73f64f556011ee5ef47bc4ebf3"
dependencies = [
"array-init",
"bus-mapping 0.13.0",
"either",
"env_logger 0.10.2",
"eth-types 0.13.0",
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
"ethers-signers",
"ff",
"gadgets 0.13.0",
"halo2-base",
"halo2-ecc",
"halo2-mpt-circuits",
"halo2_gadgets",
"halo2_proofs",
"hex",
"itertools 0.11.0",
"log",
"misc-precompiled-circuit",
"mock 0.13.0",
"mpt-zktrie 0.13.0",
"num",
"num-bigint",
"poseidon-circuit",
"rand",
"rand_chacha",
"rand_xorshift",
"rayon",
"serde",
"serde_json",
"sha3 0.10.8",
"snark-verifier",
"snark-verifier-sdk",
"strum 0.25.0",
"strum_macros 0.25.3",
"subtle",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=main#23181f209e94137f74337b150179aeb80c72e7c8"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=main)",
]
[[package]]
name = "zktrie"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"gobuild",
"zktrie_rust 0.3.0 (git+https://github.com/scroll-tech/zktrie.git?branch=v0.9)",
"zktrie_rust",
]
[[package]]
@@ -5451,20 +5445,6 @@ dependencies = [
"strum_macros 0.24.3",
]
[[package]]
name = "zktrie_rust"
version = "0.3.0"
source = "git+https://github.com/scroll-tech/zktrie.git?branch=v0.9#460b8c22af65b7809164548cba1e0253b6db5a70"
dependencies = [
"hex",
"lazy_static",
"num",
"num-derive",
"num-traits",
"strum 0.24.1",
"strum_macros 0.24.3",
]
[[package]]
name = "zstd"
version = "0.13.0"

View File

@@ -29,8 +29,8 @@ ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_curie = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.5", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.0", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
base64 = "0.13.1"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest-middleware = "0.3"

View File

@@ -3,7 +3,7 @@
"keystore_path": "keystore.json",
"keystore_password": "prover-pwd",
"db_path": "unique-db-path-for-prover-1",
"prover_type": 2,
"proof_type": 2,
"low_version_circuit": {
"hard_fork_name": "bernoulli",
"params_path": "params",

View File

@@ -14,8 +14,6 @@ use types::*;
use crate::{config::Config, key_signer::KeySigner};
pub use errors::ProofStatusNotOKError;
pub struct CoordinatorClient<'a> {
api: Api,
token: Option<String>,

View File

@@ -1,6 +1,4 @@
use crate::{coordinator_client::ProofStatusNotOKError, types::ProofStatus};
use super::{errors::*, types::*};
use super::types::*;
use anyhow::{bail, Result};
use core::time::Duration;
use reqwest::{header::CONTENT_TYPE, Url};
@@ -78,23 +76,7 @@ impl Api {
token: &String,
) -> Result<Response<SubmitProofResponseData>> {
let method = "/coordinator/v1/submit_proof";
let response = self
.post_with_token::<SubmitProofRequest, Response<SubmitProofResponseData>>(
method, req, token,
)
.await?;
// when req's status already not ok, we mark the error returned from coordinator and will
// ignore it later.
if response.errcode == ErrorCode::ErrCoordinatorHandleZkProofFailure
&& req.status != ProofStatus::Ok
&& response
.errmsg
.contains("validator failure proof msg status not ok")
{
return Err(anyhow::anyhow!(ProofStatusNotOKError));
}
Ok(response)
self.post_with_token(method, req, token).await
}
async fn post_with_token<Req, Resp>(

View File

@@ -1,5 +1,4 @@
use serde::{Deserialize, Deserializer};
use std::fmt;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ErrorCode {
@@ -52,14 +51,3 @@ impl<'de> Deserialize<'de> for ErrorCode {
Ok(ErrorCode::from_i32(v))
}
}
// ====================================================
#[derive(Debug, Clone)]
pub struct ProofStatusNotOKError;
impl fmt::Display for ProofStatusNotOKError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "proof status not ok")
}
}

View File

@@ -1,5 +1,4 @@
#![feature(lazy_cell)]
#![feature(core_intrinsics)]
mod config;
mod coordinator_client;
@@ -38,7 +37,7 @@ struct Args {
log_file: Option<String>,
}
fn start() -> Result<()> {
fn main() -> Result<(), Box<dyn std::error::Error>> {
let args = Args::parse();
if args.version {
@@ -77,10 +76,3 @@ fn start() -> Result<()> {
Ok(())
}
fn main() {
let result = start();
if let Err(e) = result {
log::error!("main exit with error {:#}", e)
}
}

View File

@@ -1,4 +1,4 @@
use super::{coordinator_client::ProofStatusNotOKError, prover::Prover, task_cache::TaskCache};
use super::{prover::Prover, task_cache::TaskCache};
use anyhow::{Context, Result};
use std::rc::Rc;
@@ -16,11 +16,7 @@ impl<'a> TaskProcessor<'a> {
loop {
log::info!("start a new round.");
if let Err(err) = self.prove_and_submit() {
if err.is::<ProofStatusNotOKError>() {
log::info!("proof status not ok, downgrade level to info.");
} else {
log::error!("encounter error: {:#}", err);
}
log::error!("encounter error: {:#}", err);
} else {
log::info!("prove & submit succeed.");
}
@@ -58,18 +54,11 @@ impl<'a> TaskProcessor<'a> {
);
let result = match self.prover.prove_task(&task_wrapper.task) {
Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task),
Err(error) => {
log::error!(
"failed to prove task, id: {}, error: {:#}",
&task_wrapper.task.id,
error
);
self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
)
}
Err(error) => self.prover.submit_error(
&task_wrapper.task,
super::types::ProofFailureType::NoPanic,
error,
),
};
return result;
}

View File

@@ -54,7 +54,7 @@ impl Default for TaskType {
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum ProverType {
Chunk,
Batch,

View File

@@ -1,6 +1,5 @@
mod common;
mod curie;
mod darwin;
mod darwin_v2;
use super::geth_client::GethClient;
use crate::{
@@ -9,8 +8,8 @@ use crate::{
utils::get_task_types,
};
use anyhow::{bail, Result};
use curie::CurieHandler;
use darwin::DarwinHandler;
use darwin_v2::DarwinV2Handler;
use std::{cell::RefCell, collections::HashMap, rc::Rc};
type HardForkName = String;
@@ -39,7 +38,7 @@ pub struct CircuitsHandlerProvider<'a> {
geth_client: Option<Rc<RefCell<GethClient>>>,
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
current_fork_name: Option<HardForkName>,
current_hard_fork_name: Option<HardForkName>,
current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>,
}
@@ -61,7 +60,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.low_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_first();
DarwinHandler::new(
CurieHandler::new(
prover_type,
&config.low_version_circuit.params_path,
&config.low_version_circuit.assets_path,
@@ -84,7 +83,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
&config.high_version_circuit.hard_fork_name
);
AssetsDirEnvConfig::enable_second();
DarwinV2Handler::new(
DarwinHandler::new(
prover_type,
&config.high_version_circuit.params_path,
&config.high_version_circuit.assets_path,
@@ -103,7 +102,7 @@ impl<'a> CircuitsHandlerProvider<'a> {
config,
geth_client,
circuits_handler_builder_map: m,
current_fork_name: None,
current_hard_fork_name: None,
current_circuit: None,
};
@@ -114,12 +113,13 @@ impl<'a> CircuitsHandlerProvider<'a> {
&mut self,
hard_fork_name: &String,
) -> Result<Rc<Box<dyn CircuitsHandler>>> {
match &self.current_fork_name {
Some(fork_name) if fork_name == hard_fork_name => {
match &self.current_hard_fork_name {
Some(name) if name == hard_fork_name => {
log::info!("get circuits handler from cache");
if let Some(handler) = &self.current_circuit {
Ok(handler.clone())
} else {
log::error!("missing cached handler, there must be something wrong.");
bail!("missing cached handler, there must be something wrong.")
}
}
@@ -131,11 +131,12 @@ impl<'a> CircuitsHandlerProvider<'a> {
log::info!("building circuits handler for {hard_fork_name}");
let handler = builder(self.prover_type, self.config, self.geth_client.clone())
.expect("failed to build circuits handler");
self.current_fork_name = Some(hard_fork_name.clone());
self.current_hard_fork_name = Some(hard_fork_name.clone());
let rc_handler = Rc::new(handler);
self.current_circuit = Some(rc_handler.clone());
Ok(rc_handler)
} else {
log::error!("missing builder, there must be something wrong.");
bail!("missing builder, there must be something wrong.")
}
}

View File

@@ -1,33 +0,0 @@
use std::{collections::BTreeMap, rc::Rc};
use crate::types::ProverType;
use once_cell::sync::OnceCell;
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
static mut PARAMS_MAP: OnceCell<Rc<BTreeMap<u32, ParamsKZG<Bn256>>>> = OnceCell::new();
pub fn get_params_map_instance<'a, F>(load_params_func: F) -> &'a BTreeMap<u32, ParamsKZG<Bn256>>
where
F: FnOnce() -> BTreeMap<u32, ParamsKZG<Bn256>>,
{
unsafe {
PARAMS_MAP.get_or_init(|| {
let params_map = load_params_func();
Rc::new(params_map)
})
}
}
pub fn get_degrees<F>(prover_types: &std::collections::HashSet<ProverType>, f: F) -> Vec<u32>
where
F: FnMut(&ProverType) -> Vec<u32>,
{
prover_types
.iter()
.flat_map(f)
.collect::<std::collections::HashSet<u32>>()
.into_iter()
.collect()
}

View File

@@ -1,4 +1,4 @@
use super::{common::*, CircuitsHandler};
use super::CircuitsHandler;
use crate::{
geth_client::GethClient,
types::{ProverType, TaskType},
@@ -10,29 +10,21 @@ use serde::Deserialize;
use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin_v2::{
aggregator::Prover as BatchProver,
check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
use prover_curie::{
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
};
// Only used for debugging.
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct BatchTaskDetail {
pub chunk_infos: Vec<ChunkInfo>,
#[serde(flatten)]
pub batch_proving_task: BatchProvingTask,
pub chunk_proofs: Vec<ChunkProof>,
}
type BundleTaskDetail = BundleProvingTask;
#[derive(Debug, Clone, Deserialize)]
#[derive(Deserialize)]
pub struct ChunkTaskDetail {
pub block_hashes: Vec<CommonHash>,
}
@@ -42,66 +34,33 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
}
#[derive(Default)]
pub struct DarwinV2Handler {
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver<'static>>>,
pub struct CurieHandler {
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl DarwinV2Handler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
impl CurieHandler {
pub fn new(
prover_type: ProverType,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
match prover_type {
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -124,15 +83,11 @@ impl DarwinV2Handler {
Ok(serde_json::to_string(&chunk_proof)?)
}
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
fn gen_batch_proof_raw(
&self,
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
) -> Result<BatchProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
.collect();
let chunk_proofs: Vec<ChunkProof> =
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
@@ -142,11 +97,11 @@ impl DarwinV2Handler {
bail!("non-match chunk protocol")
}
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
let batch_proof = prover.borrow_mut().gen_batch_proof(
batch_task_detail.batch_proving_task,
None,
self.get_output_dir(),
)?;
let batch = BatchProvingTask { chunk_proofs };
let batch_proof =
prover
.borrow_mut()
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
return Ok(batch_proof);
}
@@ -155,32 +110,12 @@ impl DarwinV2Handler {
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_batch_proof for task {}", task.id);
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
self.gen_chunk_hashes_proofs(task)?;
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
Ok(serde_json::to_string(&batch_proof)?)
}
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
if let Some(prover) = self.batch_prover.as_ref() {
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
bundle_task_detail,
None,
self.get_output_dir(),
)?;
return Ok(bundle_proof);
}
unreachable!("please check errors in proof_type logic")
}
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
Ok(serde_json::to_string(&bundle_proof)?)
}
fn get_output_dir(&self) -> Option<&str> {
OUTPUT_DIR.as_deref()
}
@@ -190,6 +125,17 @@ impl DarwinV2Handler {
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
}
fn gen_chunk_hashes_proofs(&self, task: &Task) -> Result<Vec<(ChunkInfo, ChunkProof)>> {
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
Ok(batch_task_detail
.chunk_infos
.clone()
.into_iter()
.zip(batch_task_detail.chunk_proofs.clone())
.collect())
}
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
if block_hashes.is_empty() {
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
@@ -244,7 +190,7 @@ impl DarwinV2Handler {
}
}
impl CircuitsHandler for DarwinV2Handler {
impl CircuitsHandler for CurieHandler {
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
match task_type {
TaskType::Chunk => self
@@ -254,11 +200,8 @@ impl CircuitsHandler for DarwinV2Handler {
TaskType::Batch => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_batch_vk()),
TaskType::Bundle => self
.batch_prover
.as_ref()
.and_then(|prover| prover.borrow().get_bundle_vk()),
.and_then(|prover| prover.borrow().get_vk()),
TaskType::Bundle => None,
_ => unreachable!(),
}
}
@@ -267,7 +210,6 @@ impl CircuitsHandler for DarwinV2Handler {
match task_type {
TaskType::Chunk => self.gen_chunk_proof(task),
TaskType::Batch => self.gen_batch_proof(task),
TaskType::Bundle => self.gen_bundle_proof(task),
_ => unreachable!(),
}
}
@@ -279,11 +221,7 @@ impl CircuitsHandler for DarwinV2Handler {
mod tests {
use super::*;
use crate::zk_circuits_handler::utils::encode_vk;
use ethers_core::types::H256;
use prover_darwin_v2::{
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
MAX_AGG_SNARKS,
};
use prover_curie::utils::chunk_trace_to_witness_block;
use std::{path::PathBuf, sync::LazyLock};
#[ctor::ctor]
@@ -294,7 +232,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("DARWIN_V2_TEST_DIR")
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
@@ -306,9 +244,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
@@ -318,27 +256,20 @@ mod tests {
#[test]
fn test_circuits() -> Result<()> {
let bi_handler = DarwinV2Handler::new_multi(
vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler = CurieHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
let chunk_dir_paths = get_chunk_dir_paths()?;
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
let mut chunk_traces = vec![];
let mut chunk_infos = vec![];
let mut chunk_proofs = vec![];
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
let chunk_id = format!("chunk_proof{}", id + 1);
log::info!("start to process {chunk_id}");
let chunk_trace = read_chunk_trace(chunk_path)?;
chunk_traces.push(chunk_trace.clone());
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
chunk_infos.push(chunk_info);
@@ -349,96 +280,30 @@ mod tests {
chunk_proofs.push(chunk_proof);
}
let batch_handler = chunk_handler;
let batch_handler = CurieHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
log::info!("start to prove batch");
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
let proof_data = serde_json::to_string(&batch_proof)?;
dump_proof("batch_proof".to_string(), proof_data)?;
Ok(())
}
// copied from https://github.com/scroll-tech/scroll-prover/blob/main/integration/src/prove.rs
fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
let num_chunks = chunks.len();
let padded_chunk =
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
let chunks_with_padding = [
chunks.to_vec(),
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
]
.concat();
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
let batch_bytes = batch_data.get_batch_data_bytes();
let blob_bytes = eip4844::get_blob_bytes(&batch_bytes);
log::info!("blob_bytes len {}", blob_bytes.len());
blob_bytes
}
// TODO: chunk_infos can be extracted from chunk_proofs.
// Still needed?
fn make_batch_task_detail(
chunk_traces: Vec<Vec<BlockTrace>>,
chunk_proofs: Vec<ChunkProof>,
last_batcher_header: Option<BatchHeader<{ MAX_AGG_SNARKS }>>,
) -> BatchTaskDetail {
// dummy parent batch hash
let dummy_parent_batch_hash = H256([
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
]);
let chunk_infos: Vec<_> = chunk_proofs.iter().map(|p| p.chunk_info.clone()).collect();
let l1_message_popped = chunk_traces
.iter()
.flatten()
.map(|chunk| chunk.num_l1_txs())
.sum();
let last_block_timestamp = chunk_traces.last().map_or(0, |block_traces| {
block_traces
.last()
.map_or(0, |block_trace| block_trace.header.timestamp.as_u64())
});
let blob_bytes = get_blob_from_chunks(&chunk_infos);
let batch_header = BatchHeader::construct_from_chunks(
last_batcher_header.map_or(4, |header| header.version),
last_batcher_header.map_or(123, |header| header.batch_index + 1),
l1_message_popped,
last_batcher_header.map_or(l1_message_popped, |header| {
header.total_l1_message_popped + l1_message_popped
}),
last_batcher_header.map_or(dummy_parent_batch_hash, |header| header.batch_hash()),
last_block_timestamp,
&chunk_infos,
&blob_bytes,
);
BatchTaskDetail {
chunk_infos,
batch_proving_task: BatchProvingTask {
chunk_proofs,
batch_header,
blob_bytes,
},
}
}
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", proof_type);
let vk_from_file = read_vk(proof_type).unwrap();
fn check_vk(task_type: TaskType, vk: Vec<u8>, info: &str) {
log::info!("check_vk, {:?}", task_type);
let vk_from_file = read_vk(task_type).unwrap();
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
}
fn read_vk(proof_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", proof_type);
let vk_file = match proof_type {
fn read_vk(task_type: TaskType) -> Result<String> {
log::info!("read_vk, {:?}", task_type);
let vk_file = match task_type {
TaskType::Chunk => CHUNK_VK_PATH.clone(),
TaskType::Batch => BATCH_VK_PATH.clone(),
TaskType::Bundle => todo!(),
TaskType::Bundle => unreachable!(),
TaskType::Undefined => unreachable!(),
};

View File

@@ -1,4 +1,4 @@
use super::{common::*, CircuitsHandler};
use super::CircuitsHandler;
use crate::{
geth_client::GethClient,
types::{ProverType, TaskType},
@@ -11,11 +11,7 @@ use crate::types::{CommonHash, Task};
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
use prover_darwin::{
aggregator::Prover as BatchProver,
check_chunk_hashes,
common::Prover as CommonProver,
config::{AGG_DEGREES, ZKEVM_DEGREES},
zkevm::Prover as ChunkProver,
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
ChunkProof, ChunkProvingTask,
};
@@ -43,65 +39,32 @@ fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
#[derive(Default)]
pub struct DarwinHandler {
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
batch_prover: Option<RefCell<BatchProver<'static>>>,
chunk_prover: Option<RefCell<ChunkProver>>,
batch_prover: Option<RefCell<BatchProver>>,
geth_client: Option<Rc<RefCell<GethClient>>>,
}
impl DarwinHandler {
pub fn new_multi(
prover_types: Vec<ProverType>,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
let class_name = std::intrinsics::type_name::<Self>();
let prover_types_set = prover_types
.into_iter()
.collect::<std::collections::HashSet<ProverType>>();
let mut handler = Self {
batch_prover: None,
chunk_prover: None,
geth_client,
};
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
ProverType::Chunk => ZKEVM_DEGREES.clone(),
ProverType::Batch => AGG_DEGREES.clone(),
});
let params_map = get_params_map_instance(|| {
log::info!(
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
class_name,
prover_types_set,
degrees
);
CommonProver::load_params_map(params_dir, &degrees)
});
for prover_type in prover_types_set {
match prover_type {
ProverType::Chunk => {
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
params_map, assets_dir,
)));
}
ProverType::Batch => {
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
params_map, assets_dir,
)))
}
}
}
Ok(handler)
}
pub fn new(
prover_type: ProverType,
params_dir: &str,
assets_dir: &str,
geth_client: Option<Rc<RefCell<GethClient>>>,
) -> Result<Self> {
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
match prover_type {
ProverType::Chunk => Ok(Self {
chunk_prover: Some(RefCell::new(ChunkProver::from_dirs(params_dir, assets_dir))),
batch_prover: None,
geth_client,
}),
ProverType::Batch => Ok(Self {
batch_prover: Some(RefCell::new(BatchProver::from_dirs(params_dir, assets_dir))),
chunk_prover: None,
geth_client,
}),
}
}
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
@@ -290,7 +253,7 @@ mod tests {
static DEFAULT_WORK_DIR: &str = "/assets";
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
std::env::var("DARWIN_TEST_DIR")
std::env::var("CURIE_TEST_DIR")
.unwrap_or(String::from(DEFAULT_WORK_DIR))
.trim_end_matches('/')
.to_string()
@@ -302,9 +265,9 @@ mod tests {
static BATCH_DIR_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
static BATCH_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
static CHUNK_VK_PATH: LazyLock<String> =
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
#[test]
fn it_works() {
@@ -314,14 +277,9 @@ mod tests {
#[test]
fn test_circuits() -> Result<()> {
let bi_handler = DarwinHandler::new_multi(
vec![ProverType::Chunk, ProverType::Batch],
&PARAMS_PATH,
&ASSETS_PATH,
None,
)?;
let chunk_handler =
DarwinHandler::new(ProverType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
let chunk_handler = bi_handler;
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
@@ -344,7 +302,8 @@ mod tests {
chunk_proofs.push(chunk_proof);
}
let batch_handler = chunk_handler;
let batch_handler =
DarwinHandler::new(ProverType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);

View File

@@ -24,12 +24,7 @@
"l1_base_fee_default": 15000000000,
"l1_blob_base_fee_default": 1
},
"gas_oracle_sender_signer_config": {
"signer_type": "PrivateKey",
"private_key_signer_config": {
"private_key": "1313131313131313131313131313131313131313131313131313131313131313"
}
}
"gas_oracle_sender_private_key": "1313131313131313131313131313131313131313131313131313131313131313"
}
},
"l2_config": {
@@ -65,24 +60,9 @@
"enable_test_env_bypass_features": true,
"finalize_batch_without_proof_timeout_sec": 7200,
"finalize_bundle_without_proof_timeout_sec": 7200,
"gas_oracle_sender_signer_config": {
"signer_type": "PrivateKey",
"private_key_signer_config": {
"private_key": "1313131313131313131313131313131313131313131313131313131313131313"
}
},
"commit_sender_signer_config": {
"signer_type": "PrivateKey",
"private_key_signer_config": {
"private_key": "1414141414141414141414141414141414141414141414141414141414141414"
}
},
"finalize_sender_signer_config": {
"signer_type": "PrivateKey",
"private_key_signer_config": {
"private_key": "1515151515151515151515151515151515151515151515151515151515151515"
}
},
"gas_oracle_sender_private_key": "1313131313131313131313131313131313131313131313131313131313131313",
"commit_sender_private_key": "1414141414141414141414141414141414141414141414141414141414141414",
"finalize_sender_private_key": "1515151515151515151515151515151515151515151515151515151515151515",
"l1_commit_gas_limit_multiplier": 1.2
},
"chunk_proposer_config": {

View File

@@ -9,10 +9,9 @@ require (
github.com/gin-gonic/gin v1.9.1
github.com/go-resty/resty/v2 v2.7.0
github.com/holiman/uint256 v1.2.4
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.16.0
github.com/scroll-tech/da-codec v0.1.1-0.20240819100936-c6af3bbe7068
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821075135-bdd1b005d40f
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6
github.com/smartystreets/goconvey v1.8.0
github.com/spf13/viper v1.19.0
github.com/stretchr/testify v1.9.0
@@ -71,6 +70,7 @@ require (
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/mitchellh/pointerstructure v1.2.0 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect

View File

@@ -248,10 +248,10 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/scroll-tech/da-codec v0.1.1-0.20240819100936-c6af3bbe7068 h1:KyTp4aedcpjr/rbntrmlhUxjrDYu1Q02QDLaF5vqpxs=
github.com/scroll-tech/da-codec v0.1.1-0.20240819100936-c6af3bbe7068/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821075135-bdd1b005d40f h1:0XhY20/Sh2UCroZqD4orK7eDElQD2XK4GLrTbPmUBpw=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821075135-bdd1b005d40f/go.mod h1:jLTGZ5iL5T7g1BEWrQXVIR+wutJFDTVs/mCfjAlrhrA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb h1:uOKdmDT0LsuS3gfynEjR4zA3Ooh6p2Z3O+IMRj2r8LA=
github.com/scroll-tech/da-codec v0.0.0-20240730031611-1b736159d5cb/go.mod h1:D6XEESeNVJkQJlv3eK+FyR+ufPkgVQbJzERylQi53Bs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6 h1:Q8YyvrcPIcXQwE4ucm4bqmPh6TP6IB1GUTXripf2WyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240626125436-418bc6f728b6/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=

View File

@@ -2,87 +2,47 @@ package config
import (
"fmt"
"reflect"
"scroll-tech/common/database"
"strings"
"github.com/mitchellh/mapstructure"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/rpc"
"github.com/spf13/viper"
)
// Config load configuration items.
type Config struct {
L1Config *L1Config `json:"l1_config"`
L2Config *L2Config `json:"l2_config"`
DBConfig *database.Config `json:"db_config"`
L1Config *L1Config `mapstructure:"l1_config"`
L2Config *L2Config `mapstructure:"l2_config"`
DBConfig *database.Config `mapstructure:"db_config"`
}
// NewConfig returns a new instance of Config.
func NewConfig(file string) (*Config, error) {
v := viper.New()
v.SetConfigFile(file)
v.SetConfigType("json")
fmt.Printf("Loading config from file: %s\n", file)
v.SetEnvPrefix("SCROLL_ROLLUP")
v.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
viper.SetConfigFile(file)
viper.SetConfigType("json")
viper.SetEnvPrefix("SCROLL_ROLLUP")
viper.AutomaticEnv()
v.AutomaticEnv()
if err := v.ReadInConfig(); err != nil {
if err := viper.ReadInConfig(); err != nil {
fmt.Printf("Error reading config file: %v\n", err)
return nil, err
}
fmt.Println("Successfully read config file")
cfg := &Config{}
decoderConfig := &mapstructure.DecoderConfig{
TagName: "json",
Result: cfg,
DecodeHook: mapstructure.ComposeDecodeHookFunc(
func(from reflect.Type, to reflect.Type, data interface{}) (interface{}, error) {
if to == reflect.TypeOf(rpc.BlockNumber(0)) {
var bn rpc.BlockNumber
err := bn.UnmarshalJSON([]byte(fmt.Sprintf("%v", data)))
if err != nil {
return nil, fmt.Errorf("invalid block number, data: %v, error: %v", data, err)
}
return bn, nil
}
if to == reflect.TypeOf(common.Address{}) {
s, ok := data.(string)
if !ok {
return nil, fmt.Errorf("invalid address, data: %v", data)
}
return common.HexToAddress(s), nil
}
if to == reflect.TypeOf(common.Hash{}) {
s, ok := data.(string)
if !ok {
return nil, fmt.Errorf("invalid hash, data: %v", data)
}
return common.HexToHash(s), nil
}
return data, nil
},
),
}
decoder, err := mapstructure.NewDecoder(decoderConfig)
if err != nil {
var cfg Config
if err := viper.Unmarshal(&cfg); err != nil {
fmt.Printf("Error unmarshaling config: %v\n", err)
return nil, err
}
fmt.Println("Successfully unmarshaled config")
if err := decoder.Decode(v.AllSettings()); err != nil {
return nil, err
}
fmt.Printf("Config: %+v\n", cfg)
fmt.Printf("L1 Config: %+v\n", cfg.L1Config)
fmt.Printf("L2 Config: %+v\n", cfg.L2Config)
fmt.Printf("DB Config: %+v\n", cfg.DBConfig)
fmt.Printf("L1 Endpoint: %s\n", cfg.L1Config.Endpoint)
fmt.Printf("L2 Endpoint: %s\n", cfg.L2Config.Endpoint)
fmt.Printf("DB DSN: %s\n", cfg.DBConfig.DSN)
if err := v.Unmarshal(cfg, viper.DecodeHook(decoderConfig.DecodeHook)); err != nil {
return nil, err
}
return cfg, nil
return &cfg, nil
}

View File

@@ -55,31 +55,4 @@ func TestConfig(t *testing.T) {
_, err = NewConfig(tmpFile.Name())
assert.Error(t, err)
})
t.Run("Override config value", func(t *testing.T) {
cfg, err := NewConfig("../../conf/config.json")
assert.NoError(t, err)
os.Setenv("SCROLL_ROLLUP_DB_CONFIG_DSN", "postgres://test:test@postgresql:5432/scroll?sslmode=disable")
os.Setenv("SCROLL_ROLLUP_L1_CONFIG_RELAYER_CONFIG_GAS_ORACLE_SENDER_SIGNER_CONFIG_PRIVATE_KEY_SIGNER_CONFIG_PRIVATE_KEY", "1616161616161616161616161616161616161616161616161616161616161616")
os.Setenv("SCROLL_ROLLUP_L2_CONFIG_RELAYER_CONFIG_GAS_ORACLE_SENDER_SIGNER_CONFIG_PRIVATE_KEY_SIGNER_CONFIG_PRIVATE_KEY", "1717171717171717171717171717171717171717171717171717171717171717")
os.Setenv("SCROLL_ROLLUP_L2_CONFIG_RELAYER_CONFIG_COMMIT_SENDER_SIGNER_CONFIG_PRIVATE_KEY_SIGNER_CONFIG_PRIVATE_KEY", "1818181818181818181818181818181818181818181818181818181818181818")
os.Setenv("SCROLL_ROLLUP_L2_CONFIG_RELAYER_CONFIG_FINALIZE_SENDER_SIGNER_CONFIG_PRIVATE_KEY_SIGNER_CONFIG_PRIVATE_KEY", "1919191919191919191919191919191919191919191919191919191919191919")
cfg2, err := NewConfig("../../conf/config.json")
assert.NoError(t, err)
assert.NotEqual(t, cfg.DBConfig.DSN, cfg2.DBConfig.DSN)
assert.NotEqual(t, cfg.L1Config.RelayerConfig.GasOracleSenderSignerConfig, cfg2.L1Config.RelayerConfig.GasOracleSenderSignerConfig)
assert.NotEqual(t, cfg.L2Config.RelayerConfig.GasOracleSenderSignerConfig, cfg2.L2Config.RelayerConfig.GasOracleSenderSignerConfig)
assert.NotEqual(t, cfg.L2Config.RelayerConfig.CommitSenderSignerConfig, cfg2.L2Config.RelayerConfig.CommitSenderSignerConfig)
assert.NotEqual(t, cfg.L2Config.RelayerConfig.FinalizeSenderSignerConfig, cfg2.L2Config.RelayerConfig.FinalizeSenderSignerConfig)
assert.Equal(t, cfg2.DBConfig.DSN, "postgres://test:test@postgresql:5432/scroll?sslmode=disable")
assert.Equal(t, "1414141414141414141414141414141414141414141414141414141414141414", cfg.L2Config.RelayerConfig.CommitSenderSignerConfig.PrivateKeySignerConfig.PrivateKey)
assert.Equal(t, "1616161616161616161616161616161616161616161616161616161616161616", cfg2.L1Config.RelayerConfig.GasOracleSenderSignerConfig.PrivateKeySignerConfig.PrivateKey)
assert.Equal(t, "1717171717171717171717171717171717171717171717171717171717171717", cfg2.L2Config.RelayerConfig.GasOracleSenderSignerConfig.PrivateKeySignerConfig.PrivateKey)
assert.Equal(t, "1818181818181818181818181818181818181818181818181818181818181818", cfg2.L2Config.RelayerConfig.CommitSenderSignerConfig.PrivateKeySignerConfig.PrivateKey)
assert.Equal(t, "1919191919191919191919191919191919191919191919191919191919191919", cfg2.L2Config.RelayerConfig.FinalizeSenderSignerConfig.PrivateKeySignerConfig.PrivateKey)
})
}

View File

@@ -1,7 +1,12 @@
package config
import (
"crypto/ecdsa"
"encoding/json"
"fmt"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/rpc"
)
@@ -54,11 +59,10 @@ type RelayerConfig struct {
ChainMonitor *ChainMonitor `json:"chain_monitor"`
// L1CommitGasLimitMultiplier multiplier for fallback gas limit in commitBatch txs
L1CommitGasLimitMultiplier float64 `json:"l1_commit_gas_limit_multiplier,omitempty"`
// Configs of transaction signers (GasOracle, Commit, Finalize)
GasOracleSenderSignerConfig *SignerConfig `json:"gas_oracle_sender_signer_config"`
CommitSenderSignerConfig *SignerConfig `json:"commit_sender_signer_config"`
FinalizeSenderSignerConfig *SignerConfig `json:"finalize_sender_signer_config"`
// The private key of the relayer
GasOracleSenderPrivateKey *ecdsa.PrivateKey `json:"-"`
CommitSenderPrivateKey *ecdsa.PrivateKey `json:"-"`
FinalizeSenderPrivateKey *ecdsa.PrivateKey `json:"-"`
// Indicates if bypass features specific to testing environments are enabled.
EnableTestEnvBypassFeatures bool `json:"enable_test_env_bypass_features"`
@@ -68,22 +72,12 @@ type RelayerConfig struct {
FinalizeBundleWithoutProofTimeoutSec uint64 `json:"finalize_bundle_without_proof_timeout_sec"`
}
// AlternativeGasTokenConfig The configuration for handling token exchange rates when updating the gas price oracle.
type AlternativeGasTokenConfig struct {
Enabled bool `json:"enabled"`
Mode string `json:"mode"`
FixedExchangeRate float64 `json:"fixed_exchange_rate"` // fixed exchange rate of L2 gas token / L1 gas token
TokenSymbolPair string `json:"token_symbol_pair"` // The pair should be L2 gas token symbol + L1 gas token symbol
}
// GasOracleConfig The config for updating gas price oracle.
type GasOracleConfig struct {
// MinGasPrice store the minimum gas price to set.
MinGasPrice uint64 `json:"min_gas_price"`
// GasPriceDiff is the minimum percentage of gas price difference to update gas oracle.
GasPriceDiff uint64 `json:"gas_price_diff"`
// AlternativeGasTokenConfig The configuration for handling token exchange rates when updating the gas price oracle.
AlternativeGasTokenConfig *AlternativeGasTokenConfig `json:"alternative_gas_token_config"`
// The following configs are only for updating L1 gas price, used for sender in L2.
// The weight for L1 base fee.
@@ -96,20 +90,77 @@ type GasOracleConfig struct {
L1BlobBaseFeeDefault uint64 `json:"l1_blob_base_fee_default"`
}
// SignerConfig - config of signer, contains type and config corresponding to type
type SignerConfig struct {
SignerType string `json:"signer_type"` // type of signer can be PrivateKey or RemoteSigner
PrivateKeySignerConfig *PrivateKeySignerConfig `json:"private_key_signer_config"`
RemoteSignerConfig *RemoteSignerConfig `json:"remote_signer_config"`
// relayerConfigAlias RelayerConfig alias name
type relayerConfigAlias RelayerConfig
func convertAndCheck(key string, uniqueAddressesSet map[string]struct{}) (*ecdsa.PrivateKey, error) {
if key == "" {
return nil, nil
}
privKey, err := crypto.ToECDSA(common.FromHex(key))
if err != nil {
return nil, err
}
addr := crypto.PubkeyToAddress(privKey.PublicKey).Hex()
if _, exists := uniqueAddressesSet[addr]; exists {
return nil, fmt.Errorf("detected duplicated address for private key: %s", addr)
}
uniqueAddressesSet[addr] = struct{}{}
return privKey, nil
}
// PrivateKeySignerConfig - config of private signer, contains private key
type PrivateKeySignerConfig struct {
PrivateKey string `json:"private_key"` // private key of signer in case of PrivateKey signerType
// UnmarshalJSON unmarshal relayer_config struct.
func (r *RelayerConfig) UnmarshalJSON(input []byte) error {
var privateKeysConfig struct {
relayerConfigAlias
GasOracleSenderPrivateKey string `json:"gas_oracle_sender_private_key"`
CommitSenderPrivateKey string `json:"commit_sender_private_key"`
FinalizeSenderPrivateKey string `json:"finalize_sender_private_key"`
}
var err error
if err = json.Unmarshal(input, &privateKeysConfig); err != nil {
return fmt.Errorf("failed to unmarshal private keys config: %w", err)
}
*r = RelayerConfig(privateKeysConfig.relayerConfigAlias)
uniqueAddressesSet := make(map[string]struct{})
r.GasOracleSenderPrivateKey, err = convertAndCheck(privateKeysConfig.GasOracleSenderPrivateKey, uniqueAddressesSet)
if err != nil {
return fmt.Errorf("error converting and checking gas oracle sender private key: %w", err)
}
r.CommitSenderPrivateKey, err = convertAndCheck(privateKeysConfig.CommitSenderPrivateKey, uniqueAddressesSet)
if err != nil {
return fmt.Errorf("error converting and checking commit sender private key: %w", err)
}
r.FinalizeSenderPrivateKey, err = convertAndCheck(privateKeysConfig.FinalizeSenderPrivateKey, uniqueAddressesSet)
if err != nil {
return fmt.Errorf("error converting and checking finalize sender private key: %w", err)
}
return nil
}
// RemoteSignerConfig - config of private signer, contains address and remote URL
type RemoteSignerConfig struct {
RemoteSignerUrl string `json:"remote_signer_url"` // remote signer url (web3signer) in case of RemoteSigner signerType
SignerAddress string `json:"signer_address"` // address of signer
// MarshalJSON marshal RelayerConfig config, transfer private keys.
func (r *RelayerConfig) MarshalJSON() ([]byte, error) {
privateKeysConfig := struct {
relayerConfigAlias
// The private key of the relayer
GasOracleSenderPrivateKey string `json:"gas_oracle_sender_private_key"`
CommitSenderPrivateKey string `json:"commit_sender_private_key"`
FinalizeSenderPrivateKey string `json:"finalize_sender_private_key"`
}{}
privateKeysConfig.relayerConfigAlias = relayerConfigAlias(*r)
privateKeysConfig.GasOracleSenderPrivateKey = common.Bytes2Hex(crypto.FromECDSA(r.GasOracleSenderPrivateKey))
privateKeysConfig.CommitSenderPrivateKey = common.Bytes2Hex(crypto.FromECDSA(r.CommitSenderPrivateKey))
privateKeysConfig.FinalizeSenderPrivateKey = common.Bytes2Hex(crypto.FromECDSA(r.FinalizeSenderPrivateKey))
return json.Marshal(&privateKeysConfig)
}

View File

@@ -10,6 +10,7 @@ import (
"github.com/prometheus/client_golang/prometheus"
"github.com/scroll-tech/go-ethereum/accounts/abi"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
@@ -21,7 +22,6 @@ import (
"scroll-tech/rollup/internal/config"
"scroll-tech/rollup/internal/controller/sender"
"scroll-tech/rollup/internal/orm"
rutils "scroll-tech/rollup/internal/utils"
)
// Layer1Relayer is responsible for updating L1 gas price oracle contract on L2.
@@ -57,9 +57,10 @@ func NewLayer1Relayer(ctx context.Context, db *gorm.DB, cfg *config.RelayerConfi
switch serviceType {
case ServiceTypeL1GasOracle:
gasOracleSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.GasOracleSenderSignerConfig, "l1_relayer", "gas_oracle_sender", types.SenderTypeL1GasOracle, db, reg)
gasOracleSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.GasOracleSenderPrivateKey, "l1_relayer", "gas_oracle_sender", types.SenderTypeL1GasOracle, db, reg)
if err != nil {
return nil, fmt.Errorf("new gas oracle sender failed, err: %w", err)
addr := crypto.PubkeyToAddress(cfg.GasOracleSenderPrivateKey.PublicKey)
return nil, fmt.Errorf("new gas oracle sender failed for address %s, err: %v", addr.Hex(), err)
}
// Ensure test features aren't enabled on the scroll mainnet.
@@ -152,31 +153,6 @@ func (r *Layer1Relayer) ProcessGasPriceOracle() {
baseFee = block.BaseFee
}
// include the token exchange rate in the fee data if alternative gas token enabled
if r.cfg.GasOracleConfig.AlternativeGasTokenConfig != nil && r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Enabled {
// The exchange rate represent the number of native token on L1 required to exchange for 1 native token on L2.
var exchangeRate float64
switch r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Mode {
case "Fixed":
exchangeRate = r.cfg.GasOracleConfig.AlternativeGasTokenConfig.FixedExchangeRate
case "BinanceApi":
exchangeRate, err = rutils.GetExchangeRateFromBinanceApi(r.cfg.GasOracleConfig.AlternativeGasTokenConfig.TokenSymbolPair, 5)
if err != nil {
log.Error("Failed to get gas token exchange rate from Binance api", "tokenSymbolPair", r.cfg.GasOracleConfig.AlternativeGasTokenConfig.TokenSymbolPair, "err", err)
return
}
default:
log.Error("Invalid alternative gas token mode", "mode", r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Mode)
return
}
if exchangeRate == 0 {
log.Error("Invalid exchange rate", "exchangeRate", exchangeRate)
return
}
baseFee = uint64(math.Ceil(float64(baseFee) / exchangeRate))
blobBaseFee = uint64(math.Ceil(float64(blobBaseFee) / exchangeRate))
}
if r.shouldUpdateGasOracle(baseFee, blobBaseFee, isCurie) {
// It indicates the committing batch has been stuck for a long time, it's likely that the L1 gas fee spiked.
// If we are not committing batches due to high fees then we shouldn't update fees to prevent users from paying high l1_data_fee
@@ -191,11 +167,6 @@ func (r *Layer1Relayer) ProcessGasPriceOracle() {
} else if err != nil {
return
}
if err != nil {
fmt.Println(111)
}
var data []byte
if isCurie {
data, err = r.l1GasOracleABI.Pack("setL1BaseFeeAndBlobBaseFee", new(big.Int).SetUint64(baseFee), new(big.Int).SetUint64(blobBaseFee))
@@ -319,6 +290,5 @@ func (r *Layer1Relayer) commitBatchReachTimeout() (bool, error) {
return false, err
}
// len(batches) == 0 probably shouldn't ever happen, but need to check this
// Also, we should check if it's a genesis batch. If so, skip the timeout check.
return len(batches) == 0 || (batches[0].Index != 0 && utils.NowUTC().Sub(*batches[0].CommittedAt) > time.Duration(r.cfg.GasOracleConfig.CheckCommittedBatchesWindowMinutes)*time.Minute), nil
return len(batches) == 0 || utils.NowUTC().Sub(*batches[0].CommittedAt) > time.Duration(r.cfg.GasOracleConfig.CheckCommittedBatchesWindowMinutes)*time.Minute, nil
}

View File

@@ -4,7 +4,6 @@ import (
"context"
"errors"
"fmt"
"math"
"math/big"
"sort"
"strings"
@@ -17,7 +16,6 @@ import (
"github.com/scroll-tech/da-codec/encoding/codecv1"
"github.com/scroll-tech/da-codec/encoding/codecv2"
"github.com/scroll-tech/da-codec/encoding/codecv3"
"github.com/scroll-tech/da-codec/encoding/codecv4"
"github.com/scroll-tech/go-ethereum/accounts/abi"
"github.com/scroll-tech/go-ethereum/common"
gethTypes "github.com/scroll-tech/go-ethereum/core/types"
@@ -76,33 +74,15 @@ type Layer2Relayer struct {
// NewLayer2Relayer will return a new instance of Layer2RelayerClient
func NewLayer2Relayer(ctx context.Context, l2Client *ethclient.Client, db *gorm.DB, cfg *config.RelayerConfig, chainCfg *params.ChainConfig, initGenesis bool, serviceType ServiceType, reg prometheus.Registerer) (*Layer2Relayer, error) {
var gasOracleSender, commitSender, finalizeSender *sender.Sender
var err error
// check that all 3 signer addresses are different, because there will be a problem in managing nonce for different senders
gasOracleSenderAddr, err := addrFromSignerConfig(cfg.GasOracleSenderSignerConfig)
if err != nil {
return nil, fmt.Errorf("failed to parse addr from gas oracle signer config, err: %v", err)
}
commitSenderAddr, err := addrFromSignerConfig(cfg.CommitSenderSignerConfig)
if err != nil {
return nil, fmt.Errorf("failed to parse addr from commit sender config, err: %v", err)
}
finalizeSenderAddr, err := addrFromSignerConfig(cfg.FinalizeSenderSignerConfig)
if err != nil {
return nil, fmt.Errorf("failed to parse addr from finalize sender config, err: %v", err)
}
if gasOracleSenderAddr == commitSenderAddr || gasOracleSenderAddr == finalizeSenderAddr || commitSenderAddr == finalizeSenderAddr {
return nil, fmt.Errorf("gas oracle, commit, and finalize sender addresses must be different. Got: Gas Oracle=%s, Commit=%s, Finalize=%s",
gasOracleSenderAddr.Hex(), commitSenderAddr.Hex(), finalizeSenderAddr.Hex())
}
switch serviceType {
case ServiceTypeL2GasOracle:
gasOracleSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.GasOracleSenderSignerConfig, "l2_relayer", "gas_oracle_sender", types.SenderTypeL2GasOracle, db, reg)
gasOracleSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.GasOracleSenderPrivateKey, "l2_relayer", "gas_oracle_sender", types.SenderTypeL2GasOracle, db, reg)
if err != nil {
return nil, fmt.Errorf("new gas oracle sender failed, err: %w", err)
addr := crypto.PubkeyToAddress(cfg.GasOracleSenderPrivateKey.PublicKey)
return nil, fmt.Errorf("new gas oracle sender failed for address %s, err: %w", addr.Hex(), err)
}
// Ensure test features aren't enabled on the ethereum mainnet.
@@ -111,14 +91,16 @@ func NewLayer2Relayer(ctx context.Context, l2Client *ethclient.Client, db *gorm.
}
case ServiceTypeL2RollupRelayer:
commitSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.CommitSenderSignerConfig, "l2_relayer", "commit_sender", types.SenderTypeCommitBatch, db, reg)
commitSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.CommitSenderPrivateKey, "l2_relayer", "commit_sender", types.SenderTypeCommitBatch, db, reg)
if err != nil {
return nil, fmt.Errorf("new commit sender failed, err: %w", err)
addr := crypto.PubkeyToAddress(cfg.CommitSenderPrivateKey.PublicKey)
return nil, fmt.Errorf("new commit sender failed for address %s, err: %w", addr.Hex(), err)
}
finalizeSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.FinalizeSenderSignerConfig, "l2_relayer", "finalize_sender", types.SenderTypeFinalizeBatch, db, reg)
finalizeSender, err = sender.NewSender(ctx, cfg.SenderConfig, cfg.FinalizeSenderPrivateKey, "l2_relayer", "finalize_sender", types.SenderTypeFinalizeBatch, db, reg)
if err != nil {
return nil, fmt.Errorf("new finalize sender failed, err: %w", err)
addr := crypto.PubkeyToAddress(cfg.FinalizeSenderPrivateKey.PublicKey)
return nil, fmt.Errorf("new finalize sender failed for address %s, err: %w", addr.Hex(), err)
}
// Ensure test features aren't enabled on the ethereum mainnet.
@@ -218,7 +200,7 @@ func (r *Layer2Relayer) initializeGenesis() error {
err = r.db.Transaction(func(dbTX *gorm.DB) error {
var dbChunk *orm.Chunk
dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, chunk, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.ChunkMetrics{}, dbTX)
dbChunk, err = r.chunkOrm.InsertChunk(r.ctx, chunk, encoding.CodecV0, rutils.ChunkMetrics{}, dbTX)
if err != nil {
return fmt.Errorf("failed to insert chunk: %v", err)
}
@@ -235,7 +217,7 @@ func (r *Layer2Relayer) initializeGenesis() error {
}
var dbBatch *orm.Batch
dbBatch, err = r.batchOrm.InsertBatch(r.ctx, batch, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{}, dbTX)
dbBatch, err = r.batchOrm.InsertBatch(r.ctx, batch, encoding.CodecV0, rutils.BatchMetrics{}, dbTX)
if err != nil {
return fmt.Errorf("failed to insert batch: %v", err)
}
@@ -325,32 +307,6 @@ func (r *Layer2Relayer) ProcessGasPriceOracle() {
return
}
suggestGasPriceUint64 := uint64(suggestGasPrice.Int64())
// include the token exchange rate in the fee data if alternative gas token enabled
if r.cfg.GasOracleConfig.AlternativeGasTokenConfig != nil && r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Enabled {
// The exchange rate represent the number of native token on L1 required to exchange for 1 native token on L2.
var exchangeRate float64
switch r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Mode {
case "Fixed":
exchangeRate = r.cfg.GasOracleConfig.AlternativeGasTokenConfig.FixedExchangeRate
case "BinanceApi":
exchangeRate, err = rutils.GetExchangeRateFromBinanceApi(r.cfg.GasOracleConfig.AlternativeGasTokenConfig.TokenSymbolPair, 5)
if err != nil {
log.Error("Failed to get gas token exchange rate from Binance api", "tokenSymbolPair", r.cfg.GasOracleConfig.AlternativeGasTokenConfig.TokenSymbolPair, "err", err)
return
}
default:
log.Error("Invalid alternative gas token mode", "mode", r.cfg.GasOracleConfig.AlternativeGasTokenConfig.Mode)
return
}
if exchangeRate == 0 {
log.Error("Invalid exchange rate", "exchangeRate", exchangeRate)
return
}
suggestGasPriceUint64 = uint64(math.Ceil(float64(suggestGasPriceUint64) * exchangeRate))
suggestGasPrice = new(big.Int).SetUint64(suggestGasPriceUint64)
}
expectedDelta := r.lastGasPrice * r.gasPriceDiff / gasPriceDiffPrecision
if r.lastGasPrice > 0 && expectedDelta == 0 {
expectedDelta = 1
@@ -422,34 +378,28 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
var calldata []byte
var blob *kzg4844.Blob
if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV0 {
if !r.chainCfg.IsBernoulli(new(big.Int).SetUint64(dbChunks[0].StartBlockNumber)) { // codecv0
calldata, err = r.constructCommitBatchPayloadCodecV0(dbBatch, dbParentBatch, dbChunks, chunks)
if err != nil {
log.Error("failed to construct commitBatch payload codecv0", "index", dbBatch.Index, "err", err)
return
}
} else if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV1 {
} else if !r.chainCfg.IsCurie(new(big.Int).SetUint64(dbChunks[0].StartBlockNumber)) { // codecv1
calldata, blob, err = r.constructCommitBatchPayloadCodecV1(dbBatch, dbParentBatch, dbChunks, chunks)
if err != nil {
log.Error("failed to construct commitBatch payload codecv1", "index", dbBatch.Index, "err", err)
return
}
} else if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV2 {
} else if !r.chainCfg.IsDarwin(dbChunks[0].StartBlockTime) { // codecv2
calldata, blob, err = r.constructCommitBatchPayloadCodecV2(dbBatch, dbParentBatch, dbChunks, chunks)
if err != nil {
log.Error("failed to construct commitBatch payload codecv2", "index", dbBatch.Index, "err", err)
return
}
} else if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV3 {
} else { // codecv3
calldata, blob, err = r.constructCommitBatchPayloadCodecV3(dbBatch, dbParentBatch, dbChunks, chunks)
if err != nil {
log.Error("failed to construct commitBatchWithBlobProof payload codecv3", "index", dbBatch.Index, "err", err)
return
}
} else if encoding.CodecVersion(dbBatch.CodecVersion) == encoding.CodecV4 {
calldata, blob, err = r.constructCommitBatchPayloadCodecV4(dbBatch, dbParentBatch, dbChunks, chunks)
if err != nil {
log.Error("failed to construct commitBatchWithBlobProof payload codecv4", "index", dbBatch.Index, "err", err)
log.Error("failed to construct commitBatch payload codecv3", "index", dbBatch.Index, "err", err)
return
}
}
@@ -489,18 +439,6 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
log.Error("UpdateCommitTxHashAndRollupStatus failed", "hash", dbBatch.Hash, "index", dbBatch.Index, "err", err)
return
}
var maxBlockHeight uint64
var totalGasUsed uint64
for _, dbChunk := range dbChunks {
if dbChunk.EndBlockNumber > maxBlockHeight {
maxBlockHeight = dbChunk.EndBlockNumber
}
totalGasUsed += dbChunk.TotalL2TxGas
}
r.metrics.rollupL2RelayerCommitBlockHeight.Set(float64(maxBlockHeight))
r.metrics.rollupL2RelayerCommitThroughput.Add(float64(totalGasUsed))
r.metrics.rollupL2RelayerProcessPendingBatchSuccessTotal.Inc()
log.Info("Sent the commitBatch tx to layer1", "batch index", dbBatch.Index, "batch hash", dbBatch.Hash, "tx hash", txHash.String())
}
@@ -786,7 +724,7 @@ func (r *Layer2Relayer) finalizeBundle(bundle *orm.Bundle, withProof bool) error
}
}
calldata, err := r.constructFinalizeBundlePayloadCodecV3AndV4(dbBatch, aggProof)
calldata, err := r.constructFinalizeBundlePayloadCodecV3(dbBatch, aggProof)
if err != nil {
return fmt.Errorf("failed to construct finalizeBundle payload codecv3, index: %v, err: %w", dbBatch.Index, err)
}
@@ -1115,45 +1053,6 @@ func (r *Layer2Relayer) constructCommitBatchPayloadCodecV3(dbBatch *orm.Batch, d
return calldata, daBatch.Blob(), nil
}
func (r *Layer2Relayer) constructCommitBatchPayloadCodecV4(dbBatch *orm.Batch, dbParentBatch *orm.Batch, dbChunks []*orm.Chunk, chunks []*encoding.Chunk) ([]byte, *kzg4844.Blob, error) {
batch := &encoding.Batch{
Index: dbBatch.Index,
TotalL1MessagePoppedBefore: dbChunks[0].TotalL1MessagesPoppedBefore,
ParentBatchHash: common.HexToHash(dbParentBatch.Hash),
Chunks: chunks,
}
daBatch, createErr := codecv4.NewDABatch(batch, dbBatch.EnableCompress)
if createErr != nil {
return nil, nil, fmt.Errorf("failed to create DA batch: %w", createErr)
}
encodedChunks := make([][]byte, len(dbChunks))
for i, c := range dbChunks {
daChunk, createErr := codecv4.NewDAChunk(chunks[i], c.TotalL1MessagesPoppedBefore)
if createErr != nil {
return nil, nil, fmt.Errorf("failed to create DA chunk: %w", createErr)
}
encodedChunks[i] = daChunk.Encode()
}
blobDataProof, err := daBatch.BlobDataProofForPointEvaluation()
if err != nil {
return nil, nil, fmt.Errorf("failed to get blob data proof for point evaluation: %w", err)
}
skippedL1MessageBitmap, _, err := encoding.ConstructSkippedBitmap(batch.Index, batch.Chunks, batch.TotalL1MessagePoppedBefore)
if err != nil {
return nil, nil, fmt.Errorf("failed to construct skipped L1 message bitmap: %w", err)
}
calldata, packErr := r.l1RollupABI.Pack("commitBatchWithBlobProof", daBatch.Version, dbParentBatch.BatchHeader, encodedChunks, skippedL1MessageBitmap, blobDataProof)
if packErr != nil {
return nil, nil, fmt.Errorf("failed to pack commitBatchWithBlobProof: %w", packErr)
}
return calldata, daBatch.Blob(), nil
}
func (r *Layer2Relayer) constructFinalizeBatchPayloadCodecV0(dbBatch *orm.Batch, dbParentBatch *orm.Batch, aggProof *message.BatchProof) ([]byte, error) {
if aggProof != nil { // finalizeBatch with proof.
calldata, packErr := r.l1RollupABI.Pack(
@@ -1282,7 +1181,7 @@ func (r *Layer2Relayer) constructFinalizeBatchPayloadCodecV2(dbBatch *orm.Batch,
return calldata, nil
}
func (r *Layer2Relayer) constructFinalizeBundlePayloadCodecV3AndV4(dbBatch *orm.Batch, aggProof *message.BundleProof) ([]byte, error) {
func (r *Layer2Relayer) constructFinalizeBundlePayloadCodecV3(dbBatch *orm.Batch, aggProof *message.BundleProof) ([]byte, error) {
if aggProof != nil { // finalizeBundle with proof.
calldata, packErr := r.l1RollupABI.Pack(
"finalizeBundleWithProof",
@@ -1325,21 +1224,3 @@ func (r *Layer2Relayer) StopSenders() {
r.finalizeSender.Stop()
}
}
func addrFromSignerConfig(config *config.SignerConfig) (common.Address, error) {
switch config.SignerType {
case sender.PrivateKeySignerType:
privKey, err := crypto.ToECDSA(common.FromHex(config.PrivateKeySignerConfig.PrivateKey))
if err != nil {
return common.Address{}, fmt.Errorf("parse sender private key failed: %w", err)
}
return crypto.PubkeyToAddress(privKey.PublicKey), nil
case sender.RemoteSignerType:
if config.RemoteSignerConfig.SignerAddress == "" {
return common.Address{}, fmt.Errorf("signer address is empty")
}
return common.HexToAddress(config.RemoteSignerConfig.SignerAddress), nil
default:
return common.Address{}, fmt.Errorf("failed to determine signer address, unknown signer type: %v", config.SignerType)
}
}

View File

@@ -29,9 +29,6 @@ type l2RelayerMetrics struct {
rollupL2RelayerProcessPendingBundlesFinalizedSuccessTotal prometheus.Counter
rollupL2BundlesFinalizedConfirmedTotal prometheus.Counter
rollupL2BundlesFinalizedConfirmedFailedTotal prometheus.Counter
rollupL2RelayerCommitBlockHeight prometheus.Gauge
rollupL2RelayerCommitThroughput prometheus.Counter
}
var (
@@ -126,14 +123,6 @@ func initL2RelayerMetrics(reg prometheus.Registerer) *l2RelayerMetrics {
Name: "rollup_layer2_bundles_finalized_confirmed_failed_total",
Help: "Total number of failed confirmations for finalized bundles on layer2.",
}),
rollupL2RelayerCommitBlockHeight: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "rollup_l2_relayer_commit_block_height",
Help: "The latest block height committed by the L2 relayer",
}),
rollupL2RelayerCommitThroughput: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "rollup_l2_relayer_commit_throughput",
Help: "The cumulative gas used in blocks committed by the L2 relayer",
}),
}
})
return l2RelayerMetric

View File

@@ -79,9 +79,9 @@ func testL2RelayerProcessPendingBatches(t *testing.T) {
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
@@ -92,7 +92,7 @@ func testL2RelayerProcessPendingBatches(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: codecVersion}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, rutils.BatchMetrics{})
assert.NoError(t, err)
relayer.ProcessPendingBatches()
@@ -128,9 +128,9 @@ func testL2RelayerProcessCommittedBatches(t *testing.T) {
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
@@ -141,7 +141,7 @@ func testL2RelayerProcessCommittedBatches(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: codecVersion}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, rutils.BatchMetrics{})
assert.NoError(t, err)
err = batchOrm.UpdateRollupStatus(context.Background(), dbBatch.Hash, types.RollupCommitted)
@@ -197,7 +197,7 @@ func testL2RelayerProcessPendingBundles(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: codecVersion}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, rutils.BatchMetrics{})
assert.NoError(t, err)
bundleOrm := orm.NewBundle(db)
@@ -259,9 +259,9 @@ func testL2RelayerFinalizeTimeoutBatches(t *testing.T) {
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
chunkDB1, err := chunkOrm.InsertChunk(context.Background(), chunk1, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
chunkDB1, err := chunkOrm.InsertChunk(context.Background(), chunk1, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
chunkDB2, err := chunkOrm.InsertChunk(context.Background(), chunk2, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
chunkDB2, err := chunkOrm.InsertChunk(context.Background(), chunk2, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
@@ -272,7 +272,7 @@ func testL2RelayerFinalizeTimeoutBatches(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: codecVersion}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, rutils.BatchMetrics{})
assert.NoError(t, err)
err = batchOrm.UpdateRollupStatus(context.Background(), dbBatch.Hash, types.RollupCommitted)
@@ -326,9 +326,9 @@ func testL2RelayerFinalizeTimeoutBundles(t *testing.T) {
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
chunkDB1, err := chunkOrm.InsertChunk(context.Background(), chunk1, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
chunkDB1, err := chunkOrm.InsertChunk(context.Background(), chunk1, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
chunkDB2, err := chunkOrm.InsertChunk(context.Background(), chunk2, rutils.CodecConfig{Version: codecVersion}, rutils.ChunkMetrics{})
chunkDB2, err := chunkOrm.InsertChunk(context.Background(), chunk2, codecVersion, rutils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
@@ -339,7 +339,7 @@ func testL2RelayerFinalizeTimeoutBundles(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: codecVersion}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, rutils.BatchMetrics{})
assert.NoError(t, err)
err = batchOrm.UpdateRollupStatus(context.Background(), dbBatch.Hash, types.RollupCommitted)
@@ -411,7 +411,7 @@ func testL2RelayerCommitConfirm(t *testing.T) {
Chunks: []*encoding.Chunk{chunk1, chunk2},
}
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
batchHashes[i] = dbBatch.Hash
}
@@ -467,7 +467,7 @@ func testL2RelayerFinalizeBatchConfirm(t *testing.T) {
Chunks: []*encoding.Chunk{chunk1, chunk2},
}
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
batchHashes[i] = dbBatch.Hash
}
@@ -525,7 +525,7 @@ func testL2RelayerFinalizeBundleConfirm(t *testing.T) {
Chunks: []*encoding.Chunk{chunk1, chunk2},
}
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
batchHashes[i] = dbBatch.Hash
@@ -580,7 +580,7 @@ func testL2RelayerGasOracleConfirm(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch1, err := batchOrm.InsertBatch(context.Background(), batch1, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch1, err := batchOrm.InsertBatch(context.Background(), batch1, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
batch2 := &encoding.Batch{
@@ -590,7 +590,7 @@ func testL2RelayerGasOracleConfirm(t *testing.T) {
Chunks: []*encoding.Chunk{chunk2},
}
dbBatch2, err := batchOrm.InsertBatch(context.Background(), batch2, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch2, err := batchOrm.InsertBatch(context.Background(), batch2, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
// Create and set up the Layer2 Relayer.
@@ -742,9 +742,9 @@ func testGetBatchStatusByIndex(t *testing.T) {
err = l2BlockOrm.InsertL2Blocks(context.Background(), []*encoding.Block{block1, block2})
assert.NoError(t, err)
chunkOrm := orm.NewChunk(db)
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk1, encoding.CodecV0, rutils.ChunkMetrics{})
assert.NoError(t, err)
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.ChunkMetrics{})
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, encoding.CodecV0, rutils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
@@ -755,7 +755,7 @@ func testGetBatchStatusByIndex(t *testing.T) {
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, rutils.CodecConfig{Version: encoding.CodecV0}, rutils.BatchMetrics{})
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, rutils.BatchMetrics{})
assert.NoError(t, err)
status, err := relayer.getBatchStatusByIndex(dbBatch)

View File

@@ -25,8 +25,8 @@ func (s *Sender) estimateLegacyGas(to *common.Address, data []byte, fallbackGasL
gasLimit, _, err := s.estimateGasLimit(to, data, nil, gasPrice, nil, nil, nil)
if err != nil {
log.Error("estimateLegacyGas estimateGasLimit failure", "gas price", gasPrice, "from", s.transactionSigner.GetAddr().String(),
"nonce", s.transactionSigner.GetNonce(), "to address", to.String(), "fallback gas limit", fallbackGasLimit, "error", err)
log.Error("estimateLegacyGas estimateGasLimit failure", "gas price", gasPrice, "from", s.auth.From.String(),
"nonce", s.auth.Nonce.Uint64(), "to address", to.String(), "fallback gas limit", fallbackGasLimit, "error", err)
if fallbackGasLimit == 0 {
return nil, err
}
@@ -56,7 +56,7 @@ func (s *Sender) estimateDynamicGas(to *common.Address, data []byte, baseFee uin
gasLimit, accessList, err := s.estimateGasLimit(to, data, nil, nil, gasTipCap, gasFeeCap, nil)
if err != nil {
log.Error("estimateDynamicGas estimateGasLimit failure",
"from", s.transactionSigner.GetAddr().String(), "nonce", s.transactionSigner.GetNonce(), "to address", to.String(),
"from", s.auth.From.String(), "nonce", s.auth.Nonce.Uint64(), "to address", to.String(),
"fallback gas limit", fallbackGasLimit, "error", err)
if fallbackGasLimit == 0 {
return nil, err
@@ -93,7 +93,7 @@ func (s *Sender) estimateBlobGas(to *common.Address, data []byte, sidecar *gethT
gasLimit, accessList, err := s.estimateGasLimit(to, data, sidecar, nil, gasTipCap, gasFeeCap, blobGasFeeCap)
if err != nil {
log.Error("estimateBlobGas estimateGasLimit failure",
"from", s.transactionSigner.GetAddr().String(), "nonce", s.transactionSigner.GetNonce(), "to address", to.String(),
"from", s.auth.From.String(), "nonce", s.auth.Nonce.Uint64(), "to address", to.String(),
"fallback gas limit", fallbackGasLimit, "error", err)
if fallbackGasLimit == 0 {
return nil, err
@@ -117,7 +117,7 @@ func (s *Sender) estimateBlobGas(to *common.Address, data []byte, sidecar *gethT
func (s *Sender) estimateGasLimit(to *common.Address, data []byte, sidecar *gethTypes.BlobTxSidecar, gasPrice, gasTipCap, gasFeeCap, blobGasFeeCap *big.Int) (uint64, *types.AccessList, error) {
msg := ethereum.CallMsg{
From: s.transactionSigner.GetAddr(),
From: s.auth.From,
To: to,
GasPrice: gasPrice,
GasTipCap: gasTipCap,
@@ -136,8 +136,7 @@ func (s *Sender) estimateGasLimit(to *common.Address, data []byte, sidecar *geth
return 0, nil, err
}
if s.config.TxType == LegacyTxType ||
s.transactionSigner.GetType() == RemoteSignerType { // web3signer doesn't support access list
if s.config.TxType == LegacyTxType {
return gasLimitWithoutAccessList, nil, nil
}

View File

@@ -3,6 +3,7 @@ package sender
import (
"bytes"
"context"
"crypto/ecdsa"
"errors"
"fmt"
"math/big"
@@ -11,6 +12,7 @@ import (
"github.com/holiman/uint256"
"github.com/prometheus/client_golang/prometheus"
"github.com/scroll-tech/go-ethereum/accounts/abi/bind"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/consensus/misc"
gethTypes "github.com/scroll-tech/go-ethereum/core/types"
@@ -65,15 +67,16 @@ type FeeData struct {
// Sender Transaction sender to send transaction to l1/l2 geth
type Sender struct {
config *config.SenderConfig
gethClient *gethclient.Client
client *ethclient.Client // The client to retrieve on chain data or send transaction.
transactionSigner *TransactionSigner
chainID *big.Int // The chain id of the endpoint
ctx context.Context
service string
name string
senderType types.SenderType
config *config.SenderConfig
gethClient *gethclient.Client
client *ethclient.Client // The client to retrieve on chain data or send transaction.
chainID *big.Int // The chain id of the endpoint
ctx context.Context
service string
name string
senderType types.SenderType
auth *bind.TransactOpts
db *gorm.DB
pendingTransactionOrm *orm.PendingTransaction
@@ -85,7 +88,7 @@ type Sender struct {
}
// NewSender returns a new instance of transaction sender
func NewSender(ctx context.Context, config *config.SenderConfig, signerConfig *config.SignerConfig, service, name string, senderType types.SenderType, db *gorm.DB, reg prometheus.Registerer) (*Sender, error) {
func NewSender(ctx context.Context, config *config.SenderConfig, priv *ecdsa.PrivateKey, service, name string, senderType types.SenderType, db *gorm.DB, reg prometheus.Registerer) (*Sender, error) {
if config.EscalateMultipleNum <= config.EscalateMultipleDen {
return nil, fmt.Errorf("invalid params, EscalateMultipleNum; %v, EscalateMultipleDen: %v", config.EscalateMultipleNum, config.EscalateMultipleDen)
}
@@ -100,17 +103,18 @@ func NewSender(ctx context.Context, config *config.SenderConfig, signerConfig *c
if err != nil {
return nil, fmt.Errorf("failed to get chain ID, err: %w", err)
}
transactionSigner, err := NewTransactionSigner(signerConfig, chainID)
auth, err := bind.NewKeyedTransactorWithChainID(priv, chainID)
if err != nil {
return nil, fmt.Errorf("failed to create transaction signer, err: %w", err)
return nil, fmt.Errorf("failed to create transactor with chain ID %v, err: %w", chainID, err)
}
// Set pending nonce
nonce, err := client.PendingNonceAt(ctx, transactionSigner.GetAddr())
nonce, err := client.PendingNonceAt(ctx, auth.From)
if err != nil {
return nil, fmt.Errorf("failed to get pending nonce for address %s, err: %w", transactionSigner.GetAddr(), err)
return nil, fmt.Errorf("failed to get pending nonce for address %s, err: %w", auth.From.Hex(), err)
}
transactionSigner.SetNonce(nonce)
auth.Nonce = big.NewInt(int64(nonce))
sender := &Sender{
ctx: ctx,
@@ -118,7 +122,7 @@ func NewSender(ctx context.Context, config *config.SenderConfig, signerConfig *c
gethClient: gethclient.New(rpcClient),
client: client,
chainID: chainID,
transactionSigner: transactionSigner,
auth: auth,
db: db,
pendingTransactionOrm: orm.NewPendingTransaction(db),
confirmCh: make(chan *Confirmation, 128),
@@ -142,7 +146,7 @@ func (s *Sender) GetChainID() *big.Int {
// Stop stop the sender module.
func (s *Sender) Stop() {
close(s.stopCh)
log.Info("sender stopped", "name", s.name, "service", s.service, "address", s.transactionSigner.GetAddr().String())
log.Info("sender stopped", "name", s.name, "service", s.service, "address", s.auth.From.String())
}
// ConfirmChan channel used to communicate with transaction sender
@@ -213,18 +217,18 @@ func (s *Sender) SendTransaction(contextID string, target *common.Address, data
if feeData, err = s.getFeeData(target, data, sidecar, baseFee, blobBaseFee, fallbackGasLimit); err != nil {
s.metrics.sendTransactionFailureGetFee.WithLabelValues(s.service, s.name).Inc()
log.Error("failed to get fee data", "from", s.transactionSigner.GetAddr().String(), "nonce", s.transactionSigner.GetNonce(), "fallback gas limit", fallbackGasLimit, "err", err)
log.Error("failed to get fee data", "from", s.auth.From.String(), "nonce", s.auth.Nonce.Uint64(), "fallback gas limit", fallbackGasLimit, "err", err)
return common.Hash{}, fmt.Errorf("failed to get fee data, err: %w", err)
}
if tx, err = s.createAndSendTx(feeData, target, data, sidecar, nil); err != nil {
s.metrics.sendTransactionFailureSendTx.WithLabelValues(s.service, s.name).Inc()
log.Error("failed to create and send tx (non-resubmit case)", "from", s.transactionSigner.GetAddr().String(), "nonce", s.transactionSigner.GetNonce(), "err", err)
log.Error("failed to create and send tx (non-resubmit case)", "from", s.auth.From.String(), "nonce", s.auth.Nonce.Uint64(), "err", err)
return common.Hash{}, fmt.Errorf("failed to create and send transaction, err: %w", err)
}
if err = s.pendingTransactionOrm.InsertPendingTransaction(s.ctx, contextID, s.getSenderMeta(), tx, blockNumber); err != nil {
log.Error("failed to insert transaction", "from", s.transactionSigner.GetAddr().String(), "nonce", s.transactionSigner.GetNonce(), "err", err)
log.Error("failed to insert transaction", "from", s.auth.From.String(), "nonce", s.auth.Nonce.Uint64(), "err", err)
return common.Hash{}, fmt.Errorf("failed to insert transaction, err: %w", err)
}
return tx.Hash(), nil
@@ -232,7 +236,7 @@ func (s *Sender) SendTransaction(contextID string, target *common.Address, data
func (s *Sender) createAndSendTx(feeData *FeeData, target *common.Address, data []byte, sidecar *gethTypes.BlobTxSidecar, overrideNonce *uint64) (*gethTypes.Transaction, error) {
var (
nonce = s.transactionSigner.GetNonce()
nonce = s.auth.Nonce.Uint64()
txData gethTypes.TxData
)
@@ -264,7 +268,7 @@ func (s *Sender) createAndSendTx(feeData *FeeData, target *common.Address, data
}
} else {
if target == nil {
log.Error("blob transaction to address cannot be nil", "address", s.transactionSigner.GetAddr().String(), "chainID", s.chainID.Uint64(), "nonce", s.transactionSigner.GetNonce())
log.Error("blob transaction to address cannot be nil", "address", s.auth.From.String(), "chainID", s.chainID.Uint64(), "nonce", s.auth.Nonce.Uint64())
return nil, errors.New("blob transaction to address cannot be nil")
}
@@ -285,15 +289,14 @@ func (s *Sender) createAndSendTx(feeData *FeeData, target *common.Address, data
}
// sign and send
tx := gethTypes.NewTx(txData)
signedTx, err := s.transactionSigner.SignTransaction(s.ctx, tx)
signedTx, err := s.auth.Signer(s.auth.From, gethTypes.NewTx(txData))
if err != nil {
log.Error("failed to sign tx", "address", s.transactionSigner.GetAddr().String(), "err", err)
log.Error("failed to sign tx", "address", s.auth.From.String(), "err", err)
return nil, err
}
if err = s.client.SendTransaction(s.ctx, signedTx); err != nil {
log.Error("failed to send tx", "tx hash", signedTx.Hash().String(), "from", s.transactionSigner.GetAddr().String(), "nonce", signedTx.Nonce(), "err", err)
log.Error("failed to send tx", "tx hash", signedTx.Hash().String(), "from", s.auth.From.String(), "nonce", signedTx.Nonce(), "err", err)
// Check if contain nonce, and reset nonce
// only reset nonce when it is not from resubmit
if strings.Contains(err.Error(), "nonce too low") && overrideNonce == nil {
@@ -322,19 +325,19 @@ func (s *Sender) createAndSendTx(feeData *FeeData, target *common.Address, data
// update nonce when it is not from resubmit
if overrideNonce == nil {
s.transactionSigner.SetNonce(nonce + 1)
s.auth.Nonce = big.NewInt(int64(nonce + 1))
}
return signedTx, nil
}
// resetNonce reset nonce if send signed tx failed.
func (s *Sender) resetNonce(ctx context.Context) {
nonce, err := s.client.PendingNonceAt(ctx, s.transactionSigner.GetAddr())
nonce, err := s.client.PendingNonceAt(ctx, s.auth.From)
if err != nil {
log.Warn("failed to reset nonce", "address", s.transactionSigner.GetAddr().String(), "err", err)
log.Warn("failed to reset nonce", "address", s.auth.From.String(), "err", err)
return
}
s.transactionSigner.SetNonce(nonce)
s.auth.Nonce = big.NewInt(int64(nonce))
}
func (s *Sender) resubmitTransaction(tx *gethTypes.Transaction, baseFee, blobBaseFee uint64) (*gethTypes.Transaction, error) {
@@ -346,7 +349,7 @@ func (s *Sender) resubmitTransaction(tx *gethTypes.Transaction, baseFee, blobBas
txInfo := map[string]interface{}{
"tx_hash": tx.Hash().String(),
"tx_type": s.config.TxType,
"from": s.transactionSigner.GetAddr().String(),
"from": s.auth.From.String(),
"nonce": tx.Nonce(),
}
@@ -470,7 +473,7 @@ func (s *Sender) resubmitTransaction(tx *gethTypes.Transaction, baseFee, blobBas
s.metrics.resubmitTransactionTotal.WithLabelValues(s.service, s.name).Inc()
tx, err := s.createAndSendTx(&feeData, tx.To(), tx.Data(), tx.BlobTxSidecar(), &nonce)
if err != nil {
log.Error("failed to create and send tx (resubmit case)", "from", s.transactionSigner.GetAddr().String(), "nonce", nonce, "err", err)
log.Error("failed to create and send tx (resubmit case)", "from", s.auth.From.String(), "nonce", nonce, "err", err)
return nil, err
}
return tx, nil
@@ -512,7 +515,7 @@ func (s *Sender) checkPendingTransaction() {
err := s.db.Transaction(func(dbTX *gorm.DB) error {
// Update the status of the transaction to TxStatusConfirmed.
if err := s.pendingTransactionOrm.UpdatePendingTransactionStatusByTxHash(s.ctx, tx.Hash(), types.TxStatusConfirmed, dbTX); err != nil {
log.Error("failed to update transaction status by tx hash", "hash", tx.Hash().String(), "sender meta", s.getSenderMeta(), "from", s.transactionSigner.GetAddr().String(), "nonce", tx.Nonce(), "err", err)
log.Error("failed to update transaction status by tx hash", "hash", tx.Hash().String(), "sender meta", s.getSenderMeta(), "from", s.auth.From.String(), "nonce", tx.Nonce(), "err", err)
return err
}
// Update other transactions with the same nonce and sender address as failed.
@@ -569,7 +572,7 @@ func (s *Sender) checkPendingTransaction() {
"service", s.service,
"name", s.name,
"hash", tx.Hash().String(),
"from", s.transactionSigner.GetAddr().String(),
"from", s.auth.From.String(),
"nonce", tx.Nonce(),
"submitBlockNumber", txnToCheck.SubmitBlockNumber,
"currentBlockNumber", blockNumber,
@@ -577,7 +580,7 @@ func (s *Sender) checkPendingTransaction() {
if newTx, err := s.resubmitTransaction(tx, baseFee, blobBaseFee); err != nil {
s.metrics.resubmitTransactionFailedTotal.WithLabelValues(s.service, s.name).Inc()
log.Error("failed to resubmit transaction", "context ID", txnToCheck.ContextID, "sender meta", s.getSenderMeta(), "from", s.transactionSigner.GetAddr().String(), "nonce", tx.Nonce(), "err", err)
log.Error("failed to resubmit transaction", "context ID", txnToCheck.ContextID, "sender meta", s.getSenderMeta(), "from", s.auth.From.String(), "nonce", tx.Nonce(), "err", err)
} else {
err := s.db.Transaction(func(dbTX *gorm.DB) error {
// Update the status of the original transaction as replaced, while still checking its confirmation status.
@@ -620,7 +623,7 @@ func (s *Sender) getSenderMeta() *orm.SenderMeta {
return &orm.SenderMeta{
Name: s.name,
Service: s.service,
Address: s.transactionSigner.GetAddr(),
Address: s.auth.From,
Type: s.senderType,
}
}

View File

@@ -38,9 +38,7 @@ import (
)
var (
privateKeyString string
privateKey *ecdsa.PrivateKey
signerConfig *config.SignerConfig
cfg *config.Config
testApps *testcontainers.TestcontainerApps
txTypes = []string{"LegacyTx", "DynamicFeeTx", "DynamicFeeTx"}
@@ -55,9 +53,6 @@ func TestMain(m *testing.M) {
if testApps != nil {
testApps.Free()
}
if testAppsSignerTest != nil {
testAppsSignerTest.Free()
}
}()
m.Run()
}
@@ -70,14 +65,7 @@ func setupEnv(t *testing.T) {
var err error
cfg, err = config.NewConfig("../../../conf/config.json")
assert.NoError(t, err)
privateKeyString = "1212121212121212121212121212121212121212121212121212121212121212"
signerConfig = &config.SignerConfig{
SignerType: "PrivateKey",
PrivateKeySignerConfig: &config.PrivateKeySignerConfig{
PrivateKey: privateKeyString,
},
}
priv, err := crypto.HexToECDSA(privateKeyString)
priv, err := crypto.HexToECDSA("1212121212121212121212121212121212121212121212121212121212121212")
assert.NoError(t, err)
privateKey = priv
@@ -160,7 +148,7 @@ func testNewSender(t *testing.T) {
// exit by Stop()
cfgCopy1 := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy1.TxType = txType
newSender1, err := NewSender(context.Background(), &cfgCopy1, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
newSender1, err := NewSender(context.Background(), &cfgCopy1, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
newSender1.Stop()
@@ -168,7 +156,7 @@ func testNewSender(t *testing.T) {
cfgCopy2 := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy2.TxType = txType
subCtx, cancel := context.WithCancel(context.Background())
_, err = NewSender(subCtx, &cfgCopy2, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
_, err = NewSender(subCtx, &cfgCopy2, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
cancel()
}
@@ -182,7 +170,7 @@ func testSendAndRetrieveTransaction(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
hash, err := s.SendTransaction("0", &common.Address{}, nil, txBlob[i], 0)
@@ -218,7 +206,7 @@ func testFallbackGasLimit(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
cfgCopy.Confirmations = rpc.LatestBlockNumber
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
client, err := ethclient.Dial(cfgCopy.Endpoint)
@@ -274,7 +262,7 @@ func testResubmitZeroGasPriceTransaction(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
feeData := &FeeData{
gasPrice: big.NewInt(0),
@@ -314,7 +302,7 @@ func testAccessListTransactionGasLimit(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
l2GasOracleABI, err := bridgeAbi.L2GasPriceOracleMetaData.GetAbi()
@@ -355,7 +343,7 @@ func testResubmitNonZeroGasPriceTransaction(t *testing.T) {
cfgCopy.EscalateMultipleNum = 110
cfgCopy.EscalateMultipleDen = 100
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
feeData := &FeeData{
gasPrice: big.NewInt(1000000000),
@@ -404,7 +392,7 @@ func testResubmitUnderpricedTransaction(t *testing.T) {
cfgCopy.EscalateMultipleNum = 109
cfgCopy.EscalateMultipleDen = 100
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
feeData := &FeeData{
gasPrice: big.NewInt(1000000000),
@@ -441,7 +429,7 @@ func testResubmitDynamicFeeTransactionWithRisingBaseFee(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
patchGuard := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -450,7 +438,7 @@ func testResubmitDynamicFeeTransactionWithRisingBaseFee(t *testing.T) {
defer patchGuard.Reset()
tx := gethTypes.NewTx(&gethTypes.DynamicFeeTx{
Nonce: s.transactionSigner.GetNonce(),
Nonce: s.auth.Nonce.Uint64(),
To: &common.Address{},
Data: nil,
Gas: 21000,
@@ -483,7 +471,7 @@ func testResubmitBlobTransactionWithRisingBaseFeeAndBlobBaseFee(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = DynamicFeeTxType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
patchGuard := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -495,7 +483,7 @@ func testResubmitBlobTransactionWithRisingBaseFeeAndBlobBaseFee(t *testing.T) {
assert.NoError(t, err)
tx := gethTypes.NewTx(&gethTypes.BlobTx{
ChainID: uint256.MustFromBig(s.chainID),
Nonce: s.transactionSigner.GetNonce(),
Nonce: s.auth.Nonce.Uint64(),
GasTipCap: uint256.MustFromBig(big.NewInt(0)),
GasFeeCap: uint256.MustFromBig(big.NewInt(0)),
Gas: 21000,
@@ -551,7 +539,7 @@ func testResubmitNonceGappedTransaction(t *testing.T) {
// stop background check pending transaction
cfgCopy.CheckPendingTime = math.MaxUint32
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeUnknown, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeUnknown, db, nil)
assert.NoError(t, err)
patchGuard1 := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -600,7 +588,7 @@ func testCheckPendingTransactionTxConfirmed(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeCommitBatch, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeCommitBatch, db, nil)
assert.NoError(t, err)
patchGuard1 := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -642,7 +630,7 @@ func testCheckPendingTransactionResubmitTxConfirmed(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
cfgCopy.EscalateBlocks = 0
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeFinalizeBatch, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeFinalizeBatch, db, nil)
assert.NoError(t, err)
patchGuard1 := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -702,7 +690,7 @@ func testCheckPendingTransactionReplacedTxConfirmed(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
cfgCopy.EscalateBlocks = 0
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeL1GasOracle, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeL1GasOracle, db, nil)
assert.NoError(t, err)
patchGuard1 := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -772,7 +760,7 @@ func testCheckPendingTransactionTxMultipleTimesWithOnlyOneTxPending(t *testing.T
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = txType
cfgCopy.EscalateBlocks = 0
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeCommitBatch, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeCommitBatch, db, nil)
assert.NoError(t, err)
patchGuard1 := gomonkey.ApplyMethodFunc(s.client, "SendTransaction", func(_ context.Context, _ *gethTypes.Transaction) error {
@@ -849,7 +837,7 @@ func testBlobTransactionWithBlobhashOpContractCall(t *testing.T) {
cfgCopy := *cfg.L2Config.RelayerConfig.SenderConfig
cfgCopy.TxType = DynamicFeeTxType
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeL1GasOracle, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeL1GasOracle, db, nil)
assert.NoError(t, err)
defer s.Stop()
@@ -901,7 +889,7 @@ func testSendBlobCarryingTxOverLimit(t *testing.T) {
sqlDB, err := db.DB()
assert.NoError(t, err)
assert.NoError(t, migrate.ResetDB(sqlDB))
s, err := NewSender(context.Background(), &cfgCopy, signerConfig, "test", "test", types.SenderTypeCommitBatch, db, nil)
s, err := NewSender(context.Background(), &cfgCopy, privateKey, "test", "test", types.SenderTypeCommitBatch, db, nil)
assert.NoError(t, err)
for i := 0; i < int(cfgCopy.MaxPendingBlobTxs); i++ {

View File

@@ -1,156 +0,0 @@
package sender
import (
"context"
"fmt"
"math/big"
"github.com/scroll-tech/go-ethereum/accounts/abi/bind"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
gethTypes "github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/rpc"
"scroll-tech/rollup/internal/config"
)
const (
// PrivateKeySignerType
PrivateKeySignerType = "PrivateKey"
// RemoteSignerType
RemoteSignerType = "RemoteSigner"
)
// TransactionSigner signs given transactions
type TransactionSigner struct {
config *config.SignerConfig
auth *bind.TransactOpts
rpcClient *rpc.Client
nonce uint64
addr common.Address
}
func NewTransactionSigner(config *config.SignerConfig, chainID *big.Int) (*TransactionSigner, error) {
switch config.SignerType {
case PrivateKeySignerType:
privKey, err := crypto.ToECDSA(common.FromHex(config.PrivateKeySignerConfig.PrivateKey))
if err != nil {
return nil, fmt.Errorf("parse sender private key failed: %w", err)
}
auth, err := bind.NewKeyedTransactorWithChainID(privKey, chainID)
if err != nil {
return nil, fmt.Errorf("failed to create transactor with chain ID %v, err: %w", chainID, err)
}
return &TransactionSigner{
config: config,
auth: auth,
addr: crypto.PubkeyToAddress(privKey.PublicKey),
}, nil
case RemoteSignerType:
if config.RemoteSignerConfig.SignerAddress == "" {
return nil, fmt.Errorf("failed to create RemoteSigner, signer address is empty")
}
rpcClient, err := rpc.Dial(config.RemoteSignerConfig.RemoteSignerUrl)
if err != nil {
return nil, fmt.Errorf("failed to dial rpc client, err: %w", err)
}
return &TransactionSigner{
config: config,
rpcClient: rpcClient,
addr: common.HexToAddress(config.RemoteSignerConfig.SignerAddress),
}, nil
default:
return nil, fmt.Errorf("failed to create new transaction signer, unknown type: %v", config.SignerType)
}
}
func (ts *TransactionSigner) SignTransaction(ctx context.Context, tx *gethTypes.Transaction) (*gethTypes.Transaction, error) {
switch ts.config.SignerType {
case PrivateKeySignerType:
signedTx, err := ts.auth.Signer(ts.addr, tx)
if err != nil {
log.Info("failed to sign tx", "address", ts.addr.String(), "err", err)
return nil, err
}
return signedTx, nil
case RemoteSignerType:
rpcTx, err := txDataToRpcTx(&ts.addr, tx)
if err != nil {
return nil, fmt.Errorf("failed to convert txData to rpc transaction, err: %w", err)
}
var result hexutil.Bytes
err = ts.rpcClient.CallContext(ctx, &result, "eth_signTransaction", rpcTx)
if err != nil {
log.Info("failed to call remote rpc", "err", err)
return nil, err
}
signedTx := new(gethTypes.Transaction)
if err := signedTx.UnmarshalBinary(result); err != nil {
return nil, err
}
return signedTx, nil
default:
// this shouldn't happen, because SignerType is checked during creation
return nil, fmt.Errorf("shouldn't happen, unknown signer type")
}
}
func (ts *TransactionSigner) SetNonce(nonce uint64) {
ts.nonce = nonce
}
func (ts *TransactionSigner) GetNonce() uint64 {
return ts.nonce
}
func (ts *TransactionSigner) GetAddr() common.Address {
return ts.addr
}
func (ts *TransactionSigner) GetType() string {
return ts.config.SignerType
}
// RpcTransaction transaction that will be send through rpc to web3Signer
type RpcTransaction struct {
From *common.Address `json:"from"`
To *common.Address `json:"to"`
Gas uint64 `json:"gas"`
GasPrice *big.Int `json:"gasPrice,omitempty"`
MaxPriorityFeePerGas *big.Int `json:"maxPriorityFeePerGas,omitempty"`
MaxFeePerGas *big.Int `json:"maxFeePerGas,omitempty"`
Nonce uint64 `json:"nonce"`
Value *big.Int `json:"value"`
Data string `json:"data"`
}
func txDataToRpcTx(from *common.Address, tx *gethTypes.Transaction) (*RpcTransaction, error) {
switch tx.Type() {
case gethTypes.LegacyTxType:
return &RpcTransaction{
From: from,
To: tx.To(),
Gas: tx.Gas(),
GasPrice: tx.GasPrice(),
Nonce: tx.Nonce(),
Value: tx.Value(),
Data: common.Bytes2Hex(tx.Data()),
}, nil
case gethTypes.DynamicFeeTxType:
return &RpcTransaction{
From: from,
To: tx.To(),
Gas: tx.Gas(),
MaxPriorityFeePerGas: tx.GasTipCap(),
MaxFeePerGas: tx.GasFeeCap(),
Nonce: tx.Nonce(),
Value: tx.Value(),
Data: common.Bytes2Hex(tx.Data()),
}, nil
default: // other tx types (BlobTx) currently not supported by web3signer
return nil, fmt.Errorf("failed to convert tx to RpcTransaction, unsupported tx type, %d", tx.Type())
}
}

View File

@@ -1,122 +0,0 @@
package sender
import (
"context"
"math/big"
"os"
"testing"
"github.com/holiman/uint256"
"github.com/scroll-tech/go-ethereum/common"
gethTypes "github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/log"
"github.com/stretchr/testify/assert"
"scroll-tech/common/testcontainers"
"scroll-tech/rollup/internal/config"
)
var (
testAppsSignerTest *testcontainers.TestcontainerApps
chainId int
)
func setupEnvSignerTest(t *testing.T) {
glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat()))
glogger.Verbosity(log.LvlInfo)
log.Root().SetHandler(glogger)
chainId = 1
testAppsSignerTest = testcontainers.NewTestcontainerApps()
assert.NoError(t, testAppsSignerTest.StartWeb3SignerContainer(chainId))
}
func TestTransactionSigner(t *testing.T) {
setupEnvSignerTest(t)
t.Run("test both signer types", testBothSignerTypes)
}
func testBothSignerTypes(t *testing.T) {
endpoint, err := testAppsSignerTest.GetWeb3SignerEndpoint()
assert.NoError(t, err)
// create remote signer
remoteSignerConf := &config.SignerConfig{
SignerType: RemoteSignerType,
RemoteSignerConfig: &config.RemoteSignerConfig{
SignerAddress: "0x1C5A77d9FA7eF466951B2F01F724BCa3A5820b63",
RemoteSignerUrl: endpoint,
},
}
remoteSigner, err := NewTransactionSigner(remoteSignerConf, big.NewInt(int64(chainId)))
assert.NoError(t, err)
remoteSigner.SetNonce(2)
// create private key signer
privateKeySignerConf := &config.SignerConfig{
SignerType: PrivateKeySignerType,
PrivateKeySignerConfig: &config.PrivateKeySignerConfig{
PrivateKey: "1212121212121212121212121212121212121212121212121212121212121212",
},
}
privateKeySigner, err := NewTransactionSigner(privateKeySignerConf, big.NewInt(int64(chainId)))
assert.NoError(t, err)
privateKeySigner.SetNonce(2)
assert.Equal(t, remoteSigner.GetAddr(), privateKeySigner.GetAddr())
to := common.BytesToAddress([]byte{0, 1, 2, 3})
data := []byte("data")
// check LegacyTx and DynamicFeeTx - transactions supported by web3signer
txDatas := []gethTypes.TxData{
&gethTypes.LegacyTx{
Nonce: remoteSigner.GetNonce(),
GasPrice: big.NewInt(1000),
Gas: 10000,
To: &to,
Data: data,
},
&gethTypes.DynamicFeeTx{
Nonce: remoteSigner.GetNonce(),
Gas: 10000,
To: &to,
Data: data,
ChainID: big.NewInt(int64(chainId)),
GasTipCap: big.NewInt(2000),
GasFeeCap: big.NewInt(3000),
},
}
var signedTx1 *gethTypes.Transaction
var signedTx2 *gethTypes.Transaction
for _, txData := range txDatas {
tx := gethTypes.NewTx(txData)
signedTx1, err = remoteSigner.SignTransaction(context.Background(), tx)
assert.NoError(t, err)
signedTx2, err = privateKeySigner.SignTransaction(context.Background(), tx)
assert.NoError(t, err)
assert.Equal(t, signedTx1.Hash(), signedTx2.Hash())
}
// BlobTx is not supported
txData := &gethTypes.BlobTx{
Nonce: remoteSigner.GetNonce(),
Gas: 10000,
To: to,
Data: data,
ChainID: uint256.NewInt(1),
GasTipCap: uint256.NewInt(2000),
GasFeeCap: uint256.NewInt(3000),
BlobFeeCap: uint256.NewInt(1),
BlobHashes: []common.Hash{},
Sidecar: nil,
}
tx := gethTypes.NewTx(txData)
_, err = remoteSigner.SignTransaction(context.Background(), tx)
assert.Error(t, err)
}

View File

@@ -2,6 +2,7 @@ package watcher
import (
"context"
"errors"
"fmt"
"time"
@@ -53,9 +54,6 @@ type BatchProposer struct {
// total number of times that batch proposer stops early due to compressed data compatibility breach
compressedDataCompatibilityBreachTotal prometheus.Counter
batchProposeBlockHeight prometheus.Gauge
batchProposeThroughput prometheus.Counter
}
// NewBatchProposer creates a new BatchProposer instance.
@@ -137,14 +135,6 @@ func NewBatchProposer(ctx context.Context, cfg *config.BatchProposerConfig, chai
Name: "rollup_propose_batch_estimate_blob_size_time",
Help: "Time taken to estimate blob size for the chunk.",
}),
batchProposeBlockHeight: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "rollup_batch_propose_block_height",
Help: "The block height of the latest proposed batch",
}),
batchProposeThroughput: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "rollup_batch_propose_throughput",
Help: "The total gas used in proposed batches",
}),
}
return p
@@ -162,10 +152,6 @@ func (p *BatchProposer) TryProposeBatch() {
func (p *BatchProposer) updateDBBatchInfo(batch *encoding.Batch, codecVersion encoding.CodecVersion, metrics *utils.BatchMetrics) error {
compatibilityBreachOccurred := false
codecConfig := utils.CodecConfig{
Version: codecVersion,
EnableCompress: true, // codecv4 is the only version that supports conditional compression, default to enable compression
}
for {
compatible, err := utils.CheckBatchCompressedDataCompatibility(batch, codecVersion)
@@ -178,15 +164,13 @@ func (p *BatchProposer) updateDBBatchInfo(batch *encoding.Batch, codecVersion en
break
}
compatibilityBreachOccurred = true
if len(batch.Chunks) == 1 {
log.Warn("Disable compression: cannot truncate batch with only 1 chunk for compatibility", "start block number", batch.Chunks[0].Blocks[0].Header.Number.Uint64(),
log.Error("Cannot truncate batch with only 1 chunk for compatibility", "start block number", batch.Chunks[0].Blocks[0].Header.Number.Uint64(),
"end block number", batch.Chunks[0].Blocks[len(batch.Chunks[0].Blocks)-1].Header.Number.Uint64())
codecConfig.EnableCompress = false
break
return errors.New("cannot truncate batch with only 1 chunk for compatibility")
}
compatibilityBreachOccurred = true
batch.Chunks = batch.Chunks[:len(batch.Chunks)-1]
log.Info("Batch not compatible with compressed data, removing last chunk", "batch index", batch.Index, "truncated chunk length", len(batch.Chunks))
@@ -197,7 +181,7 @@ func (p *BatchProposer) updateDBBatchInfo(batch *encoding.Batch, codecVersion en
// recalculate batch metrics after truncation
var calcErr error
metrics, calcErr = utils.CalculateBatchMetrics(batch, codecConfig)
metrics, calcErr = utils.CalculateBatchMetrics(batch, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate batch metrics, batch index: %v, error: %w", batch.Index, calcErr)
}
@@ -206,23 +190,11 @@ func (p *BatchProposer) updateDBBatchInfo(batch *encoding.Batch, codecVersion en
p.recordAllBatchMetrics(metrics)
}
if len(batch.Chunks) > 0 && len(batch.Chunks[len(batch.Chunks)-1].Blocks) > 0 {
lastChunk := batch.Chunks[len(batch.Chunks)-1]
lastBlock := lastChunk.Blocks[len(lastChunk.Blocks)-1]
p.batchProposeBlockHeight.Set(float64(lastBlock.Header.Number.Uint64()))
}
var totalGasUsed uint64
for _, chunk := range batch.Chunks {
totalGasUsed += chunk.L2GasUsed()
}
p.batchProposeThroughput.Add(float64(totalGasUsed))
p.proposeBatchUpdateInfoTotal.Inc()
err := p.db.Transaction(func(dbTX *gorm.DB) error {
dbBatch, dbErr := p.batchOrm.InsertBatch(p.ctx, batch, codecConfig, *metrics, dbTX)
dbBatch, dbErr := p.batchOrm.InsertBatch(p.ctx, batch, codecVersion, *metrics, dbTX)
if dbErr != nil {
log.Warn("BatchProposer.updateBatchInfoInDB insert batch failure", "index", batch.Index, "parent hash", batch.ParentBatchHash.Hex(), "codec version", codecVersion, "enable compress", codecConfig.EnableCompress, "error", dbErr)
log.Warn("BatchProposer.updateBatchInfoInDB insert batch failure", "index", batch.Index, "parent hash", batch.ParentBatchHash.Hex(), "error", dbErr)
return dbErr
}
if dbErr = p.chunkOrm.UpdateBatchHashInRange(p.ctx, dbBatch.StartChunkIndex, dbBatch.EndChunkIndex, dbBatch.Hash, dbTX); dbErr != nil {
@@ -283,10 +255,7 @@ func (p *BatchProposer) proposeBatch() error {
return err
}
codecConfig := utils.CodecConfig{
Version: forks.GetCodecVersion(p.chainCfg, firstUnbatchedChunk.StartBlockNumber, firstUnbatchedChunk.StartBlockTime),
EnableCompress: true, // codecv4 is the only version that supports conditional compression, default to enable compression
}
codecVersion := forks.GetCodecVersion(p.chainCfg, firstUnbatchedChunk.StartBlockNumber, firstUnbatchedChunk.StartBlockTime)
var batch encoding.Batch
batch.Index = dbParentBatch.Index + 1
@@ -295,7 +264,7 @@ func (p *BatchProposer) proposeBatch() error {
for i, chunk := range daChunks {
batch.Chunks = append(batch.Chunks, chunk)
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecConfig)
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate batch metrics: %w", calcErr)
}
@@ -324,17 +293,17 @@ func (p *BatchProposer) proposeBatch() error {
batch.Chunks = batch.Chunks[:len(batch.Chunks)-1]
metrics, err := utils.CalculateBatchMetrics(&batch, codecConfig)
metrics, err := utils.CalculateBatchMetrics(&batch, codecVersion)
if err != nil {
return fmt.Errorf("failed to calculate batch metrics: %w", err)
}
p.recordAllBatchMetrics(metrics)
return p.updateDBBatchInfo(&batch, codecConfig.Version, metrics)
return p.updateDBBatchInfo(&batch, codecVersion, metrics)
}
}
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecConfig)
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate batch metrics: %w", calcErr)
}
@@ -348,7 +317,7 @@ func (p *BatchProposer) proposeBatch() error {
p.batchFirstBlockTimeoutReached.Inc()
p.recordAllBatchMetrics(metrics)
return p.updateDBBatchInfo(&batch, codecConfig.Version, metrics)
return p.updateDBBatchInfo(&batch, codecVersion, metrics)
}
log.Debug("pending chunks do not reach one of the constraints or contain a timeout block")

View File

@@ -92,7 +92,7 @@ func testBatchProposerCodecv0Limits(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -101,7 +101,7 @@ func testBatchProposerCodecv0Limits(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -229,7 +229,7 @@ func testBatchProposerCodecv1Limits(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -238,7 +238,7 @@ func testBatchProposerCodecv1Limits(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -370,7 +370,7 @@ func testBatchProposerCodecv2Limits(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -379,7 +379,7 @@ func testBatchProposerCodecv2Limits(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -515,7 +515,7 @@ func testBatchProposerCodecv3Limits(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -524,7 +524,7 @@ func testBatchProposerCodecv3Limits(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -605,7 +605,7 @@ func testBatchCommitGasAndCalldataSizeCodecv0Estimation(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -614,7 +614,7 @@ func testBatchCommitGasAndCalldataSizeCodecv0Estimation(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -684,7 +684,7 @@ func testBatchCommitGasAndCalldataSizeCodecv1Estimation(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -693,7 +693,7 @@ func testBatchCommitGasAndCalldataSizeCodecv1Estimation(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -763,7 +763,7 @@ func testBatchCommitGasAndCalldataSizeCodecv2Estimation(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -772,7 +772,7 @@ func testBatchCommitGasAndCalldataSizeCodecv2Estimation(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -844,7 +844,7 @@ func testBatchCommitGasAndCalldataSizeCodecv3Estimation(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -853,7 +853,7 @@ func testBatchCommitGasAndCalldataSizeCodecv3Estimation(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -926,7 +926,7 @@ func testBatchProposerBlobSizeLimit(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -935,7 +935,7 @@ func testBatchProposerBlobSizeLimit(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
var chainConfig *params.ChainConfig
@@ -1029,7 +1029,7 @@ func testBatchProposerMaxChunkNumPerBatchLimit(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -1038,7 +1038,7 @@ func testBatchProposerMaxChunkNumPerBatchLimit(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
var expectedChunkNum uint64
@@ -1117,7 +1117,7 @@ func testBatchProposerRespectHardforks(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -1126,7 +1126,7 @@ func testBatchProposerRespectHardforks(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{

View File

@@ -2,7 +2,6 @@ package watcher
import (
"context"
"errors"
"time"
"github.com/prometheus/client_golang/prometheus"
@@ -147,14 +146,8 @@ func (p *BundleProposer) proposeBundle() error {
if err != nil {
return err
}
if firstChunk == nil {
log.Error("first chunk not found", "start chunk index", batches[0].StartChunkIndex, "start batch index", batches[0].Index, "firstUnbundledBatchIndex", firstUnbundledBatchIndex)
return errors.New("first chunk not found in proposeBundle")
}
hardforkName := forks.GetHardforkName(p.chainCfg, firstChunk.StartBlockNumber, firstChunk.StartBlockTime)
codecVersion := encoding.CodecVersion(batches[0].CodecVersion)
codecVersion := forks.GetCodecVersion(p.chainCfg, firstChunk.StartBlockNumber, firstChunk.StartBlockTime)
for i := 1; i < len(batches); i++ {
chunk, err := p.chunkOrm.GetChunkByIndex(p.ctx, batches[i].StartChunkIndex)
if err != nil {

View File

@@ -72,7 +72,7 @@ func testBundleProposerLimits(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -81,7 +81,7 @@ func testBundleProposerLimits(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
l2BlockOrm := orm.NewL2Block(db)
@@ -156,7 +156,7 @@ func testBundleProposerRespectHardforks(t *testing.T) {
Blocks: []*encoding.Block{block},
}
chunkOrm := orm.NewChunk(db)
_, err := chunkOrm.InsertChunk(context.Background(), chunk, utils.CodecConfig{Version: encoding.CodecV0}, utils.ChunkMetrics{})
_, err := chunkOrm.InsertChunk(context.Background(), chunk, encoding.CodecV0, utils.ChunkMetrics{})
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 0,
@@ -165,7 +165,7 @@ func testBundleProposerRespectHardforks(t *testing.T) {
Chunks: []*encoding.Chunk{chunk},
}
batchOrm := orm.NewBatch(db)
_, err = batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: encoding.CodecV0}, utils.BatchMetrics{})
_, err = batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV0, utils.BatchMetrics{})
assert.NoError(t, err)
cp := NewChunkProposer(context.Background(), &config.ChunkProposerConfig{

View File

@@ -2,6 +2,7 @@ package watcher
import (
"context"
"errors"
"fmt"
"time"
@@ -56,9 +57,6 @@ type ChunkProposer struct {
// total number of times that chunk proposer stops early due to compressed data compatibility breach
compressedDataCompatibilityBreachTotal prometheus.Counter
chunkProposeBlockHeight prometheus.Gauge
chunkProposeThroughput prometheus.Counter
}
// NewChunkProposer creates a new ChunkProposer instance.
@@ -153,14 +151,6 @@ func NewChunkProposer(ctx context.Context, cfg *config.ChunkProposerConfig, chai
Name: "rollup_propose_chunk_estimate_blob_size_time",
Help: "Time taken to estimate blob size for the chunk.",
}),
chunkProposeBlockHeight: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "rollup_chunk_propose_block_height",
Help: "The block height of the latest proposed chunk",
}),
chunkProposeThroughput: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "rollup_chunk_propose_throughput",
Help: "The total gas used in proposed chunks",
}),
}
return p
@@ -182,10 +172,6 @@ func (p *ChunkProposer) updateDBChunkInfo(chunk *encoding.Chunk, codecVersion en
}
compatibilityBreachOccurred := false
codecConfig := utils.CodecConfig{
Version: codecVersion,
EnableCompress: true,
}
for {
compatible, err := utils.CheckChunkCompressedDataCompatibility(chunk, codecVersion)
@@ -198,14 +184,12 @@ func (p *ChunkProposer) updateDBChunkInfo(chunk *encoding.Chunk, codecVersion en
break
}
compatibilityBreachOccurred = true
if len(chunk.Blocks) == 1 {
log.Warn("Disable compression: cannot truncate chunk with only 1 block for compatibility", "block number", chunk.Blocks[0].Header.Number)
codecConfig.EnableCompress = false
break
log.Error("Cannot truncate chunk with only 1 block for compatibility", "block number", chunk.Blocks[0].Header.Number)
return errors.New("cannot truncate chunk with only 1 block for compatibility")
}
compatibilityBreachOccurred = true
chunk.Blocks = chunk.Blocks[:len(chunk.Blocks)-1]
log.Info("Chunk not compatible with compressed data, removing last block", "start block number", chunk.Blocks[0].Header.Number, "truncated block length", len(chunk.Blocks))
@@ -216,7 +200,7 @@ func (p *ChunkProposer) updateDBChunkInfo(chunk *encoding.Chunk, codecVersion en
// recalculate chunk metrics after truncation
var calcErr error
metrics, calcErr = utils.CalculateChunkMetrics(chunk, codecConfig)
metrics, calcErr = utils.CalculateChunkMetrics(chunk, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate chunk metrics, start block number: %v, error: %w", chunk.Blocks[0].Header.Number, calcErr)
}
@@ -225,16 +209,11 @@ func (p *ChunkProposer) updateDBChunkInfo(chunk *encoding.Chunk, codecVersion en
p.recordAllChunkMetrics(metrics)
}
if len(chunk.Blocks) > 0 {
p.chunkProposeBlockHeight.Set(float64(chunk.Blocks[len(chunk.Blocks)-1].Header.Number.Uint64()))
}
p.chunkProposeThroughput.Add(float64(chunk.L2GasUsed()))
p.proposeChunkUpdateInfoTotal.Inc()
err := p.db.Transaction(func(dbTX *gorm.DB) error {
dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, chunk, codecConfig, *metrics, dbTX)
dbChunk, err := p.chunkOrm.InsertChunk(p.ctx, chunk, codecVersion, *metrics, dbTX)
if err != nil {
log.Warn("ChunkProposer.InsertChunk failed", "codec version", codecVersion, "enable compress", codecConfig.EnableCompress, "err", err)
log.Warn("ChunkProposer.InsertChunk failed", "err", err)
return err
}
if err := p.l2BlockOrm.UpdateChunkHashInRange(p.ctx, dbChunk.StartBlockNumber, dbChunk.EndBlockNumber, dbChunk.Hash, dbTX); err != nil {
@@ -282,27 +261,24 @@ func (p *ChunkProposer) proposeChunk() error {
}
}
codecConfig := utils.CodecConfig{
Version: forks.GetCodecVersion(p.chainCfg, blocks[0].Header.Number.Uint64(), blocks[0].Header.Time),
EnableCompress: true, // codecv4 is the only version that supports conditional compression, default to enable compression
}
codecVersion := forks.GetCodecVersion(p.chainCfg, blocks[0].Header.Number.Uint64(), blocks[0].Header.Time)
// Including Curie block in a sole chunk.
if p.chainCfg.CurieBlock != nil && blocks[0].Header.Number.Cmp(p.chainCfg.CurieBlock) == 0 {
chunk := encoding.Chunk{Blocks: blocks[:1]}
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecConfig)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate chunk metrics: %w", calcErr)
}
p.recordTimerChunkMetrics(metrics)
return p.updateDBChunkInfo(&chunk, codecConfig.Version, metrics)
return p.updateDBChunkInfo(&chunk, codecVersion, metrics)
}
var chunk encoding.Chunk
for i, block := range blocks {
chunk.Blocks = append(chunk.Blocks, block)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecConfig)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate chunk metrics: %w", calcErr)
}
@@ -339,17 +315,17 @@ func (p *ChunkProposer) proposeChunk() error {
chunk.Blocks = chunk.Blocks[:len(chunk.Blocks)-1]
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecConfig)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate chunk metrics: %w", calcErr)
}
p.recordAllChunkMetrics(metrics)
return p.updateDBChunkInfo(&chunk, codecConfig.Version, metrics)
return p.updateDBChunkInfo(&chunk, codecVersion, metrics)
}
}
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecConfig)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if calcErr != nil {
return fmt.Errorf("failed to calculate chunk metrics: %w", calcErr)
}
@@ -364,7 +340,7 @@ func (p *ChunkProposer) proposeChunk() error {
p.chunkFirstBlockTimeoutReached.Inc()
p.recordAllChunkMetrics(metrics)
return p.updateDBChunkInfo(&chunk, codecConfig.Version, metrics)
return p.updateDBChunkInfo(&chunk, codecVersion, metrics)
}
log.Debug("pending blocks do not reach one of the constraints or contain a timeout block")

View File

@@ -127,7 +127,6 @@ func (w *L2WatcherClient) getAndStoreBlocks(ctx context.Context, from, to uint64
return fmt.Errorf("failed to GetBlockByNumberOrHash: %v. number: %v", err, number)
}
if block.RowConsumption == nil {
w.metrics.fetchNilRowConsumptionBlockTotal.Inc()
return fmt.Errorf("fetched block does not contain RowConsumption. number: %v", number)
}
@@ -152,7 +151,6 @@ func (w *L2WatcherClient) getAndStoreBlocks(ctx context.Context, from, to uint64
return fmt.Errorf("failed to estimate block L1 commit calldata size: %v", err)
}
w.metrics.rollupL2BlockL1CommitCalldataSize.Set(float64(blockL1CommitCalldataSize))
w.metrics.rollupL2WatcherSyncThroughput.Add(float64(block.Header.GasUsed))
}
if err := w.l2BlockOrm.InsertL2Blocks(w.ctx, blocks); err != nil {
return fmt.Errorf("failed to batch insert BlockTraces: %v", err)

View File

@@ -12,9 +12,6 @@ type l2WatcherMetrics struct {
fetchRunningMissingBlocksHeight prometheus.Gauge
rollupL2BlocksFetchedGap prometheus.Gauge
rollupL2BlockL1CommitCalldataSize prometheus.Gauge
fetchNilRowConsumptionBlockTotal prometheus.Counter
rollupL2WatcherSyncThroughput prometheus.Counter
}
var (
@@ -41,14 +38,6 @@ func initL2WatcherMetrics(reg prometheus.Registerer) *l2WatcherMetrics {
Name: "rollup_l2_block_l1_commit_calldata_size",
Help: "The l1 commitBatch calldata size of the l2 block",
}),
fetchNilRowConsumptionBlockTotal: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "rollup_l2_watcher_fetch_nil_row_consumption_block_total",
Help: "The total number of occurrences where a fetched block has nil RowConsumption",
}),
rollupL2WatcherSyncThroughput: promauto.With(reg).NewCounter(prometheus.CounterOpts{
Name: "rollup_l2_watcher_sync_throughput",
Help: "The cumulative gas used in blocks that L2 watcher sync",
}),
}
})
return l2WatcherMetric

View File

@@ -35,8 +35,6 @@ type Batch struct {
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
// proof
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
@@ -250,7 +248,7 @@ func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, erro
}
// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecConfig rutils.CodecConfig, metrics rutils.BatchMetrics, dbTX ...*gorm.DB) (*Batch, error) {
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecVersion encoding.CodecVersion, metrics rutils.BatchMetrics, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {
return nil, errors.New("invalid args: batch is nil")
}
@@ -271,7 +269,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecCon
startChunkIndex = parentBatch.EndChunkIndex + 1
}
batchMeta, err := rutils.GetBatchMetadata(batch, codecConfig)
batchMeta, err := rutils.GetBatchMetadata(batch, codecVersion)
if err != nil {
log.Error("failed to get batch metadata", "index", batch.Index, "total l1 message popped before", batch.TotalL1MessagePoppedBefore,
"parent hash", batch.ParentBatchHash, "number of chunks", numChunks, "err", err)
@@ -290,9 +288,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecCon
WithdrawRoot: batch.WithdrawRoot().Hex(),
ParentBatchHash: batch.ParentBatchHash.Hex(),
BatchHeader: batchMeta.BatchBytes,
CodecVersion: int16(codecConfig.Version),
EnableCompress: codecConfig.EnableCompress,
BlobBytes: batchMeta.BlobBytes,
CodecVersion: int16(codecVersion),
ChunkProofsStatus: int16(types.ChunkProofsStatusPending),
ProvingStatus: int16(types.ProvingTaskUnassigned),
RollupStatus: int16(types.RollupPending),

View File

@@ -13,7 +13,6 @@ import (
"scroll-tech/common/types"
"scroll-tech/rollup/internal/utils"
rutils "scroll-tech/rollup/internal/utils"
)
// Chunk represents a chunk of blocks in the database.
@@ -34,8 +33,6 @@ type Chunk struct {
StateRoot string `json:"state_root" gorm:"column:state_root"`
ParentChunkStateRoot string `json:"parent_chunk_state_root" gorm:"column:parent_chunk_state_root"`
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"` // use for debug
// proof
ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:1"`
@@ -178,7 +175,7 @@ func (o *Chunk) GetChunksByBatchHash(ctx context.Context, batchHash string) ([]*
}
// InsertChunk inserts a new chunk into the database.
func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecConfig rutils.CodecConfig, metrics utils.ChunkMetrics, dbTX ...*gorm.DB) (*Chunk, error) {
func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecVersion encoding.CodecVersion, metrics utils.ChunkMetrics, dbTX ...*gorm.DB) (*Chunk, error) {
if chunk == nil || len(chunk.Blocks) == 0 {
return nil, errors.New("invalid args")
}
@@ -203,7 +200,7 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecCon
parentChunkStateRoot = parentChunk.StateRoot
}
chunkHash, err := utils.GetChunkHash(chunk, totalL1MessagePoppedBefore, codecConfig.Version)
chunkHash, err := utils.GetChunkHash(chunk, totalL1MessagePoppedBefore, codecVersion)
if err != nil {
log.Error("failed to get chunk hash", "err", err)
return nil, fmt.Errorf("Chunk.InsertChunk error: %w", err)
@@ -228,8 +225,6 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecCon
StateRoot: chunk.Blocks[numBlocks-1].Header.Root.Hex(),
ParentChunkStateRoot: parentChunkStateRoot,
WithdrawRoot: chunk.Blocks[numBlocks-1].WithdrawRoot.Hex(),
CodecVersion: int16(codecConfig.Version),
EnableCompress: codecConfig.EnableCompress,
ProvingStatus: int16(types.ProvingTaskUnassigned),
CrcMax: metrics.CrcMax,
BlobSize: metrics.L1CommitBlobSize,

View File

@@ -221,11 +221,11 @@ func TestChunkOrm(t *testing.T) {
assert.NoError(t, err)
}
dbChunk1, err := chunkOrm.InsertChunk(context.Background(), chunk1, utils.CodecConfig{Version: codecVersion}, utils.ChunkMetrics{})
dbChunk1, err := chunkOrm.InsertChunk(context.Background(), chunk1, codecVersion, utils.ChunkMetrics{})
assert.NoError(t, err)
assert.Equal(t, dbChunk1.Hash, chunkHash1.Hex())
dbChunk2, err := chunkOrm.InsertChunk(context.Background(), chunk2, utils.CodecConfig{Version: codecVersion}, utils.ChunkMetrics{})
dbChunk2, err := chunkOrm.InsertChunk(context.Background(), chunk2, codecVersion, utils.ChunkMetrics{})
assert.NoError(t, err)
assert.Equal(t, dbChunk2.Hash, chunkHash2.Hex())
@@ -275,7 +275,7 @@ func TestBatchOrm(t *testing.T) {
Index: 0,
Chunks: []*encoding.Chunk{chunk1},
}
batch1, err := batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: codecVersion}, utils.BatchMetrics{})
batch1, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, utils.BatchMetrics{})
assert.NoError(t, err)
hash1 := batch1.Hash
@@ -306,7 +306,7 @@ func TestBatchOrm(t *testing.T) {
Index: 1,
Chunks: []*encoding.Chunk{chunk2},
}
batch2, err := batchOrm.InsertBatch(context.Background(), batch, utils.CodecConfig{Version: codecVersion}, utils.BatchMetrics{})
batch2, err := batchOrm.InsertBatch(context.Background(), batch, codecVersion, utils.BatchMetrics{})
assert.NoError(t, err)
hash2 := batch2.Hash
@@ -432,7 +432,7 @@ func TestBundleOrm(t *testing.T) {
Index: 0,
Chunks: []*encoding.Chunk{chunk1},
}
dbBatch1, err := batchOrm.InsertBatch(context.Background(), batch1, utils.CodecConfig{Version: encoding.CodecV3}, utils.BatchMetrics{})
dbBatch1, err := batchOrm.InsertBatch(context.Background(), batch1, encoding.CodecV3, utils.BatchMetrics{})
assert.NoError(t, err)
chunk2 := &encoding.Chunk{Blocks: []*encoding.Block{block2}}
@@ -440,7 +440,7 @@ func TestBundleOrm(t *testing.T) {
Index: 1,
Chunks: []*encoding.Chunk{chunk2},
}
dbBatch2, err := batchOrm.InsertBatch(context.Background(), batch2, utils.CodecConfig{Version: encoding.CodecV3}, utils.BatchMetrics{})
dbBatch2, err := batchOrm.InsertBatch(context.Background(), batch2, encoding.CodecV3, utils.BatchMetrics{})
assert.NoError(t, err)
var bundle1 *Bundle

Some files were not shown because too many files have changed in this diff Show More