mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 15:38:18 -05:00
Compare commits
35 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5204ad50e0 | ||
|
|
f824fb0efc | ||
|
|
a55c7bdc77 | ||
|
|
47b1a037a9 | ||
|
|
ae34020c34 | ||
|
|
fa9fab6e98 | ||
|
|
c4f869a33a | ||
|
|
0cee9a51e6 | ||
|
|
97de988228 | ||
|
|
a12175dafc | ||
|
|
fedfa04c2b | ||
|
|
0d8b00c3de | ||
|
|
826357ab5d | ||
|
|
d26381cba3 | ||
|
|
0e65686ce4 | ||
|
|
6dd878eaca | ||
|
|
a18fe06440 | ||
|
|
3ac69bec51 | ||
|
|
e80f030246 | ||
|
|
a77f1413ee | ||
|
|
5b62692098 | ||
|
|
4f34e90f00 | ||
|
|
6b1b822c81 | ||
|
|
a34c01d90b | ||
|
|
0578aab3ae | ||
|
|
55dfbf6735 | ||
|
|
f3ddf43439 | ||
|
|
a2582dcc3f | ||
|
|
0a1868cec1 | ||
|
|
fcfbc53252 | ||
|
|
13c8605211 | ||
|
|
228cba48b7 | ||
|
|
b7c5ba9046 | ||
|
|
5f7973528e | ||
|
|
69a7339bab |
10
.github/workflows/common.yml
vendored
10
.github/workflows/common.yml
vendored
@@ -42,11 +42,11 @@ jobs:
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "common/libzkp/impl -> target"
|
||||
- name: Lint
|
||||
working-directory: 'common'
|
||||
run: |
|
||||
rm -rf $HOME/.cache/golangci-lint
|
||||
make lint
|
||||
# - name: Lint
|
||||
# working-directory: 'common'
|
||||
# run: |
|
||||
# rm -rf $HOME/.cache/golangci-lint
|
||||
# make lint
|
||||
goimports-lint:
|
||||
if: github.event.pull_request.draft == false
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
42
.github/workflows/docker.yml
vendored
42
.github/workflows/docker.yml
vendored
@@ -307,6 +307,48 @@ jobs:
|
||||
REPOSITORY: coordinator-api
|
||||
run: |
|
||||
aws --region ${{ env.AWS_REGION }} ecr describe-repositories --repository-names ${{ env.REPOSITORY }} && : || aws --region ${{ env.AWS_REGION }} ecr create-repository --repository-name ${{ env.REPOSITORY }}
|
||||
- name: Setup SSH for repositories and clone them
|
||||
run: |
|
||||
mkdir -p ~/.ssh
|
||||
chmod 700 ~/.ssh
|
||||
|
||||
# Setup for plonky3-gpu
|
||||
echo "${{ secrets.PLONKY3_GPU_SSH_PRIVATE_KEY }}" > ~/.ssh/plonky3_gpu_key
|
||||
chmod 600 ~/.ssh/plonky3_gpu_key
|
||||
eval "$(ssh-agent -s)" > /dev/null
|
||||
ssh-add ~/.ssh/plonky3_gpu_key 2>/dev/null
|
||||
ssh-keyscan -t rsa github.com >> ~/.ssh/known_hosts 2>/dev/null
|
||||
echo "Loaded plonky3-gpu key"
|
||||
|
||||
# Clone plonky3-gpu repository
|
||||
./build/dockerfiles/coordinator-api/clone_plonky3_gpu.sh
|
||||
|
||||
# Setup for openvm-stark-gpu
|
||||
echo "${{ secrets.OPENVM_STARK_GPU_SSH_PRIVATE_KEY }}" > ~/.ssh/openvm_stark_gpu_key
|
||||
chmod 600 ~/.ssh/openvm_stark_gpu_key
|
||||
eval "$(ssh-agent -s)" > /dev/null
|
||||
ssh-add ~/.ssh/openvm_stark_gpu_key 2>/dev/null
|
||||
echo "Loaded openvm-stark-gpu key"
|
||||
|
||||
# Clone openvm-stark-gpu repository
|
||||
./build/dockerfiles/coordinator-api/clone_openvm_stark_gpu.sh
|
||||
|
||||
# Setup for openvm-gpu
|
||||
echo "${{ secrets.OPENVM_GPU_SSH_PRIVATE_KEY }}" > ~/.ssh/openvm_gpu_key
|
||||
chmod 600 ~/.ssh/openvm_gpu_key
|
||||
eval "$(ssh-agent -s)" > /dev/null
|
||||
ssh-add ~/.ssh/openvm_gpu_key 2>/dev/null
|
||||
echo "Loaded openvm-gpu key"
|
||||
|
||||
# Clone openvm-gpu repository
|
||||
./build/dockerfiles/coordinator-api/clone_openvm_gpu.sh
|
||||
|
||||
# Show number of loaded keys
|
||||
echo "Number of loaded keys: $(ssh-add -l | wc -l)"
|
||||
|
||||
- name: Checkout specific commits
|
||||
run: |
|
||||
./build/dockerfiles/coordinator-api/checkout_all.sh
|
||||
- name: Build and push
|
||||
uses: docker/build-push-action@v3
|
||||
env:
|
||||
|
||||
6
.github/workflows/intermediate-docker.yml
vendored
6
.github/workflows/intermediate-docker.yml
vendored
@@ -9,9 +9,13 @@ on:
|
||||
type: choice
|
||||
options:
|
||||
- "1.20"
|
||||
- "1.20.14"
|
||||
- "1.21"
|
||||
- "1.21.13"
|
||||
- "1.22"
|
||||
- "1.22.12"
|
||||
- "1.23"
|
||||
- "1.23.7"
|
||||
default: "1.21"
|
||||
RUST_VERSION:
|
||||
description: "Rust toolchain version"
|
||||
@@ -20,6 +24,7 @@ on:
|
||||
options:
|
||||
- nightly-2023-12-03
|
||||
- nightly-2022-12-10
|
||||
- 1.86.0
|
||||
default: "nightly-2023-12-03"
|
||||
PYTHON_VERSION:
|
||||
description: "Python version"
|
||||
@@ -43,6 +48,7 @@ on:
|
||||
type: choice
|
||||
options:
|
||||
- 0.1.41
|
||||
- 0.1.71
|
||||
BASE_IMAGE:
|
||||
description: "which intermediate image you want to update"
|
||||
required: true
|
||||
|
||||
99
.github/workflows/prover.yml
vendored
99
.github/workflows/prover.yml
vendored
@@ -1,99 +0,0 @@
|
||||
name: Prover
|
||||
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- main
|
||||
- staging
|
||||
- develop
|
||||
- alpha
|
||||
paths:
|
||||
- 'prover/**'
|
||||
- '.github/workflows/prover.yml'
|
||||
pull_request:
|
||||
types:
|
||||
- opened
|
||||
- reopened
|
||||
- synchronize
|
||||
- ready_for_review
|
||||
paths:
|
||||
- 'prover/**'
|
||||
- '.github/workflows/prover.yml'
|
||||
|
||||
defaults:
|
||||
run:
|
||||
working-directory: 'prover'
|
||||
|
||||
jobs:
|
||||
skip_check:
|
||||
runs-on: ubuntu-latest
|
||||
outputs:
|
||||
should_skip: ${{ steps.skip_check.outputs.should_skip }}
|
||||
steps:
|
||||
- id: skip_check
|
||||
uses: fkirc/skip-duplicate-actions@v5
|
||||
with:
|
||||
cancel_others: 'true'
|
||||
concurrent_skipping: 'same_content_newer'
|
||||
paths_ignore: '["**/README.md"]'
|
||||
|
||||
fmt:
|
||||
needs: [skip_check]
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
(github.event.action == 'ready_for_review' || needs.skip_check.outputs.should_skip != 'true')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2023-12-03
|
||||
components: rustfmt
|
||||
- name: Cargo cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "prover -> target"
|
||||
- name: Cargo check
|
||||
run: cargo check --all-features
|
||||
- name: Cargo fmt
|
||||
run: cargo fmt --all -- --check
|
||||
|
||||
clippy:
|
||||
needs: [skip_check, fmt]
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
(github.event.action == 'ready_for_review' || needs.skip_check.outputs.should_skip != 'true')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 30
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2023-12-03
|
||||
components: clippy
|
||||
- name: Cargo cache
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "prover -> target"
|
||||
- name: Run clippy
|
||||
run: cargo clippy --all-features --all-targets -- -D warnings
|
||||
|
||||
compile:
|
||||
needs: [skip_check, clippy]
|
||||
if: |
|
||||
github.event.pull_request.draft == false &&
|
||||
(github.event.action == 'ready_for_review' || needs.skip_check.outputs.should_skip != 'true')
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
with:
|
||||
toolchain: nightly-2023-12-03
|
||||
- name: Cache cargo
|
||||
uses: Swatinem/rust-cache@v2
|
||||
with:
|
||||
workspaces: "prover -> target"
|
||||
- name: Test
|
||||
run: |
|
||||
make prover
|
||||
@@ -11,7 +11,7 @@ Please note that this project is released with a [Contributor Code of Conduct][c
|
||||
|
||||
## Contribute to Scroll
|
||||
|
||||
Did you know there are many ways of contributing to Scroll? If you are looking to contribute to by adding Scroll to existing Dev Tools or by doing integrations please go to the [Contribute to Scroll](https://github.com/scroll-tech/contribute-to-scroll) repo instead. If you are looking to contribute to Scroll's Halo2 zkEVM circuits please refer to the [zkEVM circuits](https://github.com/scroll-tech/zkevm-circuits) repo. This repository covers the Scroll infrastructure and smart contracts, if you want to contribute to these areas continue reading this document.
|
||||
Did you know there are many ways of contributing to Scroll? If you are looking to contribute to by adding Scroll to existing Dev Tools or by doing integrations please go to the [Contribute to Scroll](https://github.com/scroll-tech/contribute-to-scroll) repo instead. This repository covers the Scroll infrastructure and smart contracts, if you want to contribute to these areas continue reading this document.
|
||||
|
||||
## Issues and PRs
|
||||
|
||||
|
||||
12
Makefile
12
Makefile
@@ -31,12 +31,12 @@ fmt: ## Format the code
|
||||
cd $(PWD)/rollup/ && go mod tidy
|
||||
cd $(PWD)/tests/integration-test/ && go mod tidy
|
||||
|
||||
goimports -local $(PWD)/bridge-history-api/ -w .
|
||||
goimports -local $(PWD)/common/ -w .
|
||||
goimports -local $(PWD)/coordinator/ -w .
|
||||
goimports -local $(PWD)/database/ -w .
|
||||
goimports -local $(PWD)/rollup/ -w .
|
||||
goimports -local $(PWD)/tests/integration-test/ -w .
|
||||
goimports -local scroll-tech/bridge-history-api/ -w .
|
||||
goimports -local scroll-tech/common/ -w .
|
||||
goimports -local scroll-tech/coordinator/ -w .
|
||||
goimports -local scroll-tech/database/ -w .
|
||||
goimports -local scroll-tech/rollup/ -w .
|
||||
goimports -local scroll-tech/tests/integration-test/ -w .
|
||||
|
||||
dev_docker: ## Build docker images for development/testing usages
|
||||
docker pull postgres
|
||||
|
||||
@@ -37,6 +37,6 @@ reset-env:
|
||||
go build -o $(PWD)/build/bin/bridgehistoryapi-db-cli ./cmd/db_cli && $(PWD)/build/bin/bridgehistoryapi-db-cli reset
|
||||
|
||||
bridgehistoryapi-docker:
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-fetcher:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-fetcher.Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-api:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-api.Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-db-cli:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-db-cli.Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-fetcher:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-fetcher.Dockerfile --platform=linux/amd64
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-api:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-api.Dockerfile --platform=linux/amd64
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/bridgehistoryapi-db-cli:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/bridgehistoryapi-db-cli.Dockerfile --platform=linux/amd64
|
||||
|
||||
File diff suppressed because one or more lines are too long
13
bridge-history-api/abi/backend_abi_test.go
Normal file
13
bridge-history-api/abi/backend_abi_test.go
Normal file
@@ -0,0 +1,13 @@
|
||||
package backendabi
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/scroll-tech/go-ethereum/crypto"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestEventSignatures(t *testing.T) {
|
||||
assert.Equal(t, crypto.Keccak256Hash([]byte("RevertBatch(uint256,bytes32)")), L1RevertBatchV0EventSig)
|
||||
assert.Equal(t, crypto.Keccak256Hash([]byte("RevertBatch(uint256,uint256)")), L1RevertBatchV7EventSig)
|
||||
}
|
||||
@@ -68,7 +68,10 @@ func action(ctx *cli.Context) error {
|
||||
|
||||
observability.Server(ctx, db)
|
||||
|
||||
l1MessageFetcher := fetcher.NewL1MessageFetcher(subCtx, cfg.L1, db, l1Client)
|
||||
l1MessageFetcher, err := fetcher.NewL1MessageFetcher(subCtx, cfg.L1, db, l1Client)
|
||||
if err != nil {
|
||||
log.Crit("failed to create L1MessageFetcher", "err", err)
|
||||
}
|
||||
go l1MessageFetcher.Start()
|
||||
|
||||
l2MessageFetcher := fetcher.NewL2MessageFetcher(subCtx, cfg.L2, db, l2Client)
|
||||
|
||||
@@ -19,9 +19,11 @@
|
||||
"ScrollChainAddr": "0xa13BAF47339d63B743e7Da8741db5456DAc1E556",
|
||||
"GatewayRouterAddr": "0xF8B1378579659D8F7EE5f3C929c2f3E332E41Fd6",
|
||||
"MessageQueueAddr": "0x0d7E906BD9cAFa154b048cFa766Cc1E54E39AF9B",
|
||||
"MessageQueueV2Addr": "0x0000000000000000000000000000000000000000",
|
||||
"BatchBridgeGatewayAddr": "0x5Bcfd99c34cf7E06fc756f6f5aE7400504852bc4",
|
||||
"GasTokenGatewayAddr": "0x0000000000000000000000000000000000000000",
|
||||
"WrappedTokenGatewayAddr": "0x0000000000000000000000000000000000000000"
|
||||
"WrappedTokenGatewayAddr": "0x0000000000000000000000000000000000000000",
|
||||
"BlobScanAPIEndpoint": "https://api.blobscan.com/blobs/"
|
||||
},
|
||||
"L2": {
|
||||
"confirmation": 0,
|
||||
|
||||
@@ -10,13 +10,16 @@ require (
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/pressly/goose/v3 v3.16.0
|
||||
github.com/prometheus/client_golang v1.19.0
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305084331-57148478e950 // It's a hotfix for the header hash incompatibility issue, pls change this with caution
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250226072559-f8a8d3898f54
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305084331-57148478e950
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
golang.org/x/sync v0.11.0
|
||||
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
|
||||
)
|
||||
|
||||
replace github.com/scroll-tech/go-ethereum => github.com/scroll-tech/go-ethereum v1.10.14-0.20250305084331-57148478e950 // It's a hotfix for the header hash incompatibility issue, pls change this with caution
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.0 // indirect
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2 // indirect
|
||||
@@ -90,7 +93,6 @@ require (
|
||||
github.com/rjeczalik/notify v0.9.1 // indirect
|
||||
github.com/rs/cors v1.7.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250226072559-f8a8d3898f54 // indirect
|
||||
github.com/scroll-tech/zktrie v0.8.4 // indirect
|
||||
github.com/sethvargo/go-retry v0.2.4 // indirect
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
|
||||
|
||||
@@ -30,9 +30,14 @@ type FetcherConfig struct {
|
||||
ScrollChainAddr string `json:"ScrollChainAddr"`
|
||||
GatewayRouterAddr string `json:"GatewayRouterAddr"`
|
||||
MessageQueueAddr string `json:"MessageQueueAddr"`
|
||||
MessageQueueV2Addr string `json:"MessageQueueV2Addr"`
|
||||
BatchBridgeGatewayAddr string `json:"BatchBridgeGatewayAddr"`
|
||||
GasTokenGatewayAddr string `json:"GasTokenGatewayAddr"`
|
||||
WrappedTokenGatewayAddr string `json:"WrappedTokenGatewayAddr"`
|
||||
|
||||
BeaconNodeAPIEndpoint string `json:"BeaconNodeAPIEndpoint"`
|
||||
BlobScanAPIEndpoint string `json:"BlobScanAPIEndpoint"`
|
||||
BlockNativeAPIEndpoint string `json:"BlockNativeAPIEndpoint"`
|
||||
}
|
||||
|
||||
// RedisConfig redis config
|
||||
|
||||
@@ -2,6 +2,7 @@ package fetcher
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"time"
|
||||
|
||||
@@ -10,6 +11,7 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/ethclient"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/rollup/da_syncer/blob_client"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"scroll-tech/bridge-history-api/internal/config"
|
||||
@@ -35,13 +37,32 @@ type L1MessageFetcher struct {
|
||||
}
|
||||
|
||||
// NewL1MessageFetcher creates a new L1MessageFetcher instance.
|
||||
func NewL1MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1MessageFetcher {
|
||||
func NewL1MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) (*L1MessageFetcher, error) {
|
||||
blobClient := blob_client.NewBlobClients()
|
||||
if cfg.BeaconNodeAPIEndpoint != "" {
|
||||
beaconNodeClient, err := blob_client.NewBeaconNodeClient(cfg.BeaconNodeAPIEndpoint)
|
||||
if err != nil {
|
||||
log.Warn("failed to create BeaconNodeClient", "err", err)
|
||||
} else {
|
||||
blobClient.AddBlobClient(beaconNodeClient)
|
||||
}
|
||||
}
|
||||
if cfg.BlobScanAPIEndpoint != "" {
|
||||
blobClient.AddBlobClient(blob_client.NewBlobScanClient(cfg.BlobScanAPIEndpoint))
|
||||
}
|
||||
if cfg.BlockNativeAPIEndpoint != "" {
|
||||
blobClient.AddBlobClient(blob_client.NewBlockNativeClient(cfg.BlockNativeAPIEndpoint))
|
||||
}
|
||||
if blobClient.Size() == 0 {
|
||||
return nil, fmt.Errorf("no blob client is configured")
|
||||
}
|
||||
|
||||
c := &L1MessageFetcher{
|
||||
ctx: ctx,
|
||||
cfg: cfg,
|
||||
client: client,
|
||||
eventUpdateLogic: logic.NewEventUpdateLogic(db, true),
|
||||
l1FetcherLogic: logic.NewL1FetcherLogic(cfg, db, client),
|
||||
l1FetcherLogic: logic.NewL1FetcherLogic(cfg, db, client, blobClient),
|
||||
}
|
||||
|
||||
reg := prometheus.DefaultRegisterer
|
||||
@@ -58,7 +79,7 @@ func NewL1MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gor
|
||||
Help: "Latest blockchain height the L1 message fetcher has synced with.",
|
||||
})
|
||||
|
||||
return c
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// Start starts the L1 message fetching process.
|
||||
|
||||
@@ -2,13 +2,16 @@ package logic
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math/big"
|
||||
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
"github.com/scroll-tech/go-ethereum/crypto"
|
||||
"github.com/scroll-tech/go-ethereum/ethclient"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/rollup/da_syncer/blob_client"
|
||||
|
||||
backendabi "scroll-tech/bridge-history-api/abi"
|
||||
"scroll-tech/bridge-history-api/internal/config"
|
||||
@@ -19,15 +22,17 @@ import (
|
||||
|
||||
// L1EventParser the l1 event parser
|
||||
type L1EventParser struct {
|
||||
cfg *config.FetcherConfig
|
||||
client *ethclient.Client
|
||||
cfg *config.FetcherConfig
|
||||
client *ethclient.Client
|
||||
blobClient blob_client.BlobClient
|
||||
}
|
||||
|
||||
// NewL1EventParser creates l1 event parser
|
||||
func NewL1EventParser(cfg *config.FetcherConfig, client *ethclient.Client) *L1EventParser {
|
||||
func NewL1EventParser(cfg *config.FetcherConfig, client *ethclient.Client, blobClient blob_client.BlobClient) *L1EventParser {
|
||||
return &L1EventParser{
|
||||
cfg: cfg,
|
||||
client: client,
|
||||
cfg: cfg,
|
||||
client: client,
|
||||
blobClient: blobClient,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -232,7 +237,21 @@ func (e *L1EventParser) ParseL1SingleCrossChainEventLogs(ctx context.Context, lo
|
||||
}
|
||||
|
||||
// ParseL1BatchEventLogs parses L1 watched batch events.
|
||||
func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.Log, client *ethclient.Client) ([]*orm.BatchEvent, error) {
|
||||
func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.Log, client *ethclient.Client, blockTimestampsMap map[uint64]uint64) ([]*orm.BatchEvent, error) {
|
||||
// Since multiple CommitBatch events per transaction is introduced >= CodecV7,
|
||||
// with one transaction carrying multiple blobs,
|
||||
// each CommitBatch event corresponds to a blob containing block range data.
|
||||
// To correctly process these events, we need to:
|
||||
// 1. Parsing the associated blob data to extract the block range for each event
|
||||
// 2. Tracking the parent batch hash for each processed CommitBatch event, to:
|
||||
// - Validate the batch hash, since parent batch hash is needed to calculate the batch hash
|
||||
// - Derive the index of the current batch by the number of parent batch hashes tracked
|
||||
// In commitBatches and commitAndFinalizeBatch, the parent batch hash is passed in calldata,
|
||||
// so that we can use it to get the first batch's parent batch hash, and derive the rest.
|
||||
// The index map serves this purpose with:
|
||||
// Key: commit transaction hash
|
||||
// Value: parent batch hashes (in order) for each processed CommitBatch event in the transaction
|
||||
txBlobIndexMap := make(map[common.Hash][]common.Hash)
|
||||
var l1BatchEvents []*orm.BatchEvent
|
||||
for _, vlog := range logs {
|
||||
switch vlog.Topics[0] {
|
||||
@@ -247,11 +266,59 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
|
||||
log.Error("Failed to get commit batch tx or the tx is still pending", "err", err, "isPending", isPending)
|
||||
return nil, err
|
||||
}
|
||||
startBlock, endBlock, err := utils.GetBatchRangeFromCalldata(commitTx.Data())
|
||||
version, startBlock, endBlock, err := utils.GetBatchVersionAndBlockRangeFromCalldata(commitTx.Data())
|
||||
if err != nil {
|
||||
log.Error("Failed to get batch range from calldata", "hash", commitTx.Hash().String(), "height", vlog.BlockNumber)
|
||||
return nil, err
|
||||
}
|
||||
if version >= 7 { // It's a batch with version >= 7.
|
||||
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(version))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("unsupported codec version: %v, err: %w", version, err)
|
||||
}
|
||||
|
||||
// we append the batch hash to the slice for the current commit transaction after processing the batch.
|
||||
// that means the current index of the batch within the transaction is len(txBlobIndexMap[vlog.TxHash]).
|
||||
currentIndex := len(txBlobIndexMap[vlog.TxHash])
|
||||
if currentIndex >= len(commitTx.BlobHashes()) {
|
||||
return nil, fmt.Errorf("commit transaction %s has %d blobs, but trying to access index %d (batch index %d)",
|
||||
vlog.TxHash.String(), len(commitTx.BlobHashes()), currentIndex, event.BatchIndex.Uint64())
|
||||
}
|
||||
blobVersionedHash := commitTx.BlobHashes()[currentIndex]
|
||||
|
||||
// validate the batch hash
|
||||
var parentBatchHash common.Hash
|
||||
if currentIndex == 0 {
|
||||
parentBatchHash, err = utils.GetParentBatchHashFromCalldata(commitTx.Data())
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get parent batch header from calldata, tx hash: %s, err: %w", vlog.TxHash.String(), err)
|
||||
}
|
||||
} else {
|
||||
// here we need to subtract 1 from the current index to get the parent batch hash.
|
||||
parentBatchHash = txBlobIndexMap[vlog.TxHash][currentIndex-1]
|
||||
}
|
||||
calculatedBatch, err := codec.NewDABatchFromParams(event.BatchIndex.Uint64(), blobVersionedHash, parentBatchHash)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to create new DA batch from params, batch index: %d, err: %w", event.BatchIndex.Uint64(), err)
|
||||
}
|
||||
if calculatedBatch.Hash() != event.BatchHash {
|
||||
return nil, fmt.Errorf("batch hash mismatch for batch %d, expected: %s, got: %s", event.BatchIndex, event.BatchHash.String(), calculatedBatch.Hash().String())
|
||||
}
|
||||
|
||||
blocks, err := e.getBatchBlockRangeFromBlob(ctx, codec, blobVersionedHash, blockTimestampsMap[vlog.BlockNumber])
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to process versioned blob, blobVersionedHash: %s, block number: %d, blob index: %d, err: %w",
|
||||
blobVersionedHash.String(), vlog.BlockNumber, currentIndex, err)
|
||||
}
|
||||
if len(blocks) == 0 {
|
||||
return nil, fmt.Errorf("no blocks found in the blob, blobVersionedHash: %s, block number: %d, blob index: %d",
|
||||
blobVersionedHash.String(), vlog.BlockNumber, currentIndex)
|
||||
}
|
||||
startBlock = blocks[0].Number()
|
||||
endBlock = blocks[len(blocks)-1].Number()
|
||||
|
||||
txBlobIndexMap[vlog.TxHash] = append(txBlobIndexMap[vlog.TxHash], event.BatchHash)
|
||||
}
|
||||
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
|
||||
BatchStatus: int(btypes.BatchStatusTypeCommitted),
|
||||
BatchIndex: event.BatchIndex.Uint64(),
|
||||
@@ -260,8 +327,8 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
|
||||
EndBlockNumber: endBlock,
|
||||
L1BlockNumber: vlog.BlockNumber,
|
||||
})
|
||||
case backendabi.L1RevertBatchEventSig:
|
||||
event := backendabi.L1RevertBatchEvent{}
|
||||
case backendabi.L1RevertBatchV0EventSig:
|
||||
event := backendabi.L1RevertBatchV0Event{}
|
||||
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "RevertBatch", vlog); err != nil {
|
||||
log.Error("Failed to unpack RevertBatch event", "err", err)
|
||||
return nil, err
|
||||
@@ -272,6 +339,19 @@ func (e *L1EventParser) ParseL1BatchEventLogs(ctx context.Context, logs []types.
|
||||
BatchHash: event.BatchHash.String(),
|
||||
L1BlockNumber: vlog.BlockNumber,
|
||||
})
|
||||
case backendabi.L1RevertBatchV7EventSig:
|
||||
event := backendabi.L1RevertBatchV7Event{}
|
||||
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "RevertBatch0", vlog); err != nil {
|
||||
log.Error("Failed to unpack RevertBatch event", "err", err)
|
||||
return nil, err
|
||||
}
|
||||
for i := event.StartBatchIndex.Uint64(); i <= event.FinishBatchIndex.Uint64(); i++ {
|
||||
l1BatchEvents = append(l1BatchEvents, &orm.BatchEvent{
|
||||
BatchStatus: int(btypes.BatchStatusTypeReverted),
|
||||
BatchIndex: i,
|
||||
L1BlockNumber: vlog.BlockNumber,
|
||||
})
|
||||
}
|
||||
case backendabi.L1FinalizeBatchEventSig:
|
||||
event := backendabi.L1FinalizeBatchEvent{}
|
||||
if err := utils.UnpackLog(backendabi.IScrollChainABI, &event, "FinalizeBatch", vlog); err != nil {
|
||||
@@ -389,3 +469,27 @@ func getRealFromAddress(ctx context.Context, eventSender common.Address, eventMe
|
||||
}
|
||||
return sender.String(), nil
|
||||
}
|
||||
|
||||
func (e *L1EventParser) getBatchBlockRangeFromBlob(ctx context.Context, codec encoding.Codec, blobVersionedHash common.Hash, l1BlockTime uint64) ([]encoding.DABlock, error) {
|
||||
blob, err := e.blobClient.GetBlobByVersionedHashAndBlockTime(ctx, blobVersionedHash, l1BlockTime)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get blob %s: %w", blobVersionedHash.Hex(), err)
|
||||
}
|
||||
if blob == nil {
|
||||
return nil, fmt.Errorf("blob %s not found", blobVersionedHash.Hex())
|
||||
}
|
||||
|
||||
blobPayload, err := codec.DecodeBlob(blob)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("blob %s decode error: %w", blobVersionedHash.Hex(), err)
|
||||
}
|
||||
|
||||
blocks := blobPayload.Blocks()
|
||||
if len(blocks) == 0 {
|
||||
return nil, fmt.Errorf("empty blocks in blob %s", blobVersionedHash.Hex())
|
||||
}
|
||||
|
||||
log.Debug("Successfully processed blob", "blobVersionedHash", blobVersionedHash.Hex(), "blocksCount", len(blocks))
|
||||
|
||||
return blocks, nil
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
"github.com/scroll-tech/go-ethereum/ethclient"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/rollup/da_syncer/blob_client"
|
||||
"gorm.io/gorm"
|
||||
|
||||
backendabi "scroll-tech/bridge-history-api/abi"
|
||||
@@ -49,7 +50,7 @@ type L1FetcherLogic struct {
|
||||
}
|
||||
|
||||
// NewL1FetcherLogic creates L1 fetcher logic
|
||||
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) *L1FetcherLogic {
|
||||
func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client, blobClient blob_client.BlobClient) *L1FetcherLogic {
|
||||
addressList := []common.Address{
|
||||
common.HexToAddress(cfg.StandardERC20GatewayAddr),
|
||||
common.HexToAddress(cfg.CustomERC20GatewayAddr),
|
||||
@@ -119,6 +120,10 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
|
||||
gatewayList = append(gatewayList, common.HexToAddress(cfg.WrappedTokenGatewayAddr))
|
||||
}
|
||||
|
||||
if common.HexToAddress(cfg.MessageQueueV2Addr) != (common.Address{}) {
|
||||
addressList = append(addressList, common.HexToAddress(cfg.MessageQueueV2Addr))
|
||||
}
|
||||
|
||||
log.Info("L1 Fetcher configured with the following address list", "addresses", addressList, "gateways", gatewayList)
|
||||
|
||||
f := &L1FetcherLogic{
|
||||
@@ -129,7 +134,7 @@ func NewL1FetcherLogic(cfg *config.FetcherConfig, db *gorm.DB, client *ethclient
|
||||
client: client,
|
||||
addressList: addressList,
|
||||
gatewayList: gatewayList,
|
||||
parser: NewL1EventParser(cfg, client),
|
||||
parser: NewL1EventParser(cfg, client, blobClient),
|
||||
}
|
||||
|
||||
reg := prometheus.DefaultRegisterer
|
||||
@@ -168,14 +173,10 @@ func (f *L1FetcherLogic) getBlocksAndDetectReorg(ctx context.Context, from, to u
|
||||
return false, 0, lastBlockHash, blocks, nil
|
||||
}
|
||||
|
||||
func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, blocks []*types.Block) (map[uint64]uint64, []*orm.CrossMessage, error) {
|
||||
func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, blocks []*types.Block) ([]*orm.CrossMessage, error) {
|
||||
var l1RevertedTxs []*orm.CrossMessage
|
||||
blockTimestampsMap := make(map[uint64]uint64)
|
||||
|
||||
for i := from; i <= to; i++ {
|
||||
block := blocks[i-from]
|
||||
blockTimestampsMap[block.NumberU64()] = block.Time()
|
||||
|
||||
for _, tx := range block.Transactions() {
|
||||
// Gateways: L1 deposit.
|
||||
// Messenger: L1 deposit retry (replayMessage), L1 deposit refund (dropMessage), L2 withdrawal's claim (relayMessageWithProof).
|
||||
@@ -187,7 +188,7 @@ func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, bl
|
||||
receipt, receiptErr := f.client.TransactionReceipt(ctx, tx.Hash())
|
||||
if receiptErr != nil {
|
||||
log.Error("Failed to get transaction receipt", "txHash", tx.Hash().String(), "err", receiptErr)
|
||||
return nil, nil, receiptErr
|
||||
return nil, receiptErr
|
||||
}
|
||||
|
||||
// Check if the transaction is failed
|
||||
@@ -199,7 +200,7 @@ func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, bl
|
||||
sender, senderErr := signer.Sender(tx)
|
||||
if senderErr != nil {
|
||||
log.Error("get sender failed", "chain id", tx.ChainId().Uint64(), "tx hash", tx.Hash().String(), "err", senderErr)
|
||||
return nil, nil, senderErr
|
||||
return nil, senderErr
|
||||
}
|
||||
|
||||
l1RevertedTxs = append(l1RevertedTxs, &orm.CrossMessage{
|
||||
@@ -213,7 +214,7 @@ func (f *L1FetcherLogic) getRevertedTxs(ctx context.Context, from, to uint64, bl
|
||||
})
|
||||
}
|
||||
}
|
||||
return blockTimestampsMap, l1RevertedTxs, nil
|
||||
return l1RevertedTxs, nil
|
||||
}
|
||||
|
||||
func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]types.Log, error) {
|
||||
@@ -224,7 +225,7 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
|
||||
Topics: make([][]common.Hash, 1),
|
||||
}
|
||||
|
||||
query.Topics[0] = make([]common.Hash, 16)
|
||||
query.Topics[0] = make([]common.Hash, 17)
|
||||
query.Topics[0][0] = backendabi.L1DepositETHSig
|
||||
query.Topics[0][1] = backendabi.L1DepositERC20Sig
|
||||
query.Topics[0][2] = backendabi.L1DepositERC721Sig
|
||||
@@ -233,14 +234,15 @@ func (f *L1FetcherLogic) l1FetcherLogs(ctx context.Context, from, to uint64) ([]
|
||||
query.Topics[0][5] = backendabi.L1RelayedMessageEventSig
|
||||
query.Topics[0][6] = backendabi.L1FailedRelayedMessageEventSig
|
||||
query.Topics[0][7] = backendabi.L1CommitBatchEventSig
|
||||
query.Topics[0][8] = backendabi.L1RevertBatchEventSig
|
||||
query.Topics[0][9] = backendabi.L1FinalizeBatchEventSig
|
||||
query.Topics[0][10] = backendabi.L1QueueTransactionEventSig
|
||||
query.Topics[0][11] = backendabi.L1DequeueTransactionEventSig
|
||||
query.Topics[0][12] = backendabi.L1DropTransactionEventSig
|
||||
query.Topics[0][13] = backendabi.L1ResetDequeuedTransactionEventSig
|
||||
query.Topics[0][14] = backendabi.L1BridgeBatchDepositSig
|
||||
query.Topics[0][15] = backendabi.L1DepositWrappedTokenSig
|
||||
query.Topics[0][8] = backendabi.L1RevertBatchV0EventSig
|
||||
query.Topics[0][9] = backendabi.L1RevertBatchV7EventSig
|
||||
query.Topics[0][10] = backendabi.L1FinalizeBatchEventSig
|
||||
query.Topics[0][11] = backendabi.L1QueueTransactionEventSig
|
||||
query.Topics[0][12] = backendabi.L1DequeueTransactionEventSig
|
||||
query.Topics[0][13] = backendabi.L1DropTransactionEventSig
|
||||
query.Topics[0][14] = backendabi.L1ResetDequeuedTransactionEventSig
|
||||
query.Topics[0][15] = backendabi.L1BridgeBatchDepositSig
|
||||
query.Topics[0][16] = backendabi.L1DepositWrappedTokenSig
|
||||
|
||||
eventLogs, err := f.client.FilterLogs(ctx, query)
|
||||
if err != nil {
|
||||
@@ -264,12 +266,18 @@ func (f *L1FetcherLogic) L1Fetcher(ctx context.Context, from, to uint64, lastBlo
|
||||
return isReorg, reorgHeight, blockHash, nil, nil
|
||||
}
|
||||
|
||||
blockTimestampsMap, l1RevertedTxs, err := f.getRevertedTxs(ctx, from, to, blocks)
|
||||
l1RevertedTxs, err := f.getRevertedTxs(ctx, from, to, blocks)
|
||||
if err != nil {
|
||||
log.Error("L1Fetcher getRevertedTxs failed", "from", from, "to", to, "error", err)
|
||||
return false, 0, common.Hash{}, nil, err
|
||||
}
|
||||
|
||||
// Map block number to block timestamp to avoid fetching block header multiple times to get block timestamp.
|
||||
blockTimestampsMap := make(map[uint64]uint64)
|
||||
for _, block := range blocks {
|
||||
blockTimestampsMap[block.NumberU64()] = block.Time()
|
||||
}
|
||||
|
||||
eventLogs, err := f.l1FetcherLogs(ctx, from, to)
|
||||
if err != nil {
|
||||
log.Error("L1Fetcher l1FetcherLogs failed", "from", from, "to", to, "error", err)
|
||||
@@ -282,7 +290,7 @@ func (f *L1FetcherLogic) L1Fetcher(ctx context.Context, from, to uint64, lastBlo
|
||||
return false, 0, common.Hash{}, nil, err
|
||||
}
|
||||
|
||||
l1BatchEvents, err := f.parser.ParseL1BatchEventLogs(ctx, eventLogs, f.client)
|
||||
l1BatchEvents, err := f.parser.ParseL1BatchEventLogs(ctx, eventLogs, f.client, blockTimestampsMap)
|
||||
if err != nil {
|
||||
log.Error("failed to parse L1 batch event logs", "from", from, "to", to, "err", err)
|
||||
return false, 0, common.Hash{}, nil, err
|
||||
|
||||
@@ -117,7 +117,7 @@ func (c *BatchEvent) InsertOrUpdateBatchEvents(ctx context.Context, l1BatchEvent
|
||||
}
|
||||
case btypes.BatchStatusTypeReverted:
|
||||
db = db.Where("batch_index = ?", l1BatchEvent.BatchIndex)
|
||||
db = db.Where("batch_hash = ?", l1BatchEvent.BatchHash)
|
||||
db = db.Where("batch_status != ?", btypes.BatchStatusTypeFinalized)
|
||||
updateFields["batch_status"] = btypes.BatchStatusTypeReverted
|
||||
if err := db.Updates(updateFields).Error; err != nil {
|
||||
return fmt.Errorf("failed to update batch event, error: %w", err)
|
||||
|
||||
@@ -154,7 +154,7 @@ func (c *CrossMessage) GetL2UnclaimedWithdrawalsByAddress(ctx context.Context, s
|
||||
db := c.db.WithContext(ctx)
|
||||
db = db.Model(&CrossMessage{})
|
||||
db = db.Where("message_type = ?", btypes.MessageTypeL2SentMessage)
|
||||
db = db.Where("tx_status = ?", types.TxStatusTypeSent)
|
||||
db = db.Where("tx_status in (?)", []types.TxStatusType{types.TxStatusTypeSent, types.TxStatusTypeFailedRelayed, types.TxStatusTypeRelayTxReverted})
|
||||
db = db.Where("sender = ?", sender)
|
||||
db = db.Order("block_timestamp desc")
|
||||
db = db.Limit(500)
|
||||
|
||||
@@ -66,25 +66,26 @@ func ComputeMessageHash(
|
||||
return common.BytesToHash(crypto.Keccak256(data))
|
||||
}
|
||||
|
||||
// GetBatchRangeFromCalldata find the block range from calldata, both inclusive.
|
||||
func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
|
||||
// GetBatchVersionAndBlockRangeFromCalldata find the block range from calldata, both inclusive.
|
||||
func GetBatchVersionAndBlockRangeFromCalldata(txData []byte) (uint8, uint64, uint64, error) {
|
||||
const methodIDLength = 4
|
||||
if len(txData) < methodIDLength {
|
||||
return 0, 0, fmt.Errorf("transaction data is too short, length of tx data: %v, minimum length required: %v", len(txData), methodIDLength)
|
||||
return 0, 0, 0, fmt.Errorf("transaction data is too short, length of tx data: %v, minimum length required: %v", len(txData), methodIDLength)
|
||||
}
|
||||
method, err := backendabi.IScrollChainABI.MethodById(txData[:methodIDLength])
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("failed to get method by ID, ID: %v, err: %w", txData[:methodIDLength], err)
|
||||
return 0, 0, 0, fmt.Errorf("failed to get method by ID, ID: %v, err: %w", txData[:methodIDLength], err)
|
||||
}
|
||||
values, err := method.Inputs.Unpack(txData[methodIDLength:])
|
||||
if err != nil {
|
||||
return 0, 0, fmt.Errorf("failed to unpack transaction data using ABI, tx data: %v, err: %w", txData, err)
|
||||
return 0, 0, 0, fmt.Errorf("failed to unpack transaction data using ABI, tx data: %v, err: %w", txData, err)
|
||||
}
|
||||
|
||||
var chunks [][]byte
|
||||
var version uint8
|
||||
|
||||
if method.Name == "importGenesisBatch" {
|
||||
return 0, 0, nil
|
||||
return 0, 0, 0, nil
|
||||
} else if method.Name == "commitBatch" {
|
||||
type commitBatchArgs struct {
|
||||
Version uint8
|
||||
@@ -95,11 +96,11 @@ func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
|
||||
|
||||
var args commitBatchArgs
|
||||
if err = method.Inputs.Copy(&args, values); err != nil {
|
||||
return 0, 0, fmt.Errorf("failed to decode calldata into commitBatch args, values: %+v, err: %w", values, err)
|
||||
return 0, 0, 0, fmt.Errorf("failed to decode calldata into commitBatch args, values: %+v, err: %w", values, err)
|
||||
}
|
||||
|
||||
chunks = args.Chunks
|
||||
|
||||
version = args.Version
|
||||
} else if method.Name == "commitBatchWithBlobProof" {
|
||||
type commitBatchWithBlobProofArgs struct {
|
||||
Version uint8
|
||||
@@ -111,10 +112,22 @@ func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
|
||||
|
||||
var args commitBatchWithBlobProofArgs
|
||||
if err = method.Inputs.Copy(&args, values); err != nil {
|
||||
return 0, 0, fmt.Errorf("failed to decode calldata into commitBatchWithBlobProofArgs args, values: %+v, err: %w", values, err)
|
||||
return 0, 0, 0, fmt.Errorf("failed to decode calldata into commitBatchWithBlobProofArgs args, values: %+v, err: %w", values, err)
|
||||
}
|
||||
|
||||
chunks = args.Chunks
|
||||
version = args.Version
|
||||
} else if method.Name == "commitBatches" || method.Name == "commitAndFinalizeBatch" {
|
||||
if len(values) < 3 {
|
||||
return 0, 0, 0, fmt.Errorf("insufficient arguments for %s, expected 3, got %d", method.Name, len(values))
|
||||
}
|
||||
|
||||
var ok bool
|
||||
version, ok = values[0].(uint8)
|
||||
if !ok {
|
||||
return 0, 0, 0, fmt.Errorf("invalid version type: %T", values[0])
|
||||
}
|
||||
return version, 0, 0, nil
|
||||
}
|
||||
|
||||
var startBlock uint64
|
||||
@@ -124,7 +137,7 @@ func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
|
||||
// | 1 byte | 60 bytes | ... | 60 bytes |
|
||||
// | num blocks | block 1 | ... | block n |
|
||||
if len(chunks) == 0 {
|
||||
return 0, 0, errors.New("invalid chunks")
|
||||
return 0, 0, 0, errors.New("invalid chunks")
|
||||
}
|
||||
chunk := chunks[0]
|
||||
block := chunk[1:61] // first block in chunk
|
||||
@@ -135,7 +148,36 @@ func GetBatchRangeFromCalldata(txData []byte) (uint64, uint64, error) {
|
||||
block = chunk[1+lastBlockIndex*60 : 1+lastBlockIndex*60+60] // last block in chunk
|
||||
finishBlock = binary.BigEndian.Uint64(block[0:8])
|
||||
|
||||
return startBlock, finishBlock, err
|
||||
return version, startBlock, finishBlock, err
|
||||
}
|
||||
|
||||
// GetParentBatchHashFromCalldata gets the parent batch hash from calldata.
|
||||
// It only supports commitBatches and commitAndFinalizeBatch, which only accept batches >= v7.
|
||||
func GetParentBatchHashFromCalldata(txData []byte) (common.Hash, error) {
|
||||
const methodIDLength = 4
|
||||
if len(txData) < methodIDLength {
|
||||
return common.Hash{}, fmt.Errorf("transaction data is too short, length of tx data: %v, minimum length required: %v", len(txData), methodIDLength)
|
||||
}
|
||||
method, err := backendabi.IScrollChainABI.MethodById(txData[:methodIDLength])
|
||||
if err != nil {
|
||||
return common.Hash{}, fmt.Errorf("failed to get method by ID, ID: %v, err: %w", txData[:methodIDLength], err)
|
||||
}
|
||||
values, err := method.Inputs.Unpack(txData[methodIDLength:])
|
||||
if err != nil {
|
||||
return common.Hash{}, fmt.Errorf("failed to unpack transaction data using ABI, tx data: %v, err: %w", txData, err)
|
||||
}
|
||||
|
||||
if method.Name == "commitBatches" || method.Name == "commitAndFinalizeBatch" {
|
||||
if len(values) < 3 {
|
||||
return common.Hash{}, fmt.Errorf("insufficient arguments for %s, expected 3, got %d", method.Name, len(values))
|
||||
}
|
||||
parentBatchHash, ok := values[1].([32]byte)
|
||||
if !ok {
|
||||
return common.Hash{}, fmt.Errorf("invalid parentBatchHash type: %T", values[1])
|
||||
}
|
||||
return common.BytesToHash(parentBatchHash[:]), nil
|
||||
}
|
||||
return common.Hash{}, fmt.Errorf("method %s does not support parent batch header", method.Name)
|
||||
}
|
||||
|
||||
// GetBlocksInRange gets a batch of blocks for a block range [start, end] inclusive.
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -91,7 +91,7 @@ linters-settings:
|
||||
#local-prefixes: github.com/org/project
|
||||
gocyclo:
|
||||
# minimal code complexity to report, 30 by default (but we recommend 10-20)
|
||||
min-complexity: 30
|
||||
min-complexity: 40
|
||||
maligned:
|
||||
# print struct with more effective memory layout or not, false by default
|
||||
suggest-new: true
|
||||
@@ -254,6 +254,9 @@ issues:
|
||||
- linters:
|
||||
- wsl
|
||||
text: "expressions should not be cuddled with declarations or returns"
|
||||
- linters:
|
||||
- govet
|
||||
text: 'shadow: declaration of "(err|ctx)" shadows declaration at'
|
||||
|
||||
# Independently from option `exclude` we use default exclude patterns,
|
||||
# it can be disabled by this option. To list all
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.mod* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY ./bridge-history-api/go.* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.mod* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Build libzkp dependency
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as chef
|
||||
FROM scrolltech/cuda-go-rust-builder:cuda-11.7.1-go-1.21-rust-nightly-2023-12-03 as chef
|
||||
WORKDIR app
|
||||
|
||||
FROM chef as planner
|
||||
@@ -9,6 +9,12 @@ RUN cargo chef prepare --recipe-path recipe.json
|
||||
FROM chef as zkp-builder
|
||||
COPY ./common/libzkp/impl/rust-toolchain ./
|
||||
COPY --from=planner /app/recipe.json recipe.json
|
||||
# run scripts to get openvm-gpu
|
||||
COPY ./build/dockerfiles/coordinator-api/plonky3-gpu /plonky3-gpu
|
||||
COPY ./build/dockerfiles/coordinator-api/openvm-stark-gpu /openvm-stark-gpu
|
||||
COPY ./build/dockerfiles/coordinator-api/openvm-gpu /openvm-gpu
|
||||
COPY ./build/dockerfiles/coordinator-api/gitconfig /root/.gitconfig
|
||||
COPY ./build/dockerfiles/coordinator-api/config.toml /root/.cargo/config.toml
|
||||
RUN cargo chef cook --release --recipe-path recipe.json
|
||||
|
||||
COPY ./common/libzkp/impl .
|
||||
@@ -16,7 +22,7 @@ RUN cargo build --release
|
||||
|
||||
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/cuda-go-rust-builder:cuda-11.7.1-go-1.21-rust-nightly-2023-12-03 as base
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
COPY ./rollup/go.* ./rollup/
|
||||
@@ -36,7 +42,7 @@ COPY --from=zkp-builder /app/target/release/libzkp.so ./coordinator/internal/log
|
||||
RUN cd ./coordinator && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/
|
||||
|
||||
# Pull coordinator into a second stage deploy ubuntu container
|
||||
FROM ubuntu:20.04
|
||||
FROM nvidia/cuda:11.7.1-runtime-ubuntu22.04
|
||||
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
|
||||
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
|
||||
# ENV CHAIN_ID=534353
|
||||
|
||||
17
build/dockerfiles/coordinator-api/checkout_all.sh
Executable file
17
build/dockerfiles/coordinator-api/checkout_all.sh
Executable file
@@ -0,0 +1,17 @@
|
||||
#!/bin/bash
|
||||
set -uex
|
||||
|
||||
PLONKY3_GPU_COMMIT=261b322 # v0.2.0
|
||||
OPENVM_STARK_GPU_COMMIT=3082234 # PR#48
|
||||
OPENVM_GPU_COMMIT=8094b4f # branch: patch-v1.2.0
|
||||
|
||||
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)
|
||||
|
||||
# checkout plonky3-gpu
|
||||
cd $DIR/plonky3-gpu && git checkout ${PLONKY3_GPU_COMMIT}
|
||||
|
||||
# checkout openvm-stark-gpu
|
||||
cd $DIR/openvm-stark-gpu && git checkout ${OPENVM_STARK_GPU_COMMIT}
|
||||
|
||||
# checkout openvm-gpu
|
||||
cd $DIR/openvm-gpu && git checkout ${OPENVM_GPU_COMMIT}
|
||||
10
build/dockerfiles/coordinator-api/clone_openvm_gpu.sh
Executable file
10
build/dockerfiles/coordinator-api/clone_openvm_gpu.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
set -uex
|
||||
|
||||
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)
|
||||
|
||||
# clone openvm-gpu if not exists
|
||||
if [ ! -d $DIR/openvm-gpu ]; then
|
||||
git clone git@github.com:scroll-tech/openvm-gpu.git $DIR/openvm-gpu
|
||||
fi
|
||||
cd $DIR/openvm-gpu && git fetch --all --force
|
||||
10
build/dockerfiles/coordinator-api/clone_openvm_stark_gpu.sh
Executable file
10
build/dockerfiles/coordinator-api/clone_openvm_stark_gpu.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
set -uex
|
||||
|
||||
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)
|
||||
|
||||
# clone openvm-stark-gpu if not exists
|
||||
if [ ! -d $DIR/openvm-stark-gpu ]; then
|
||||
git clone git@github.com:scroll-tech/openvm-stark-gpu.git $DIR/openvm-stark-gpu
|
||||
fi
|
||||
cd $DIR/openvm-stark-gpu && git fetch --all --force
|
||||
10
build/dockerfiles/coordinator-api/clone_plonky3_gpu.sh
Executable file
10
build/dockerfiles/coordinator-api/clone_plonky3_gpu.sh
Executable file
@@ -0,0 +1,10 @@
|
||||
#!/bin/bash
|
||||
set -uex
|
||||
|
||||
DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")" > /dev/null 2>&1 && pwd)
|
||||
|
||||
# clone plonky3-gpu if not exists
|
||||
if [ ! -d $DIR/plonky3-gpu ]; then
|
||||
git clone git@github.com:scroll-tech/plonky3-gpu.git $DIR/plonky3-gpu
|
||||
fi
|
||||
cd $DIR/plonky3-gpu && git fetch --all --force
|
||||
92
build/dockerfiles/coordinator-api/config.toml
Normal file
92
build/dockerfiles/coordinator-api/config.toml
Normal file
@@ -0,0 +1,92 @@
|
||||
# openvm
|
||||
# same order and features as zkvm-prover/Cargo.toml.gpu
|
||||
[patch."ssh://git@github.com/scroll-tech/openvm-gpu.git"]
|
||||
openvm = { path = "/openvm-gpu/crates/toolchain/openvm", default-features = false }
|
||||
openvm-algebra-complex-macros = { path = "/openvm-gpu/extensions/algebra/complex-macros", default-features = false }
|
||||
openvm-algebra-guest = { path = "/openvm-gpu/extensions/algebra/guest", default-features = false }
|
||||
openvm-bigint-guest = { path = "/openvm-gpu/extensions/bigint/guest", default-features = false }
|
||||
openvm-build = { path = "/openvm-gpu/crates/toolchain/build", default-features = false }
|
||||
openvm-circuit = { path = "/openvm-gpu/crates/vm", default-features = false }
|
||||
openvm-custom-insn = { path = "/openvm-gpu/crates/toolchain/custom_insn", default-features = false }
|
||||
openvm-continuations = { path = "/openvm-gpu/crates/continuations", default-features = false }
|
||||
openvm-ecc-guest = { path = "/openvm-gpu/extensions/ecc/guest", default-features = false }
|
||||
openvm-instructions ={ path = "/openvm-gpu/crates/toolchain/instructions", default-features = false }
|
||||
openvm-keccak256-guest = { path = "/openvm-gpu/extensions/keccak256/guest", default-features = false }
|
||||
openvm-native-circuit = { path = "/openvm-gpu/extensions/native/circuit", default-features = false }
|
||||
openvm-native-compiler = { path = "/openvm-gpu/extensions/native/compiler", default-features = false }
|
||||
openvm-native-recursion = { path = "/openvm-gpu/extensions/native/recursion", default-features = false }
|
||||
openvm-native-transpiler = { path = "/openvm-gpu/extensions/native/transpiler", default-features = false }
|
||||
openvm-pairing-guest = { path = "/openvm-gpu/extensions/pairing/guest", default-features = false }
|
||||
openvm-rv32im-guest = { path = "/openvm-gpu/extensions/rv32im/guest", default-features = false }
|
||||
openvm-rv32im-transpiler = { path = "/openvm-gpu/extensions/rv32im/transpiler", default-features = false }
|
||||
openvm-sdk = { path = "/openvm-gpu/crates/sdk", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
|
||||
openvm-sha256-guest = { path = "/openvm-gpu/extensions/sha256/guest", default-features = false }
|
||||
openvm-transpiler = { path = "/openvm-gpu/crates/toolchain/transpiler", default-features = false }
|
||||
|
||||
# stark-backend
|
||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||
openvm-stark-backend = { path = "/openvm-stark-gpu/crates/stark-backend", features = ["gpu"] }
|
||||
openvm-stark-sdk = { path = "/openvm-stark-gpu/crates/stark-sdk", features = ["gpu"] }
|
||||
|
||||
[patch."ssh://git@github.com/scroll-tech/openvm-stark-gpu.git"]
|
||||
openvm-stark-backend = { path = "/openvm-stark-gpu/crates/stark-backend", features = ["gpu"] }
|
||||
openvm-stark-sdk = { path = "/openvm-stark-gpu/crates/stark-sdk", features = ["gpu"] }
|
||||
|
||||
# plonky3
|
||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||
p3-air = { path = "/plonky3-gpu/air" }
|
||||
p3-field = { path = "/plonky3-gpu/field" }
|
||||
p3-commit = { path = "/plonky3-gpu/commit" }
|
||||
p3-matrix = { path = "/plonky3-gpu/matrix" }
|
||||
p3-baby-bear = { path = "/plonky3-gpu/baby-bear" }
|
||||
p3-koala-bear = { path = "/plonky3-gpu/koala-bear" }
|
||||
p3-util = { path = "/plonky3-gpu/util" }
|
||||
p3-challenger = { path = "/plonky3-gpu/challenger" }
|
||||
p3-dft = { path = "/plonky3-gpu/dft" }
|
||||
p3-fri = { path = "/plonky3-gpu/fri" }
|
||||
p3-goldilocks = { path = "/plonky3-gpu/goldilocks" }
|
||||
p3-keccak = { path = "/plonky3-gpu/keccak" }
|
||||
p3-keccak-air = { path = "/plonky3-gpu/keccak-air" }
|
||||
p3-blake3 = { path = "/plonky3-gpu/blake3" }
|
||||
p3-mds = { path = "/plonky3-gpu/mds" }
|
||||
p3-monty-31 = { path = "/plonky3-gpu/monty-31" }
|
||||
p3-merkle-tree = { path = "/plonky3-gpu/merkle-tree" }
|
||||
p3-poseidon = { path = "/plonky3-gpu/poseidon" }
|
||||
p3-poseidon2 = { path = "/plonky3-gpu/poseidon2" }
|
||||
p3-poseidon2-air = { path = "/plonky3-gpu/poseidon2-air" }
|
||||
p3-symmetric = { path = "/plonky3-gpu/symmetric" }
|
||||
p3-uni-stark = { path = "/plonky3-gpu/uni-stark" }
|
||||
p3-maybe-rayon = { path = "/plonky3-gpu/maybe-rayon" }
|
||||
p3-bn254-fr = { path = "/plonky3-gpu/bn254-fr" }
|
||||
|
||||
# gpu crates
|
||||
[patch."ssh://git@github.com/scroll-tech/plonky3-gpu.git"]
|
||||
p3-gpu-base = { path = "/plonky3-gpu/gpu-base" }
|
||||
p3-gpu-build = { path = "/plonky3-gpu/gpu-build" }
|
||||
p3-gpu-field = { path = "/plonky3-gpu/gpu-field" }
|
||||
p3-gpu-backend = { path = "/plonky3-gpu/gpu-backend" }
|
||||
p3-gpu-module = { path = "/plonky3-gpu/gpu-module" }
|
||||
p3-air = { path = "/plonky3-gpu/air" }
|
||||
p3-field = { path = "/plonky3-gpu/field" }
|
||||
p3-commit = { path = "/plonky3-gpu/commit" }
|
||||
p3-matrix = { path = "/plonky3-gpu/matrix" }
|
||||
p3-baby-bear = { path = "/plonky3-gpu/baby-bear" }
|
||||
p3-koala-bear = { path = "/plonky3-gpu/koala-bear" }
|
||||
p3-util = { path = "/plonky3-gpu/util" }
|
||||
p3-challenger = { path = "/plonky3-gpu/challenger" }
|
||||
p3-dft = { path = "/plonky3-gpu/dft" }
|
||||
p3-fri = { path = "/plonky3-gpu/fri" }
|
||||
p3-goldilocks = { path = "/plonky3-gpu/goldilocks" }
|
||||
p3-keccak = { path = "/plonky3-gpu/keccak" }
|
||||
p3-keccak-air = { path = "/plonky3-gpu/keccak-air" }
|
||||
p3-blake3 = { path = "/plonky3-gpu/blake3" }
|
||||
p3-mds = { path = "/plonky3-gpu/mds" }
|
||||
p3-monty-31 = { path = "/plonky3-gpu/monty-31" }
|
||||
p3-merkle-tree = { path = "/plonky3-gpu/merkle-tree" }
|
||||
p3-poseidon = { path = "/plonky3-gpu/poseidon" }
|
||||
p3-poseidon2 = { path = "/plonky3-gpu/poseidon2" }
|
||||
p3-poseidon2-air = { path = "/plonky3-gpu/poseidon2-air" }
|
||||
p3-symmetric = { path = "/plonky3-gpu/symmetric" }
|
||||
p3-uni-stark = { path = "/plonky3-gpu/uni-stark" }
|
||||
p3-maybe-rayon = { path = "/plonky3-gpu/maybe-rayon" }
|
||||
p3-bn254-fr = { path = "/plonky3-gpu/bn254-fr" }
|
||||
2
build/dockerfiles/coordinator-api/gitconfig
Normal file
2
build/dockerfiles/coordinator-api/gitconfig
Normal file
@@ -0,0 +1,2 @@
|
||||
[url "https://github.com/"]
|
||||
insteadOf = ssh://git@github.com/
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
ARG CUDA_VERSION=11.7.1
|
||||
ARG GO_VERSION=1.21
|
||||
ARG GO_VERSION=1.22.12
|
||||
ARG RUST_VERSION=nightly-2023-12-03
|
||||
ARG CARGO_CHEF_TAG=0.1.41
|
||||
|
||||
@@ -36,7 +36,7 @@ RUN if [ "$(uname -m)" = "x86_64" ]; then \
|
||||
else \
|
||||
echo "Unsupported architecture"; exit 1; \
|
||||
fi
|
||||
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN rm go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz && rm /tmp/arch
|
||||
RUN wget https://go.dev/dl/go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN tar -C /usr/local -xzf go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN rm go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz && rm /tmp/arch
|
||||
ENV PATH="/usr/local/go/bin:${PATH}"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
ARG GO_VERSION=1.21
|
||||
ARG GO_VERSION=1.22.12
|
||||
ARG RUST_VERSION=nightly-2023-12-03
|
||||
ARG CARGO_CHEF_TAG=0.1.41
|
||||
|
||||
@@ -32,7 +32,7 @@ RUN if [ "$(uname -m)" = "x86_64" ]; then \
|
||||
else \
|
||||
echo "Unsupported architecture"; exit 1; \
|
||||
fi
|
||||
RUN wget https://go.dev/dl/go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN tar -C /usr/local -xzf go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN rm go${GO_VERSION}.1.linux-$(cat /tmp/arch).tar.gz && rm /tmp/arch
|
||||
RUN wget https://go.dev/dl/go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN tar -C /usr/local -xzf go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz
|
||||
RUN rm go${GO_VERSION}.linux-$(cat /tmp/arch).tar.gz && rm /tmp/arch
|
||||
ENV PATH="/usr/local/go/bin:${PATH}"
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
|
||||
FROM scrolltech/go-rust-builder:go-1.22-rust-nightly-2023-12-03 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
|
||||
@@ -41,7 +41,7 @@ func (g *gormLogger) Error(_ context.Context, msg string, data ...interface{}) {
|
||||
func (g *gormLogger) Trace(_ context.Context, begin time.Time, fc func() (string, int64), err error) {
|
||||
elapsed := time.Since(begin)
|
||||
sql, rowsAffected := fc()
|
||||
g.gethLogger.Debug("gorm", "line", utils.FileWithLineNum(), "cost", elapsed, "sql", sql, "rowsAffected", rowsAffected, "err", err)
|
||||
g.gethLogger.Trace("gorm", "line", utils.FileWithLineNum(), "cost", elapsed, "sql", sql, "rowsAffected", rowsAffected, "err", err)
|
||||
}
|
||||
|
||||
// InitDB init the db handler
|
||||
|
||||
1862
common/libzkp/impl/Cargo.lock
generated
1862
common/libzkp/impl/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -14,8 +14,8 @@ ruint = { git = "https://github.com/scroll-tech/uint.git", branch = "v1.12.3" }
|
||||
tiny-keccak = { git = "https://github.com/scroll-tech/tiny-keccak", branch = "scroll-patch-v2.0.2-openvm-v1.0.0-rc.1" }
|
||||
|
||||
[dependencies]
|
||||
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-prover" }
|
||||
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.1.0-rc.6", package = "scroll-zkvm-verifier" }
|
||||
euclid_prover = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.4.2", package = "scroll-zkvm-prover" }
|
||||
euclid_verifier = { git = "https://github.com/scroll-tech/zkvm-prover.git", tag = "v0.4.2", package = "scroll-zkvm-verifier" }
|
||||
|
||||
base64 = "0.13.0"
|
||||
env_logger = "0.9.0"
|
||||
@@ -32,3 +32,35 @@ opt-level = 3
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
|
||||
[patch."https://github.com/openvm-org/stark-backend.git"]
|
||||
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
openvm-stark-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
|
||||
|
||||
[patch."https://github.com/Plonky3/Plonky3.git"]
|
||||
p3-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-field = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-commit = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-matrix = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-baby-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", features = [
|
||||
"nightly-features",
|
||||
], tag = "v0.2.0" }
|
||||
p3-koala-bear = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-util = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-challenger = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-dft = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-fri = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-goldilocks = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-keccak = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-keccak-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-blake3 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-mds = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-merkle-tree = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-monty-31 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-poseidon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-poseidon2 = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
|
||||
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.0" }
|
||||
|
||||
@@ -26,12 +26,6 @@ pub unsafe extern "C" fn verify_chunk_proof(
|
||||
|
||||
fn verify_proof(proof: *const c_char, fork_name: *const c_char, task_type: TaskType) -> c_char {
|
||||
let fork_name_str = c_char_to_str(fork_name);
|
||||
// Skip verification for darwinV2 as we can't host darwinV2 and euclid verifiers on the same
|
||||
// binary.
|
||||
if fork_name_str == "darwinV2" {
|
||||
return true as c_char;
|
||||
}
|
||||
|
||||
let proof = c_char_to_vec(proof);
|
||||
let verifier = verifier::get_verifier(fork_name_str);
|
||||
|
||||
|
||||
@@ -1,9 +1,8 @@
|
||||
#![allow(static_mut_refs)]
|
||||
|
||||
mod euclid;
|
||||
|
||||
mod euclidv2;
|
||||
use anyhow::{bail, Result};
|
||||
use euclid::EuclidVerifier;
|
||||
use euclidv2::EuclidV2Verifier;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{cell::OnceCell, path::Path, rc::Rc};
|
||||
|
||||
@@ -29,27 +28,23 @@ pub trait ProofVerifier {
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub fork_name: String,
|
||||
pub params_path: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct VerifierConfig {
|
||||
pub low_version_circuit: CircuitConfig,
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
}
|
||||
|
||||
type HardForkName = String;
|
||||
|
||||
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
|
||||
|
||||
static mut VERIFIER_LOW: OnceCell<VerifierPair> = OnceCell::new();
|
||||
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
|
||||
|
||||
pub fn init(config: VerifierConfig) {
|
||||
let verifier = EuclidVerifier::new(&config.high_version_circuit.assets_path);
|
||||
let verifier = EuclidV2Verifier::new(&config.high_version_circuit.assets_path);
|
||||
unsafe {
|
||||
VERIFIER_LOW
|
||||
VERIFIER_HIGH
|
||||
.set(VerifierPair(
|
||||
config.high_version_circuit.fork_name,
|
||||
Rc::new(Box::new(verifier)),
|
||||
@@ -60,12 +55,6 @@ pub fn init(config: VerifierConfig) {
|
||||
|
||||
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
|
||||
unsafe {
|
||||
if let Some(verifier) = VERIFIER_LOW.get() {
|
||||
if verifier.0 == fork_name {
|
||||
return Ok(verifier.1.clone());
|
||||
}
|
||||
}
|
||||
|
||||
if let Some(verifier) = VERIFIER_HIGH.get() {
|
||||
if verifier.0 == fork_name {
|
||||
return Ok(verifier.1.clone());
|
||||
|
||||
@@ -4,16 +4,16 @@ use anyhow::Result;
|
||||
|
||||
use crate::utils::panic_catch;
|
||||
use euclid_prover::{BatchProof, BundleProof, ChunkProof};
|
||||
use euclid_verifier::verifier::{BatchVerifier, BundleVerifier, ChunkVerifier};
|
||||
use euclid_verifier::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
|
||||
use std::{fs::File, path::Path};
|
||||
|
||||
pub struct EuclidVerifier {
|
||||
pub struct EuclidV2Verifier {
|
||||
chunk_verifier: ChunkVerifier,
|
||||
batch_verifier: BatchVerifier,
|
||||
bundle_verifier: BundleVerifier,
|
||||
bundle_verifier: BundleVerifierEuclidV2,
|
||||
}
|
||||
|
||||
impl EuclidVerifier {
|
||||
impl EuclidV2Verifier {
|
||||
pub fn new(assets_dir: &str) -> Self {
|
||||
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
|
||||
let config = Path::new(assets_dir).join("root-verifier-vm-config");
|
||||
@@ -24,13 +24,13 @@ impl EuclidVerifier {
|
||||
.expect("Setting up chunk verifier"),
|
||||
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up batch verifier"),
|
||||
bundle_verifier: BundleVerifier::setup(&config, &exe, &verifier_bin)
|
||||
bundle_verifier: BundleVerifierEuclidV2::setup(&config, &exe, &verifier_bin)
|
||||
.expect("Setting up bundle verifier"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl ProofVerifier for EuclidVerifier {
|
||||
impl ProofVerifier for EuclidV2Verifier {
|
||||
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
|
||||
panic_catch(|| match task_type {
|
||||
TaskType::Chunk => {
|
||||
@@ -276,8 +276,8 @@ const (
|
||||
SenderTypeFinalizeBatch
|
||||
// SenderTypeL1GasOracle indicates a sender from L2 responsible for updating L1 gas prices.
|
||||
SenderTypeL1GasOracle
|
||||
// SenderTypeL2GasOracle indicates a sender from L1 responsible for updating L2 gas prices.
|
||||
SenderTypeL2GasOracle
|
||||
// SenderTypeL2GasOracleDeprecated indicates a sender from L1 responsible for updating L2 gas prices, which is deprecated.
|
||||
SenderTypeL2GasOracleDeprecated
|
||||
)
|
||||
|
||||
// String returns a string representation of the SenderType.
|
||||
@@ -289,8 +289,8 @@ func (t SenderType) String() string {
|
||||
return "SenderTypeFinalizeBatch"
|
||||
case SenderTypeL1GasOracle:
|
||||
return "SenderTypeL1GasOracle"
|
||||
case SenderTypeL2GasOracle:
|
||||
return "SenderTypeL2GasOracle"
|
||||
case SenderTypeL2GasOracleDeprecated:
|
||||
return "SenderTypeL2GasOracleDeprecated"
|
||||
default:
|
||||
return fmt.Sprintf("Unknown SenderType (%d)", int32(t))
|
||||
}
|
||||
|
||||
@@ -173,9 +173,9 @@ func TestSenderType(t *testing.T) {
|
||||
"SenderTypeL1GasOracle",
|
||||
},
|
||||
{
|
||||
"SenderTypeL2GasOracle",
|
||||
SenderTypeL2GasOracle,
|
||||
"SenderTypeL2GasOracle",
|
||||
"SenderTypeL2GasOracleDeprecated",
|
||||
SenderTypeL2GasOracleDeprecated,
|
||||
"SenderTypeL2GasOracleDeprecated",
|
||||
},
|
||||
{
|
||||
"Invalid Value",
|
||||
|
||||
@@ -4,12 +4,16 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"math/big"
|
||||
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||
)
|
||||
|
||||
const (
|
||||
euclidFork = "euclid"
|
||||
EuclidV2Fork = "euclidV2"
|
||||
|
||||
EuclidV2ForkNameForProver = "euclidv2"
|
||||
)
|
||||
|
||||
// ProofType represents the type of task.
|
||||
@@ -39,185 +43,102 @@ const (
|
||||
ProofTypeBundle
|
||||
)
|
||||
|
||||
// ChunkTaskDetail is a type containing ChunkTask detail.
|
||||
// ChunkTaskDetail is a type containing ChunkTask detail for chunk task.
|
||||
type ChunkTaskDetail struct {
|
||||
BlockHashes []common.Hash `json:"block_hashes"`
|
||||
// use one of the string of "euclidv1" / "euclidv2"
|
||||
ForkName string `json:"fork_name"`
|
||||
BlockHashes []common.Hash `json:"block_hashes"`
|
||||
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||
}
|
||||
|
||||
// it is a hex encoded big with fixed length on 48 bytes
|
||||
type Byte48 struct {
|
||||
hexutil.Big
|
||||
}
|
||||
|
||||
func (e Byte48) MarshalText() ([]byte, error) {
|
||||
i := e.ToInt()
|
||||
// overrite encode big
|
||||
if sign := i.Sign(); sign < 0 {
|
||||
// sanity check
|
||||
return nil, errors.New("Byte48 must be positive integer")
|
||||
} else {
|
||||
s := i.Text(16)
|
||||
if len(s) > 96 {
|
||||
return nil, errors.New("integer Exceed 384bit")
|
||||
}
|
||||
return []byte(fmt.Sprintf("0x%0*s", 96, s)), nil
|
||||
}
|
||||
}
|
||||
|
||||
func isString(input []byte) bool {
|
||||
return len(input) >= 2 && input[0] == '"' && input[len(input)-1] == '"'
|
||||
}
|
||||
|
||||
// hexutil.Big has limition of 256bit so we have to override it ...
|
||||
func (e *Byte48) UnmarshalJSON(input []byte) error {
|
||||
if !isString(input) {
|
||||
return errors.New("not hex string")
|
||||
}
|
||||
|
||||
b, err := hexutil.Decode(string(input[1 : len(input)-1]))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if len(b) != 48 {
|
||||
return fmt.Errorf("not a 48 bytes hex string: %d", len(b))
|
||||
}
|
||||
var dec big.Int
|
||||
dec.SetBytes(b)
|
||||
*e = Byte48{(hexutil.Big)(dec)}
|
||||
return nil
|
||||
}
|
||||
|
||||
// BatchTaskDetail is a type containing BatchTask detail.
|
||||
type BatchTaskDetail struct {
|
||||
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
|
||||
ChunkProofs []ChunkProof `json:"chunk_proofs"`
|
||||
BatchHeader interface{} `json:"batch_header"`
|
||||
BlobBytes []byte `json:"blob_bytes"`
|
||||
// use one of the string of "euclidv1" / "euclidv2"
|
||||
ForkName string `json:"fork_name"`
|
||||
ChunkInfos []*ChunkInfo `json:"chunk_infos"`
|
||||
ChunkProofs []*OpenVMChunkProof `json:"chunk_proofs"`
|
||||
BatchHeader interface{} `json:"batch_header"`
|
||||
BlobBytes []byte `json:"blob_bytes"`
|
||||
KzgProof Byte48 `json:"kzg_proof,omitempty"`
|
||||
KzgCommitment Byte48 `json:"kzg_commitment,omitempty"`
|
||||
ChallengeDigest common.Hash `json:"challenge_digest,omitempty"`
|
||||
}
|
||||
|
||||
// BundleTaskDetail consists of all the information required to describe the task to generate a proof for a bundle of batches.
|
||||
type BundleTaskDetail struct {
|
||||
BatchProofs []BatchProof `json:"batch_proofs"`
|
||||
// use one of the string of "euclidv1" / "euclidv2"
|
||||
ForkName string `json:"fork_name"`
|
||||
BatchProofs []*OpenVMBatchProof `json:"batch_proofs"`
|
||||
BundleInfo *OpenVMBundleInfo `json:"bundle_info,omitempty"`
|
||||
}
|
||||
|
||||
// ChunkInfo is for calculating pi_hash for chunk
|
||||
type ChunkInfo struct {
|
||||
ChainID uint64 `json:"chain_id"`
|
||||
PrevStateRoot common.Hash `json:"prev_state_root"`
|
||||
PostStateRoot common.Hash `json:"post_state_root"`
|
||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||
DataHash common.Hash `json:"data_hash"`
|
||||
IsPadding bool `json:"is_padding"`
|
||||
TxBytes []byte `json:"tx_bytes"`
|
||||
TxBytesHash common.Hash `json:"tx_data_digest"`
|
||||
ChainID uint64 `json:"chain_id"`
|
||||
PrevStateRoot common.Hash `json:"prev_state_root"`
|
||||
PostStateRoot common.Hash `json:"post_state_root"`
|
||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||
DataHash common.Hash `json:"data_hash"`
|
||||
IsPadding bool `json:"is_padding"`
|
||||
TxBytes []byte `json:"tx_bytes"`
|
||||
TxBytesHash common.Hash `json:"tx_data_digest"`
|
||||
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
|
||||
TxDataLength uint64 `json:"tx_data_length"`
|
||||
InitialBlockNumber uint64 `json:"initial_block_number"`
|
||||
BlockCtxs []BlockContextV2 `json:"block_ctxs"`
|
||||
}
|
||||
|
||||
// SubCircuitRowUsage tracing info added in v0.11.0rc8
|
||||
type SubCircuitRowUsage struct {
|
||||
Name string `json:"name"`
|
||||
RowNumber uint64 `json:"row_number"`
|
||||
}
|
||||
|
||||
// ChunkProof
|
||||
type ChunkProof interface {
|
||||
Proof() []byte
|
||||
}
|
||||
|
||||
// NewChunkProof creates a new ChunkProof instance.
|
||||
func NewChunkProof(hardForkName string) ChunkProof {
|
||||
switch hardForkName {
|
||||
case euclidFork:
|
||||
return &OpenVMChunkProof{}
|
||||
default:
|
||||
return &Halo2ChunkProof{}
|
||||
}
|
||||
}
|
||||
|
||||
// Halo2ChunkProof includes the proof info that are required for chunk verification and rollup.
|
||||
type Halo2ChunkProof struct {
|
||||
StorageTrace []byte `json:"storage_trace,omitempty"`
|
||||
Protocol []byte `json:"protocol"`
|
||||
RawProof []byte `json:"proof"`
|
||||
Instances []byte `json:"instances"`
|
||||
Vk []byte `json:"vk"`
|
||||
// cross-reference between cooridinator computation and prover compution
|
||||
ChunkInfo *ChunkInfo `json:"chunk_info,omitempty"`
|
||||
GitVersion string `json:"git_version,omitempty"`
|
||||
RowUsages []SubCircuitRowUsage `json:"row_usages,omitempty"`
|
||||
}
|
||||
|
||||
// Proof returns the proof bytes of a ChunkProof
|
||||
func (ap *Halo2ChunkProof) Proof() []byte {
|
||||
return ap.RawProof
|
||||
}
|
||||
|
||||
// BatchProof
|
||||
type BatchProof interface {
|
||||
SanityCheck() error
|
||||
Proof() []byte
|
||||
}
|
||||
|
||||
// NewBatchProof creates a new BatchProof instance.
|
||||
func NewBatchProof(hardForkName string) BatchProof {
|
||||
switch hardForkName {
|
||||
case euclidFork:
|
||||
return &OpenVMBatchProof{}
|
||||
default:
|
||||
return &Halo2BatchProof{}
|
||||
}
|
||||
}
|
||||
|
||||
// Halo2BatchProof includes the proof info that are required for batch verification and rollup.
|
||||
type Halo2BatchProof struct {
|
||||
Protocol []byte `json:"protocol"`
|
||||
RawProof []byte `json:"proof"`
|
||||
Instances []byte `json:"instances"`
|
||||
Vk []byte `json:"vk"`
|
||||
// cross-reference between cooridinator computation and prover compution
|
||||
BatchHash common.Hash `json:"batch_hash"`
|
||||
GitVersion string `json:"git_version,omitempty"`
|
||||
}
|
||||
|
||||
// Proof returns the proof bytes of a BatchProof
|
||||
func (ap *Halo2BatchProof) Proof() []byte {
|
||||
return ap.RawProof
|
||||
}
|
||||
|
||||
// SanityCheck checks whether a BatchProof is in a legal format
|
||||
func (ap *Halo2BatchProof) SanityCheck() error {
|
||||
if ap == nil {
|
||||
return errors.New("agg_proof is nil")
|
||||
}
|
||||
|
||||
if len(ap.RawProof) == 0 {
|
||||
return errors.New("proof not ready")
|
||||
}
|
||||
|
||||
if len(ap.RawProof)%32 != 0 {
|
||||
return fmt.Errorf("proof buffer length must be a multiple of 32, got: %d", len(ap.RawProof))
|
||||
}
|
||||
|
||||
if len(ap.Instances) == 0 {
|
||||
return errors.New("instance not ready")
|
||||
}
|
||||
|
||||
if len(ap.Vk) == 0 {
|
||||
return errors.New("vk not ready")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// BundleProof
|
||||
type BundleProof interface {
|
||||
SanityCheck() error
|
||||
Proof() []byte
|
||||
}
|
||||
|
||||
// NewBundleProof creates a new BundleProof instance.
|
||||
func NewBundleProof(hardForkName string) BundleProof {
|
||||
switch hardForkName {
|
||||
case euclidFork:
|
||||
return &OpenVMBundleProof{}
|
||||
default:
|
||||
return &Halo2BundleProof{}
|
||||
}
|
||||
}
|
||||
|
||||
// BundleProof includes the proof info that are required for verification of a bundle of batch proofs.
|
||||
type Halo2BundleProof struct {
|
||||
RawProof []byte `json:"proof"`
|
||||
Instances []byte `json:"instances"`
|
||||
Vk []byte `json:"vk"`
|
||||
// cross-reference between cooridinator computation and prover compution
|
||||
GitVersion string `json:"git_version,omitempty"`
|
||||
}
|
||||
|
||||
// Proof returns the proof bytes of a BundleProof
|
||||
func (ap *Halo2BundleProof) Proof() []byte {
|
||||
return ap.RawProof
|
||||
}
|
||||
|
||||
// SanityCheck checks whether a BundleProof is in a legal format
|
||||
func (ap *Halo2BundleProof) SanityCheck() error {
|
||||
if ap == nil {
|
||||
return errors.New("agg_proof is nil")
|
||||
}
|
||||
|
||||
if len(ap.RawProof) == 0 {
|
||||
return errors.New("proof not ready")
|
||||
}
|
||||
|
||||
if len(ap.RawProof)%32 != 0 {
|
||||
return fmt.Errorf("proof buffer length must be a multiple of 32, got: %d", len(ap.RawProof))
|
||||
}
|
||||
|
||||
if len(ap.Instances) == 0 {
|
||||
return errors.New("instance not ready")
|
||||
}
|
||||
|
||||
if len(ap.Vk) == 0 {
|
||||
return errors.New("vk not ready")
|
||||
}
|
||||
|
||||
return nil
|
||||
// BlockContextV2 is the block context for euclid v2
|
||||
type BlockContextV2 struct {
|
||||
Timestamp uint64 `json:"timestamp"`
|
||||
BaseFee hexutil.Big `json:"base_fee"`
|
||||
GasLimit uint64 `json:"gas_limit"`
|
||||
NumTxs uint16 `json:"num_txs"`
|
||||
NumL1Msgs uint16 `json:"num_l1_msgs"`
|
||||
}
|
||||
|
||||
// Proof for flatten VM proof
|
||||
@@ -254,12 +175,14 @@ func (p *OpenVMChunkProof) Proof() []byte {
|
||||
|
||||
// OpenVMBatchInfo is for calculating pi_hash for batch header
|
||||
type OpenVMBatchInfo struct {
|
||||
ParentBatchHash common.Hash `json:"parent_batch_hash"`
|
||||
ParentStateRoot common.Hash `json:"parent_state_root"`
|
||||
StateRoot common.Hash `json:"state_root"`
|
||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||
BatchHash common.Hash `json:"batch_hash"`
|
||||
ChainID uint64 `json:"chain_id"`
|
||||
ParentBatchHash common.Hash `json:"parent_batch_hash"`
|
||||
ParentStateRoot common.Hash `json:"parent_state_root"`
|
||||
StateRoot common.Hash `json:"state_root"`
|
||||
WithdrawRoot common.Hash `json:"withdraw_root"`
|
||||
BatchHash common.Hash `json:"batch_hash"`
|
||||
ChainID uint64 `json:"chain_id"`
|
||||
PrevMsgQueueHash common.Hash `json:"prev_msg_queue_hash"`
|
||||
PostMsgQueueHash common.Hash `json:"post_msg_queue_hash"`
|
||||
}
|
||||
|
||||
// BatchProof includes the proof info that are required for batch verification and rollup.
|
||||
@@ -319,6 +242,7 @@ type OpenVMBundleInfo struct {
|
||||
NumBatches uint32 `json:"num_batches"`
|
||||
PrevBatchHash common.Hash `json:"prev_batch_hash"`
|
||||
BatchHash common.Hash `json:"batch_hash"`
|
||||
MsgQueueHash common.Hash `json:"msg_queue_hash"`
|
||||
}
|
||||
|
||||
// OpenVMBundleProof includes the proof info that are required for verification of a bundle of batch proofs.
|
||||
|
||||
22
common/types/message/message_test.go
Normal file
22
common/types/message/message_test.go
Normal file
@@ -0,0 +1,22 @@
|
||||
package message
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestBytes48(t *testing.T) {
|
||||
ti := &Byte48{}
|
||||
ti.UnmarshalText([]byte("0x1"))
|
||||
if s, err := ti.MarshalText(); err == nil {
|
||||
if len(s) != 98 {
|
||||
panic(fmt.Sprintf("wrong str: %s", s))
|
||||
}
|
||||
}
|
||||
ti.UnmarshalText([]byte("0x0"))
|
||||
if s, err := ti.MarshalText(); err == nil {
|
||||
if len(s) != 98 {
|
||||
panic(fmt.Sprintf("wrong str: %s", s))
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -20,9 +20,12 @@ var (
|
||||
}
|
||||
// RollupRelayerFlags contains flags only used in rollup-relayer
|
||||
RollupRelayerFlags = []cli.Flag{
|
||||
&ImportGenesisFlag,
|
||||
&MinCodecVersionFlag,
|
||||
}
|
||||
// ProposerToolFlags contains flags only used in proposer tool
|
||||
ProposerToolFlags = []cli.Flag{
|
||||
&StartL2BlockFlag,
|
||||
}
|
||||
// ConfigFileFlag load json type config file.
|
||||
ConfigFileFlag = cli.StringFlag{
|
||||
Name: "config",
|
||||
@@ -73,12 +76,6 @@ var (
|
||||
Category: "METRICS",
|
||||
Value: 6060,
|
||||
}
|
||||
// ImportGenesisFlag import genesis batch during startup
|
||||
ImportGenesisFlag = cli.BoolFlag{
|
||||
Name: "import-genesis",
|
||||
Usage: "Import genesis batch into L1 contract during startup",
|
||||
Value: false,
|
||||
}
|
||||
// ServicePortFlag is the port the service will listen on
|
||||
ServicePortFlag = cli.IntFlag{
|
||||
Name: "service.port",
|
||||
@@ -97,4 +94,10 @@ var (
|
||||
Usage: "Minimum required codec version for the chunk/batch/bundle proposers",
|
||||
Required: true,
|
||||
}
|
||||
// StartL2BlockFlag indicates the start L2 block number for proposer tool
|
||||
StartL2BlockFlag = cli.Uint64Flag{
|
||||
Name: "start-l2-block",
|
||||
Usage: "Start L2 block number for proposer tool",
|
||||
Value: 0,
|
||||
}
|
||||
)
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.4.92"
|
||||
var tag = "v4.5.20"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -90,18 +90,10 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
|
||||
cfg.ProverManager = &coordinatorConfig.ProverManager{
|
||||
ProversPerSession: 1,
|
||||
Verifier: &coordinatorConfig.VerifierConfig{
|
||||
MockMode: true,
|
||||
LowVersionCircuit: &coordinatorConfig.CircuitConfig{
|
||||
ParamsPath: "",
|
||||
AssetsPath: "",
|
||||
ForkName: "darwin",
|
||||
MinProverVersion: "v4.2.0",
|
||||
},
|
||||
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
|
||||
ParamsPath: "",
|
||||
AssetsPath: "",
|
||||
ForkName: "darwinV2",
|
||||
MinProverVersion: "v4.3.0",
|
||||
ForkName: "euclidV2",
|
||||
MinProverVersion: "v4.4.89",
|
||||
},
|
||||
},
|
||||
BatchCollectionTimeSec: 60,
|
||||
|
||||
@@ -62,14 +62,14 @@ func action(ctx *cli.Context) error {
|
||||
return fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", taskID)
|
||||
}
|
||||
|
||||
var batchProofs []message.BatchProof
|
||||
var batchProofs []*message.OpenVMBatchProof
|
||||
for _, batch := range batches {
|
||||
proof := message.NewBatchProof("darwinV2")
|
||||
var proof message.OpenVMBatchProof
|
||||
if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil {
|
||||
log.Error("failed to unmarshal batch proof")
|
||||
return fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, taskID, batch.Hash)
|
||||
}
|
||||
batchProofs = append(batchProofs, proof)
|
||||
batchProofs = append(batchProofs, &proof)
|
||||
}
|
||||
|
||||
taskDetail := message.BundleTaskDetail{
|
||||
|
||||
@@ -7,17 +7,9 @@
|
||||
"batch_collection_time_sec": 180,
|
||||
"chunk_collection_time_sec": 180,
|
||||
"verifier": {
|
||||
"mock_mode": true,
|
||||
"low_version_circuit": {
|
||||
"params_path": "params",
|
||||
"assets_path": "assets",
|
||||
"fork_name": "darwin",
|
||||
"min_prover_version": "v4.4.43"
|
||||
},
|
||||
"high_version_circuit": {
|
||||
"params_path": "params",
|
||||
"assets_path": "assets",
|
||||
"fork_name": "darwinV2",
|
||||
"fork_name": "euclidV2",
|
||||
"min_prover_version": "v4.4.45"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -2,8 +2,6 @@ module scroll-tech/coordinator
|
||||
|
||||
go 1.22
|
||||
|
||||
toolchain go1.22.2
|
||||
|
||||
require (
|
||||
github.com/appleboy/gin-jwt/v2 v2.9.1
|
||||
github.com/gin-gonic/gin v1.9.1
|
||||
@@ -11,7 +9,7 @@ require (
|
||||
github.com/google/uuid v1.6.0
|
||||
github.com/mitchellh/mapstructure v1.5.0
|
||||
github.com/prometheus/client_golang v1.19.0
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
|
||||
github.com/shopspring/decimal v1.3.1
|
||||
github.com/stretchr/testify v1.10.0
|
||||
|
||||
@@ -177,8 +177,8 @@ github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU
|
||||
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435 h1:X9fkvjrYBY79lGgKEPpUhuiJ4vWpWwzOVw4H8CU8L54=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493 h1:Ioc01J0WEMxuwFvEPGJeBKXdf2KY4Yc3XbFky/IxLlI=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250401062930-9f9f53898493/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
|
||||
@@ -51,7 +51,6 @@ type Config struct {
|
||||
|
||||
// CircuitConfig circuit items.
|
||||
type CircuitConfig struct {
|
||||
ParamsPath string `json:"params_path"`
|
||||
AssetsPath string `json:"assets_path"`
|
||||
ForkName string `json:"fork_name"`
|
||||
MinProverVersion string `json:"min_prover_version"`
|
||||
@@ -59,8 +58,6 @@ type CircuitConfig struct {
|
||||
|
||||
// VerifierConfig load zk verifier config.
|
||||
type VerifierConfig struct {
|
||||
MockMode bool `json:"mock_mode"`
|
||||
LowVersionCircuit *CircuitConfig `json:"low_version_circuit"`
|
||||
HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
|
||||
}
|
||||
|
||||
|
||||
@@ -15,15 +15,18 @@ func TestConfig(t *testing.T) {
|
||||
"prover_manager": {
|
||||
"provers_per_session": 1,
|
||||
"session_attempts": 5,
|
||||
"external_prover_threshold": 32,
|
||||
"bundle_collection_time_sec": 180,
|
||||
"batch_collection_time_sec": 180,
|
||||
"chunk_collection_time_sec": 180,
|
||||
"verifier": {
|
||||
"mock_mode": true,
|
||||
"params_path": "",
|
||||
"agg_vk_path": ""
|
||||
"high_version_circuit": {
|
||||
"assets_path": "assets",
|
||||
"fork_name": "euclidV2",
|
||||
"min_prover_version": "v4.4.45"
|
||||
}
|
||||
},
|
||||
"max_verifier_workers": 4,
|
||||
"min_prover_version": "v1.0.0"
|
||||
"max_verifier_workers": 4
|
||||
},
|
||||
"db": {
|
||||
"driver_name": "postgres",
|
||||
|
||||
@@ -46,7 +46,7 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
|
||||
|
||||
hardForkNames, err := a.loginLogic.ProverHardForkName(&login)
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("prover hard name failure:%w", err)
|
||||
return "", fmt.Errorf("prover hard fork name failure:%w", err)
|
||||
}
|
||||
|
||||
// check the challenge is used, if used, return failure
|
||||
|
||||
@@ -26,7 +26,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
|
||||
panic("proof receiver new verifier failure")
|
||||
}
|
||||
|
||||
log.Info("verifier created", "chunkVerifier", vf.ChunkVKMap, "batchVerifier", vf.BatchVKMap, "bundleVerifier", vf.BundleVkMap, "openVmVerifier", vf.OpenVMVkMap)
|
||||
log.Info("verifier created", "openVmVerifier", vf.OpenVMVkMap)
|
||||
|
||||
Auth = NewAuthController(db, cfg, vf)
|
||||
GetTask = NewGetTaskController(cfg, chainCfg, db, reg)
|
||||
|
||||
@@ -21,9 +21,6 @@ import (
|
||||
type LoginLogic struct {
|
||||
cfg *config.Config
|
||||
challengeOrm *orm.Challenge
|
||||
chunkVks map[string]struct{}
|
||||
batchVKs map[string]struct{}
|
||||
bundleVks map[string]struct{}
|
||||
|
||||
openVmVks map[string]struct{}
|
||||
|
||||
@@ -33,25 +30,13 @@ type LoginLogic struct {
|
||||
// NewLoginLogic new a LoginLogic
|
||||
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
|
||||
proverVersionHardForkMap := make(map[string][]string)
|
||||
if version.CheckScrollRepoVersion(cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
|
||||
log.Error("config file error, low verifier min_prover_version should not more than high verifier min_prover_version",
|
||||
"low verifier min_prover_version", cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion,
|
||||
"high verifier min_prover_version", cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion)
|
||||
panic("verifier config file error")
|
||||
}
|
||||
|
||||
var highHardForks []string
|
||||
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
|
||||
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.LowVersionCircuit.ForkName)
|
||||
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
|
||||
|
||||
proverVersionHardForkMap[cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion] = []string{cfg.ProverManager.Verifier.LowVersionCircuit.ForkName}
|
||||
|
||||
return &LoginLogic{
|
||||
cfg: cfg,
|
||||
chunkVks: vf.ChunkVKMap,
|
||||
batchVKs: vf.BatchVKMap,
|
||||
bundleVks: vf.BundleVkMap,
|
||||
openVmVks: vf.OpenVMVkMap,
|
||||
challengeOrm: orm.NewChallenge(db),
|
||||
proverVersionHardForkMap: proverVersionHardForkMap,
|
||||
@@ -71,46 +56,25 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
|
||||
return errors.New("auth message verify failure")
|
||||
}
|
||||
|
||||
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion) {
|
||||
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
|
||||
l.cfg.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, login.Message.ProverVersion)
|
||||
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
|
||||
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", l.cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion, login.Message.ProverVersion)
|
||||
}
|
||||
|
||||
if len(login.Message.ProverTypes) > 0 {
|
||||
vks := make(map[string]struct{})
|
||||
for _, proverType := range login.Message.ProverTypes {
|
||||
switch proverType {
|
||||
case types.ProverTypeChunk:
|
||||
for vk := range l.chunkVks {
|
||||
vks[vk] = struct{}{}
|
||||
}
|
||||
case types.ProverTypeBatch:
|
||||
for vk := range l.batchVKs {
|
||||
vks[vk] = struct{}{}
|
||||
}
|
||||
for vk := range l.bundleVks {
|
||||
vks[vk] = struct{}{}
|
||||
}
|
||||
case types.ProverTypeOpenVM:
|
||||
for vk := range l.openVmVks {
|
||||
vks[vk] = struct{}{}
|
||||
}
|
||||
default:
|
||||
log.Error("invalid prover_type", "value", proverType, "prover name", login.Message.ProverName, "prover_version", login.Message.ProverVersion)
|
||||
}
|
||||
}
|
||||
vks := make(map[string]struct{})
|
||||
for vk := range l.openVmVks {
|
||||
vks[vk] = struct{}{}
|
||||
}
|
||||
|
||||
for _, vk := range login.Message.VKs {
|
||||
if _, ok := vks[vk]; !ok {
|
||||
log.Error("vk inconsistency", "prover vk", vk, "prover name", login.Message.ProverName,
|
||||
"prover_version", login.Message.ProverVersion, "message", login.Message)
|
||||
if !version.CheckScrollProverVersion(login.Message.ProverVersion) {
|
||||
return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s",
|
||||
version.Version, login.Message.ProverVersion)
|
||||
}
|
||||
// if the prover reports a same prover version
|
||||
return errors.New("incompatible vk. please check your params files or config files")
|
||||
for _, vk := range login.Message.VKs {
|
||||
if _, ok := vks[vk]; !ok {
|
||||
log.Error("vk inconsistency", "prover vk", vk, "prover name", login.Message.ProverName,
|
||||
"prover_version", login.Message.ProverVersion, "message", login.Message)
|
||||
if !version.CheckScrollProverVersion(login.Message.ProverVersion) {
|
||||
return fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s",
|
||||
version.Version, login.Message.ProverVersion)
|
||||
}
|
||||
// if the prover reports a same prover version
|
||||
return errors.New("incompatible vk. please check your params files or config files")
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"math/big"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
@@ -11,6 +12,7 @@ import (
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/params"
|
||||
"gorm.io/gorm"
|
||||
@@ -77,6 +79,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
var batchTask *orm.Batch
|
||||
var hardForkName string
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpBatchTask *orm.Batch
|
||||
@@ -101,16 +104,26 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
taskCtx.taskType = message.ProofTypeBatch
|
||||
taskCtx.batchTask = tmpBatchTask
|
||||
|
||||
var checkErr error
|
||||
hardForkName, checkErr = bp.hardForkSanityCheck(ctx, taskCtx)
|
||||
if checkErr != nil {
|
||||
log.Debug("hard fork sanity check failed", "height", getTaskParameter.ProverHeight, "err", checkErr)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBatch.String(), "task ID", tmpBatchTask.Hash, "error", getTaskError)
|
||||
proverTasks, getFailedTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, 2)
|
||||
if getFailedTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBatch.String(), "task ID", tmpBatchTask.Hash, "error", getFailedTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBatchTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
@@ -136,23 +149,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
log.Info("start batch proof generation session", "task_id", batchTask.Hash, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
|
||||
|
||||
hardForkName, getHardForkErr := bp.hardForkName(ctx, batchTask)
|
||||
if getHardForkErr != nil {
|
||||
bp.recoverActiveAttempts(ctx, batchTask)
|
||||
log.Error("retrieve hard fork name by batch failed", "task_id", batchTask.Hash, "err", getHardForkErr)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
||||
bp.recoverActiveAttempts(ctx, batchTask)
|
||||
log.Error("incompatible prover version",
|
||||
"requisite hard fork name", hardForkName,
|
||||
"prover hard fork name", taskCtx.HardForkNames,
|
||||
"task_id", batchTask.Hash)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
proverTask := orm.ProverTask{
|
||||
TaskID: batchTask.Hash,
|
||||
ProverPublicKey: taskCtx.PublicKey,
|
||||
@@ -189,20 +185,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
return taskMsg, nil
|
||||
}
|
||||
|
||||
func (bp *BatchProverTask) hardForkName(ctx *gin.Context, batchTask *orm.Batch) (string, error) {
|
||||
startChunk, getChunkErr := bp.chunkOrm.GetChunkByHash(ctx, batchTask.StartChunkHash)
|
||||
if getChunkErr != nil {
|
||||
return "", getChunkErr
|
||||
}
|
||||
|
||||
l2Block, getBlockErr := bp.blockOrm.GetL2BlockByNumber(ctx.Copy(), startChunk.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
hardForkName := encoding.GetHardforkName(bp.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
}
|
||||
|
||||
func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, batch *orm.Batch, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
||||
// get chunk from db
|
||||
chunks, err := bp.chunkOrm.GetChunksByBatchHash(ctx, task.TaskID)
|
||||
@@ -215,32 +197,32 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
||||
return nil, fmt.Errorf("no chunk found for batch task id:%s", task.TaskID)
|
||||
}
|
||||
|
||||
var chunkProofs []message.ChunkProof
|
||||
var chunkProofs []*message.OpenVMChunkProof
|
||||
var chunkInfos []*message.ChunkInfo
|
||||
for _, chunk := range chunks {
|
||||
proof := message.NewChunkProof(hardForkName)
|
||||
var proof message.OpenVMChunkProof
|
||||
if encodeErr := json.Unmarshal(chunk.Proof, &proof); encodeErr != nil {
|
||||
return nil, fmt.Errorf("Chunk.GetProofsByBatchHash unmarshal proof error: %w, batch hash: %v, chunk hash: %v", encodeErr, task.TaskID, chunk.Hash)
|
||||
}
|
||||
chunkProofs = append(chunkProofs, proof)
|
||||
chunkProofs = append(chunkProofs, &proof)
|
||||
|
||||
chunkInfo := message.ChunkInfo{
|
||||
ChainID: bp.cfg.L2.ChainID,
|
||||
PrevStateRoot: common.HexToHash(chunk.ParentChunkStateRoot),
|
||||
PostStateRoot: common.HexToHash(chunk.StateRoot),
|
||||
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
|
||||
DataHash: common.HexToHash(chunk.Hash),
|
||||
IsPadding: false,
|
||||
}
|
||||
if haloProot, ok := proof.(*message.Halo2ChunkProof); ok {
|
||||
if haloProot.ChunkInfo != nil {
|
||||
chunkInfo.TxBytes = haloProot.ChunkInfo.TxBytes
|
||||
}
|
||||
ChainID: bp.cfg.L2.ChainID,
|
||||
PrevStateRoot: common.HexToHash(chunk.ParentChunkStateRoot),
|
||||
PostStateRoot: common.HexToHash(chunk.StateRoot),
|
||||
WithdrawRoot: common.HexToHash(chunk.WithdrawRoot),
|
||||
DataHash: common.HexToHash(chunk.Hash),
|
||||
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
||||
PostMsgQueueHash: common.HexToHash(chunk.PostL1MessageQueueHash),
|
||||
IsPadding: false,
|
||||
InitialBlockNumber: proof.MetaData.ChunkInfo.InitialBlockNumber,
|
||||
BlockCtxs: proof.MetaData.ChunkInfo.BlockCtxs,
|
||||
TxDataLength: proof.MetaData.ChunkInfo.TxDataLength,
|
||||
}
|
||||
chunkInfos = append(chunkInfos, &chunkInfo)
|
||||
}
|
||||
|
||||
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs)
|
||||
taskDetail, err := bp.getBatchTaskDetail(batch, chunkInfos, chunkProofs, hardForkName)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get batch task detail, taskID:%s err:%w", task.TaskID, err)
|
||||
}
|
||||
@@ -257,6 +239,9 @@ func (bp *BatchProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
||||
TaskData: string(chunkProofsBytes),
|
||||
HardForkName: hardForkName,
|
||||
}
|
||||
|
||||
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBatch.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
|
||||
|
||||
return taskMsg, nil
|
||||
}
|
||||
|
||||
@@ -266,15 +251,22 @@ func (bp *BatchProverTask) recoverActiveAttempts(ctx *gin.Context, batchTask *or
|
||||
}
|
||||
}
|
||||
|
||||
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []message.ChunkProof) (*message.BatchTaskDetail, error) {
|
||||
func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*message.ChunkInfo, chunkProofs []*message.OpenVMChunkProof, hardForkName string) (*message.BatchTaskDetail, error) {
|
||||
taskDetail := &message.BatchTaskDetail{
|
||||
ChunkInfos: chunkInfos,
|
||||
ChunkProofs: chunkProofs,
|
||||
}
|
||||
|
||||
if hardForkName == message.EuclidV2Fork {
|
||||
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||
} else {
|
||||
log.Error("unsupported hard fork name", "hard_fork_name", hardForkName)
|
||||
return nil, fmt.Errorf("unsupported hard fork name: %s", hardForkName)
|
||||
}
|
||||
|
||||
dbBatchCodecVersion := encoding.CodecVersion(dbBatch.CodecVersion)
|
||||
switch dbBatchCodecVersion {
|
||||
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6:
|
||||
case encoding.CodecV3, encoding.CodecV4, encoding.CodecV6, encoding.CodecV7:
|
||||
default:
|
||||
return taskDetail, nil
|
||||
}
|
||||
@@ -290,6 +282,13 @@ func (bp *BatchProverTask) getBatchTaskDetail(dbBatch *orm.Batch, chunkInfos []*
|
||||
}
|
||||
taskDetail.BatchHeader = batchHeader
|
||||
taskDetail.BlobBytes = dbBatch.BlobBytes
|
||||
taskDetail.ChallengeDigest = common.HexToHash(dbBatch.ChallengeDigest)
|
||||
// Memory layout of `BlobDataProof`: used in Codec.BlobDataProofForPointEvaluation()
|
||||
// | z | y | kzg_commitment | kzg_proof |
|
||||
// |---------|---------|----------------|-----------|
|
||||
// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||
taskDetail.KzgProof = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[112:160]))}
|
||||
taskDetail.KzgCommitment = message.Byte48{Big: hexutil.Big(*new(big.Int).SetBytes(dbBatch.BlobDataProof[64:112]))}
|
||||
|
||||
return taskDetail, nil
|
||||
}
|
||||
|
||||
@@ -9,19 +9,19 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/params"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
coordinatorType "scroll-tech/coordinator/internal/types"
|
||||
cutils "scroll-tech/coordinator/internal/utils"
|
||||
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
)
|
||||
|
||||
// BundleProverTask is prover task implement for bundle proof
|
||||
@@ -77,6 +77,7 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
}
|
||||
|
||||
var bundleTask *orm.Bundle
|
||||
var hardForkName string
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpBundleTask *orm.Bundle
|
||||
@@ -101,6 +102,16 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
taskCtx.taskType = message.ProofTypeBundle
|
||||
taskCtx.bundleTask = tmpBundleTask
|
||||
|
||||
var checkErr error
|
||||
hardForkName, checkErr = bp.hardForkSanityCheck(ctx, taskCtx)
|
||||
if checkErr != nil {
|
||||
log.Debug("hard fork sanity check failed", "height", getTaskParameter.ProverHeight, "err", checkErr)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBundle, tmpBundleTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
@@ -110,7 +121,7 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBundleTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
@@ -136,23 +147,6 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
}
|
||||
|
||||
log.Info("start bundle proof generation session", "task index", bundleTask.Index, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
|
||||
|
||||
hardForkName, getHardForkErr := bp.hardForkName(ctx, bundleTask)
|
||||
if getHardForkErr != nil {
|
||||
bp.recoverActiveAttempts(ctx, bundleTask)
|
||||
log.Error("retrieve hard fork name by bundle failed", "task_id", bundleTask.Hash, "err", getHardForkErr)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
||||
bp.recoverActiveAttempts(ctx, bundleTask)
|
||||
log.Error("incompatible prover version",
|
||||
"requisite hard fork name", hardForkName,
|
||||
"prover hard fork name", taskCtx.HardForkNames,
|
||||
"task_id", bundleTask.Hash)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
proverTask := orm.ProverTask{
|
||||
TaskID: bundleTask.Hash,
|
||||
ProverPublicKey: taskCtx.PublicKey,
|
||||
@@ -189,26 +183,6 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
return taskMsg, nil
|
||||
}
|
||||
|
||||
func (bp *BundleProverTask) hardForkName(ctx *gin.Context, bundleTask *orm.Bundle) (string, error) {
|
||||
startBatch, getBatchErr := bp.batchOrm.GetBatchByHash(ctx, bundleTask.StartBatchHash)
|
||||
if getBatchErr != nil {
|
||||
return "", getBatchErr
|
||||
}
|
||||
|
||||
startChunk, getChunkErr := bp.chunkOrm.GetChunkByHash(ctx, startBatch.StartChunkHash)
|
||||
if getChunkErr != nil {
|
||||
return "", getChunkErr
|
||||
}
|
||||
|
||||
l2Block, getBlockErr := bp.blockOrm.GetL2BlockByNumber(ctx.Copy(), startChunk.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
|
||||
hardForkName := encoding.GetHardforkName(bp.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
}
|
||||
|
||||
func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
||||
// get bundle from db
|
||||
batches, err := bp.batchOrm.GetBatchesByBundleHash(ctx, task.TaskID)
|
||||
@@ -221,19 +195,42 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
|
||||
return nil, fmt.Errorf("failed to get batch proofs for bundle task id:%s, no batch found", task.TaskID)
|
||||
}
|
||||
|
||||
var batchProofs []message.BatchProof
|
||||
parentBatch, err := bp.batchOrm.GetBatchByHash(ctx, batches[0].ParentBatchHash)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to get parent batch for batch task id:%s err:%w", task.TaskID, err)
|
||||
}
|
||||
|
||||
var batchProofs []*message.OpenVMBatchProof
|
||||
for _, batch := range batches {
|
||||
proof := message.NewBatchProof(hardForkName)
|
||||
var proof message.OpenVMBatchProof
|
||||
if encodeErr := json.Unmarshal(batch.Proof, &proof); encodeErr != nil {
|
||||
return nil, fmt.Errorf("failed to unmarshal proof: %w, bundle hash: %v, batch hash: %v", encodeErr, task.TaskID, batch.Hash)
|
||||
}
|
||||
batchProofs = append(batchProofs, proof)
|
||||
batchProofs = append(batchProofs, &proof)
|
||||
}
|
||||
|
||||
taskDetail := message.BundleTaskDetail{
|
||||
BatchProofs: batchProofs,
|
||||
}
|
||||
|
||||
if hardForkName == message.EuclidV2Fork {
|
||||
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||
} else {
|
||||
log.Error("unsupported hard fork name", "hard_fork_name", hardForkName)
|
||||
return nil, fmt.Errorf("unsupported hard fork name: %s", hardForkName)
|
||||
}
|
||||
|
||||
taskDetail.BundleInfo = &message.OpenVMBundleInfo{
|
||||
ChainID: bp.cfg.L2.ChainID,
|
||||
PrevStateRoot: common.HexToHash(parentBatch.StateRoot),
|
||||
PostStateRoot: common.HexToHash(batches[len(batches)-1].StateRoot),
|
||||
WithdrawRoot: common.HexToHash(batches[len(batches)-1].WithdrawRoot),
|
||||
NumBatches: uint32(len(batches)),
|
||||
PrevBatchHash: common.HexToHash(batches[0].ParentBatchHash),
|
||||
BatchHash: common.HexToHash(batches[len(batches)-1].Hash),
|
||||
MsgQueueHash: common.HexToHash(batches[len(batches)-1].PostL1MessageQueueHash),
|
||||
}
|
||||
|
||||
batchProofsBytes, err := json.Marshal(taskDetail)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal batch proofs, taskID:%s err:%w", task.TaskID, err)
|
||||
@@ -246,6 +243,9 @@ func (bp *BundleProverTask) formatProverTask(ctx context.Context, task *orm.Prov
|
||||
TaskData: string(batchProofsBytes),
|
||||
HardForkName: hardForkName,
|
||||
}
|
||||
|
||||
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeBundle.String(), "hard_fork_name", hardForkName, "task_data", taskMsg.TaskData)
|
||||
|
||||
return taskMsg, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -9,7 +9,7 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/log"
|
||||
"github.com/scroll-tech/go-ethereum/params"
|
||||
"gorm.io/gorm"
|
||||
@@ -75,6 +75,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
var chunkTask *orm.Chunk
|
||||
var hardForkName string
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpChunkTask *orm.Chunk
|
||||
@@ -99,16 +100,26 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
taskCtx.taskType = message.ProofTypeChunk
|
||||
taskCtx.chunkTask = tmpChunkTask
|
||||
|
||||
var checkErr error
|
||||
hardForkName, checkErr = cp.hardForkSanityCheck(ctx, taskCtx)
|
||||
if checkErr != nil {
|
||||
log.Debug("hard fork sanity check failed", "height", getTaskParameter.ProverHeight, "err", checkErr)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := cp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeChunk.String(), "task ID", tmpChunkTask.Hash, "error", getTaskError)
|
||||
proverTasks, getFailedTaskError := cp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, 2)
|
||||
if getFailedTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeChunk.String(), "task ID", tmpChunkTask.Hash, "error", getFailedTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpChunkTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
@@ -134,23 +145,6 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
}
|
||||
|
||||
log.Info("start chunk generation session", "task_id", chunkTask.Hash, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
|
||||
|
||||
hardForkName, getHardForkErr := cp.hardForkName(ctx, chunkTask)
|
||||
if getHardForkErr != nil {
|
||||
cp.recoverActiveAttempts(ctx, chunkTask)
|
||||
log.Error("retrieve hard fork name by chunk failed", "task_id", chunkTask.Hash, "err", getHardForkErr)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
||||
cp.recoverActiveAttempts(ctx, chunkTask)
|
||||
log.Error("incompatible prover version",
|
||||
"requisite hard fork name", hardForkName,
|
||||
"prover hard fork name", taskCtx.HardForkNames,
|
||||
"task_id", chunkTask.Hash)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
proverTask := orm.ProverTask{
|
||||
TaskID: chunkTask.Hash,
|
||||
ProverPublicKey: taskCtx.PublicKey,
|
||||
@@ -169,7 +163,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
|
||||
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
|
||||
if err != nil {
|
||||
cp.recoverActiveAttempts(ctx, chunkTask)
|
||||
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
|
||||
@@ -186,26 +180,28 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
return taskMsg, nil
|
||||
}
|
||||
|
||||
func (cp *ChunkProverTask) hardForkName(ctx *gin.Context, chunkTask *orm.Chunk) (string, error) {
|
||||
l2Block, getBlockErr := cp.blockOrm.GetL2BlockByNumber(ctx.Copy(), chunkTask.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
hardForkName := encoding.GetHardforkName(cp.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
}
|
||||
|
||||
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
||||
func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.ProverTask, chunk *orm.Chunk, hardForkName string) (*coordinatorType.GetTaskSchema, error) {
|
||||
// Get block hashes.
|
||||
blockHashes, dbErr := cp.blockOrm.GetL2BlockHashesByChunkHash(ctx, task.TaskID)
|
||||
if dbErr != nil || len(blockHashes) == 0 {
|
||||
return nil, fmt.Errorf("failed to fetch block hashes of a chunk, chunk hash:%s err:%w", task.TaskID, dbErr)
|
||||
}
|
||||
|
||||
var taskDetailBytes []byte
|
||||
taskDetail := message.ChunkTaskDetail{
|
||||
BlockHashes: blockHashes,
|
||||
BlockHashes: blockHashes,
|
||||
PrevMsgQueueHash: common.HexToHash(chunk.PrevL1MessageQueueHash),
|
||||
}
|
||||
blockHashesBytes, err := json.Marshal(taskDetail)
|
||||
|
||||
if hardForkName == message.EuclidV2Fork {
|
||||
taskDetail.ForkName = message.EuclidV2ForkNameForProver
|
||||
} else {
|
||||
log.Error("unsupported hard fork name", "hard_fork_name", hardForkName)
|
||||
return nil, fmt.Errorf("unsupported hard fork name: %s", hardForkName)
|
||||
}
|
||||
|
||||
var err error
|
||||
taskDetailBytes, err = json.Marshal(taskDetail)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to marshal block hashes hash:%s, err:%w", task.TaskID, err)
|
||||
}
|
||||
@@ -214,10 +210,12 @@ func (cp *ChunkProverTask) formatProverTask(ctx context.Context, task *orm.Prove
|
||||
UUID: task.UUID.String(),
|
||||
TaskID: task.TaskID,
|
||||
TaskType: int(message.ProofTypeChunk),
|
||||
TaskData: string(blockHashesBytes),
|
||||
TaskData: string(taskDetailBytes),
|
||||
HardForkName: hardForkName,
|
||||
}
|
||||
|
||||
log.Debug("TaskData", "task_id", task.TaskID, "task_type", message.ProofTypeChunk.String(), "hard_fork_name", hardForkName, "task_data", proverTaskSchema.TaskData)
|
||||
|
||||
return proverTaskSchema, nil
|
||||
}
|
||||
|
||||
|
||||
@@ -9,9 +9,12 @@ import (
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/prometheus/client_golang/prometheus"
|
||||
"github.com/prometheus/client_golang/prometheus/promauto"
|
||||
"github.com/scroll-tech/da-codec/encoding"
|
||||
"github.com/scroll-tech/go-ethereum/params"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"scroll-tech/common/types/message"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
coordinatorType "scroll-tech/coordinator/internal/types"
|
||||
@@ -52,6 +55,76 @@ type proverTaskContext struct {
|
||||
ProverVersion string
|
||||
ProverProviderType uint8
|
||||
HardForkNames map[string]struct{}
|
||||
|
||||
taskType message.ProofType
|
||||
chunkTask *orm.Chunk
|
||||
batchTask *orm.Batch
|
||||
bundleTask *orm.Bundle
|
||||
}
|
||||
|
||||
// hardForkName get the chunk/batch/bundle hard fork name
|
||||
func (b *BaseProverTask) hardForkName(ctx *gin.Context, taskCtx *proverTaskContext) (string, error) {
|
||||
switch {
|
||||
case taskCtx.taskType == message.ProofTypeChunk:
|
||||
if taskCtx.chunkTask == nil {
|
||||
return "", errors.New("chunk task is nil")
|
||||
}
|
||||
l2Block, getBlockErr := b.blockOrm.GetL2BlockByNumber(ctx.Copy(), taskCtx.chunkTask.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
hardForkName := encoding.GetHardforkName(b.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
|
||||
case taskCtx.taskType == message.ProofTypeBatch:
|
||||
if taskCtx.batchTask == nil {
|
||||
return "", errors.New("batch task is nil")
|
||||
}
|
||||
startChunk, getChunkErr := b.chunkOrm.GetChunkByHash(ctx, taskCtx.batchTask.StartChunkHash)
|
||||
if getChunkErr != nil {
|
||||
return "", getChunkErr
|
||||
}
|
||||
l2Block, getBlockErr := b.blockOrm.GetL2BlockByNumber(ctx.Copy(), startChunk.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
hardForkName := encoding.GetHardforkName(b.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
|
||||
case taskCtx.taskType == message.ProofTypeBundle:
|
||||
if taskCtx.bundleTask == nil {
|
||||
return "", errors.New("bundle task is nil")
|
||||
}
|
||||
startBatch, getBatchErr := b.batchOrm.GetBatchByHash(ctx, taskCtx.bundleTask.StartBatchHash)
|
||||
if getBatchErr != nil {
|
||||
return "", getBatchErr
|
||||
}
|
||||
startChunk, getChunkErr := b.chunkOrm.GetChunkByHash(ctx, startBatch.StartChunkHash)
|
||||
if getChunkErr != nil {
|
||||
return "", getChunkErr
|
||||
}
|
||||
l2Block, getBlockErr := b.blockOrm.GetL2BlockByNumber(ctx.Copy(), startChunk.StartBlockNumber)
|
||||
if getBlockErr != nil {
|
||||
return "", getBlockErr
|
||||
}
|
||||
hardForkName := encoding.GetHardforkName(b.chainCfg, l2Block.Number, l2Block.BlockTimestamp)
|
||||
return hardForkName, nil
|
||||
default:
|
||||
return "", errors.New("illegal task type")
|
||||
}
|
||||
}
|
||||
|
||||
// hardForkSanityCheck check the task's hard fork name is the same as prover
|
||||
func (b *BaseProverTask) hardForkSanityCheck(ctx *gin.Context, taskCtx *proverTaskContext) (string, error) {
|
||||
hardForkName, getHardForkErr := b.hardForkName(ctx, taskCtx)
|
||||
if getHardForkErr != nil {
|
||||
return "", getHardForkErr
|
||||
}
|
||||
|
||||
if _, ok := taskCtx.HardForkNames[hardForkName]; !ok {
|
||||
return "", fmt.Errorf("to be assigned prover task's hard-fork name is not the same as prover, proverName: %s, proverVersion: %s, proverSupportHardForkNames: %s, taskHardForkName: %v", taskCtx.ProverName, taskCtx.ProverVersion, taskCtx.HardForkNames, hardForkName)
|
||||
}
|
||||
return hardForkName, nil
|
||||
}
|
||||
|
||||
// checkParameter check the prover task parameter illegal
|
||||
|
||||
@@ -171,19 +171,19 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
|
||||
|
||||
switch message.ProofType(proofParameter.TaskType) {
|
||||
case message.ProofTypeChunk:
|
||||
chunkProof := message.NewChunkProof(hardForkName)
|
||||
chunkProof := &message.OpenVMChunkProof{}
|
||||
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &chunkProof); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
success, verifyErr = m.verifier.VerifyChunkProof(chunkProof, hardForkName)
|
||||
case message.ProofTypeBatch:
|
||||
batchProof := message.NewBatchProof(hardForkName)
|
||||
batchProof := &message.OpenVMBatchProof{}
|
||||
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
success, verifyErr = m.verifier.VerifyBatchProof(batchProof, hardForkName)
|
||||
case message.ProofTypeBundle:
|
||||
bundleProof := message.NewBundleProof(hardForkName)
|
||||
bundleProof := &message.OpenVMBundleProof{}
|
||||
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {
|
||||
return unmarshalErr
|
||||
}
|
||||
|
||||
@@ -10,31 +10,26 @@ import (
|
||||
|
||||
// NewVerifier Sets up a mock verifier.
|
||||
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
||||
batchVKMap := map[string]struct{}{"mock_vk": {}}
|
||||
chunkVKMap := map[string]struct{}{"mock_vk": {}}
|
||||
return &Verifier{cfg: cfg, ChunkVKMap: chunkVKMap, BatchVKMap: batchVKMap}, nil
|
||||
return &Verifier{cfg: cfg, OpenVMVkMap: map[string]struct{}{"mock_vk": {}}}, nil
|
||||
}
|
||||
|
||||
// VerifyChunkProof return a mock verification result for a ChunkProof.
|
||||
func (v *Verifier) VerifyChunkProof(proof message.ChunkProof, forkName string) (bool, error) {
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
func (v *Verifier) VerifyChunkProof(proof *message.OpenVMChunkProof, forkName string) (bool, error) {
|
||||
if proof.VmProof != nil && string(proof.VmProof.Proof) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// VerifyBatchProof return a mock verification result for a BatchProof.
|
||||
func (v *Verifier) VerifyBatchProof(proof message.BatchProof, forkName string) (bool, error) {
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
func (v *Verifier) VerifyBatchProof(proof *message.OpenVMBatchProof, forkName string) (bool, error) {
|
||||
if proof.VmProof != nil && string(proof.VmProof.Proof) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// VerifyBundleProof return a mock verification result for a BundleProof.
|
||||
func (v *Verifier) VerifyBundleProof(proof message.BundleProof, forkName string) (bool, error) {
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
func (v *Verifier) VerifyBundleProof(proof *message.OpenVMBundleProof, forkName string) (bool, error) {
|
||||
return true, nil
|
||||
}
|
||||
|
||||
@@ -7,11 +7,8 @@ import (
|
||||
// InvalidTestProof invalid proof used in tests
|
||||
const InvalidTestProof = "this is a invalid proof"
|
||||
|
||||
// Verifier represents a rust ffi to a halo2 verifier.
|
||||
// Verifier represents a rust ffi to a verifier.
|
||||
type Verifier struct {
|
||||
cfg *config.VerifierConfig
|
||||
ChunkVKMap map[string]struct{}
|
||||
BatchVKMap map[string]struct{}
|
||||
BundleVkMap map[string]struct{}
|
||||
OpenVMVkMap map[string]struct{}
|
||||
}
|
||||
|
||||
@@ -30,14 +30,12 @@ import (
|
||||
// in `*config.CircuitConfig` being changed
|
||||
type rustCircuitConfig struct {
|
||||
ForkName string `json:"fork_name"`
|
||||
ParamsPath string `json:"params_path"`
|
||||
AssetsPath string `json:"assets_path"`
|
||||
}
|
||||
|
||||
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
|
||||
return &rustCircuitConfig{
|
||||
ForkName: cfg.ForkName,
|
||||
ParamsPath: cfg.ParamsPath,
|
||||
AssetsPath: cfg.AssetsPath,
|
||||
}
|
||||
}
|
||||
@@ -46,13 +44,11 @@ func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
|
||||
// Define a brand new struct here is to eliminate side effects in case fields
|
||||
// in `*config.VerifierConfig` being changed
|
||||
type rustVerifierConfig struct {
|
||||
LowVersionCircuit *rustCircuitConfig `json:"low_version_circuit"`
|
||||
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
|
||||
}
|
||||
|
||||
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
|
||||
return &rustVerifierConfig{
|
||||
LowVersionCircuit: newRustCircuitConfig(cfg.LowVersionCircuit),
|
||||
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
|
||||
}
|
||||
}
|
||||
@@ -65,19 +61,6 @@ type rustVkDump struct {
|
||||
|
||||
// NewVerifier Sets up a rust ffi to call verify.
|
||||
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
||||
if cfg.MockMode {
|
||||
chunkVKMap := map[string]struct{}{"mock_vk": {}}
|
||||
batchVKMap := map[string]struct{}{"mock_vk": {}}
|
||||
bundleVKMap := map[string]struct{}{"mock_vk": {}}
|
||||
openVMVkMap := map[string]struct{}{"mock_vk": {}}
|
||||
return &Verifier{
|
||||
cfg: cfg,
|
||||
ChunkVKMap: chunkVKMap,
|
||||
BatchVKMap: batchVKMap,
|
||||
BundleVkMap: bundleVKMap,
|
||||
OpenVMVkMap: openVMVkMap,
|
||||
}, nil
|
||||
}
|
||||
verifierConfig := newRustVerifierConfig(cfg)
|
||||
configBytes, err := json.Marshal(verifierConfig)
|
||||
if err != nil {
|
||||
@@ -93,34 +76,18 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
|
||||
|
||||
v := &Verifier{
|
||||
cfg: cfg,
|
||||
ChunkVKMap: make(map[string]struct{}),
|
||||
BatchVKMap: make(map[string]struct{}),
|
||||
BundleVkMap: make(map[string]struct{}),
|
||||
OpenVMVkMap: make(map[string]struct{}),
|
||||
}
|
||||
|
||||
if err := v.loadLowVersionVKs(cfg); err != nil {
|
||||
if err := v.loadOpenVMVks(message.EuclidV2Fork); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := v.loadOpenVMVks(cfg.HighVersionCircuit.ForkName); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
v.loadCurieVersionVKs()
|
||||
return v, nil
|
||||
}
|
||||
|
||||
// VerifyBatchProof Verify a ZkProof by marshaling it and sending it to the Halo2 Verifier.
|
||||
func (v *Verifier) VerifyBatchProof(proof message.BatchProof, forkName string) (bool, error) {
|
||||
if v.cfg.MockMode {
|
||||
log.Info("Mock mode, batch verifier disabled")
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
|
||||
}
|
||||
// VerifyBatchProof Verify a ZkProof by marshaling it and sending it to the Verifier.
|
||||
func (v *Verifier) VerifyBatchProof(proof *message.OpenVMBatchProof, forkName string) (bool, error) {
|
||||
buf, err := json.Marshal(proof)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@@ -138,16 +105,8 @@ func (v *Verifier) VerifyBatchProof(proof message.BatchProof, forkName string) (
|
||||
return verified != 0, nil
|
||||
}
|
||||
|
||||
// VerifyChunkProof Verify a ZkProof by marshaling it and sending it to the Halo2 Verifier.
|
||||
func (v *Verifier) VerifyChunkProof(proof message.ChunkProof, forkName string) (bool, error) {
|
||||
if v.cfg.MockMode {
|
||||
log.Info("Mock mode, verifier disabled")
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
|
||||
}
|
||||
// VerifyChunkProof Verify a ZkProof by marshaling it and sending it to the Verifier.
|
||||
func (v *Verifier) VerifyChunkProof(proof *message.OpenVMChunkProof, forkName string) (bool, error) {
|
||||
buf, err := json.Marshal(proof)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@@ -166,15 +125,7 @@ func (v *Verifier) VerifyChunkProof(proof message.ChunkProof, forkName string) (
|
||||
}
|
||||
|
||||
// VerifyBundleProof Verify a ZkProof for a bundle of batches, by marshaling it and verifying it via the EVM verifier.
|
||||
func (v *Verifier) VerifyBundleProof(proof message.BundleProof, forkName string) (bool, error) {
|
||||
if v.cfg.MockMode {
|
||||
log.Info("Mock mode, verifier disabled")
|
||||
if string(proof.Proof()) == InvalidTestProof {
|
||||
return false, nil
|
||||
}
|
||||
return true, nil
|
||||
|
||||
}
|
||||
func (v *Verifier) VerifyBundleProof(proof *message.OpenVMBundleProof, forkName string) (bool, error) {
|
||||
buf, err := json.Marshal(proof)
|
||||
if err != nil {
|
||||
return false, err
|
||||
@@ -204,31 +155,6 @@ func (v *Verifier) readVK(filePat string) (string, error) {
|
||||
return base64.StdEncoding.EncodeToString(byt), nil
|
||||
}
|
||||
|
||||
// load low version vks, current is darwin
|
||||
func (v *Verifier) loadLowVersionVKs(cfg *config.VerifierConfig) error {
|
||||
bundleVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_bundle.vkey"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
batchVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_batch.vkey"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
chunkVK, err := v.readVK(path.Join(cfg.LowVersionCircuit.AssetsPath, "vk_chunk.vkey"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
v.BundleVkMap[bundleVK] = struct{}{}
|
||||
v.BatchVKMap[batchVK] = struct{}{}
|
||||
v.ChunkVKMap[chunkVK] = struct{}{}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (v *Verifier) loadCurieVersionVKs() {
|
||||
v.BatchVKMap["AAAAGgAAAARX2S0K1wF333B1waOsnG/vcASJmWG9YM6SNWCBy1ywD9jfGkei+f0wNYpkjW7JO12EfU7CjYVBo+PGku3zaQJI64lbn6BwyTBa4RfrPFpV5mP47ix0sXZ+Wt5wklMLRW7OIJb1yfCDm+gkSsp3/Zqrxt4SY4rQ4WtHfynTCQ0KDi78jNuiFvwxO3ub3DkgGVaxMkGxTRP/Vz6E7MCZMUBR5wZFcMzJn+73f0wYjDxfj00krg9O1VrwVxbVV1ycLR6oQLcOgm/l+xwth8io0vDpF9OY21gD5DgJn9GgcYe8KoRVEbEqApLZPdBibpcSMTY9czZI2LnFcqrDDmYvhEwgjhZrsTog2xLXOODoOupZ/is5ekQ9Gi0y871b1mLlCGA="] = struct{}{}
|
||||
v.ChunkVKMap["AAAAGQAAAATyWEABRbJ6hQQ5/zLX1gTasr7349minA9rSgMS6gDeHwZKqikRiO3md+pXjjxMHnKQtmXYgMXhJSvlmZ+Ws+cheuly2X1RuNQzcZuRImaKPR9LJsVZYsXfJbuqdKX8p0Gj8G83wMJOmTzNVUyUol0w0lTU+CEiTpHOnxBsTF3EWaW3s1u4ycOgWt1c9M6s7WmaBZLYgAWYCunO5CLCLApNGbCASeck/LuSoedEri5u6HccCKU2khG6zl6W07jvYSbDVLJktbjRiHv+/HQix+K14j8boo8Z/unhpwXCsPxkQA=="] = struct{}{}
|
||||
}
|
||||
|
||||
func (v *Verifier) loadOpenVMVks(forkName string) error {
|
||||
tempFile := path.Join(os.TempDir(), "openVmVk.json")
|
||||
defer func() {
|
||||
|
||||
@@ -29,17 +29,9 @@ func TestFFI(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
cfg := &config.VerifierConfig{
|
||||
MockMode: false,
|
||||
LowVersionCircuit: &config.CircuitConfig{
|
||||
ParamsPath: *paramsPath,
|
||||
AssetsPath: *assetsPathLo,
|
||||
ForkName: "darwin",
|
||||
MinProverVersion: "",
|
||||
},
|
||||
HighVersionCircuit: &config.CircuitConfig{
|
||||
ParamsPath: *paramsPath,
|
||||
AssetsPath: *assetsPathHi,
|
||||
ForkName: "darwinV2",
|
||||
ForkName: "euclidV2",
|
||||
MinProverVersion: "",
|
||||
},
|
||||
}
|
||||
@@ -48,43 +40,43 @@ func TestFFI(t *testing.T) {
|
||||
as.NoError(err)
|
||||
|
||||
chunkProof1 := readChunkProof(*chunkProofPath1, as)
|
||||
chunkOk1, err := v.VerifyChunkProof(chunkProof1, "darwinV2")
|
||||
chunkOk1, err := v.VerifyChunkProof(chunkProof1, "euclidV2")
|
||||
as.NoError(err)
|
||||
as.True(chunkOk1)
|
||||
t.Log("Verified chunk proof 1")
|
||||
|
||||
chunkProof2 := readChunkProof(*chunkProofPath2, as)
|
||||
chunkOk2, err := v.VerifyChunkProof(chunkProof2, "darwinV2")
|
||||
chunkOk2, err := v.VerifyChunkProof(chunkProof2, "euclidV2")
|
||||
as.NoError(err)
|
||||
as.True(chunkOk2)
|
||||
t.Log("Verified chunk proof 2")
|
||||
|
||||
batchProof := readBatchProof(*batchProofPath, as)
|
||||
batchOk, err := v.VerifyBatchProof(batchProof, "darwinV2")
|
||||
batchOk, err := v.VerifyBatchProof(batchProof, "euclidV2")
|
||||
as.NoError(err)
|
||||
as.True(batchOk)
|
||||
t.Log("Verified batch proof")
|
||||
}
|
||||
|
||||
func readBatchProof(filePat string, as *assert.Assertions) types.BatchProof {
|
||||
func readBatchProof(filePat string, as *assert.Assertions) *types.OpenVMBatchProof {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
byt, err := io.ReadAll(f)
|
||||
as.NoError(err)
|
||||
|
||||
proof := &types.Halo2BatchProof{}
|
||||
proof := &types.OpenVMBatchProof{}
|
||||
as.NoError(json.Unmarshal(byt, proof))
|
||||
|
||||
return proof
|
||||
}
|
||||
|
||||
func readChunkProof(filePat string, as *assert.Assertions) types.ChunkProof {
|
||||
func readChunkProof(filePat string, as *assert.Assertions) *types.OpenVMChunkProof {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
byt, err := io.ReadAll(f)
|
||||
as.NoError(err)
|
||||
|
||||
proof := &types.Halo2ChunkProof{}
|
||||
proof := &types.OpenVMChunkProof{}
|
||||
as.NoError(json.Unmarshal(byt, proof))
|
||||
|
||||
return proof
|
||||
|
||||
@@ -19,20 +19,23 @@ type Batch struct {
|
||||
db *gorm.DB `gorm:"column:-"`
|
||||
|
||||
// batch
|
||||
Index uint64 `json:"index" gorm:"column:index"`
|
||||
Hash string `json:"hash" gorm:"column:hash"`
|
||||
DataHash string `json:"data_hash" gorm:"column:data_hash"`
|
||||
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
|
||||
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
|
||||
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
|
||||
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
|
||||
StateRoot string `json:"state_root" gorm:"column:state_root"`
|
||||
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
|
||||
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
|
||||
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
|
||||
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
|
||||
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
|
||||
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
||||
Index uint64 `json:"index" gorm:"column:index"`
|
||||
Hash string `json:"hash" gorm:"column:hash"`
|
||||
DataHash string `json:"data_hash" gorm:"column:data_hash"`
|
||||
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
|
||||
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
|
||||
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
|
||||
EndChunkHash string `json:"end_chunk_hash" gorm:"column:end_chunk_hash"`
|
||||
StateRoot string `json:"state_root" gorm:"column:state_root"`
|
||||
WithdrawRoot string `json:"withdraw_root" gorm:"column:withdraw_root"`
|
||||
ParentBatchHash string `json:"parent_batch_hash" gorm:"column:parent_batch_hash"`
|
||||
BatchHeader []byte `json:"batch_header" gorm:"column:batch_header"`
|
||||
CodecVersion int16 `json:"codec_version" gorm:"column:codec_version"`
|
||||
PrevL1MessageQueueHash string `json:"prev_l1_message_queue_hash" gorm:"column:prev_l1_message_queue_hash"`
|
||||
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
|
||||
EnableCompress bool `json:"enable_compress" gorm:"column:enable_compress"`
|
||||
BlobBytes []byte `json:"blob_bytes" gorm:"column:blob_bytes"`
|
||||
ChallengeDigest string `json:"challenge_digest" gorm:"column:challenge_digest"`
|
||||
|
||||
// proof
|
||||
ChunkProofsStatus int16 `json:"chunk_proofs_status" gorm:"column:chunk_proofs_status;default:1"`
|
||||
|
||||
@@ -28,6 +28,8 @@ type Chunk struct {
|
||||
StartBlockTime uint64 `json:"start_block_time" gorm:"column:start_block_time"`
|
||||
TotalL1MessagesPoppedBefore uint64 `json:"total_l1_messages_popped_before" gorm:"column:total_l1_messages_popped_before"`
|
||||
TotalL1MessagesPoppedInChunk uint64 `json:"total_l1_messages_popped_in_chunk" gorm:"column:total_l1_messages_popped_in_chunk"`
|
||||
PrevL1MessageQueueHash string `json:"prev_l1_message_queue_hash" gorm:"column:prev_l1_message_queue_hash"`
|
||||
PostL1MessageQueueHash string `json:"post_l1_message_queue_hash" gorm:"column:post_l1_message_queue_hash"`
|
||||
ParentChunkHash string `json:"parent_chunk_hash" gorm:"column:parent_chunk_hash"`
|
||||
StateRoot string `json:"state_root" gorm:"column:state_root"`
|
||||
ParentChunkStateRoot string `json:"parent_chunk_state_root" gorm:"column:parent_chunk_state_root"`
|
||||
|
||||
@@ -9,11 +9,12 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"scroll-tech/database/migrate"
|
||||
|
||||
"scroll-tech/common/testcontainers"
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/database/migrate"
|
||||
)
|
||||
|
||||
var (
|
||||
|
||||
@@ -22,7 +22,7 @@ func TestAuthMessageSignAndVerify(t *testing.T) {
|
||||
ProverVersion: "v0.0.1",
|
||||
Challenge: "abcdef",
|
||||
ProverProviderType: ProverProviderTypeInternal,
|
||||
ProverTypes: []ProverType{ProverTypeBatch},
|
||||
ProverTypes: []ProverType{ProverTypeOpenVM},
|
||||
VKs: []string{"vk1", "vk2"},
|
||||
},
|
||||
PublicKey: publicKeyHex,
|
||||
@@ -64,7 +64,7 @@ func TestGenerateSignature(t *testing.T) {
|
||||
ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
|
||||
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
|
||||
ProverProviderType: ProverProviderTypeInternal,
|
||||
ProverTypes: []ProverType{ProverTypeChunk},
|
||||
ProverTypes: []ProverType{ProverTypeOpenVM},
|
||||
VKs: []string{"mock_vk"},
|
||||
},
|
||||
PublicKey: publicKeyHex,
|
||||
|
||||
@@ -2,7 +2,6 @@ package types
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"scroll-tech/common/types/message"
|
||||
)
|
||||
|
||||
@@ -21,10 +20,12 @@ type ProverType uint8
|
||||
|
||||
func (r ProverType) String() string {
|
||||
switch r {
|
||||
case ProverTypeChunk:
|
||||
return "prover type chunk"
|
||||
case ProverTypeBatch:
|
||||
return "prover type batch"
|
||||
case ProverTypeChunkDeprecated:
|
||||
return "prover type chunk (deprecated)"
|
||||
case ProverTypeBatchDeprecated:
|
||||
return "prover type batch (deprecated)"
|
||||
case ProverTypeOpenVM:
|
||||
return "prover type openvm"
|
||||
default:
|
||||
return fmt.Sprintf("illegal prover type: %d", r)
|
||||
}
|
||||
@@ -33,10 +34,10 @@ func (r ProverType) String() string {
|
||||
const (
|
||||
// ProverTypeUndefined is an unknown prover type
|
||||
ProverTypeUndefined ProverType = iota
|
||||
// ProverTypeChunk signals it's a chunk prover, which can prove chunk_tasks
|
||||
ProverTypeChunk
|
||||
// ProverTypeBatch signals it's a batch prover, which can prove batch_tasks and bundle_tasks
|
||||
ProverTypeBatch
|
||||
// ProverTypeChunk signals it's a chunk prover, which can prove chunk_tasks, which is deprecated
|
||||
ProverTypeChunkDeprecated
|
||||
// ProverTypeBatch signals it's a batch prover, which can prove batch_tasks and bundle_tasks, which is deprecated
|
||||
ProverTypeBatchDeprecated
|
||||
// ProverTypeOpenVM
|
||||
ProverTypeOpenVM
|
||||
)
|
||||
@@ -45,9 +46,9 @@ const (
|
||||
func MakeProverType(proofType message.ProofType) ProverType {
|
||||
switch proofType {
|
||||
case message.ProofTypeChunk:
|
||||
return ProverTypeChunk
|
||||
return ProverTypeChunkDeprecated
|
||||
case message.ProofTypeBatch, message.ProofTypeBundle:
|
||||
return ProverTypeBatch
|
||||
return ProverTypeBatchDeprecated
|
||||
default:
|
||||
return ProverTypeUndefined
|
||||
}
|
||||
|
||||
@@ -20,11 +20,12 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
"gorm.io/gorm"
|
||||
|
||||
"scroll-tech/database/migrate"
|
||||
|
||||
"scroll-tech/common/testcontainers"
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/version"
|
||||
"scroll-tech/database/migrate"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/controller/api"
|
||||
@@ -66,7 +67,7 @@ func randomURL() string {
|
||||
return fmt.Sprintf("localhost:%d", 10000+2000+id.Int64())
|
||||
}
|
||||
|
||||
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string, forks []string) (*cron.Collector, *http.Server) {
|
||||
func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL string) (*cron.Collector, *http.Server) {
|
||||
var err error
|
||||
db, err = testApps.GetGormDBClient()
|
||||
|
||||
@@ -83,18 +84,10 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
|
||||
ProverManager: &config.ProverManager{
|
||||
ProversPerSession: proversPerSession,
|
||||
Verifier: &config.VerifierConfig{
|
||||
MockMode: true,
|
||||
LowVersionCircuit: &config.CircuitConfig{
|
||||
ParamsPath: "",
|
||||
AssetsPath: "",
|
||||
ForkName: "homestead",
|
||||
MinProverVersion: "v4.2.0",
|
||||
},
|
||||
HighVersionCircuit: &config.CircuitConfig{
|
||||
ParamsPath: "",
|
||||
AssetsPath: "",
|
||||
ForkName: "bernoulli",
|
||||
MinProverVersion: "v4.3.0",
|
||||
ForkName: "euclidV2",
|
||||
MinProverVersion: "v4.4.89",
|
||||
},
|
||||
},
|
||||
BatchCollectionTimeSec: 10,
|
||||
@@ -108,20 +101,17 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
|
||||
},
|
||||
}
|
||||
|
||||
var chainConf params.ChainConfig
|
||||
for _, forkName := range forks {
|
||||
switch forkName {
|
||||
case "bernoulli":
|
||||
chainConf.BernoulliBlock = big.NewInt(100)
|
||||
case "homestead":
|
||||
chainConf.HomesteadBlock = big.NewInt(0)
|
||||
}
|
||||
}
|
||||
|
||||
proofCollector := cron.NewCollector(context.Background(), db, conf, nil)
|
||||
|
||||
router := gin.New()
|
||||
api.InitController(conf, &chainConf, db, nil)
|
||||
api.InitController(conf, ¶ms.ChainConfig{
|
||||
BernoulliBlock: big.NewInt(0),
|
||||
CurieBlock: big.NewInt(0),
|
||||
DarwinTime: new(uint64),
|
||||
DarwinV2Time: new(uint64),
|
||||
EuclidTime: new(uint64),
|
||||
EuclidV2Time: new(uint64),
|
||||
}, db, nil)
|
||||
route.Route(router, conf, nil)
|
||||
srv := &http.Server{
|
||||
Addr: coordinatorURL,
|
||||
@@ -141,7 +131,7 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
|
||||
func setEnv(t *testing.T) {
|
||||
var err error
|
||||
|
||||
version.Version = "v4.2.0"
|
||||
version.Version = "v4.4.89"
|
||||
|
||||
glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat()))
|
||||
glogger.Verbosity(log.LvlInfo)
|
||||
@@ -197,7 +187,7 @@ func TestApis(t *testing.T) {
|
||||
func testHandshake(t *testing.T) {
|
||||
// Setup coordinator and http server.
|
||||
coordinatorURL := randomURL()
|
||||
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
|
||||
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL)
|
||||
defer func() {
|
||||
proofCollector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -210,7 +200,7 @@ func testHandshake(t *testing.T) {
|
||||
func testFailedHandshake(t *testing.T) {
|
||||
// Setup coordinator and http server.
|
||||
coordinatorURL := randomURL()
|
||||
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"homestead"})
|
||||
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL)
|
||||
defer func() {
|
||||
proofCollector.Stop()
|
||||
}()
|
||||
@@ -228,7 +218,7 @@ func testFailedHandshake(t *testing.T) {
|
||||
|
||||
func testGetTaskBlocked(t *testing.T) {
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -272,7 +262,7 @@ func testGetTaskBlocked(t *testing.T) {
|
||||
|
||||
func testOutdatedProverVersion(t *testing.T) {
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -284,14 +274,12 @@ func testOutdatedProverVersion(t *testing.T) {
|
||||
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch, "v1.999.999")
|
||||
assert.True(t, chunkProver.healthCheckSuccess(t))
|
||||
|
||||
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
|
||||
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, chunkProver.proverVersion)
|
||||
expectedErr := fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: v4.4.89, actual version: %s", chunkProver.proverVersion)
|
||||
code, errMsg := chunkProver.tryGetProverTask(t, message.ProofTypeChunk)
|
||||
assert.Equal(t, types.ErrJWTCommonErr, code)
|
||||
assert.Equal(t, expectedErr, errors.New(errMsg))
|
||||
|
||||
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s",
|
||||
conf.ProverManager.Verifier.LowVersionCircuit.MinProverVersion, batchProver.proverVersion)
|
||||
expectedErr = fmt.Errorf("check the login parameter failure: incompatible prover version. please upgrade your prover, minimum allowed version: v4.4.89, actual version: %s", batchProver.proverVersion)
|
||||
code, errMsg = batchProver.tryGetProverTask(t, message.ProofTypeBatch)
|
||||
assert.Equal(t, types.ErrJWTCommonErr, code)
|
||||
assert.Equal(t, expectedErr, errors.New(errMsg))
|
||||
@@ -299,7 +287,7 @@ func testOutdatedProverVersion(t *testing.T) {
|
||||
|
||||
func testValidProof(t *testing.T) {
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"homestead"})
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -382,7 +370,7 @@ func testValidProof(t *testing.T) {
|
||||
func testInvalidProof(t *testing.T) {
|
||||
// Setup coordinator and ws server.
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -470,7 +458,7 @@ func testInvalidProof(t *testing.T) {
|
||||
func testProofGeneratedFailed(t *testing.T) {
|
||||
// Setup coordinator and ws server.
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL, []string{"darwinV2"})
|
||||
collector, httpHandler := setupCoordinator(t, 3, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -571,7 +559,7 @@ func testProofGeneratedFailed(t *testing.T) {
|
||||
func testTimeoutProof(t *testing.T) {
|
||||
// Setup coordinator and ws server.
|
||||
coordinatorURL := randomURL()
|
||||
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL, []string{"darwinV2"})
|
||||
collector, httpHandler := setupCoordinator(t, 1, coordinatorURL)
|
||||
defer func() {
|
||||
collector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
@@ -594,7 +582,9 @@ func testTimeoutProof(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
err = chunkOrm.UpdateBatchHashInRange(context.Background(), 0, 100, batch.Hash)
|
||||
assert.NoError(t, err)
|
||||
encodeData, err := json.Marshal(message.Halo2ChunkProof{})
|
||||
encodeData, err := json.Marshal(message.OpenVMChunkProof{VmProof: &message.OpenVMProof{}, MetaData: struct {
|
||||
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
|
||||
}{ChunkInfo: &message.ChunkInfo{}}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
err = chunkOrm.UpdateProofAndProvingStatusByHash(context.Background(), dbChunk.Hash, encodeData, types.ProvingTaskUnassigned, 1)
|
||||
|
||||
@@ -207,14 +207,16 @@ func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSc
|
||||
}
|
||||
|
||||
var proof []byte
|
||||
switch proverTaskSchema.TaskType {
|
||||
case int(message.ProofTypeChunk):
|
||||
encodeData, err := json.Marshal(message.Halo2ChunkProof{})
|
||||
switch message.ProofType(proverTaskSchema.TaskType) {
|
||||
case message.ProofTypeChunk:
|
||||
encodeData, err := json.Marshal(message.OpenVMChunkProof{VmProof: &message.OpenVMProof{}, MetaData: struct {
|
||||
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
|
||||
}{ChunkInfo: &message.ChunkInfo{}}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
proof = encodeData
|
||||
case int(message.ProofTypeBatch):
|
||||
encodeData, err := json.Marshal(message.Halo2BatchProof{})
|
||||
case message.ProofTypeBatch:
|
||||
encodeData, err := json.Marshal(message.OpenVMBatchProof{VmProof: &message.OpenVMProof{}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
proof = encodeData
|
||||
@@ -223,16 +225,14 @@ func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSc
|
||||
if proofStatus == verifiedFailed {
|
||||
switch proverTaskSchema.TaskType {
|
||||
case int(message.ProofTypeChunk):
|
||||
chunkProof := message.Halo2ChunkProof{}
|
||||
chunkProof.RawProof = []byte(verifier.InvalidTestProof)
|
||||
encodeData, err := json.Marshal(&chunkProof)
|
||||
encodeData, err := json.Marshal(message.OpenVMChunkProof{VmProof: &message.OpenVMProof{Proof: []byte(verifier.InvalidTestProof)}, MetaData: struct {
|
||||
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
|
||||
}{ChunkInfo: &message.ChunkInfo{}}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
proof = encodeData
|
||||
case int(message.ProofTypeBatch):
|
||||
batchProof := message.Halo2BatchProof{}
|
||||
batchProof.RawProof = []byte(verifier.InvalidTestProof)
|
||||
encodeData, err := json.Marshal(&batchProof)
|
||||
encodeData, err := json.Marshal(&message.OpenVMBatchProof{VmProof: &message.OpenVMProof{Proof: []byte(verifier.InvalidTestProof)}})
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, encodeData)
|
||||
proof = encodeData
|
||||
|
||||
@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
|
||||
cur, err := Current(pgDB)
|
||||
assert.NoError(t, err)
|
||||
// total number of tables.
|
||||
assert.Equal(t, int64(24), cur)
|
||||
assert.Equal(t, int64(26), cur)
|
||||
}
|
||||
|
||||
func testMigrate(t *testing.T) {
|
||||
assert.NoError(t, Migrate(pgDB))
|
||||
cur, err := Current(pgDB)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(24), cur)
|
||||
assert.Equal(t, int64(26), cur)
|
||||
}
|
||||
|
||||
func testRollback(t *testing.T) {
|
||||
version, err := Current(pgDB)
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, int64(24), version)
|
||||
assert.Equal(t, int64(26), version)
|
||||
|
||||
assert.NoError(t, Rollback(pgDB, nil))
|
||||
|
||||
|
||||
@@ -0,0 +1,26 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
|
||||
ALTER TABLE chunk
|
||||
ADD COLUMN prev_l1_message_queue_hash VARCHAR DEFAULT '',
|
||||
ADD COLUMN post_l1_message_queue_hash VARCHAR DEFAULT '';
|
||||
|
||||
ALTER TABLE batch
|
||||
ADD COLUMN prev_l1_message_queue_hash VARCHAR DEFAULT '',
|
||||
ADD COLUMN post_l1_message_queue_hash VARCHAR DEFAULT '';
|
||||
|
||||
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
|
||||
ALTER TABLE IF EXISTS chunk
|
||||
DROP COLUMN IF EXISTS prev_l1_message_queue_hash,
|
||||
DROP COLUMN IF EXISTS post_l1_message_queue_hash;
|
||||
|
||||
ALTER TABLE IF EXISTS batch
|
||||
DROP COLUMN IF EXISTS prev_l1_message_queue_hash,
|
||||
DROP COLUMN IF EXISTS post_l1_message_queue_hash;
|
||||
|
||||
-- +goose StatementEnd
|
||||
15
database/migrate/migrations/00026_add_challenge_digest.sql
Normal file
15
database/migrate/migrations/00026_add_challenge_digest.sql
Normal file
@@ -0,0 +1,15 @@
|
||||
-- +goose Up
|
||||
-- +goose StatementBegin
|
||||
|
||||
ALTER TABLE batch
|
||||
ADD COLUMN challenge_digest VARCHAR DEFAULT '';
|
||||
|
||||
-- +goose StatementEnd
|
||||
|
||||
-- +goose Down
|
||||
-- +goose StatementBegin
|
||||
|
||||
ALTER TABLE IF EXISTS batch
|
||||
DROP COLUMN IF EXISTS challenge_digest;
|
||||
|
||||
-- +goose StatementEnd
|
||||
190
go.work.sum
190
go.work.sum
@@ -24,54 +24,71 @@ cloud.google.com/go v0.81.0/go.mod h1:mk/AM35KwGk/Nm2YSeZbxXdrNK3KZOYHmLkOqC2V6E
|
||||
cloud.google.com/go v0.110.10 h1:LXy9GEO+timppncPIAZoOj3l58LIU9k+kn48AN7IO3Y=
|
||||
cloud.google.com/go v0.110.10/go.mod h1:v1OoFqYxiBkUrruItNM3eT4lLByNjxmJSV/xDKJNnic=
|
||||
cloud.google.com/go v0.112.0/go.mod h1:3jEEVwZ/MHU4djK5t5RHuKOA/GbLddgTdVubX1qnPD4=
|
||||
cloud.google.com/go v0.112.1 h1:uJSeirPke5UNZHIb4SxfZklVSiWWVqW4oXlETwZziwM=
|
||||
cloud.google.com/go v0.112.1/go.mod h1:+Vbu+Y1UU+I1rjmzeMOb/8RfkKJK2Gyxi1X6jJCZLo4=
|
||||
cloud.google.com/go/accessapproval v1.7.4 h1:ZvLvJ952zK8pFHINjpMBY5k7LTAp/6pBf50RDMRgBUI=
|
||||
cloud.google.com/go/accessapproval v1.7.4/go.mod h1:/aTEh45LzplQgFYdQdwPMR9YdX0UlhBmvB84uAmQKUc=
|
||||
cloud.google.com/go/accessapproval v1.7.5 h1:uzmAMSgYcnlHa9X9YSQZ4Q1wlfl4NNkZyQgho1Z6p04=
|
||||
cloud.google.com/go/accessapproval v1.7.5/go.mod h1:g88i1ok5dvQ9XJsxpUInWWvUBrIZhyPDPbk4T01OoJ0=
|
||||
cloud.google.com/go/accesscontextmanager v1.8.4 h1:Yo4g2XrBETBCqyWIibN3NHNPQKUfQqti0lI+70rubeE=
|
||||
cloud.google.com/go/accesscontextmanager v1.8.4/go.mod h1:ParU+WbMpD34s5JFEnGAnPBYAgUHozaTmDJU7aCU9+M=
|
||||
cloud.google.com/go/accesscontextmanager v1.8.5 h1:2GLNaNu9KRJhJBFTIVRoPwk6xE5mUDgD47abBq4Zp/I=
|
||||
cloud.google.com/go/accesscontextmanager v1.8.5/go.mod h1:TInEhcZ7V9jptGNqN3EzZ5XMhT6ijWxTGjzyETwmL0Q=
|
||||
cloud.google.com/go/aiplatform v1.54.0 h1:wH7OYl9Vq/5tupok0BPTFY9xaTLb0GxkReHtB5PF7cI=
|
||||
cloud.google.com/go/aiplatform v1.54.0/go.mod h1:pwZMGvqe0JRkI1GWSZCtnAfrR4K1bv65IHILGA//VEU=
|
||||
cloud.google.com/go/aiplatform v1.60.0 h1:0cSrii1ZeLr16MbBoocyy5KVnrSdiQ3KN/vtrTe7RqE=
|
||||
cloud.google.com/go/aiplatform v1.60.0/go.mod h1:eTlGuHOahHprZw3Hio5VKmtThIOak5/qy6pzdsqcQnM=
|
||||
cloud.google.com/go/analytics v0.21.6 h1:fnV7B8lqyEYxCU0LKk+vUL7mTlqRAq4uFlIthIdr/iA=
|
||||
cloud.google.com/go/analytics v0.21.6/go.mod h1:eiROFQKosh4hMaNhF85Oc9WO97Cpa7RggD40e/RBy8w=
|
||||
cloud.google.com/go/analytics v0.23.0 h1:Q+y94XH84jM8SK8O7qiY/PJRexb6n7dRbQ6PiUa4YGM=
|
||||
cloud.google.com/go/analytics v0.23.0/go.mod h1:YPd7Bvik3WS95KBok2gPXDqQPHy08TsCQG6CdUCb+u0=
|
||||
cloud.google.com/go/apigateway v1.6.4 h1:VVIxCtVerchHienSlaGzV6XJGtEM9828Erzyr3miUGs=
|
||||
cloud.google.com/go/apigateway v1.6.4/go.mod h1:0EpJlVGH5HwAN4VF4Iec8TAzGN1aQgbxAWGJsnPCGGY=
|
||||
cloud.google.com/go/apigateway v1.6.5 h1:sPXnpk+6TneKIrjCjcpX5YGsAKy3PTdpIchoj8/74OE=
|
||||
cloud.google.com/go/apigateway v1.6.5/go.mod h1:6wCwvYRckRQogyDDltpANi3zsCDl6kWi0b4Je+w2UiI=
|
||||
cloud.google.com/go/apigeeconnect v1.6.4 h1:jSoGITWKgAj/ssVogNE9SdsTqcXnryPzsulENSRlusI=
|
||||
cloud.google.com/go/apigeeconnect v1.6.4/go.mod h1:CapQCWZ8TCjnU0d7PobxhpOdVz/OVJ2Hr/Zcuu1xFx0=
|
||||
cloud.google.com/go/apigeeconnect v1.6.5 h1:CrfIKv9Go3fh/QfQgisU3MeP90Ww7l/sVGmr3TpECo8=
|
||||
cloud.google.com/go/apigeeconnect v1.6.5/go.mod h1:MEKm3AiT7s11PqTfKE3KZluZA9O91FNysvd3E6SJ6Ow=
|
||||
cloud.google.com/go/apigeeregistry v0.8.2 h1:DSaD1iiqvELag+lV4VnnqUUFd8GXELu01tKVdWZrviE=
|
||||
cloud.google.com/go/apigeeregistry v0.8.2/go.mod h1:h4v11TDGdeXJDJvImtgK2AFVvMIgGWjSb0HRnBSjcX8=
|
||||
cloud.google.com/go/apigeeregistry v0.8.3 h1:C+QU2K+DzDjk4g074ouwHQGkoff1h5OMQp6sblCVreQ=
|
||||
cloud.google.com/go/apigeeregistry v0.8.3/go.mod h1:aInOWnqF4yMQx8kTjDqHNXjZGh/mxeNlAf52YqtASUs=
|
||||
cloud.google.com/go/appengine v1.8.4 h1:Qub3fqR7iA1daJWdzjp/Q0Jz0fUG0JbMc7Ui4E9IX/E=
|
||||
cloud.google.com/go/appengine v1.8.4/go.mod h1:TZ24v+wXBujtkK77CXCpjZbnuTvsFNT41MUaZ28D6vg=
|
||||
cloud.google.com/go/appengine v1.8.5 h1:l2SviT44zWQiOv8bPoMBzW0vOcMO22iO0s+nVtVhdts=
|
||||
cloud.google.com/go/appengine v1.8.5/go.mod h1:uHBgNoGLTS5di7BvU25NFDuKa82v0qQLjyMJLuPQrVo=
|
||||
cloud.google.com/go/area120 v0.8.4 h1:YnSO8m02pOIo6AEOgiOoUDVbw4pf+bg2KLHi4rky320=
|
||||
cloud.google.com/go/area120 v0.8.4/go.mod h1:jfawXjxf29wyBXr48+W+GyX/f8fflxp642D/bb9v68M=
|
||||
cloud.google.com/go/area120 v0.8.5 h1:vTs08KPLN/iMzTbxpu5ciL06KcsrVPMjz4IwcQyZ4uY=
|
||||
cloud.google.com/go/area120 v0.8.5/go.mod h1:BcoFCbDLZjsfe4EkCnEq1LKvHSK0Ew/zk5UFu6GMyA0=
|
||||
cloud.google.com/go/artifactregistry v1.14.6 h1:/hQaadYytMdA5zBh+RciIrXZQBWK4vN7EUsrQHG+/t8=
|
||||
cloud.google.com/go/artifactregistry v1.14.6/go.mod h1:np9LSFotNWHcjnOgh8UVK0RFPCTUGbO0ve3384xyHfE=
|
||||
cloud.google.com/go/artifactregistry v1.14.7 h1:W9sVlyb1VRcUf83w7aM3yMsnp4HS4PoyGqYQNG0O5lI=
|
||||
cloud.google.com/go/artifactregistry v1.14.7/go.mod h1:0AUKhzWQzfmeTvT4SjfI4zjot72EMfrkvL9g9aRjnnM=
|
||||
cloud.google.com/go/asset v1.15.3 h1:uI8Bdm81s0esVWbWrTHcjFDFKNOa9aB7rI1vud1hO84=
|
||||
cloud.google.com/go/asset v1.15.3/go.mod h1:yYLfUD4wL4X589A9tYrv4rFrba0QlDeag0CMcM5ggXU=
|
||||
cloud.google.com/go/asset v1.17.2 h1:xgFnBP3luSbUcC9RWJvb3Zkt+y/wW6PKwPHr3ssnIP8=
|
||||
cloud.google.com/go/asset v1.17.2/go.mod h1:SVbzde67ehddSoKf5uebOD1sYw8Ab/jD/9EIeWg99q4=
|
||||
cloud.google.com/go/assuredworkloads v1.11.4 h1:FsLSkmYYeNuzDm8L4YPfLWV+lQaUrJmH5OuD37t1k20=
|
||||
cloud.google.com/go/assuredworkloads v1.11.4/go.mod h1:4pwwGNwy1RP0m+y12ef3Q/8PaiWrIDQ6nD2E8kvWI9U=
|
||||
cloud.google.com/go/assuredworkloads v1.11.5 h1:gCrN3IyvqY3cP0wh2h43d99CgH3G+WYs9CeuFVKChR8=
|
||||
cloud.google.com/go/assuredworkloads v1.11.5/go.mod h1:FKJ3g3ZvkL2D7qtqIGnDufFkHxwIpNM9vtmhvt+6wqk=
|
||||
cloud.google.com/go/automl v1.13.4 h1:i9tOKXX+1gE7+rHpWKjiuPfGBVIYoWvLNIGpWgPtF58=
|
||||
cloud.google.com/go/automl v1.13.4/go.mod h1:ULqwX/OLZ4hBVfKQaMtxMSTlPx0GqGbWN8uA/1EqCP8=
|
||||
cloud.google.com/go/automl v1.13.5 h1:ijiJy9sYWh75WrqImXsfWc1e3HR3iO+ef9fvW03Ig/4=
|
||||
cloud.google.com/go/automl v1.13.5/go.mod h1:MDw3vLem3yh+SvmSgeYUmUKqyls6NzSumDm9OJ3xJ1Y=
|
||||
cloud.google.com/go/baremetalsolution v1.2.3 h1:oQiFYYCe0vwp7J8ZmF6siVKEumWtiPFJMJcGuyDVRUk=
|
||||
cloud.google.com/go/baremetalsolution v1.2.3/go.mod h1:/UAQ5xG3faDdy180rCUv47e0jvpp3BFxT+Cl0PFjw5g=
|
||||
cloud.google.com/go/baremetalsolution v1.2.4 h1:LFydisRmS7hQk9P/YhekwuZGqb45TW4QavcrMToWo5A=
|
||||
cloud.google.com/go/baremetalsolution v1.2.4/go.mod h1:BHCmxgpevw9IEryE99HbYEfxXkAEA3hkMJbYYsHtIuY=
|
||||
cloud.google.com/go/batch v1.6.3 h1:mPiIH20a5NU02rucbAmLeO4sLPO9hrTK0BLjdHyW8xw=
|
||||
cloud.google.com/go/batch v1.6.3/go.mod h1:J64gD4vsNSA2O5TtDB5AAux3nJ9iV8U3ilg3JDBYejU=
|
||||
cloud.google.com/go/batch v1.8.0 h1:2HK4JerwVaIcCh/lJiHwh6+uswPthiMMWhiSWLELayk=
|
||||
cloud.google.com/go/batch v1.8.0/go.mod h1:k8V7f6VE2Suc0zUM4WtoibNrA6D3dqBpB+++e3vSGYc=
|
||||
cloud.google.com/go/beyondcorp v1.0.3 h1:VXf9SnrnSmj2BF2cHkoTHvOUp8gjsz1KJFOMW7czdsY=
|
||||
cloud.google.com/go/beyondcorp v1.0.3/go.mod h1:HcBvnEd7eYr+HGDd5ZbuVmBYX019C6CEXBonXbCVwJo=
|
||||
cloud.google.com/go/beyondcorp v1.0.4 h1:qs0J0O9Ol2h1yA0AU+r7l3hOCPzs2MjE1d6d/kaHIKo=
|
||||
cloud.google.com/go/beyondcorp v1.0.4/go.mod h1:Gx8/Rk2MxrvWfn4WIhHIG1NV7IBfg14pTKv1+EArVcc=
|
||||
cloud.google.com/go/bigquery v1.0.1/go.mod h1:i/xbL2UlR5RvWAURpBYZTtm/cXjCha9lbfbpx4poX+o=
|
||||
cloud.google.com/go/bigquery v1.3.0/go.mod h1:PjpwJnslEMmckchkHFfq+HTD2DmtT67aNFKH1/VBDHE=
|
||||
@@ -81,69 +98,89 @@ cloud.google.com/go/bigquery v1.7.0/go.mod h1://okPTzCYNXSlb24MZs83e2Do+h+VXtc4g
|
||||
cloud.google.com/go/bigquery v1.8.0/go.mod h1:J5hqkt3O0uAFnINi6JXValWIb1v0goeZM77hZzJN/fQ=
|
||||
cloud.google.com/go/bigquery v1.57.1 h1:FiULdbbzUxWD0Y4ZGPSVCDLvqRSyCIO6zKV7E2nf5uA=
|
||||
cloud.google.com/go/bigquery v1.57.1/go.mod h1:iYzC0tGVWt1jqSzBHqCr3lrRn0u13E8e+AqowBsDgug=
|
||||
cloud.google.com/go/bigquery v1.59.1 h1:CpT+/njKuKT3CEmswm6IbhNu9u35zt5dO4yPDLW+nG4=
|
||||
cloud.google.com/go/bigquery v1.59.1/go.mod h1:VP1UJYgevyTwsV7desjzNzDND5p6hZB+Z8gZJN1GQUc=
|
||||
cloud.google.com/go/bigtable v1.2.0 h1:F4cCmA4nuV84V5zYQ3MKY+M1Cw1avHDuf3S/LcZPA9c=
|
||||
cloud.google.com/go/bigtable v1.2.0/go.mod h1:JcVAOl45lrTmQfLj7T6TxyMzIN/3FGGcFm+2xVAli2o=
|
||||
cloud.google.com/go/billing v1.17.4 h1:77/4kCqzH6Ou5CCDzNmqmboE+WvbwFBJmw1QZQz19AI=
|
||||
cloud.google.com/go/billing v1.17.4/go.mod h1:5DOYQStCxquGprqfuid/7haD7th74kyMBHkjO/OvDtk=
|
||||
cloud.google.com/go/billing v1.18.2 h1:oWUEQvuC4JvtnqLZ35zgzdbuHt4Itbftvzbe6aEyFdE=
|
||||
cloud.google.com/go/billing v1.18.2/go.mod h1:PPIwVsOOQ7xzbADCwNe8nvK776QpfrOAUkvKjCUcpSE=
|
||||
cloud.google.com/go/binaryauthorization v1.7.3 h1:3R6WYn1JKIaVicBmo18jXubu7xh4mMkmbIgsTXk0cBA=
|
||||
cloud.google.com/go/binaryauthorization v1.7.3/go.mod h1:VQ/nUGRKhrStlGr+8GMS8f6/vznYLkdK5vaKfdCIpvU=
|
||||
cloud.google.com/go/binaryauthorization v1.8.1 h1:1jcyh2uIUwSZkJ/JmL8kd5SUkL/Krbv8zmYLEbAz6kY=
|
||||
cloud.google.com/go/binaryauthorization v1.8.1/go.mod h1:1HVRyBerREA/nhI7yLang4Zn7vfNVA3okoAR9qYQJAQ=
|
||||
cloud.google.com/go/certificatemanager v1.7.4 h1:5YMQ3Q+dqGpwUZ9X5sipsOQ1fLPsxod9HNq0+nrqc6I=
|
||||
cloud.google.com/go/certificatemanager v1.7.4/go.mod h1:FHAylPe/6IIKuaRmHbjbdLhGhVQ+CWHSD5Jq0k4+cCE=
|
||||
cloud.google.com/go/certificatemanager v1.7.5 h1:UMBr/twXvH3jcT5J5/YjRxf2tvwTYIfrpemTebe0txc=
|
||||
cloud.google.com/go/certificatemanager v1.7.5/go.mod h1:uX+v7kWqy0Y3NG/ZhNvffh0kuqkKZIXdvlZRO7z0VtM=
|
||||
cloud.google.com/go/channel v1.17.3 h1:Rd4+fBrjiN6tZ4TR8R/38elkyEkz6oogGDr7jDyjmMY=
|
||||
cloud.google.com/go/channel v1.17.3/go.mod h1:QcEBuZLGGrUMm7kNj9IbU1ZfmJq2apotsV83hbxX7eE=
|
||||
cloud.google.com/go/channel v1.17.5 h1:/omiBnyFjm4S1ETHoOmJbL7LH7Ljcei4rYG6Sj3hc80=
|
||||
cloud.google.com/go/channel v1.17.5/go.mod h1:FlpaOSINDAXgEext0KMaBq/vwpLMkkPAw9b2mApQeHc=
|
||||
cloud.google.com/go/cloudbuild v1.15.0 h1:9IHfEMWdCklJ1cwouoiQrnxmP0q3pH7JUt8Hqx4Qbck=
|
||||
cloud.google.com/go/cloudbuild v1.15.0/go.mod h1:eIXYWmRt3UtggLnFGx4JvXcMj4kShhVzGndL1LwleEM=
|
||||
cloud.google.com/go/cloudbuild v1.15.1 h1:ZB6oOmJo+MTov9n629fiCrO9YZPOg25FZvQ7gIHu5ng=
|
||||
cloud.google.com/go/cloudbuild v1.15.1/go.mod h1:gIofXZSu+XD2Uy+qkOrGKEx45zd7s28u/k8f99qKals=
|
||||
cloud.google.com/go/clouddms v1.7.3 h1:xe/wJKz55VO1+L891a1EG9lVUgfHr9Ju/I3xh1nwF84=
|
||||
cloud.google.com/go/clouddms v1.7.3/go.mod h1:fkN2HQQNUYInAU3NQ3vRLkV2iWs8lIdmBKOx4nrL6Hc=
|
||||
cloud.google.com/go/clouddms v1.7.4 h1:Sr0Zo5EAcPQiCBgHWICg3VGkcdS/LLP1d9SR7qQBM/s=
|
||||
cloud.google.com/go/clouddms v1.7.4/go.mod h1:RdrVqoFG9RWI5AvZ81SxJ/xvxPdtcRhFotwdE79DieY=
|
||||
cloud.google.com/go/cloudtasks v1.12.4 h1:5xXuFfAjg0Z5Wb81j2GAbB3e0bwroCeSF+5jBn/L650=
|
||||
cloud.google.com/go/cloudtasks v1.12.4/go.mod h1:BEPu0Gtt2dU6FxZHNqqNdGqIG86qyWKBPGnsb7udGY0=
|
||||
cloud.google.com/go/cloudtasks v1.12.6 h1:EUt1hIZ9bLv8Iz9yWaCrqgMnIU+Tdh0yXM1MMVGhjfE=
|
||||
cloud.google.com/go/cloudtasks v1.12.6/go.mod h1:b7c7fe4+TJsFZfDyzO51F7cjq7HLUlRi/KZQLQjDsaY=
|
||||
cloud.google.com/go/compute v1.20.1/go.mod h1:4tCnrn48xsqlwSAiLf1HXMQk8CONslYbdiEZc9FEIbM=
|
||||
cloud.google.com/go/compute v1.23.3 h1:6sVlXXBmbd7jNX0Ipq0trII3e4n1/MsADLK6a+aiVlk=
|
||||
cloud.google.com/go/compute v1.23.3/go.mod h1:VCgBUoMnIVIR0CscqQiPJLAG25E3ZRZMzcFZeQ+h8CI=
|
||||
cloud.google.com/go/compute v1.23.4/go.mod h1:/EJMj55asU6kAFnuZET8zqgwgJ9FvXWXOkkfQZa4ioI=
|
||||
cloud.google.com/go/compute v1.24.0 h1:phWcR2eWzRJaL/kOiJwfFsPs4BaKq1j6vnpZrc1YlVg=
|
||||
cloud.google.com/go/compute v1.24.0/go.mod h1:kw1/T+h/+tK2LJK0wiPPx1intgdAM3j/g3hFDlscY40=
|
||||
cloud.google.com/go/contactcenterinsights v1.12.0 h1:wP41IUA4ucMVooj/TP53jd7vbNjWrDkAPOeulVJGT5U=
|
||||
cloud.google.com/go/contactcenterinsights v1.12.0/go.mod h1:HHX5wrz5LHVAwfI2smIotQG9x8Qd6gYilaHcLLLmNis=
|
||||
cloud.google.com/go/contactcenterinsights v1.13.0 h1:6Vs/YnDG5STGjlWMEjN/xtmft7MrOTOnOZYUZtGTx0w=
|
||||
cloud.google.com/go/contactcenterinsights v1.13.0/go.mod h1:ieq5d5EtHsu8vhe2y3amtZ+BE+AQwX5qAy7cpo0POsI=
|
||||
cloud.google.com/go/container v1.28.0 h1:/o82CFWXIYnT9p/07SnRgybqL3Pmmu86jYIlzlJVUBY=
|
||||
cloud.google.com/go/container v1.28.0/go.mod h1:b1A1gJeTBXVLQ6GGw9/9M4FG94BEGsqJ5+t4d/3N7O4=
|
||||
cloud.google.com/go/container v1.31.0 h1:MAaNH7VRNPWEhvqOypq2j+7ONJKrKzon4v9nS3nLZe0=
|
||||
cloud.google.com/go/container v1.31.0/go.mod h1:7yABn5s3Iv3lmw7oMmyGbeV6tQj86njcTijkkGuvdZA=
|
||||
cloud.google.com/go/containeranalysis v0.11.3 h1:5rhYLX+3a01drpREqBZVXR9YmWH45RnML++8NsCtuD8=
|
||||
cloud.google.com/go/containeranalysis v0.11.3/go.mod h1:kMeST7yWFQMGjiG9K7Eov+fPNQcGhb8mXj/UcTiWw9U=
|
||||
cloud.google.com/go/containeranalysis v0.11.4 h1:doJ0M1ljS4hS0D2UbHywlHGwB7sQLNrt9vFk9Zyi7vY=
|
||||
cloud.google.com/go/containeranalysis v0.11.4/go.mod h1:cVZT7rXYBS9NG1rhQbWL9pWbXCKHWJPYraE8/FTSYPE=
|
||||
cloud.google.com/go/datacatalog v1.19.0 h1:rbYNmHwvAOOwnW2FPXYkaK3Mf1MmGqRzK0mMiIEyLdo=
|
||||
cloud.google.com/go/datacatalog v1.19.0/go.mod h1:5FR6ZIF8RZrtml0VUao22FxhdjkoG+a0866rEnObryM=
|
||||
cloud.google.com/go/datacatalog v1.19.3 h1:A0vKYCQdxQuV4Pi0LL9p39Vwvg4jH5yYveMv50gU5Tw=
|
||||
cloud.google.com/go/datacatalog v1.19.3/go.mod h1:ra8V3UAsciBpJKQ+z9Whkxzxv7jmQg1hfODr3N3YPJ4=
|
||||
cloud.google.com/go/dataflow v0.9.4 h1:7VmCNWcPJBS/srN2QnStTB6nu4Eb5TMcpkmtaPVhRt4=
|
||||
cloud.google.com/go/dataflow v0.9.4/go.mod h1:4G8vAkHYCSzU8b/kmsoR2lWyHJD85oMJPHMtan40K8w=
|
||||
cloud.google.com/go/dataflow v0.9.5 h1:RYHtcPhmE664+F0Je46p+NvFbG8z//KCXp+uEqB4jZU=
|
||||
cloud.google.com/go/dataflow v0.9.5/go.mod h1:udl6oi8pfUHnL0z6UN9Lf9chGqzDMVqcYTcZ1aPnCZQ=
|
||||
cloud.google.com/go/dataform v0.9.1 h1:jV+EsDamGX6cE127+QAcCR/lergVeeZdEQ6DdrxW3sQ=
|
||||
cloud.google.com/go/dataform v0.9.1/go.mod h1:pWTg+zGQ7i16pyn0bS1ruqIE91SdL2FDMvEYu/8oQxs=
|
||||
cloud.google.com/go/dataform v0.9.2 h1:5e4eqGrd0iDTCg4Q+VlAao5j2naKAA7xRurNtwmUknU=
|
||||
cloud.google.com/go/dataform v0.9.2/go.mod h1:S8cQUwPNWXo7m/g3DhWHsLBoufRNn9EgFrMgne2j7cI=
|
||||
cloud.google.com/go/datafusion v1.7.4 h1:Q90alBEYlMi66zL5gMSGQHfbZLB55mOAg03DhwTTfsk=
|
||||
cloud.google.com/go/datafusion v1.7.4/go.mod h1:BBs78WTOLYkT4GVZIXQCZT3GFpkpDN4aBY4NDX/jVlM=
|
||||
cloud.google.com/go/datafusion v1.7.5 h1:HQ/BUOP8OIGJxuztpYvNvlb+/U+/Bfs9SO8tQbh61fk=
|
||||
cloud.google.com/go/datafusion v1.7.5/go.mod h1:bYH53Oa5UiqahfbNK9YuYKteeD4RbQSNMx7JF7peGHc=
|
||||
cloud.google.com/go/datalabeling v0.8.4 h1:zrq4uMmunf2KFDl/7dS6iCDBBAxBnKVDyw6+ajz3yu0=
|
||||
cloud.google.com/go/datalabeling v0.8.4/go.mod h1:Z1z3E6LHtffBGrNUkKwbwbDxTiXEApLzIgmymj8A3S8=
|
||||
cloud.google.com/go/datalabeling v0.8.5 h1:GpIFRdm0qIZNsxqURFJwHt0ZBJZ0nF/mUVEigR7PH/8=
|
||||
cloud.google.com/go/datalabeling v0.8.5/go.mod h1:IABB2lxQnkdUbMnQaOl2prCOfms20mcPxDBm36lps+s=
|
||||
cloud.google.com/go/dataplex v1.11.2 h1:AfFFR15Ifh4U+Me1IBztrSd5CrasTODzy3x8KtDyHdc=
|
||||
cloud.google.com/go/dataplex v1.11.2/go.mod h1:mHJYQQ2VEJHsyoC0OdNyy988DvEbPhqFs5OOLffLX0c=
|
||||
cloud.google.com/go/dataplex v1.14.2 h1:fxIfdU8fxzR3clhOoNI7XFppvAmndxDu1AMH+qX9WKQ=
|
||||
cloud.google.com/go/dataplex v1.14.2/go.mod h1:0oGOSFlEKef1cQeAHXy4GZPB/Ife0fz/PxBf+ZymA2U=
|
||||
cloud.google.com/go/dataproc v1.12.0 h1:W47qHL3W4BPkAIbk4SWmIERwsWBaNnWm0P2sdx3YgGU=
|
||||
cloud.google.com/go/dataproc v1.12.0/go.mod h1:zrF3aX0uV3ikkMz6z4uBbIKyhRITnxvr4i3IjKsKrw4=
|
||||
cloud.google.com/go/dataproc/v2 v2.3.0 h1:tTVP9tTxmc8fixxOd/8s6Q6Pz/+yzn7r7XdZHretQH0=
|
||||
cloud.google.com/go/dataproc/v2 v2.3.0/go.mod h1:G5R6GBc9r36SXv/RtZIVfB8SipI+xVn0bX5SxUzVYbY=
|
||||
cloud.google.com/go/dataproc/v2 v2.4.0 h1:/u81Fd+BvCLp+xjctI1DiWVJn6cn9/s3Akc8xPH02yk=
|
||||
cloud.google.com/go/dataproc/v2 v2.4.0/go.mod h1:3B1Ht2aRB8VZIteGxQS/iNSJGzt9+CA0WGnDVMEm7Z4=
|
||||
cloud.google.com/go/dataqna v0.8.4 h1:NJnu1kAPamZDs/if3bJ3+Wb6tjADHKL83NUWsaIp2zg=
|
||||
cloud.google.com/go/dataqna v0.8.4/go.mod h1:mySRKjKg5Lz784P6sCov3p1QD+RZQONRMRjzGNcFd0c=
|
||||
cloud.google.com/go/dataqna v0.8.5 h1:9ybXs3nr9BzxSGC04SsvtuXaHY0qmJSLIpIAbZo9GqQ=
|
||||
cloud.google.com/go/dataqna v0.8.5/go.mod h1:vgihg1mz6n7pb5q2YJF7KlXve6tCglInd6XO0JGOlWM=
|
||||
cloud.google.com/go/datastore v1.0.0/go.mod h1:LXYbyblFSglQ5pkeyhO+Qmw7ukd3C+pD7TKLgZqpHYE=
|
||||
cloud.google.com/go/datastore v1.1.0/go.mod h1:umbIZjpQpHh4hmRpGhH4tLFup+FVzqBi1b3c64qFpCk=
|
||||
@@ -151,141 +188,186 @@ cloud.google.com/go/datastore v1.15.0 h1:0P9WcsQeTWjuD1H14JIY7XQscIPQ4Laje8ti96I
|
||||
cloud.google.com/go/datastore v1.15.0/go.mod h1:GAeStMBIt9bPS7jMJA85kgkpsMkvseWWXiaHya9Jes8=
|
||||
cloud.google.com/go/datastream v1.10.3 h1:Z2sKPIB7bT2kMW5Uhxy44ZgdJzxzE5uKjavoW+EuHEE=
|
||||
cloud.google.com/go/datastream v1.10.3/go.mod h1:YR0USzgjhqA/Id0Ycu1VvZe8hEWwrkjuXrGbzeDOSEA=
|
||||
cloud.google.com/go/datastream v1.10.4 h1:o1QDKMo/hk0FN7vhoUQURREuA0rgKmnYapB+1M+7Qz4=
|
||||
cloud.google.com/go/datastream v1.10.4/go.mod h1:7kRxPdxZxhPg3MFeCSulmAJnil8NJGGvSNdn4p1sRZo=
|
||||
cloud.google.com/go/deploy v1.15.0 h1:ZdmYzRMTGkVyP1nXEUat9FpbJGJemDcNcx82RSSOElc=
|
||||
cloud.google.com/go/deploy v1.15.0/go.mod h1:e5XOUI5D+YGldyLNZ21wbp9S8otJbBE4i88PtO9x/2g=
|
||||
cloud.google.com/go/deploy v1.17.1 h1:m27Ojwj03gvpJqCbodLYiVmE9x4/LrHGGMjzc0LBfM4=
|
||||
cloud.google.com/go/deploy v1.17.1/go.mod h1:SXQyfsXrk0fBmgBHRzBjQbZhMfKZ3hMQBw5ym7MN/50=
|
||||
cloud.google.com/go/dialogflow v1.44.3 h1:cK/f88KX+YVR4tLH4clMQlvrLWD2qmKJQziusjGPjmc=
|
||||
cloud.google.com/go/dialogflow v1.44.3/go.mod h1:mHly4vU7cPXVweuB5R0zsYKPMzy240aQdAu06SqBbAQ=
|
||||
cloud.google.com/go/dialogflow v1.49.0 h1:KqG0oxGE71qo0lRVyAoeBozefCvsMfcDzDjoLYSY0F4=
|
||||
cloud.google.com/go/dialogflow v1.49.0/go.mod h1:dhVrXKETtdPlpPhE7+2/k4Z8FRNUp6kMV3EW3oz/fe0=
|
||||
cloud.google.com/go/dlp v1.11.1 h1:OFlXedmPP/5//X1hBEeq3D9kUVm9fb6ywYANlpv/EsQ=
|
||||
cloud.google.com/go/dlp v1.11.1/go.mod h1:/PA2EnioBeXTL/0hInwgj0rfsQb3lpE3R8XUJxqUNKI=
|
||||
cloud.google.com/go/dlp v1.11.2 h1:lTipOuJaSjlYnnotPMbEhKURLC6GzCMDDzVbJAEbmYM=
|
||||
cloud.google.com/go/dlp v1.11.2/go.mod h1:9Czi+8Y/FegpWzgSfkRlyz+jwW6Te9Rv26P3UfU/h/w=
|
||||
cloud.google.com/go/documentai v1.23.5 h1:KAlzT+q8qvRxAmhsJUvLtfFHH0PNvz3M79H6CgVBKL8=
|
||||
cloud.google.com/go/documentai v1.23.5/go.mod h1:ghzBsyVTiVdkfKaUCum/9bGBEyBjDO4GfooEcYKhN+g=
|
||||
cloud.google.com/go/documentai v1.25.0 h1:lI62GMEEPO6vXJI9hj+G9WjOvnR0hEjvjokrnex4cxA=
|
||||
cloud.google.com/go/documentai v1.25.0/go.mod h1:ftLnzw5VcXkLItp6pw1mFic91tMRyfv6hHEY5br4KzY=
|
||||
cloud.google.com/go/domains v0.9.4 h1:ua4GvsDztZ5F3xqjeLKVRDeOvJshf5QFgWGg1CKti3A=
|
||||
cloud.google.com/go/domains v0.9.4/go.mod h1:27jmJGShuXYdUNjyDG0SodTfT5RwLi7xmH334Gvi3fY=
|
||||
cloud.google.com/go/domains v0.9.5 h1:Mml/R6s3vQQvFPpi/9oX3O5dRirgjyJ8cksK8N19Y7g=
|
||||
cloud.google.com/go/domains v0.9.5/go.mod h1:dBzlxgepazdFhvG7u23XMhmMKBjrkoUNaw0A8AQB55Y=
|
||||
cloud.google.com/go/edgecontainer v1.1.4 h1:Szy3Q/N6bqgQGyxqjI+6xJZbmvPvnFHp3UZr95DKcQ0=
|
||||
cloud.google.com/go/edgecontainer v1.1.4/go.mod h1:AvFdVuZuVGdgaE5YvlL1faAoa1ndRR/5XhXZvPBHbsE=
|
||||
cloud.google.com/go/edgecontainer v1.1.5 h1:tBY32km78ScpK2aOP84JoW/+wtpx5WluyPUSEE3270U=
|
||||
cloud.google.com/go/edgecontainer v1.1.5/go.mod h1:rgcjrba3DEDEQAidT4yuzaKWTbkTI5zAMu3yy6ZWS0M=
|
||||
cloud.google.com/go/errorreporting v0.3.0 h1:kj1XEWMu8P0qlLhm3FwcaFsUvXChV/OraZwA70trRR0=
|
||||
cloud.google.com/go/errorreporting v0.3.0/go.mod h1:xsP2yaAp+OAW4OIm60An2bbLpqIhKXdWR/tawvl7QzU=
|
||||
cloud.google.com/go/essentialcontacts v1.6.5 h1:S2if6wkjR4JCEAfDtIiYtD+sTz/oXjh2NUG4cgT1y/Q=
|
||||
cloud.google.com/go/essentialcontacts v1.6.5/go.mod h1:jjYbPzw0x+yglXC890l6ECJWdYeZ5dlYACTFL0U/VuM=
|
||||
cloud.google.com/go/essentialcontacts v1.6.6 h1:13eHn5qBnsawxI7mIrv4jRIEmQ1xg0Ztqw5ZGqtUNfA=
|
||||
cloud.google.com/go/essentialcontacts v1.6.6/go.mod h1:XbqHJGaiH0v2UvtuucfOzFXN+rpL/aU5BCZLn4DYl1Q=
|
||||
cloud.google.com/go/eventarc v1.13.3 h1:+pFmO4eu4dOVipSaFBLkmqrRYG94Xl/TQZFOeohkuqU=
|
||||
cloud.google.com/go/eventarc v1.13.3/go.mod h1:RWH10IAZIRcj1s/vClXkBgMHwh59ts7hSWcqD3kaclg=
|
||||
cloud.google.com/go/eventarc v1.13.4 h1:ORkd6/UV5FIdA8KZQDLNZYKS7BBOrj0p01DXPmT4tE4=
|
||||
cloud.google.com/go/eventarc v1.13.4/go.mod h1:zV5sFVoAa9orc/52Q+OuYUG9xL2IIZTbbuTHC6JSY8s=
|
||||
cloud.google.com/go/filestore v1.8.0 h1:/+wUEGwk3x3Kxomi2cP5dsR8+SIXxo7M0THDjreFSYo=
|
||||
cloud.google.com/go/filestore v1.8.0/go.mod h1:S5JCxIbFjeBhWMTfIYH2Jx24J6BqjwpkkPl+nBA5DlI=
|
||||
cloud.google.com/go/filestore v1.8.1 h1:X5G4y/vrUo1B8Nsz93qSWTMAcM8LXbGUldq33OdcdCw=
|
||||
cloud.google.com/go/filestore v1.8.1/go.mod h1:MbN9KcaM47DRTIuLfQhJEsjaocVebNtNQhSLhKCF5GM=
|
||||
cloud.google.com/go/firestore v1.1.0/go.mod h1:ulACoGHTpvq5r8rxGJ4ddJZBZqakUQqClKRT5SZwBmk=
|
||||
cloud.google.com/go/firestore v1.14.0 h1:8aLcKnMPoldYU3YHgu4t2exrKhLQkqaXAGqT0ljrFVw=
|
||||
cloud.google.com/go/firestore v1.14.0/go.mod h1:96MVaHLsEhbvkBEdZgfN+AS/GIkco1LRpH9Xp9YZfzQ=
|
||||
cloud.google.com/go/firestore v1.15.0 h1:/k8ppuWOtNuDHt2tsRV42yI21uaGnKDEQnRFeBpbFF8=
|
||||
cloud.google.com/go/firestore v1.15.0/go.mod h1:GWOxFXcv8GZUtYpWHw/w6IuYNux/BtmeVTMmjrm4yhk=
|
||||
cloud.google.com/go/functions v1.15.4 h1:ZjdiV3MyumRM6++1Ixu6N0VV9LAGlCX4AhW6Yjr1t+U=
|
||||
cloud.google.com/go/functions v1.15.4/go.mod h1:CAsTc3VlRMVvx+XqXxKqVevguqJpnVip4DdonFsX28I=
|
||||
cloud.google.com/go/functions v1.16.0 h1:IWVylmK5F6hJ3R5zaRW7jI5PrWhCvtBVU4axQLmXSo4=
|
||||
cloud.google.com/go/functions v1.16.0/go.mod h1:nbNpfAG7SG7Duw/o1iZ6ohvL7mc6MapWQVpqtM29n8k=
|
||||
cloud.google.com/go/gkebackup v1.3.4 h1:KhnOrr9A1tXYIYeXKqCKbCI8TL2ZNGiD3dm+d7BDUBg=
|
||||
cloud.google.com/go/gkebackup v1.3.4/go.mod h1:gLVlbM8h/nHIs09ns1qx3q3eaXcGSELgNu1DWXYz1HI=
|
||||
cloud.google.com/go/gkebackup v1.3.5 h1:iuE8KNtTsPOc79qeWoNS8zOWoXPD9SAdOmwgxtlCmh8=
|
||||
cloud.google.com/go/gkebackup v1.3.5/go.mod h1:KJ77KkNN7Wm1LdMopOelV6OodM01pMuK2/5Zt1t4Tvc=
|
||||
cloud.google.com/go/gkeconnect v0.8.4 h1:1JLpZl31YhQDQeJ98tK6QiwTpgHFYRJwpntggpQQWis=
|
||||
cloud.google.com/go/gkeconnect v0.8.4/go.mod h1:84hZz4UMlDCKl8ifVW8layK4WHlMAFeq8vbzjU0yJkw=
|
||||
cloud.google.com/go/gkeconnect v0.8.5 h1:17d+ZSSXKqG/RwZCq3oFMIWLPI8Zw3b8+a9/BEVlwH0=
|
||||
cloud.google.com/go/gkeconnect v0.8.5/go.mod h1:LC/rS7+CuJ5fgIbXv8tCD/mdfnlAadTaUufgOkmijuk=
|
||||
cloud.google.com/go/gkehub v0.14.4 h1:J5tYUtb3r0cl2mM7+YHvV32eL+uZQ7lONyUZnPikCEo=
|
||||
cloud.google.com/go/gkehub v0.14.4/go.mod h1:Xispfu2MqnnFt8rV/2/3o73SK1snL8s9dYJ9G2oQMfc=
|
||||
cloud.google.com/go/gkehub v0.14.5 h1:RboLNFzf9wEMSo7DrKVBlf+YhK/A/jrLN454L5Tz99Q=
|
||||
cloud.google.com/go/gkehub v0.14.5/go.mod h1:6bzqxM+a+vEH/h8W8ec4OJl4r36laxTs3A/fMNHJ0wA=
|
||||
cloud.google.com/go/gkemulticloud v1.0.3 h1:NmJsNX9uQ2CT78957xnjXZb26TDIMvv+d5W2vVUt0Pg=
|
||||
cloud.google.com/go/gkemulticloud v1.0.3/go.mod h1:7NpJBN94U6DY1xHIbsDqB2+TFZUfjLUKLjUX8NGLor0=
|
||||
cloud.google.com/go/gkemulticloud v1.1.1 h1:rsSZAGLhyjyE/bE2ToT5fqo1qSW7S+Ubsc9jFOcbhSI=
|
||||
cloud.google.com/go/gkemulticloud v1.1.1/go.mod h1:C+a4vcHlWeEIf45IB5FFR5XGjTeYhF83+AYIpTy4i2Q=
|
||||
cloud.google.com/go/grafeas v0.3.0 h1:oyTL/KjiUeBs9eYLw/40cpSZglUC+0F7X4iu/8t7NWs=
|
||||
cloud.google.com/go/grafeas v0.3.0/go.mod h1:P7hgN24EyONOTMyeJH6DxG4zD7fwiYa5Q6GUgyFSOU8=
|
||||
cloud.google.com/go/grafeas v0.3.4 h1:D4x32R/cHX3MTofKwirz015uEdVk4uAxvZkZCZkOrF4=
|
||||
cloud.google.com/go/grafeas v0.3.4/go.mod h1:A5m316hcG+AulafjAbPKXBO/+I5itU4LOdKO2R/uDIc=
|
||||
cloud.google.com/go/gsuiteaddons v1.6.4 h1:uuw2Xd37yHftViSI8J2hUcCS8S7SH3ZWH09sUDLW30Q=
|
||||
cloud.google.com/go/gsuiteaddons v1.6.4/go.mod h1:rxtstw7Fx22uLOXBpsvb9DUbC+fiXs7rF4U29KHM/pE=
|
||||
cloud.google.com/go/gsuiteaddons v1.6.5 h1:CZEbaBwmbYdhFw21Fwbo+C35HMe36fTE0FBSR4KSfWg=
|
||||
cloud.google.com/go/gsuiteaddons v1.6.5/go.mod h1:Lo4P2IvO8uZ9W+RaC6s1JVxo42vgy+TX5a6hfBZ0ubs=
|
||||
cloud.google.com/go/iam v1.1.5 h1:1jTsCu4bcsNsE4iiqNT5SHwrDRCfRmIaaaVFhRveTJI=
|
||||
cloud.google.com/go/iam v1.1.5/go.mod h1:rB6P/Ic3mykPbFio+vo7403drjlgvoWfYpJhMXEbzv8=
|
||||
cloud.google.com/go/iam v1.1.6 h1:bEa06k05IO4f4uJonbB5iAgKTPpABy1ayxaIZV/GHVc=
|
||||
cloud.google.com/go/iam v1.1.6/go.mod h1:O0zxdPeGBoFdWW3HWmBxJsk0pfvNM/p/qa82rWOGTwI=
|
||||
cloud.google.com/go/iap v1.9.3 h1:M4vDbQ4TLXdaljXVZSwW7XtxpwXUUarY2lIs66m0aCM=
|
||||
cloud.google.com/go/iap v1.9.3/go.mod h1:DTdutSZBqkkOm2HEOTBzhZxh2mwwxshfD/h3yofAiCw=
|
||||
cloud.google.com/go/iap v1.9.4 h1:94zirc2r4t6KzhAMW0R6Dme005eTP6yf7g6vN4IhRrA=
|
||||
cloud.google.com/go/iap v1.9.4/go.mod h1:vO4mSq0xNf/Pu6E5paORLASBwEmphXEjgCFg7aeNu1w=
|
||||
cloud.google.com/go/ids v1.4.4 h1:VuFqv2ctf/A7AyKlNxVvlHTzjrEvumWaZflUzBPz/M4=
|
||||
cloud.google.com/go/ids v1.4.4/go.mod h1:z+WUc2eEl6S/1aZWzwtVNWoSZslgzPxAboS0lZX0HjI=
|
||||
cloud.google.com/go/ids v1.4.5 h1:xd4U7pgl3GHV+MABnv1BF4/Vy/zBF7CYC8XngkOLzag=
|
||||
cloud.google.com/go/ids v1.4.5/go.mod h1:p0ZnyzjMWxww6d2DvMGnFwCsSxDJM666Iir1bK1UuBo=
|
||||
cloud.google.com/go/iot v1.7.4 h1:m1WljtkZnvLTIRYW1YTOv5A6H1yKgLHR6nU7O8yf27w=
|
||||
cloud.google.com/go/iot v1.7.4/go.mod h1:3TWqDVvsddYBG++nHSZmluoCAVGr1hAcabbWZNKEZLk=
|
||||
cloud.google.com/go/iot v1.7.5 h1:munTeBlbqI33iuTYgXy7S8lW2TCgi5l1hA4roSIY+EE=
|
||||
cloud.google.com/go/iot v1.7.5/go.mod h1:nq3/sqTz3HGaWJi1xNiX7F41ThOzpud67vwk0YsSsqs=
|
||||
cloud.google.com/go/kms v1.15.5 h1:pj1sRfut2eRbD9pFRjNnPNg/CzJPuQAzUujMIM1vVeM=
|
||||
cloud.google.com/go/kms v1.15.5/go.mod h1:cU2H5jnp6G2TDpUGZyqTCoy1n16fbubHZjmVXSMtwDI=
|
||||
cloud.google.com/go/kms v1.15.7 h1:7caV9K3yIxvlQPAcaFffhlT7d1qpxjB1wHBtjWa13SM=
|
||||
cloud.google.com/go/kms v1.15.7/go.mod h1:ub54lbsa6tDkUwnu4W7Yt1aAIFLnspgh0kPGToDukeI=
|
||||
cloud.google.com/go/language v1.12.2 h1:zg9uq2yS9PGIOdc0Kz/l+zMtOlxKWonZjjo5w5YPG2A=
|
||||
cloud.google.com/go/language v1.12.2/go.mod h1:9idWapzr/JKXBBQ4lWqVX/hcadxB194ry20m/bTrhWc=
|
||||
cloud.google.com/go/language v1.12.3 h1:iaJZg6K4j/2PvZZVcjeO/btcWWIllVRBhuTFjGO4LXs=
|
||||
cloud.google.com/go/language v1.12.3/go.mod h1:evFX9wECX6mksEva8RbRnr/4wi/vKGYnAJrTRXU8+f8=
|
||||
cloud.google.com/go/lifesciences v0.9.4 h1:rZEI/UxcxVKEzyoRS/kdJ1VoolNItRWjNN0Uk9tfexg=
|
||||
cloud.google.com/go/lifesciences v0.9.4/go.mod h1:bhm64duKhMi7s9jR9WYJYvjAFJwRqNj+Nia7hF0Z7JA=
|
||||
cloud.google.com/go/lifesciences v0.9.5 h1:gXvN70m2p+4zgJFzaz6gMKaxTuF9WJ0USYoMLWAOm8g=
|
||||
cloud.google.com/go/lifesciences v0.9.5/go.mod h1:OdBm0n7C0Osh5yZB7j9BXyrMnTRGBJIZonUMxo5CzPw=
|
||||
cloud.google.com/go/logging v1.8.1 h1:26skQWPeYhvIasWKm48+Eq7oUqdcdbwsCVwz5Ys0FvU=
|
||||
cloud.google.com/go/logging v1.8.1/go.mod h1:TJjR+SimHwuC8MZ9cjByQulAMgni+RkXeI3wwctHJEI=
|
||||
cloud.google.com/go/logging v1.9.0 h1:iEIOXFO9EmSiTjDmfpbRjOxECO7R8C7b8IXUGOj7xZw=
|
||||
cloud.google.com/go/logging v1.9.0/go.mod h1:1Io0vnZv4onoUnsVUQY3HZ3Igb1nBchky0A0y7BBBhE=
|
||||
cloud.google.com/go/longrunning v0.5.4 h1:w8xEcbZodnA2BbW6sVirkkoC+1gP8wS57EUUgGS0GVg=
|
||||
cloud.google.com/go/longrunning v0.5.4/go.mod h1:zqNVncI0BOP8ST6XQD1+VcvuShMmq7+xFSzOL++V0dI=
|
||||
cloud.google.com/go/longrunning v0.5.5 h1:GOE6pZFdSrTb4KAiKnXsJBtlE6mEyaW44oKyMILWnOg=
|
||||
cloud.google.com/go/longrunning v0.5.5/go.mod h1:WV2LAxD8/rg5Z1cNW6FJ/ZpX4E4VnDnoTk0yawPBB7s=
|
||||
cloud.google.com/go/managedidentities v1.6.4 h1:SF/u1IJduMqQQdJA4MDyivlIQ4SrV5qAawkr/ZEREkY=
|
||||
cloud.google.com/go/managedidentities v1.6.4/go.mod h1:WgyaECfHmF00t/1Uk8Oun3CQ2PGUtjc3e9Alh79wyiM=
|
||||
cloud.google.com/go/managedidentities v1.6.5 h1:+bpih1piZVLxla/XBqeSUzJBp8gv9plGHIMAI7DLpDM=
|
||||
cloud.google.com/go/managedidentities v1.6.5/go.mod h1:fkFI2PwwyRQbjLxlm5bQ8SjtObFMW3ChBGNqaMcgZjI=
|
||||
cloud.google.com/go/maps v1.6.1 h1:2+eMp/1MvMPp5qrSOd3vtnLKa/pylt+krVRqET3jWsM=
|
||||
cloud.google.com/go/maps v1.6.1/go.mod h1:4+buOHhYXFBp58Zj/K+Lc1rCmJssxxF4pJ5CJnhdz18=
|
||||
cloud.google.com/go/maps v1.6.4 h1:EVCZAiDvog9So46460BGbCasPhi613exoaQbpilMVlk=
|
||||
cloud.google.com/go/maps v1.6.4/go.mod h1:rhjqRy8NWmDJ53saCfsXQ0LKwBHfi6OSh5wkq6BaMhI=
|
||||
cloud.google.com/go/mediatranslation v0.8.4 h1:VRCQfZB4s6jN0CSy7+cO3m4ewNwgVnaePanVCQh/9Z4=
|
||||
cloud.google.com/go/mediatranslation v0.8.4/go.mod h1:9WstgtNVAdN53m6TQa5GjIjLqKQPXe74hwSCxUP6nj4=
|
||||
cloud.google.com/go/mediatranslation v0.8.5 h1:c76KdIXljQHSCb/Cy47S8H4s05A4zbK3pAFGzwcczZo=
|
||||
cloud.google.com/go/mediatranslation v0.8.5/go.mod h1:y7kTHYIPCIfgyLbKncgqouXJtLsU+26hZhHEEy80fSs=
|
||||
cloud.google.com/go/memcache v1.10.4 h1:cdex/ayDd294XBj2cGeMe6Y+H1JvhN8y78B9UW7pxuQ=
|
||||
cloud.google.com/go/memcache v1.10.4/go.mod h1:v/d8PuC8d1gD6Yn5+I3INzLR01IDn0N4Ym56RgikSI0=
|
||||
cloud.google.com/go/memcache v1.10.5 h1:yeDv5qxRedFosvpMSEswrqUsJM5OdWvssPHFliNFTc4=
|
||||
cloud.google.com/go/memcache v1.10.5/go.mod h1:/FcblbNd0FdMsx4natdj+2GWzTq+cjZvMa1I+9QsuMA=
|
||||
cloud.google.com/go/metastore v1.13.3 h1:94l/Yxg9oBZjin2bzI79oK05feYefieDq0o5fjLSkC8=
|
||||
cloud.google.com/go/metastore v1.13.3/go.mod h1:K+wdjXdtkdk7AQg4+sXS8bRrQa9gcOr+foOMF2tqINE=
|
||||
cloud.google.com/go/metastore v1.13.4 h1:dR7vqWXlK6IYR8Wbu9mdFfwlVjodIBhd1JRrpZftTEg=
|
||||
cloud.google.com/go/metastore v1.13.4/go.mod h1:FMv9bvPInEfX9Ac1cVcRXp8EBBQnBcqH6gz3KvJ9BAE=
|
||||
cloud.google.com/go/monitoring v1.16.3 h1:mf2SN9qSoBtIgiMA4R/y4VADPWZA7VCNJA079qLaZQ8=
|
||||
cloud.google.com/go/monitoring v1.16.3/go.mod h1:KwSsX5+8PnXv5NJnICZzW2R8pWTis8ypC4zmdRD63Tw=
|
||||
cloud.google.com/go/monitoring v1.18.0 h1:NfkDLQDG2UR3WYZVQE8kwSbUIEyIqJUPl+aOQdFH1T4=
|
||||
cloud.google.com/go/monitoring v1.18.0/go.mod h1:c92vVBCeq/OB4Ioyo+NbN2U7tlg5ZH41PZcdvfc+Lcg=
|
||||
cloud.google.com/go/networkconnectivity v1.14.3 h1:e9lUkCe2BexsqsUc2bjV8+gFBpQa54J+/F3qKVtW+wA=
|
||||
cloud.google.com/go/networkconnectivity v1.14.3/go.mod h1:4aoeFdrJpYEXNvrnfyD5kIzs8YtHg945Og4koAjHQek=
|
||||
cloud.google.com/go/networkconnectivity v1.14.4 h1:GBfXFhLyPspnaBE3nI/BRjdhW8vcbpT9QjE/4kDCDdc=
|
||||
cloud.google.com/go/networkconnectivity v1.14.4/go.mod h1:PU12q++/IMnDJAB+3r+tJtuCXCfwfN+C6Niyj6ji1Po=
|
||||
cloud.google.com/go/networkmanagement v1.9.3 h1:HsQk4FNKJUX04k3OI6gUsoveiHMGvDRqlaFM2xGyvqU=
|
||||
cloud.google.com/go/networkmanagement v1.9.3/go.mod h1:y7WMO1bRLaP5h3Obm4tey+NquUvB93Co1oh4wpL+XcU=
|
||||
cloud.google.com/go/networkmanagement v1.9.4 h1:aLV5GcosBNmd6M8+a0ekB0XlLRexv4fvnJJrYnqeBcg=
|
||||
cloud.google.com/go/networkmanagement v1.9.4/go.mod h1:daWJAl0KTFytFL7ar33I6R/oNBH8eEOX/rBNHrC/8TA=
|
||||
cloud.google.com/go/networksecurity v0.9.4 h1:947tNIPnj1bMGTIEBo3fc4QrrFKS5hh0bFVsHmFm4Vo=
|
||||
cloud.google.com/go/networksecurity v0.9.4/go.mod h1:E9CeMZ2zDsNBkr8axKSYm8XyTqNhiCHf1JO/Vb8mD1w=
|
||||
cloud.google.com/go/networksecurity v0.9.5 h1:+caSxBTj0E8OYVh/5wElFdjEMO1S/rZtE1152Cepchc=
|
||||
cloud.google.com/go/networksecurity v0.9.5/go.mod h1:KNkjH/RsylSGyyZ8wXpue8xpCEK+bTtvof8SBfIhMG8=
|
||||
cloud.google.com/go/notebooks v1.11.2 h1:eTOTfNL1yM6L/PCtquJwjWg7ZZGR0URFaFgbs8kllbM=
|
||||
cloud.google.com/go/notebooks v1.11.2/go.mod h1:z0tlHI/lREXC8BS2mIsUeR3agM1AkgLiS+Isov3SS70=
|
||||
cloud.google.com/go/notebooks v1.11.3 h1:FH48boYmrWVQ6k0Mx/WrnNafXncT5iSYxA8CNyWTgy0=
|
||||
cloud.google.com/go/notebooks v1.11.3/go.mod h1:0wQyI2dQC3AZyQqWnRsp+yA+kY4gC7ZIVP4Qg3AQcgo=
|
||||
cloud.google.com/go/optimization v1.6.2 h1:iFsoexcp13cGT3k/Hv8PA5aK+FP7FnbhwDO9llnruas=
|
||||
cloud.google.com/go/optimization v1.6.2/go.mod h1:mWNZ7B9/EyMCcwNl1frUGEuY6CPijSkz88Fz2vwKPOY=
|
||||
cloud.google.com/go/optimization v1.6.3 h1:63NZaWyN+5rZEKHPX4ACpw3BjgyeuY8+rCehiCMaGPY=
|
||||
cloud.google.com/go/optimization v1.6.3/go.mod h1:8ve3svp3W6NFcAEFr4SfJxrldzhUl4VMUJmhrqVKtYA=
|
||||
cloud.google.com/go/orchestration v1.8.4 h1:kgwZ2f6qMMYIVBtUGGoU8yjYWwMTHDanLwM/CQCFaoQ=
|
||||
cloud.google.com/go/orchestration v1.8.4/go.mod h1:d0lywZSVYtIoSZXb0iFjv9SaL13PGyVOKDxqGxEf/qI=
|
||||
cloud.google.com/go/orchestration v1.8.5 h1:YHgWMlrPttIVGItgGfuvO2KM7x+y9ivN/Yk92pMm1a4=
|
||||
cloud.google.com/go/orchestration v1.8.5/go.mod h1:C1J7HesE96Ba8/hZ71ISTV2UAat0bwN+pi85ky38Yq8=
|
||||
cloud.google.com/go/orgpolicy v1.11.4 h1:RWuXQDr9GDYhjmrredQJC7aY7cbyqP9ZuLbq5GJGves=
|
||||
cloud.google.com/go/orgpolicy v1.11.4/go.mod h1:0+aNV/nrfoTQ4Mytv+Aw+stBDBjNf4d8fYRA9herfJI=
|
||||
cloud.google.com/go/orgpolicy v1.12.1 h1:2JbXigqBJVp8Dx5dONUttFqewu4fP0p3pgOdIZAhpYU=
|
||||
cloud.google.com/go/orgpolicy v1.12.1/go.mod h1:aibX78RDl5pcK3jA8ysDQCFkVxLj3aOQqrbBaUL2V5I=
|
||||
cloud.google.com/go/osconfig v1.12.4 h1:OrRCIYEAbrbXdhm13/JINn9pQchvTTIzgmOCA7uJw8I=
|
||||
cloud.google.com/go/osconfig v1.12.4/go.mod h1:B1qEwJ/jzqSRslvdOCI8Kdnp0gSng0xW4LOnIebQomA=
|
||||
cloud.google.com/go/osconfig v1.12.5 h1:Mo5jGAxOMKH/PmDY7fgY19yFcVbvwREb5D5zMPQjFfo=
|
||||
cloud.google.com/go/osconfig v1.12.5/go.mod h1:D9QFdxzfjgw3h/+ZaAb5NypM8bhOMqBzgmbhzWViiW8=
|
||||
cloud.google.com/go/oslogin v1.12.2 h1:NP/KgsD9+0r9hmHC5wKye0vJXVwdciv219DtYKYjgqE=
|
||||
cloud.google.com/go/oslogin v1.12.2/go.mod h1:CQ3V8Jvw4Qo4WRhNPF0o+HAM4DiLuE27Ul9CX9g2QdY=
|
||||
cloud.google.com/go/oslogin v1.13.1 h1:1K4nOT5VEZNt7XkhaTXupBYos5HjzvJMfhvyD2wWdFs=
|
||||
cloud.google.com/go/oslogin v1.13.1/go.mod h1:vS8Sr/jR7QvPWpCjNqy6LYZr5Zs1e8ZGW/KPn9gmhws=
|
||||
cloud.google.com/go/phishingprotection v0.8.4 h1:sPLUQkHq6b4AL0czSJZ0jd6vL55GSTHz2B3Md+TCZI0=
|
||||
cloud.google.com/go/phishingprotection v0.8.4/go.mod h1:6b3kNPAc2AQ6jZfFHioZKg9MQNybDg4ixFd4RPZZ2nE=
|
||||
cloud.google.com/go/phishingprotection v0.8.5 h1:DH3WFLzEoJdW/6xgsmoDqOwT1xddFi7gKu0QGZQhpGU=
|
||||
cloud.google.com/go/phishingprotection v0.8.5/go.mod h1:g1smd68F7mF1hgQPuYn3z8HDbNre8L6Z0b7XMYFmX7I=
|
||||
cloud.google.com/go/policytroubleshooter v1.10.2 h1:sq+ScLP83d7GJy9+wpwYJVnY+q6xNTXwOdRIuYjvHT4=
|
||||
cloud.google.com/go/policytroubleshooter v1.10.2/go.mod h1:m4uF3f6LseVEnMV6nknlN2vYGRb+75ylQwJdnOXfnv0=
|
||||
cloud.google.com/go/policytroubleshooter v1.10.3 h1:c0WOzC6hz964QWNBkyKfna8A2jOIx1zzZa43Gx/P09o=
|
||||
cloud.google.com/go/policytroubleshooter v1.10.3/go.mod h1:+ZqG3agHT7WPb4EBIRqUv4OyIwRTZvsVDHZ8GlZaoxk=
|
||||
cloud.google.com/go/privatecatalog v0.9.4 h1:Vo10IpWKbNvc/z/QZPVXgCiwfjpWoZ/wbgful4Uh/4E=
|
||||
cloud.google.com/go/privatecatalog v0.9.4/go.mod h1:SOjm93f+5hp/U3PqMZAHTtBtluqLygrDrVO8X8tYtG0=
|
||||
cloud.google.com/go/privatecatalog v0.9.5 h1:UZ0assTnATXSggoxUIh61RjTQ4P9zCMk/kEMbn0nMYA=
|
||||
cloud.google.com/go/privatecatalog v0.9.5/go.mod h1:fVWeBOVe7uj2n3kWRGlUQqR/pOd450J9yZoOECcQqJk=
|
||||
cloud.google.com/go/pubsub v1.0.1/go.mod h1:R0Gpsv3s54REJCy4fxDixWD93lHJMoZTyQ2kNxGRt3I=
|
||||
cloud.google.com/go/pubsub v1.1.0/go.mod h1:EwwdRX2sKPjnvnqCa270oGRyludottCI76h+R3AArQw=
|
||||
@@ -293,56 +375,73 @@ cloud.google.com/go/pubsub v1.2.0/go.mod h1:jhfEVHT8odbXTkndysNHCcx0awwzvfOlguIA
|
||||
cloud.google.com/go/pubsub v1.3.1/go.mod h1:i+ucay31+CNRpDW4Lu78I4xXG+O1r/MAHgjpRVR+TSU=
|
||||
cloud.google.com/go/pubsub v1.33.0 h1:6SPCPvWav64tj0sVX/+npCBKhUi/UjJehy9op/V3p2g=
|
||||
cloud.google.com/go/pubsub v1.33.0/go.mod h1:f+w71I33OMyxf9VpMVcZbnG5KSUkCOUHYpFd5U1GdRc=
|
||||
cloud.google.com/go/pubsub v1.36.1 h1:dfEPuGCHGbWUhaMCTHUFjfroILEkx55iUmKBZTP5f+Y=
|
||||
cloud.google.com/go/pubsub v1.36.1/go.mod h1:iYjCa9EzWOoBiTdd4ps7QoMtMln5NwaZQpK1hbRfBDE=
|
||||
cloud.google.com/go/pubsublite v1.8.1 h1:pX+idpWMIH30/K7c0epN6V703xpIcMXWRjKJsz0tYGY=
|
||||
cloud.google.com/go/pubsublite v1.8.1/go.mod h1:fOLdU4f5xldK4RGJrBMm+J7zMWNj/k4PxwEZXy39QS0=
|
||||
cloud.google.com/go/recaptchaenterprise/v2 v2.8.4 h1:KOlLHLv3h3HwcZAkx91ubM3Oztz3JtT3ZacAJhWDorQ=
|
||||
cloud.google.com/go/recaptchaenterprise/v2 v2.8.4/go.mod h1:Dak54rw6lC2gBY8FBznpOCAR58wKf+R+ZSJRoeJok4w=
|
||||
cloud.google.com/go/recaptchaenterprise/v2 v2.9.2 h1:U3Wfq12X9cVMuTpsWDSURnXF0Z9hSPTHj+xsnXDRLsw=
|
||||
cloud.google.com/go/recaptchaenterprise/v2 v2.9.2/go.mod h1:trwwGkfhCmp05Ll5MSJPXY7yvnO0p4v3orGANAFHAuU=
|
||||
cloud.google.com/go/recommendationengine v0.8.4 h1:JRiwe4hvu3auuh2hujiTc2qNgPPfVp+Q8KOpsXlEzKQ=
|
||||
cloud.google.com/go/recommendationengine v0.8.4/go.mod h1:GEteCf1PATl5v5ZsQ60sTClUE0phbWmo3rQ1Js8louU=
|
||||
cloud.google.com/go/recommendationengine v0.8.5 h1:ineqLswaCSBY0csYv5/wuXJMBlxATK6Xc5jJkpiTEdM=
|
||||
cloud.google.com/go/recommendationengine v0.8.5/go.mod h1:A38rIXHGFvoPvmy6pZLozr0g59NRNREz4cx7F58HAsQ=
|
||||
cloud.google.com/go/recommender v1.11.3 h1:VndmgyS/J3+izR8V8BHa7HV/uun8//ivQ3k5eVKKyyM=
|
||||
cloud.google.com/go/recommender v1.11.3/go.mod h1:+FJosKKJSId1MBFeJ/TTyoGQZiEelQQIZMKYYD8ruK4=
|
||||
cloud.google.com/go/recommender v1.12.1 h1:LVLYS3r3u0MSCxQSDUtLSkporEGi9OAE6hGvayrZNPs=
|
||||
cloud.google.com/go/recommender v1.12.1/go.mod h1:gf95SInWNND5aPas3yjwl0I572dtudMhMIG4ni8nr+0=
|
||||
cloud.google.com/go/redis v1.14.1 h1:J9cEHxG9YLmA9o4jTSvWt/RuVEn6MTrPlYSCRHujxDQ=
|
||||
cloud.google.com/go/redis v1.14.1/go.mod h1:MbmBxN8bEnQI4doZPC1BzADU4HGocHBk2de3SbgOkqs=
|
||||
cloud.google.com/go/redis v1.14.2 h1:QF0maEdVv0Fj/2roU8sX3NpiDBzP9ICYTO+5F32gQNo=
|
||||
cloud.google.com/go/redis v1.14.2/go.mod h1:g0Lu7RRRz46ENdFKQ2EcQZBAJ2PtJHJLuiiRuEXwyQw=
|
||||
cloud.google.com/go/resourcemanager v1.9.4 h1:JwZ7Ggle54XQ/FVYSBrMLOQIKoIT/uer8mmNvNLK51k=
|
||||
cloud.google.com/go/resourcemanager v1.9.4/go.mod h1:N1dhP9RFvo3lUfwtfLWVxfUWq8+KUQ+XLlHLH3BoFJ0=
|
||||
cloud.google.com/go/resourcemanager v1.9.5 h1:AZWr1vWVDKGwfLsVhcN+vcwOz3xqqYxtmMa0aABCMms=
|
||||
cloud.google.com/go/resourcemanager v1.9.5/go.mod h1:hep6KjelHA+ToEjOfO3garMKi/CLYwTqeAw7YiEI9x8=
|
||||
cloud.google.com/go/resourcesettings v1.6.4 h1:yTIL2CsZswmMfFyx2Ic77oLVzfBFoWBYgpkgiSPnC4Y=
|
||||
cloud.google.com/go/resourcesettings v1.6.4/go.mod h1:pYTTkWdv2lmQcjsthbZLNBP4QW140cs7wqA3DuqErVI=
|
||||
cloud.google.com/go/resourcesettings v1.6.5 h1:BTr5MVykJwClASci/7Og4Qfx70aQ4n3epsNLj94ZYgw=
|
||||
cloud.google.com/go/resourcesettings v1.6.5/go.mod h1:WBOIWZraXZOGAgoR4ukNj0o0HiSMO62H9RpFi9WjP9I=
|
||||
cloud.google.com/go/retail v1.14.4 h1:geqdX1FNqqL2p0ADXjPpw8lq986iv5GrVcieTYafuJQ=
|
||||
cloud.google.com/go/retail v1.14.4/go.mod h1:l/N7cMtY78yRnJqp5JW8emy7MB1nz8E4t2yfOmklYfg=
|
||||
cloud.google.com/go/retail v1.16.0 h1:Fn1GuAua1c6crCGqfJ1qMxG1Xh10Tg/x5EUODEHMqkw=
|
||||
cloud.google.com/go/retail v1.16.0/go.mod h1:LW7tllVveZo4ReWt68VnldZFWJRzsh9np+01J9dYWzE=
|
||||
cloud.google.com/go/run v1.3.3 h1:qdfZteAm+vgzN1iXzILo3nJFQbzziudkJrvd9wCf3FQ=
|
||||
cloud.google.com/go/run v1.3.3/go.mod h1:WSM5pGyJ7cfYyYbONVQBN4buz42zFqwG67Q3ch07iK4=
|
||||
cloud.google.com/go/run v1.3.4 h1:m9WDA7DzTpczhZggwYlZcBWgCRb+kgSIisWn1sbw2rQ=
|
||||
cloud.google.com/go/run v1.3.4/go.mod h1:FGieuZvQ3tj1e9GnzXqrMABSuir38AJg5xhiYq+SF3o=
|
||||
cloud.google.com/go/scheduler v1.10.5 h1:eMEettHlFhG5pXsoHouIM5nRT+k+zU4+GUvRtnxhuVI=
|
||||
cloud.google.com/go/scheduler v1.10.5/go.mod h1:MTuXcrJC9tqOHhixdbHDFSIuh7xZF2IysiINDuiq6NI=
|
||||
cloud.google.com/go/scheduler v1.10.6 h1:5U8iXLoQ03qOB+ZXlAecU7fiE33+u3QiM9nh4cd0eTE=
|
||||
cloud.google.com/go/scheduler v1.10.6/go.mod h1:pe2pNCtJ+R01E06XCDOJs1XvAMbv28ZsQEbqknxGOuE=
|
||||
cloud.google.com/go/secretmanager v1.11.4 h1:krnX9qpG2kR2fJ+u+uNyNo+ACVhplIAS4Pu7u+4gd+k=
|
||||
cloud.google.com/go/secretmanager v1.11.4/go.mod h1:wreJlbS9Zdq21lMzWmJ0XhWW2ZxgPeahsqeV/vZoJ3w=
|
||||
cloud.google.com/go/secretmanager v1.11.5 h1:82fpF5vBBvu9XW4qj0FU2C6qVMtj1RM/XHwKXUEAfYY=
|
||||
cloud.google.com/go/secretmanager v1.11.5/go.mod h1:eAGv+DaCHkeVyQi0BeXgAHOU0RdrMeZIASKc+S7VqH4=
|
||||
cloud.google.com/go/security v1.15.4 h1:sdnh4Islb1ljaNhpIXlIPgb3eYj70QWgPVDKOUYvzJc=
|
||||
cloud.google.com/go/security v1.15.4/go.mod h1:oN7C2uIZKhxCLiAAijKUCuHLZbIt/ghYEo8MqwD/Ty4=
|
||||
cloud.google.com/go/security v1.15.5 h1:wTKJQ10j8EYgvE8Y+KhovxDRVDk2iv/OsxZ6GrLP3kE=
|
||||
cloud.google.com/go/security v1.15.5/go.mod h1:KS6X2eG3ynWjqcIX976fuToN5juVkF6Ra6c7MPnldtc=
|
||||
cloud.google.com/go/securitycenter v1.24.2 h1:qCEyXoJoxNKKA1bDywBjjqCB7ODXazzHnVWnG5Uqd1M=
|
||||
cloud.google.com/go/securitycenter v1.24.2/go.mod h1:l1XejOngggzqwr4Fa2Cn+iWZGf+aBLTXtB/vXjy5vXM=
|
||||
cloud.google.com/go/securitycenter v1.24.4 h1:/5jjkZ+uGe8hZ7pvd7pO30VW/a+pT2MrrdgOqjyucKQ=
|
||||
cloud.google.com/go/securitycenter v1.24.4/go.mod h1:PSccin+o1EMYKcFQzz9HMMnZ2r9+7jbc+LvPjXhpwcU=
|
||||
cloud.google.com/go/servicedirectory v1.11.3 h1:5niCMfkw+jifmFtbBrtRedbXkJm3fubSR/KHbxSJZVM=
|
||||
cloud.google.com/go/servicedirectory v1.11.3/go.mod h1:LV+cHkomRLr67YoQy3Xq2tUXBGOs5z5bPofdq7qtiAw=
|
||||
cloud.google.com/go/servicedirectory v1.11.4 h1:da7HFI1229kyzIyuVEzHXip0cw0d+E0s8mjQby0WN+k=
|
||||
cloud.google.com/go/servicedirectory v1.11.4/go.mod h1:Bz2T9t+/Ehg6x+Y7Ycq5xiShYLD96NfEsWNHyitj1qM=
|
||||
cloud.google.com/go/shell v1.7.4 h1:nurhlJcSVFZneoRZgkBEHumTYf/kFJptCK2eBUq/88M=
|
||||
cloud.google.com/go/shell v1.7.4/go.mod h1:yLeXB8eKLxw0dpEmXQ/FjriYrBijNsONpwnWsdPqlKM=
|
||||
cloud.google.com/go/shell v1.7.5 h1:3Fq2hzO0ZSyaqBboJrFkwwf/qMufDtqwwA6ep8EZxEI=
|
||||
cloud.google.com/go/shell v1.7.5/go.mod h1:hL2++7F47/IfpfTO53KYf1EC+F56k3ThfNEXd4zcuiE=
|
||||
cloud.google.com/go/spanner v1.53.0 h1:/NzWQJ1MEhdRcffiutRKbW/AIGVKhcTeivWTDjEyCCo=
|
||||
cloud.google.com/go/spanner v1.53.0/go.mod h1:liG4iCeLqm5L3fFLU5whFITqP0e0orsAW1uUSrd4rws=
|
||||
cloud.google.com/go/spanner v1.56.0 h1:o/Cv7/zZ1WgRXVCd5g3Nc23ZI39p/1pWFqFwvg6Wcu8=
|
||||
cloud.google.com/go/spanner v1.56.0/go.mod h1:DndqtUKQAt3VLuV2Le+9Y3WTnq5cNKrnLb/Piqcj+h0=
|
||||
cloud.google.com/go/speech v1.21.0 h1:qkxNao58oF8ghAHE1Eghen7XepawYEN5zuZXYWaUTA4=
|
||||
cloud.google.com/go/speech v1.21.0/go.mod h1:wwolycgONvfz2EDU8rKuHRW3+wc9ILPsAWoikBEWavY=
|
||||
cloud.google.com/go/speech v1.21.1 h1:nuFc+Kj5B8de75nN4FdPyUbI2SiBoHZG6BLurXL56Q0=
|
||||
cloud.google.com/go/speech v1.21.1/go.mod h1:E5GHZXYQlkqWQwY5xRSLHw2ci5NMQNG52FfMU1aZrIA=
|
||||
cloud.google.com/go/storage v1.0.0/go.mod h1:IhtSnM/ZTZV8YYJWCY8RULGVqBDmpoyjwiyrjsg+URw=
|
||||
cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0ZeosJ0Rtdos=
|
||||
@@ -351,51 +450,67 @@ cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RX
|
||||
cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0=
|
||||
cloud.google.com/go/storage v1.30.1 h1:uOdMxAs8HExqBlnLtnQyP0YkvbiDpdGShGKtx6U/oNM=
|
||||
cloud.google.com/go/storage v1.30.1/go.mod h1:NfxhC0UJE1aXSx7CIIbCf7y9HKT7BiccwkR7+P7gN8E=
|
||||
cloud.google.com/go/storage v1.38.0 h1:Az68ZRGlnNTpIBbLjSMIV2BDcwwXYlRlQzis0llkpJg=
|
||||
cloud.google.com/go/storage v1.38.0/go.mod h1:tlUADB0mAb9BgYls9lq+8MGkfzOXuLrnHXlpHmvFJoY=
|
||||
cloud.google.com/go/storagetransfer v1.10.3 h1:YM1dnj5gLjfL6aDldO2s4GeU8JoAvH1xyIwXre63KmI=
|
||||
cloud.google.com/go/storagetransfer v1.10.3/go.mod h1:Up8LY2p6X68SZ+WToswpQbQHnJpOty/ACcMafuey8gc=
|
||||
cloud.google.com/go/storagetransfer v1.10.4 h1:dy4fL3wO0VABvzM05ycMUPFHxTPbJz9Em8ikAJVqSbI=
|
||||
cloud.google.com/go/storagetransfer v1.10.4/go.mod h1:vef30rZKu5HSEf/x1tK3WfWrL0XVoUQN/EPDRGPzjZs=
|
||||
cloud.google.com/go/talent v1.6.5 h1:LnRJhhYkODDBoTwf6BeYkiJHFw9k+1mAFNyArwZUZAs=
|
||||
cloud.google.com/go/talent v1.6.5/go.mod h1:Mf5cma696HmE+P2BWJ/ZwYqeJXEeU0UqjHFXVLadEDI=
|
||||
cloud.google.com/go/talent v1.6.6 h1:JssV0CE3FNujuSWn7SkosOzg7qrMxVnt6txOfGcMSa4=
|
||||
cloud.google.com/go/talent v1.6.6/go.mod h1:y/WQDKrhVz12WagoarpAIyKKMeKGKHWPoReZ0g8tseQ=
|
||||
cloud.google.com/go/texttospeech v1.7.4 h1:ahrzTgr7uAbvebuhkBAAVU6kRwVD0HWsmDsvMhtad5Q=
|
||||
cloud.google.com/go/texttospeech v1.7.4/go.mod h1:vgv0002WvR4liGuSd5BJbWy4nDn5Ozco0uJymY5+U74=
|
||||
cloud.google.com/go/texttospeech v1.7.5 h1:dxY2Q5mHCbrGa3oPR2O3PCicdnvKa1JmwGQK36EFLOw=
|
||||
cloud.google.com/go/texttospeech v1.7.5/go.mod h1:tzpCuNWPwrNJnEa4Pu5taALuZL4QRRLcb+K9pbhXT6M=
|
||||
cloud.google.com/go/tpu v1.6.4 h1:XIEH5c0WeYGaVy9H+UueiTaf3NI6XNdB4/v6TFQJxtE=
|
||||
cloud.google.com/go/tpu v1.6.4/go.mod h1:NAm9q3Rq2wIlGnOhpYICNI7+bpBebMJbh0yyp3aNw1Y=
|
||||
cloud.google.com/go/tpu v1.6.5 h1:C8YyYda8WtNdBoCgFwwBzZd+S6+EScHOxM/z1h0NNp8=
|
||||
cloud.google.com/go/tpu v1.6.5/go.mod h1:P9DFOEBIBhuEcZhXi+wPoVy/cji+0ICFi4TtTkMHSSs=
|
||||
cloud.google.com/go/trace v1.10.4 h1:2qOAuAzNezwW3QN+t41BtkDJOG42HywL73q8x/f6fnM=
|
||||
cloud.google.com/go/trace v1.10.4/go.mod h1:Nso99EDIK8Mj5/zmB+iGr9dosS/bzWCJ8wGmE6TXNWY=
|
||||
cloud.google.com/go/trace v1.10.5 h1:0pr4lIKJ5XZFYD9GtxXEWr0KkVeigc3wlGpZco0X1oA=
|
||||
cloud.google.com/go/trace v1.10.5/go.mod h1:9hjCV1nGBCtXbAE4YK7OqJ8pmPYSxPA0I67JwRd5s3M=
|
||||
cloud.google.com/go/translate v1.9.3 h1:t5WXTqlrk8VVJu/i3WrYQACjzYJiff5szARHiyqqPzI=
|
||||
cloud.google.com/go/translate v1.9.3/go.mod h1:Kbq9RggWsbqZ9W5YpM94Q1Xv4dshw/gr/SHfsl5yCZ0=
|
||||
cloud.google.com/go/translate v1.10.1 h1:upovZ0wRMdzZvXnu+RPam41B0mRJ+coRXFP2cYFJ7ew=
|
||||
cloud.google.com/go/translate v1.10.1/go.mod h1:adGZcQNom/3ogU65N9UXHOnnSvjPwA/jKQUMnsYXOyk=
|
||||
cloud.google.com/go/video v1.20.3 h1:Xrpbm2S9UFQ1pZEeJt9Vqm5t2T/z9y/M3rNXhFoo8Is=
|
||||
cloud.google.com/go/video v1.20.3/go.mod h1:TnH/mNZKVHeNtpamsSPygSR0iHtvrR/cW1/GDjN5+GU=
|
||||
cloud.google.com/go/video v1.20.4 h1:TXwotxkShP1OqgKsbd+b8N5hrIHavSyLGvYnLGCZ7xc=
|
||||
cloud.google.com/go/video v1.20.4/go.mod h1:LyUVjyW+Bwj7dh3UJnUGZfyqjEto9DnrvTe1f/+QrW0=
|
||||
cloud.google.com/go/videointelligence v1.11.4 h1:YS4j7lY0zxYyneTFXjBJUj2r4CFe/UoIi/PJG0Zt/Rg=
|
||||
cloud.google.com/go/videointelligence v1.11.4/go.mod h1:kPBMAYsTPFiQxMLmmjpcZUMklJp3nC9+ipJJtprccD8=
|
||||
cloud.google.com/go/videointelligence v1.11.5 h1:mYaWH8uhUCXLJCN3gdXswKzRa2+lK0zN6/KsIubm6pE=
|
||||
cloud.google.com/go/videointelligence v1.11.5/go.mod h1:/PkeQjpRponmOerPeJxNPuxvi12HlW7Em0lJO14FC3I=
|
||||
cloud.google.com/go/vision/v2 v2.7.5 h1:T/ujUghvEaTb+YnFY/jiYwVAkMbIC8EieK0CJo6B4vg=
|
||||
cloud.google.com/go/vision/v2 v2.7.5/go.mod h1:GcviprJLFfK9OLf0z8Gm6lQb6ZFUulvpZws+mm6yPLM=
|
||||
cloud.google.com/go/vision/v2 v2.8.0 h1:W52z1b6LdGI66MVhE70g/NFty9zCYYcjdKuycqmlhtg=
|
||||
cloud.google.com/go/vision/v2 v2.8.0/go.mod h1:ocqDiA2j97pvgogdyhoxiQp2ZkDCyr0HWpicywGGRhU=
|
||||
cloud.google.com/go/vmmigration v1.7.4 h1:qPNdab4aGgtaRX+51jCOtJxlJp6P26qua4o1xxUDjpc=
|
||||
cloud.google.com/go/vmmigration v1.7.4/go.mod h1:yBXCmiLaB99hEl/G9ZooNx2GyzgsjKnw5fWcINRgD70=
|
||||
cloud.google.com/go/vmmigration v1.7.5 h1:5v9RT2vWyuw3pK2ox0HQpkoftO7Q7/8591dTxxQc79g=
|
||||
cloud.google.com/go/vmmigration v1.7.5/go.mod h1:pkvO6huVnVWzkFioxSghZxIGcsstDvYiVCxQ9ZH3eYI=
|
||||
cloud.google.com/go/vmwareengine v1.0.3 h1:WY526PqM6QNmFHSqe2sRfK6gRpzWjmL98UFkql2+JDM=
|
||||
cloud.google.com/go/vmwareengine v1.0.3/go.mod h1:QSpdZ1stlbfKtyt6Iu19M6XRxjmXO+vb5a/R6Fvy2y4=
|
||||
cloud.google.com/go/vmwareengine v1.1.1 h1:EGdDi9QbqThfZq3ILcDK5g+m9jTevc34AY5tACx5v7k=
|
||||
cloud.google.com/go/vmwareengine v1.1.1/go.mod h1:nMpdsIVkUrSaX8UvmnBhzVzG7PPvNYc5BszcvIVudYs=
|
||||
cloud.google.com/go/vpcaccess v1.7.4 h1:zbs3V+9ux45KYq8lxxn/wgXole6SlBHHKKyZhNJoS+8=
|
||||
cloud.google.com/go/vpcaccess v1.7.4/go.mod h1:lA0KTvhtEOb/VOdnH/gwPuOzGgM+CWsmGu6bb4IoMKk=
|
||||
cloud.google.com/go/vpcaccess v1.7.5 h1:XyL6hTLtEM/eE4F1GEge8xUN9ZCkiVWn44K/YA7z1rQ=
|
||||
cloud.google.com/go/vpcaccess v1.7.5/go.mod h1:slc5ZRvvjP78c2dnL7m4l4R9GwL3wDLcpIWz6P/ziig=
|
||||
cloud.google.com/go/webrisk v1.9.4 h1:iceR3k0BCRZgf2D/NiKviVMFfuNC9LmeNLtxUFRB/wI=
|
||||
cloud.google.com/go/webrisk v1.9.4/go.mod h1:w7m4Ib4C+OseSr2GL66m0zMBywdrVNTDKsdEsfMl7X0=
|
||||
cloud.google.com/go/webrisk v1.9.5 h1:251MvGuC8wisNN7+jqu9DDDZAi38KiMXxOpA/EWy4dE=
|
||||
cloud.google.com/go/webrisk v1.9.5/go.mod h1:aako0Fzep1Q714cPEM5E+mtYX8/jsfegAuS8aivxy3U=
|
||||
cloud.google.com/go/websecurityscanner v1.6.4 h1:5Gp7h5j7jywxLUp6NTpjNPkgZb3ngl0tUSw6ICWvtJQ=
|
||||
cloud.google.com/go/websecurityscanner v1.6.4/go.mod h1:mUiyMQ+dGpPPRkHgknIZeCzSHJ45+fY4F52nZFDHm2o=
|
||||
cloud.google.com/go/websecurityscanner v1.6.5 h1:YqWZrZYabG88TZt7364XWRJGhxmxhony2ZUyZEYMF2k=
|
||||
cloud.google.com/go/websecurityscanner v1.6.5/go.mod h1:QR+DWaxAz2pWooylsBF854/Ijvuoa3FCyS1zBa1rAVQ=
|
||||
cloud.google.com/go/workflows v1.12.3 h1:qocsqETmLAl34mSa01hKZjcqAvt699gaoFbooGGMvaM=
|
||||
cloud.google.com/go/workflows v1.12.3/go.mod h1:fmOUeeqEwPzIU81foMjTRQIdwQHADi/vEr1cx9R1m5g=
|
||||
cloud.google.com/go/workflows v1.12.4 h1:uHNmUiatTbPQ4H1pabwfzpfEYD4BBnqDHqMm2IesOh4=
|
||||
cloud.google.com/go/workflows v1.12.4/go.mod h1:yQ7HUqOkdJK4duVtMeBCAOPiN1ZF1E9pAMX51vpwB/w=
|
||||
collectd.org v0.3.0 h1:iNBHGw1VvPJxH2B6RiFWFZ+vsjo1lCdRszBeOuwGi00=
|
||||
collectd.org v0.3.0/go.mod h1:A/8DzQBkF6abtvrT2j/AU/4tiBgJWYyh0y/oB/4MlWE=
|
||||
@@ -442,6 +557,7 @@ github.com/CloudyKit/fastprinter v0.0.0-20200109182630-33d98a066a53/go.mod h1:+3
|
||||
github.com/CloudyKit/jet/v6 v6.1.0/go.mod h1:d3ypHeIRNo2+XyqnGA8s+aphtcVpjP5hPwP/Lzo7Ro4=
|
||||
github.com/DATA-DOG/go-sqlmock v1.3.3 h1:CWUqKXe0s8A2z6qCgkP4Kru7wC11YoAnoupUKFDnH08=
|
||||
github.com/DATA-DOG/go-sqlmock v1.3.3/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/DataDog/datadog-go v3.2.0+incompatible h1:qSG2N4FghB1He/r2mFrWKCaL7dXCilEuNEeAn20fdD4=
|
||||
github.com/DataDog/datadog-go v3.2.0+incompatible/go.mod h1:LButxg5PwREeZtORoXG3tL4fMGNddJ+vMq1mwgfaqoQ=
|
||||
github.com/HdrHistogram/hdrhistogram-go v1.1.2/go.mod h1:yDgFjdqOqDEKOvasDdhWNXYg9BVp4O+o5f6V/ehm6Oo=
|
||||
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
|
||||
@@ -469,6 +585,7 @@ github.com/akavel/rsrc v0.10.2/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxk
|
||||
github.com/alecthomas/kingpin/v2 v2.4.0 h1:f48lwail6p8zpO1bC4TxtqACaGqHYA22qkHjHpqDjYY=
|
||||
github.com/alecthomas/kingpin/v2 v2.4.0/go.mod h1:0gyi0zQnjuFk8xrkNKamJoyUo382HRL7ATRpFZCw6tE=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751 h1:JYp7IbQjafoB+tBA3gMyHYHrpOtNuDiK/uB5uXxq5wM=
|
||||
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho=
|
||||
@@ -484,6 +601,7 @@ github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db h1:nxAtV4Vaj
|
||||
github.com/apache/arrow/go/arrow v0.0.0-20191024131854-af6fa24be0db/go.mod h1:VTxUBvSJ3s3eHAg65PNgrsn5BtqCRPdmyXh6rAfdxN0=
|
||||
github.com/apache/arrow/go/v12 v12.0.0 h1:xtZE63VWl7qLdB0JObIXvvhGjoVNrQ9ciIHG2OK5cmc=
|
||||
github.com/apache/arrow/go/v12 v12.0.0/go.mod h1:d+tV/eHZZ7Dz7RPrFKtPK02tpr+c9/PEd/zm8mDS9Vg=
|
||||
github.com/apache/arrow/go/v14 v14.0.2 h1:N8OkaJEOfI3mEZt07BIkvo4sC6XDbL+48MBPWO5IONw=
|
||||
github.com/apache/arrow/go/v14 v14.0.2/go.mod h1:u3fgh3EdgN/YQ8cVQRguVW3R+seMybFg8QBQ5LU+eBY=
|
||||
github.com/apache/thrift v0.16.0 h1:qEy6UW60iVOlUy+b9ZR0d5WzUWYGOo4HfopoyBaNmoY=
|
||||
github.com/apache/thrift v0.16.0/go.mod h1:PHK3hniurgQaNMZYaCLEqXKsYK8upmhPbmdP2FXSqgU=
|
||||
@@ -496,8 +614,10 @@ github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2 h1:7Ip0wMmLHLRJdrloD
|
||||
github.com/armon/circbuf v0.0.0-20190214190532-5111143e8da2/go.mod h1:3U/XgcO3hCbHZ8TKRvWD2dDTCfh9M9ya+I9JpbB7O8o=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6 h1:G1bPvciwNyF7IUmKXNt9Ak3m6u9DE1rF+RmtIkBpVdA=
|
||||
github.com/armon/go-metrics v0.0.0-20180917152333-f0300d1749da/go.mod h1:Q73ZrmVTwzkszR9V5SSuryQ31EELlFMUz1kKyl939pY=
|
||||
github.com/armon/go-metrics v0.4.1 h1:hR91U9KYmb6bLBYLQjyM+3j+rcd/UhE+G78SFnF8gJA=
|
||||
github.com/armon/go-metrics v0.4.1/go.mod h1:E6amYzXo6aW1tqzoZGT755KkbgrJsSdpwZ+3JqfkOG4=
|
||||
github.com/armon/go-radix v0.0.0-20180808171621-7fddfc383310/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/armon/go-radix v1.0.0 h1:F4z6KzEeeQIMeLFa97iZU6vupzoecKdU5TX24SNppXI=
|
||||
github.com/armon/go-radix v1.0.0/go.mod h1:ufUuZ+zHj4x4TnLV4JWEpy2hxWSpsRywHrMgIH9cCH8=
|
||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a h1:idn718Q4B6AGu/h5Sxe66HYVdqdGu2l9Iebqhi/AEoA=
|
||||
github.com/asaskevich/govalidator v0.0.0-20190424111038-f61b66f89f4a/go.mod h1:lB+ZfQJz7igIIfQNfa7Ml4HSf2uFQQRzpGGRXenZAgY=
|
||||
@@ -527,13 +647,16 @@ github.com/aws/aws-sdk-go-v2/service/sso v1.1.1/go.mod h1:SuZJxklHxLAXgLTc1iFXbE
|
||||
github.com/aws/aws-sdk-go-v2/service/sts v1.1.1/go.mod h1:Wi0EBZwiz/K44YliU0EKxqTCJGUfYTWXrrBwkq736bM=
|
||||
github.com/aws/smithy-go v1.1.0/go.mod h1:EzMw8dbp/YJL4A5/sbhGddag+NPT7q084agLbB9LgIw=
|
||||
github.com/aymerick/douceur v0.2.0/go.mod h1:wlT5vV2O3h55X9m7iVYN0TBM0NH/MmbLnd30/FjWUq4=
|
||||
github.com/benbjohnson/clock v1.1.0 h1:Q92kusRqC1XV2MjkWETPvjJVqKetz1OzxZB7mHJLju8=
|
||||
github.com/benbjohnson/clock v1.1.0/go.mod h1:J11/hYXuz8f4ySSvYwY0FKfm+ezbsZBKZxNJlLklBHA=
|
||||
github.com/bgentry/speakeasy v0.1.0 h1:ByYyxL9InA1OWqxJqqp2A5pYHUrCiAL6K3J+LKSsQkY=
|
||||
github.com/bgentry/speakeasy v0.1.0/go.mod h1:+zsyZBPWlz7T6j88CTgSN5bM796AkVf0kBD4zp0CCIs=
|
||||
github.com/bitly/go-hostpool v0.1.0 h1:XKmsF6k5el6xHG3WPJ8U0Ku/ye7njX7W81Ng7O2ioR0=
|
||||
github.com/bitly/go-simplejson v0.5.0 h1:6IH+V8/tVMab511d5bn4M7EwGXZf9Hj6i2xSwkNEM+Y=
|
||||
github.com/bits-and-blooms/bitset v1.7.0/go.mod h1:gIdJ4wp64HaoK2YrL1Q5/N7Y16edYb8uY+O0FJTyyDA=
|
||||
github.com/bits-and-blooms/bitset v1.12.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/bits-and-blooms/bitset v1.14.2/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/bketelsen/crypt v0.0.4 h1:w/jqZtC9YD4DS/Vp9GhWfWcCpuAL58oTnLoI8vE9YHU=
|
||||
github.com/bketelsen/crypt v0.0.4/go.mod h1:aI6NrJ0pMGgvZKL1iVgXLnfIFJtfV+bKCoqOes/6LfM=
|
||||
github.com/blang/semver/v4 v4.0.0 h1:1PFHFE6yCCTv8C1TeyNNarDzntLi7wMI5i/pzqYIsAM=
|
||||
github.com/blang/semver/v4 v4.0.0/go.mod h1:IbckMUScFkM3pff0VJDNKRiT6TG/YpiHIM2yvyW5YoQ=
|
||||
@@ -542,6 +665,7 @@ github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40 h1:y4B3+GPxKlrigF1ha
|
||||
github.com/bmizerany/pat v0.0.0-20170815010413-6226ea591a40/go.mod h1:8rLXio+WjiTceGBHIoTvn60HIbs7Hm7bcHjyrSqYB9c=
|
||||
github.com/boltdb/bolt v1.3.1 h1:JQmyP4ZBrce+ZQu0dY660FMfatumYDLun9hBCUVIkF4=
|
||||
github.com/boltdb/bolt v1.3.1/go.mod h1:clJnj/oiGkjum5o1McbSZDSLxVThjynRyGBgiAx27Ps=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU=
|
||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1 h1:q0rUy8C/TYNBQS1+CGKw68tLOFYSNEs0TFnxxnS9+4U=
|
||||
github.com/btcsuite/btcd/chaincfg/chainhash v1.0.1/go.mod h1:7SFka0XMvUgj3hfZtydOrQY2mwhPclbT2snogU7SQQc=
|
||||
@@ -571,7 +695,9 @@ github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMn
|
||||
github.com/chzyer/test v0.0.0-20210722231415-061457976a23/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU=
|
||||
github.com/cilium/ebpf v0.9.1 h1:64sn2K3UKw8NbP/blsixRpF3nXuyhz/VjRlRzvlBRu4=
|
||||
github.com/cilium/ebpf v0.9.1/go.mod h1:+OhNOIXx/Fnu1IE8bJz2dzOA+VSfyTfdNUVdlQnxUFY=
|
||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible h1:C29Ae4G5GtYyYMm1aztcyj/J5ckgJm2zwdDajFbx1NY=
|
||||
github.com/circonus-labs/circonus-gometrics v2.3.1+incompatible/go.mod h1:nmEj6Dob7S7YxXgwXpfOuvO54S+tGdZdw9fuRZt25Ag=
|
||||
github.com/circonus-labs/circonusllhist v0.1.3 h1:TJH+oke8D16535+jHExHj4nQvzlZrj7ug5D7I/orNUA=
|
||||
github.com/circonus-labs/circonusllhist v0.1.3/go.mod h1:kMXHVDlOchFAehlya5ePtbp5jckzBHf4XRpQvBOLI+I=
|
||||
github.com/client9/misspell v0.3.4 h1:ta993UF76GwbvJcIo3Y68y/M3WxlpEHPWIGDkJYwzJI=
|
||||
github.com/cloudflare/backoff v0.0.0-20161212185259-647f3cdfc87a h1:8d1CEOF1xldesKds5tRG3tExBsMOgWYownMHNCsev54=
|
||||
@@ -586,6 +712,7 @@ github.com/cncf/udpa/go v0.0.0-20201120205902-5459f2c99403/go.mod h1:WmhPx2Nbnht
|
||||
github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe h1:QQ3GSy+MqSHxm/d8nCtnAiZdYFd45cYZPs8vOOIYKfk=
|
||||
github.com/cncf/udpa/go v0.0.0-20220112060539-c52dc94e7fbe/go.mod h1:6pvJx4me5XPnfI9Z40ddWsdw2W/uZgQLFXToKeRcDiI=
|
||||
github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs=
|
||||
github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa h1:jQCWAUqqlij9Pgj2i/PB79y4KOPYVyFYdROxgaCwdTQ=
|
||||
github.com/cncf/xds/go v0.0.0-20231128003011-0fa0005c9caa/go.mod h1:x/1Gn8zydmfq8dk6e9PdstVsDgu9RuyIIJqAaF//0IM=
|
||||
github.com/cockroachdb/sentry-go v0.6.1-cockroachdb.2/go.mod h1:8BT+cPK6xvFOcRlk0R8eg+OTkcqI6baNH4xAkpiYVvQ=
|
||||
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
|
||||
@@ -648,7 +775,9 @@ github.com/dchest/blake512 v1.0.0 h1:oDFEQFIqFSeuA34xLtXZ/rWxCXdSjirjzPhey5EUvmA
|
||||
github.com/dchest/blake512 v1.0.0/go.mod h1:FV1x7xPPLWukZlpDpWQ88rF/SFwZ5qbskrzhLMB92JI=
|
||||
github.com/deckarep/golang-set v1.8.0 h1:sk9/l/KqpunDwP7pSjUg0keiOOLEnOBHzykLrsPppp4=
|
||||
github.com/deckarep/golang-set v1.8.0/go.mod h1:5nI87KwE7wgsBU1F4GKAw2Qod7p5kyS383rP6+o6qqo=
|
||||
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
|
||||
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
|
||||
github.com/deepmap/oapi-codegen v1.6.0 h1:w/d1ntwh91XI0b/8ja7+u5SvA4IFfM0UNNLmiDR1gg0=
|
||||
github.com/deepmap/oapi-codegen v1.6.0/go.mod h1:ryDa9AgbELGeB+YEXE1dR53yAjHwFvE9iAUlWl9Al3M=
|
||||
@@ -688,9 +817,12 @@ github.com/envoyproxy/go-control-plane v0.9.9-0.20201210154907-fd9021fe5dad/go.m
|
||||
github.com/envoyproxy/go-control-plane v0.9.9-0.20210217033140-668b12f5399d/go.mod h1:cXg6YxExXjJnVBQHBLXeUAgxn2UodCpnH306RInaBQk=
|
||||
github.com/envoyproxy/go-control-plane v0.11.1 h1:wSUXTlLfiAQRWs2F+p+EKOY9rUyis1MyGqJ2DIk5HpM=
|
||||
github.com/envoyproxy/go-control-plane v0.11.1/go.mod h1:uhMcXKCQMEJHiAb0w+YGefQLaTEw+YhGluxZkrTmD0g=
|
||||
github.com/envoyproxy/go-control-plane v0.12.0 h1:4X+VP1GHd1Mhj6IB5mMeGbLCleqxjletLK6K0rbxyZI=
|
||||
github.com/envoyproxy/go-control-plane v0.12.0/go.mod h1:ZBTaoJ23lqITozF0M6G4/IragXCQKCnYbmlmtHvwRG0=
|
||||
github.com/envoyproxy/protoc-gen-validate v1.0.4 h1:gVPz/FMfvh57HdSJQyvBtF00j8JU4zdyUgIUNhlgg0A=
|
||||
github.com/envoyproxy/protoc-gen-validate v1.0.4/go.mod h1:qys6tmnRsYrQqIhm2bvKZH4Blx/1gTIZ2UKVY1M+Yew=
|
||||
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5 h1:Yzb9+7DPaBjB8zlTR87/ElzFsnQfuHnVUVqpZZIcV5Y=
|
||||
github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4 h1:B2mpK+MNqgPqk2/KNi1LbqwtZDy5F7iy0mynQiBr8VA=
|
||||
github.com/ethereum/c-kzg-4844/bindings/go v0.0.0-20230126171313-363c7d7593b4/go.mod h1:y4GA2JbAUama1S4QwYjC2hefgGLU8Ul0GMtL/ADMF1c=
|
||||
github.com/ethereum/go-ethereum v1.10.26 h1:i/7d9RBBwiXCEuyduBQzJw/mKmnvzsN14jqBmytw72s=
|
||||
github.com/ethereum/go-ethereum v1.10.26/go.mod h1:EYFyF19u3ezGLD4RqOkLq+ZCXzYbLoNDdZlMt7kyKFg=
|
||||
@@ -700,6 +832,7 @@ github.com/fatih/color v1.7.0 h1:DkWD4oS2D8LGGgTQ6IvwJJXSL5Vp2ffcQg58nFV38Ys=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.9.0/go.mod h1:eQcE1qtQxscV5RaZvpXrrb8Drkc3/DdQ+uUYCNjL+zU=
|
||||
github.com/fatih/color v1.13.0/go.mod h1:kLAiJbzzSOZDVNGyDpeOxJ47H46qBXwg5ILebYFFOfk=
|
||||
github.com/fatih/color v1.14.1 h1:qfhVLaG5s+nCROl1zJsZRxFeYrHLqWroPOQ8BWiNb4w=
|
||||
github.com/fatih/color v1.14.1/go.mod h1:2oHN61fhTpgcxD3TSWCgKDiH1+x4OiDVVGH8WlgGZGg=
|
||||
github.com/fatih/structs v1.1.0/go.mod h1:9NiDSp5zOcgEDl+j00MP/WkGVPOlPRLejGD8Ga6PJ7M=
|
||||
github.com/fjl/gencodec v0.0.0-20220412091415-8bb9e558978c h1:CndMRAH4JIwxbW8KYq6Q+cGWcGHz0FjGR3QqcInWcW0=
|
||||
@@ -733,6 +866,7 @@ github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1 h1:QbL/5oDUmRBzO9/Z7Seo
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72 h1:b+9H1GAsx5RsjvDFLoS5zkNBzIQMuVKUYQDmxU3N5XE=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4 h1:WtGNWLvXpe6ZudgnXrq0barxBImvnnJoMEhXAzcbM0I=
|
||||
github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8=
|
||||
github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vbaY=
|
||||
github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU=
|
||||
@@ -791,6 +925,7 @@ github.com/google/cel-go v0.12.6/go.mod h1:Jk7ljRzLBhkmiAwBoUxB1sZSCVBAzkqPF25ol
|
||||
github.com/google/flatbuffers v1.11.0/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v2.0.8+incompatible h1:ivUb1cGomAB101ZM1T0nOiWz9pSrTMoa9+EiY7igmkM=
|
||||
github.com/google/flatbuffers v2.0.8+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/flatbuffers v23.5.26+incompatible h1:M9dgRyhJemaM4Sw8+66GHBu8ioaQmyPLg1b8VwK5WJg=
|
||||
github.com/google/flatbuffers v23.5.26+incompatible/go.mod h1:1AeVuKshWv4vARoZatz6mlQ0JxURH0Kv5+zNeJKJCa8=
|
||||
github.com/google/go-cmp v0.4.1/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
@@ -800,6 +935,7 @@ github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/
|
||||
github.com/google/go-cmp v0.5.4/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-containerregistry v0.14.0 h1:z58vMqHxuwvAsVwvKEkmVBz2TlgBgH5k6koEXBtlYkw=
|
||||
github.com/google/go-containerregistry v0.14.0/go.mod h1:aiJ2fp/SXvkWgmYHioXnbMdlgB8eXiiYOY55gfN91Wk=
|
||||
github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9 h1:OF1IPgv+F4NmqmJ98KTjdN97Vs1JxDPB3vbmYzV2dpk=
|
||||
github.com/google/go-pkcs11 v0.2.1-0.20230907215043-c6f79328ddf9/go.mod h1:6eQoGcuNJpa7jnd5pMGdkSaQpNDYvPlXWMcjXXThLlY=
|
||||
github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU=
|
||||
github.com/google/gofuzz v1.1.1-0.20200604201612-c04b05f3adfa/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
@@ -840,7 +976,9 @@ github.com/googleapis/gax-go/v2 v2.12.0 h1:A+gCJKdRfqXkr+BIRGtZLibNXf0m1f9E4HG56
|
||||
github.com/googleapis/gax-go/v2 v2.12.0/go.mod h1:y+aIqrI5eb1YGMVJfuV3185Ts/D7qKpsEkdD5+I6QGU=
|
||||
github.com/googleapis/gax-go/v2 v2.12.1/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc=
|
||||
github.com/googleapis/gax-go/v2 v2.12.2/go.mod h1:61M8vcyyXR2kqKFxKrfA22jaA8JGF7Dc8App1U3H6jc=
|
||||
github.com/googleapis/gax-go/v2 v2.12.3 h1:5/zPPDvw8Q1SuXjrqrZslrqT7dL/uJT2CQii/cLCKqA=
|
||||
github.com/googleapis/gax-go/v2 v2.12.3/go.mod h1:AKloxT6GtNbaLm8QTNSidHUVsHYcBHwWRvkNFJUQcS4=
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720 h1:zC34cGQu69FG7qzJ3WiKW244WfhDC3xxYMeNOX2gtUQ=
|
||||
github.com/googleapis/google-cloud-go-testing v0.0.0-20210719221736-1c9a4c676720/go.mod h1:dvDLG8qkwmyD9a/MJJN3XJcT3xFxOKAvTZGvuZmac9g=
|
||||
github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl60c=
|
||||
github.com/gorilla/mux v1.8.0/go.mod h1:DVbg23sWSpFRCP0SfiEN6jmj59UnW/n46BH5rLB71So=
|
||||
@@ -859,8 +997,10 @@ github.com/guptarohit/asciigraph v0.5.5/go.mod h1:dYl5wwK4gNsnFf9Zp+l06rFiDZ5YtX
|
||||
github.com/hanwen/go-fuse/v2 v2.2.0 h1:jo5QZYmBLNcl9ovypWaQ5yXMSSV+Ch68xoC3rtZvvBM=
|
||||
github.com/hanwen/go-fuse/v2 v2.2.0/go.mod h1:B1nGE/6RBFyBRC1RRnf23UpwCdyJ31eukw34oAKukAc=
|
||||
github.com/hashicorp/consul/api v1.1.0/go.mod h1:VmuI/Lkw1nC05EYQWNKwWGbkg+FbDBtguAZLlVdkD9Q=
|
||||
github.com/hashicorp/consul/api v1.28.2 h1:mXfkRHrpHN4YY3RqL09nXU1eHKLNiuAN4kHvDQ16k/8=
|
||||
github.com/hashicorp/consul/api v1.28.2/go.mod h1:KyzqzgMEya+IZPcD65YFoOVAgPpbfERu4I/tzG6/ueE=
|
||||
github.com/hashicorp/consul/sdk v0.1.1/go.mod h1:VKf9jXwCTEY1QZP2MOLRhb5i/I/ssyNV1vwHyQBF0x8=
|
||||
github.com/hashicorp/consul/sdk v0.16.0 h1:SE9m0W6DEfgIVCJX7xU+iv/hUl4m/nxqMTnCdMxDpJ8=
|
||||
github.com/hashicorp/consul/sdk v0.16.0/go.mod h1:7pxqqhqoaPqnBnzXD1StKed62LqJeClzVsUEy85Zr0A=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
github.com/hashicorp/go-cleanhttp v0.5.1/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtngrth3wmdIIUrZ80=
|
||||
@@ -868,11 +1008,13 @@ github.com/hashicorp/go-cty-funcs v0.0.0-20230405223818-a090f58aa992 h1:fYOrSfO5
|
||||
github.com/hashicorp/go-cty-funcs v0.0.0-20230405223818-a090f58aa992/go.mod h1:Abjk0jbRkDaNCzsRhOv2iDCofYpX1eVsjozoiK63qLA=
|
||||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
||||
github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c=
|
||||
github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-msgpack v0.5.3/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-msgpack v0.5.5 h1:i9R9JSrqIz0QVLz3sz+i3YJdT7TTSLcfLLzJi9aZTuI=
|
||||
github.com/hashicorp/go-msgpack v0.5.5/go.mod h1:ahLV/dePpqEmjfWmKiqvPkv/twdG7iPBM1vqhUKIvfM=
|
||||
github.com/hashicorp/go-multierror v1.0.0/go.mod h1:dHtQlpGsu+cZNNAkkCN/P3hoUDHhCYQXV3UM06sGGrk=
|
||||
github.com/hashicorp/go-multierror v1.1.0/go.mod h1:spPvp8C1qA32ftKqdAHm4hHTbPw+vmowP0z+KUhOZdA=
|
||||
@@ -881,13 +1023,18 @@ github.com/hashicorp/go-retryablehttp v0.7.2 h1:AcYqCvkpalPnPF2pn0KamgwamS42TqUD
|
||||
github.com/hashicorp/go-retryablehttp v0.7.2/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
|
||||
github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8=
|
||||
github.com/hashicorp/go-rootcerts v1.0.0/go.mod h1:K6zTfqpRlCUIjkwsN4Z+hiSfzSTQa6eBIzfwKfwNnHU=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2 h1:jzhAVGtqPKbwpyCPELlgNWhE1znq+qwJtW5Oi2viEzc=
|
||||
github.com/hashicorp/go-rootcerts v1.0.2/go.mod h1:pqUvnprVnM5bf7AOirdbb01K4ccR319Vf4pU3K5EGc8=
|
||||
github.com/hashicorp/go-sockaddr v1.0.0/go.mod h1:7Xibr9yA9JjQq1JpNB2Vw7kxv8xerXegt+ozgdvDeDU=
|
||||
github.com/hashicorp/go-sockaddr v1.0.2 h1:ztczhD1jLxIRjVejw8gFomI1BQZOe2WoVOu0SyteCQc=
|
||||
github.com/hashicorp/go-sockaddr v1.0.2/go.mod h1:rB4wwRAUzs07qva3c5SdrY/NEtAUjGlgmH/UkBUC97A=
|
||||
github.com/hashicorp/go-syslog v1.0.0 h1:KaodqZuhUoZereWVIYmpUgZysurB1kBLX2j0MwMrUAE=
|
||||
github.com/hashicorp/go-syslog v1.0.0/go.mod h1:qPfqrKkXGihmCqbJM2mZgkZGvKG1dFdvsLplgctolz4=
|
||||
github.com/hashicorp/go-uuid v1.0.0/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go-uuid v1.0.3 h1:2gKiV6YVmrJ1i2CKKa9obLvRieoRGviZFL26PcT/Co8=
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/go.net v0.0.1 h1:sNCoNyDEvN1xa+X0baata4RdcpKwcMS6DH+xwfqPgjw=
|
||||
github.com/hashicorp/go.net v0.0.1/go.mod h1:hjKkEWcCURg++eb33jQU7oqQcI9XDCnUzHA0oac0k90=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
@@ -895,12 +1042,16 @@ github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+l
|
||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
|
||||
github.com/hashicorp/logutils v1.0.0 h1:dLEQVugN8vlakKOUE3ihGLTZJRB4j+M2cdTm/ORI65Y=
|
||||
github.com/hashicorp/logutils v1.0.0/go.mod h1:QIAnNjmIWmVIIkWDTG1z5v++HQmx9WQRO+LraFDTW64=
|
||||
github.com/hashicorp/mdns v1.0.0/go.mod h1:tL+uN++7HEJ6SQLQ2/p+z2pH24WQKWjBPkE0mNTz8vQ=
|
||||
github.com/hashicorp/mdns v1.0.4 h1:sY0CMhFmjIPDMlTB+HfymFHCaYLhgifZ0QhjaYKD/UQ=
|
||||
github.com/hashicorp/mdns v1.0.4/go.mod h1:mtBihi+LeNXGtG8L9dX59gAEa12BDtBQSp4v/YAJqrc=
|
||||
github.com/hashicorp/memberlist v0.1.3/go.mod h1:ajVTdAv/9Im8oMAAj5G31PhhMCZJV2pPBoIllUwCN7I=
|
||||
github.com/hashicorp/memberlist v0.5.0 h1:EtYPN8DpAURiapus508I4n9CzHs2W+8NZGbmmR/prTM=
|
||||
github.com/hashicorp/memberlist v0.5.0/go.mod h1:yvyXLpo0QaGE59Y7hDTsTzDD25JYBZ4mHgHUZ8lrOI0=
|
||||
github.com/hashicorp/serf v0.8.2/go.mod h1:6hOLApaqBFA1NXqRQAsxw9QxuDEvNxSQRwA/JwenrHc=
|
||||
github.com/hashicorp/serf v0.10.1 h1:Z1H2J60yRKvfDYAOZLd2MU0ND4AH/WDz7xYHDWQsIPY=
|
||||
github.com/hashicorp/serf v0.10.1/go.mod h1:yL2t6BqATOLGc5HF7qbFkTfXoPIY0WZdWHfEvMqbG+4=
|
||||
github.com/holiman/uint256 v1.2.4/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E=
|
||||
github.com/hpcloud/tail v1.0.0 h1:nfCOvKYfkgYP8hkirhJocXT2+zOD8yUNjXaWfTlyFKI=
|
||||
@@ -908,9 +1059,11 @@ github.com/huin/goupnp v1.0.2/go.mod h1:0dxJBVBHqTMjIUMkESDTNgOOx/Mw5wYIfyFmdzSa
|
||||
github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150 h1:vlNjIqmUZ9CMAWsbURYl3a6wZbw7q5RHVvlXTNS/Bs8=
|
||||
github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o=
|
||||
github.com/hydrogen18/memlistener v1.0.0/go.mod h1:qEIFzExnS6016fRpRfxrExeVn2gbClQA99gQhnIcdhE=
|
||||
github.com/iancoleman/strcase v0.3.0 h1:nTXanmYxhfFAMjZL34Ov6gkzEsSJZ5DbhxWjvSASxEI=
|
||||
github.com/iancoleman/strcase v0.3.0/go.mod h1:iwCmte+B7n89clKwxIoIXy/HfoL7AsD47ZCWhYzw7ho=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6 h1:UDMh68UUwekSh5iP2OMhRRZJiiBccgV7axzUG8vi56c=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20181102032728-5e5cf60278f6/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639 h1:mV02weKRL81bEnm8A0HT1/CAelMQDBuQIfLw8n+d6xI=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20200824232613-28f6c0f3b639/go.mod h1:aSSvb/t6k1mPoxDqO4vJh6VOCGPwU4O0C2/Eqndh1Sc=
|
||||
github.com/ianlancetaylor/demangle v0.0.0-20220319035150-800ac71e25c2/go.mod h1:aYm2/VgdVmcIU8iMfdMvDMsRAQjcfZSKFby6HOFvi/w=
|
||||
github.com/iden3/go-iden3-crypto v0.0.15/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E=
|
||||
@@ -1000,11 +1153,13 @@ github.com/klauspost/pgzip v1.0.2-0.20170402124221-0bf5dcad4ada/go.mod h1:Ch1tH6
|
||||
github.com/knz/go-libedit v1.10.1 h1:0pHpWtx9vcvC0xGZqEQlQdfSQs7WRlAjuPvk3fOZDCo=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/fs v0.1.0 h1:Jskdu9ieNAYnjxsi0LbQp1ulIKZV1LAFgK1tWhpZgl8=
|
||||
github.com/kr/fs v0.1.0/go.mod h1:FFnZGqtBN9Gxj7eW1uZ42v5BccTP0vu6NEaFoC2HwRg=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515 h1:T+h1c/A9Gawja4Y9mFVWj2vyii2bbUNDw3kt9VxK2EY=
|
||||
github.com/kr/pty v1.1.1 h1:VkoXIwSboBpnk99O/KFauAEILuNHv5DVFKZMBN/gUgw=
|
||||
github.com/kylelemons/go-gypsy v1.0.0 h1:7/wQ7A3UL1bnqRMnZ6T8cwCOArfZCxFmb1iTxaOOo1s=
|
||||
github.com/kylelemons/go-gypsy v1.0.0/go.mod h1:chkXM0zjdpXOiqkCW1XcCHDfjfk14PH2KKkQWxfJUcU=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/labstack/echo/v4 v4.2.1 h1:LF5Iq7t/jrtUuSutNuiEWtB5eiHfZ5gSe2pcu5exjQw=
|
||||
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
|
||||
@@ -1027,6 +1182,7 @@ github.com/lestrrat-go/option v1.0.0/go.mod h1:5ZHFbivi4xwXxhxY9XHDe2FHo6/Z7WWmt
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3 h1:jUp75lepDg0phMUJBCmvaeFDldD2N3S1lBuPwUTszio=
|
||||
github.com/linuxkit/virtsock v0.0.0-20201010232012-f8cee7dfc7a3/go.mod h1:3r6x7q95whyfWQpmGZTu3gk3v2YkMi05HEzl7Tf7YEo=
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.3 h1:/3+/2sWyXeMLzKd1bX+ixWKgEMsULrIivpDsuaF441o=
|
||||
github.com/lyft/protoc-gen-star/v2 v2.0.3/go.mod h1:amey7yeodaJhXSbf/TlLvWiqQfLOSpEk//mLlc+axEk=
|
||||
github.com/magiconair/properties v1.8.5/go.mod h1:y3VJvCyxH9uVvJTWEGAELF3aiYNyPKd5NZ3oSwXrF60=
|
||||
github.com/mailgun/raymond/v2 v2.0.46/go.mod h1:lsgvL50kgt1ylcFJYZiULi5fjPBkkhNfj4KA0W54Z18=
|
||||
@@ -1061,6 +1217,7 @@ github.com/microsoft/go-mssqldb v1.6.0 h1:mM3gYdVwEPFrlg/Dvr2DNVEgYFG7L42l+dGc67
|
||||
github.com/microsoft/go-mssqldb v1.6.0/go.mod h1:00mDtPbeQCRGC1HwOOR5K/gr30P1NcEG0vx6Kbv2aJU=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/miekg/dns v1.1.26/go.mod h1:bPDLeHnStXmXAq1m/Ch/hvfNHr14JKNPMBo3VZKjuso=
|
||||
github.com/miekg/dns v1.1.41 h1:WMszZWJG0XmzbK9FEmzH2TVcqYzFesusSIB41b8KHxY=
|
||||
github.com/miekg/dns v1.1.41/go.mod h1:p6aan82bvRIyn+zDIv9xYNUpwa73JcSh9BKwknJysuI=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8 h1:AMFGa4R4MiIpspGNG7Z948v4n35fFGB3RR3G/ry4FWs=
|
||||
github.com/minio/asm2plan9s v0.0.0-20200509001527-cdd76441f9d8/go.mod h1:mC1jAcsrzbxHt8iiaC+zU4b1ylILSosueou12R++wfY=
|
||||
@@ -1071,18 +1228,22 @@ github.com/minio/sha256-simd v1.0.0/go.mod h1:OuYzVNI5vcoYIAmbIvHPl3N3jUzVedXbKy
|
||||
github.com/mistifyio/go-zfs/v3 v3.0.1 h1:YaoXgBePoMA12+S1u/ddkv+QqxcfiZK4prI6HPnkFiU=
|
||||
github.com/mistifyio/go-zfs/v3 v3.0.1/go.mod h1:CzVgeB0RvF2EGzQnytKVvVSDwmKJXxkOTUGbNrTja/k=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
github.com/mitchellh/cli v1.1.0 h1:tEElEatulEHDeedTxwckzyYMA5c86fbmNIUL1hBIiTg=
|
||||
github.com/mitchellh/cli v1.1.0/go.mod h1:xcISNoH86gajksDmfB23e/pu+B+GeFRMYmoHXxx3xhI=
|
||||
github.com/mitchellh/go-homedir v1.0.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-homedir v1.1.0 h1:lukF9ziXFxDFPkA1vsr5zpc1XuPDn/wFntq5mG+4E0Y=
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/go-ps v1.0.0 h1:i6ampVEEF4wQFF+bkYfwYgY+F/uYJDktmvLPf7qIgjc=
|
||||
github.com/mitchellh/go-ps v1.0.0/go.mod h1:J4lOc8z8yJs6vUwklHw2XEIiT4z4C40KtWVN3nvg8Pg=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0 h1:fzU/JVNcaqHQEcVFAKeR41fkiLdIPrefOvVG1VZ96U0=
|
||||
github.com/mitchellh/go-testing-interface v1.0.0/go.mod h1:kRemZodwjscx+RGhAo8eIhFbs2+BFgRtFPeD/KE+zxI=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7 h1:DpOJ2HYzCv8LZP15IdmG+YdwD2luVPHITV96TkirNBM=
|
||||
github.com/mitchellh/go-wordwrap v0.0.0-20150314170334-ad45545899c7/go.mod h1:ZXFpozHsX6DPmq2I0TCekCxypsnAUbP2oI0UX1GXzOo=
|
||||
github.com/mitchellh/gox v0.4.0 h1:lfGJxY7ToLJQjHHwi0EX6uYBdK78egf954SQl13PQJc=
|
||||
github.com/mitchellh/gox v0.4.0/go.mod h1:Sd9lOJ0+aimLBi73mGofS1ycjY8lL3uZM3JPS42BGNg=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2 h1:vGKWl0YJqUNxE8d+h8f6NJLcCJrgbhC4NcD46KavDd4=
|
||||
github.com/mitchellh/hashstructure/v2 v2.0.2/go.mod h1:MG3aRVU/N29oo/V/IhBX8GR/zz4kQkprJgF2EVszyDE=
|
||||
github.com/mitchellh/iochan v1.0.0 h1:C+X3KsSTLFVBr/tK1eYN/vs4rJcvsiLU338UhYPJWeY=
|
||||
github.com/mitchellh/iochan v1.0.0/go.mod h1:JwYml1nuB7xOzsp52dPpHFffvOCDupsG0QubkSMEySY=
|
||||
github.com/mitchellh/mapstructure v0.0.0-20160808181253-ca63d7c062ee/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mmcloughlin/profile v0.1.1 h1:jhDmAqPyebOsVDOCICJoINoLb/AnLBaUw58nFzxWS2w=
|
||||
@@ -1098,8 +1259,11 @@ github.com/naoina/go-stringutil v0.1.0 h1:rCUeRUHjBjGTSHl0VC00jUPLz8/F9dDzYI70Hz
|
||||
github.com/naoina/go-stringutil v0.1.0/go.mod h1:XJ2SJL9jCtBh+P9q5btrd/Ylo8XwT/h1USek5+NqSA0=
|
||||
github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416 h1:shk/vn9oCoOTmwcouEdwIeOtOGA/ELRUw/GwvxwfT+0=
|
||||
github.com/naoina/toml v0.1.2-0.20170918210437-9fafd6967416/go.mod h1:NBIhNtsFMo3G2szEBne+bO4gS192HuIYRqfvOWb4i1E=
|
||||
github.com/nats-io/nats.go v1.34.0 h1:fnxnPCNiwIG5w08rlMcEKTUw4AV/nKyGCOJE8TdhSPk=
|
||||
github.com/nats-io/nats.go v1.34.0/go.mod h1:Ubdu4Nh9exXdSz0RVWRFBbRfrbSxOYd26oF0wkWclB8=
|
||||
github.com/nats-io/nkeys v0.4.7 h1:RwNJbbIdYCoClSDNY7QVKZlyb/wfT6ugvFCiKy6vDvI=
|
||||
github.com/nats-io/nkeys v0.4.7/go.mod h1:kqXRgRDPlGy7nGaEDMuYzmiJCIAAWDK0IMBtDmGD0nc=
|
||||
github.com/nats-io/nuid v1.0.1 h1:5iA8DT8V7q8WK2EScv2padNa/rTESc1KdnPw4TC2paw=
|
||||
github.com/nats-io/nuid v1.0.1/go.mod h1:19wcPz3Ph3q0Jbyiqsd0kePYG7A95tJPxeL+1OSON2c=
|
||||
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86 h1:D6paGObi5Wud7xg83MaEFyjxQB1W5bz5d0IFppr+ymk=
|
||||
github.com/neelance/astrewrite v0.0.0-20160511093645-99348263ae86/go.mod h1:kHJEU3ofeGjhHklVoIGuVj85JJwZ6kWPaJwCIxgnFmo=
|
||||
@@ -1118,6 +1282,7 @@ github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc
|
||||
github.com/package-url/packageurl-go v0.1.1-0.20220428063043-89078438f170 h1:DiLBVp4DAcZlBVBEtJpNWZpZVq0AEeCY7Hqk8URVs4o=
|
||||
github.com/package-url/packageurl-go v0.1.1-0.20220428063043-89078438f170/go.mod h1:uQd4a7Rh3ZsVg5j0lNyAfyxIeGde9yrlhjF78GzeW0c=
|
||||
github.com/pascaldekloe/goe v0.0.0-20180627143212-57f6aae5913c/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/pascaldekloe/goe v0.1.0 h1:cBOtyMzM9HTpWjXfbbunk26uA6nG3a8n06Wieeh0MwY=
|
||||
github.com/pascaldekloe/goe v0.1.0/go.mod h1:lzWF7FIEvWOWxwDKqyGYQf6ZUaNfKdP144TG7ZOy1lc=
|
||||
github.com/paulbellamy/ratecounter v0.2.0 h1:2L/RhJq+HA8gBQImDXtLPrDXK5qAj6ozWVK/zFXVJGs=
|
||||
github.com/paulbellamy/ratecounter v0.2.0/go.mod h1:Hfx1hDpSGoqxkVVpBi/IlYD7kChlfo5C6hzIHwPqfFE=
|
||||
@@ -1138,10 +1303,12 @@ github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e h1:aoZm08cpOy4WuID//EZDgc
|
||||
github.com/pkg/profile v1.5.0 h1:042Buzk+NhDI+DeSAA62RwJL8VAuZUMQZUjCsRz1Mug=
|
||||
github.com/pkg/profile v1.5.0/go.mod h1:qBsxPvzyUincmltOk6iyRVxHYg4adc0OFOv72ZdLa18=
|
||||
github.com/pkg/sftp v1.10.1/go.mod h1:lYOWFsE0bwd1+KfKJaKeuokY15vzFx25BLbzYYoAxZI=
|
||||
github.com/pkg/sftp v1.13.6 h1:JFZT4XbOU7l77xGSpOdW+pwIMqP044IyjXX6FGyEKFo=
|
||||
github.com/pkg/sftp v1.13.6/go.mod h1:tz1ryNURKu77RL+GuCzmoJYxQczL3wLNNpPWagdg4Qk=
|
||||
github.com/pkg/term v0.0.0-20180730021639-bffc007b7fd5 h1:tFwafIEMf0B7NlcxV/zJ6leBIa81D3hgGSgsE5hCkOQ=
|
||||
github.com/pkg/term v0.0.0-20180730021639-bffc007b7fd5/go.mod h1:eCbImbZ95eXtAUIbLAuAVnBnwf83mjf6QIVH8SHYwqQ=
|
||||
github.com/posener/complete v1.1.1/go.mod h1:em0nMJCgc9GFtwrmVmEMR/ZL6WyhyjMBndrE9hABlRI=
|
||||
github.com/posener/complete v1.2.3 h1:NP0eAhjcjImqslEwo/1hq7gpajME0fTLTezBKDqfXqo=
|
||||
github.com/posener/complete v1.2.3/go.mod h1:WZIdtGGp+qx0sLrYKtIRAruyNpv6hFCicSgv7Sy7s/s=
|
||||
github.com/pquerna/cachecontrol v0.1.0 h1:yJMy84ti9h/+OEWa752kBTKv4XC30OtVVHYv/8cTqKc=
|
||||
github.com/pquerna/cachecontrol v0.1.0/go.mod h1:NrUG3Z7Rdu85UNR3vm7SOsl1nFIeSiQnrHV5K9mBcUI=
|
||||
@@ -1170,7 +1337,9 @@ github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncj
|
||||
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
|
||||
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f h1:UFr9zpz4xgTnIE5yIMtWAMngCdZ9p/+q6lTbgelo80M=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/sagikazarmark/crypt v0.19.0 h1:WMyLTjHBo64UvNcWqpzY3pbZTYgnemZU8FBZigKc42E=
|
||||
github.com/sagikazarmark/crypt v0.19.0/go.mod h1:c6vimRziqqERhtSe0MhIvzE1w54FrCHtrXb5NH/ja78=
|
||||
github.com/schollz/closestmatch v2.1.0+incompatible/go.mod h1:RtP1ddjLong6gTkbtmuhtR2uUrrJOpYzYRvbcPAid+g=
|
||||
github.com/scroll-tech/da-codec v0.0.0-20240605080813-32bfc9fccde7/go.mod h1:1wWYii0OPwd5kw+xrz0PFgS420xNadrNF1x/ELJT+TM=
|
||||
@@ -1186,11 +1355,15 @@ github.com/scroll-tech/da-codec v0.1.1-0.20241005172014-aca0bef21638 h1:2KIfClLB
|
||||
github.com/scroll-tech/da-codec v0.1.1-0.20241005172014-aca0bef21638/go.mod h1:6jxEQvNc7GQKMSUi25PthAUY3WnZL8CN0yWivBgAXi0=
|
||||
github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b h1:5H6V6ybacXFJ2ti+eFwtf+12Otufod6goxK6/u7Nu1k=
|
||||
github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b/go.mod h1:48uxaqVgpD8ulH8p+nrBtfeLHZ9tX82bVVdPNkW3rPE=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f h1:YYbhuUwjowqI4oyXtECRofck7Fyj18e1tcRjuQlZpJE=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250227072756-a1482833595f/go.mod h1:xECEHZLVzbdUn+tNbRJhRIjLGTOTmnFQuTgUTeVLX58=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250519114140-bfa7133d4ad1/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
||||
github.com/segmentio/kafka-go v0.2.0 h1:HtCSf6B4gN/87yc5qTl7WsxPKQIIGXLPPM1bMCPOsoY=
|
||||
@@ -1220,6 +1393,7 @@ github.com/spf13/afero v1.6.0/go.mod h1:Ai8FlHk4v/PARR026UzYexafAt9roJ7LcLMAmO6Z
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cobra v0.0.3/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
|
||||
github.com/spf13/cobra v1.2.1/go.mod h1:ExllRjgxM/piMAM+3tAZvg8fsklGAf3tPfi+i8t68Nk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/viper v1.8.1/go.mod h1:o0Pch8wJ9BVSWGQMbra6iw0oQ5oktSIBaujf1rJH9Ns=
|
||||
github.com/spiffe/go-spiffe/v2 v2.1.1 h1:RT9kM8MZLZIsPTH+HKQEP5yaAk3yd/VBzlINaRjXs8k=
|
||||
@@ -1249,6 +1423,7 @@ github.com/tonistiigi/go-actions-cache v0.0.0-20220404170428-0bdeb6e1eac7 h1:8eY
|
||||
github.com/tonistiigi/go-actions-cache v0.0.0-20220404170428-0bdeb6e1eac7/go.mod h1:qqvyZqkfwkoJuPU/bw61bItaoO0SJ8YSW0vSVRRvsRg=
|
||||
github.com/tonistiigi/go-archvariant v1.0.0 h1:5LC1eDWiBNflnTF1prCiX09yfNHIxDC/aukdhCdTyb0=
|
||||
github.com/tonistiigi/go-archvariant v1.0.0/go.mod h1:TxFmO5VS6vMq2kvs3ht04iPXtu2rUT/erOnGFYfk5Ho=
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926 h1:G3dpKMzFDjgEh2q1Z7zUUtKa8ViPtH+ocF0bE0g00O8=
|
||||
github.com/tv42/httpunix v0.0.0-20150427012821-b75d8614f926/go.mod h1:9ESjWnEqriFuLhtthL60Sar/7RFoluCcXsuvEwTV5KM=
|
||||
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs=
|
||||
github.com/ugorji/go v1.2.7 h1:qYhyWUUd6WbiM+C6JZAUkIJt/1WrjzNHY9+KCIjVqTo=
|
||||
@@ -1299,23 +1474,28 @@ github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
|
||||
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
|
||||
github.com/ziutek/mymysql v1.5.4 h1:GB0qdRGsTwQSBVYuVShFBKaXSnSnYYC2d9knnE1LHFs=
|
||||
github.com/ziutek/mymysql v1.5.4/go.mod h1:LMSpPZ6DbqWFxNCHW77HeMg9I646SAhApZ/wKdgO/C0=
|
||||
go.einride.tech/aip v0.66.0 h1:XfV+NQX6L7EOYK11yoHHFtndeaWh3KbD9/cN/6iWEt8=
|
||||
go.einride.tech/aip v0.66.0/go.mod h1:qAhMsfT7plxBX+Oy7Huol6YUvZ0ZzdUz26yZsQwfl1M=
|
||||
go.etcd.io/bbolt v1.3.7 h1:j+zJOnnEjF/kyHlDDgGnVL/AIqIJPq8UoB2GSNfkUfQ=
|
||||
go.etcd.io/bbolt v1.3.7/go.mod h1:N9Mkw9X8x5fupy0IKsmuqVtoGDyxsaDlbk4Rd05IAQw=
|
||||
go.etcd.io/etcd/api/v3 v3.5.0/go.mod h1:cbVKeC6lCfl7j/8jBhAK6aIYO9XOjdptoxU/nLQcPvs=
|
||||
go.etcd.io/etcd/api/v3 v3.5.5 h1:BX4JIbQ7hl7+jL+g+2j5UAr0o1bctCm6/Ct+ArBGkf0=
|
||||
go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8=
|
||||
go.etcd.io/etcd/api/v3 v3.5.12 h1:W4sw5ZoU2Juc9gBWuLk5U6fHfNVyY1WC5g9uiXZio/c=
|
||||
go.etcd.io/etcd/api/v3 v3.5.12/go.mod h1:Ot+o0SWSyT6uHhA56al1oCED0JImsRiU9Dc26+C2a+4=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.0/go.mod h1:IJHfcCEKxYu1Os13ZdwCwIUTUVGYTSAM3YSwc9/Ac1g=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.5 h1:9S0JUVvmrVl7wCF39iTQthdaaNIiAaQbmK75ogO6GU8=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.5/go.mod h1:ggrwbk069qxpKPq8/FKkQ3Xq9y39kbFR4LnKszpRXeQ=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.12 h1:EYDL6pWwyOsylrQyLp2w+HkQ46ATiOvoEdMarindU2A=
|
||||
go.etcd.io/etcd/client/pkg/v3 v3.5.12/go.mod h1:seTzl2d9APP8R5Y2hFL3NVlD6qC/dOT+3kvrqPyTas4=
|
||||
go.etcd.io/etcd/client/v2 v2.305.0/go.mod h1:h9puh54ZTgAKtEbut2oe9P4L/oqKCVB6xsXlzd7alYQ=
|
||||
go.etcd.io/etcd/client/v2 v2.305.5 h1:DktRP60//JJpnPC0VBymAN/7V71GHMdjDCBt4ZPXDjI=
|
||||
go.etcd.io/etcd/client/v2 v2.305.5/go.mod h1:zQjKllfqfBVyVStbt4FaosoX2iYd8fV/GRy/PbowgP4=
|
||||
go.etcd.io/etcd/client/v2 v2.305.12 h1:0m4ovXYo1CHaA/Mp3X/Fak5sRNIWf01wk/X1/G3sGKI=
|
||||
go.etcd.io/etcd/client/v2 v2.305.12/go.mod h1:aQ/yhsxMu+Oht1FOupSr60oBvcS9cKXHrzBpDsPTf9E=
|
||||
go.etcd.io/etcd/client/v3 v3.5.5 h1:q++2WTJbUgpQu4B6hCuT7VkdwaTP7Qz6Daak3WzbrlI=
|
||||
go.etcd.io/etcd/client/v3 v3.5.5/go.mod h1:aApjR4WGlSumpnJ2kloS75h6aHUmAyaPLjHMxpc7E7c=
|
||||
go.etcd.io/etcd/client/v3 v3.5.12 h1:v5lCPXn1pf1Uu3M4laUE2hp/geOTc5uPcYYsNe1lDxg=
|
||||
go.etcd.io/etcd/client/v3 v3.5.12/go.mod h1:tSbBCakoWmmddL+BKVAJHa9km+O/E+bumDe9mSbPiqw=
|
||||
go.etcd.io/etcd/pkg/v3 v3.5.5 h1:Ablg7T7OkR+AeeeU32kdVhw/AGDsitkKPl7aW73ssjU=
|
||||
go.etcd.io/etcd/pkg/v3 v3.5.5/go.mod h1:6ksYFxttiUGzC2uxyqiyOEvhAiD0tuIqSZkX3TyPdaE=
|
||||
@@ -1357,6 +1537,7 @@ go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||
go.uber.org/zap v1.17.0/go.mod h1:MXVU+bhUf/A7Xi2HNOnopQOrmycQ5Ih87HtOu4q5SSo=
|
||||
go.uber.org/zap v1.19.0 h1:mZQZefskPPCMIBCSEH0v2/iUqqLrYtaeqwD6FUGUnFE=
|
||||
go.uber.org/zap v1.19.0/go.mod h1:xg/QME4nWcxGxrpdeYfq7UvYrLh66cuVKdrbD1XF/NI=
|
||||
go.uber.org/zap v1.21.0 h1:WefMeulhovoZ2sYXz7st6K0sLj7bBhpiFaud4r4zST8=
|
||||
go.uber.org/zap v1.21.0/go.mod h1:wjWOCqI0f2ZZrJF/UufIOkiC8ii6tm1iqIsLo76RfJw=
|
||||
golang.org/x/crypto v0.0.0-20181029021203-45a5f77698d3/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
@@ -1403,6 +1584,7 @@ golang.org/x/lint v0.0.0-20191125180803-fdd1cda4f05f/go.mod h1:5qLYkcX4OjUUV8bRu
|
||||
golang.org/x/lint v0.0.0-20200130185559-910be7a94367/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20200302205851-738671d3881b/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20201208152925-83fdc39ff7b5/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616 h1:VLliZ0d+/avPrXXH+OakdXhpJuEoBZuwh1m2j7U6Iug=
|
||||
golang.org/x/lint v0.0.0-20210508222113-6edffad5e616/go.mod h1:3xt1FjdF8hUf6vQPIChWIBhFzV8gjjsPE/fR3IyQdNY=
|
||||
golang.org/x/mobile v0.0.0-20190312151609-d3739f865fa6/go.mod h1:z+o9i4GpDbdi3rU15maQ/Ox0txvL9dWGYEHz965HBQE=
|
||||
golang.org/x/mobile v0.0.0-20190719004257-d2bd2a29d028 h1:4+4C/Iv2U4fMZBiMCc98MG1In4gJY5YRhtpDNeDeHWs=
|
||||
@@ -1622,6 +1804,7 @@ golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58
|
||||
golang.org/x/tools v0.17.0/go.mod h1:xsh6VxdV005rRVaS6SSAf9oiAqljS7UZUacMZ8Bnsps=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2 h1:H2TDz8ibqkAF6YGhCdN3jS9O0/s90v0rJh3X/OLHEUk=
|
||||
golang.org/x/xerrors v0.0.0-20220907171357-04be3eba64a2/go.mod h1:K8+ghG5WaK9qNqU5K3HdILfMLy1f3aNYFI/wnl100a8=
|
||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028 h1:+cNy6SZtPcJQH3LJVLOSmiC7MMxXNOb3PU/VUEz+EhU=
|
||||
golang.org/x/xerrors v0.0.0-20231012003039-104605ab7028/go.mod h1:NDW/Ps6MPRej6fsCIbMTohpP40sJ/P/vI1MoTEGwX90=
|
||||
gonum.org/v1/gonum v0.0.0-20180816165407-929014505bf4/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
|
||||
gonum.org/v1/gonum v0.0.0-20181121035319-3f7ecaa7e8ca/go.mod h1:Y+Yx5eoAFn32cQvJDxZx5Dpnq+c3wtXuadVZAcxbbBo=
|
||||
@@ -1660,11 +1843,13 @@ google.golang.org/api v0.162.0/go.mod h1:6SulDkfoBIg4NFmCuZ39XeeAgSHCPecfSUuDyYl
|
||||
google.golang.org/api v0.166.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA=
|
||||
google.golang.org/api v0.167.0/go.mod h1:4FcBc686KFi7QI/U51/2GKKevfZMpM17sCdibqe/bSA=
|
||||
google.golang.org/api v0.169.0/go.mod h1:gpNOiMA2tZ4mf5R9Iwf4rK/Dcz0fbdIgWYWVoxmsyLg=
|
||||
google.golang.org/api v0.171.0 h1:w174hnBPqut76FzW5Qaupt7zY8Kql6fiVjgys4f58sU=
|
||||
google.golang.org/api v0.171.0/go.mod h1:Hnq5AHm4OTMt2BUVjael2CWZFD6vksJdWCWiUAmjC9o=
|
||||
google.golang.org/appengine v1.5.0/go.mod h1:xpcJRLb0r/rnEns0DIKYYv+WjYCduHsrkT7/EB5XEv4=
|
||||
google.golang.org/appengine v1.6.1/go.mod h1:i06prIuMbXzDqacNJfV5OdTW448YApPu5ww/cMBSeb0=
|
||||
google.golang.org/appengine v1.6.5/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.6/go.mod h1:8WjMMxjGQR8xUklV/ARdw2HLXBOI7O7uCIDZVag1xfc=
|
||||
google.golang.org/appengine v1.6.8 h1:IhEN5q69dyKagZPYMSdIjS2HqprW324FRQZJcGqPAsM=
|
||||
google.golang.org/appengine v1.6.8/go.mod h1:1jJ3jBArFh5pcgW8gCtRJnepW8FzD1V44FJffLiz/Ds=
|
||||
google.golang.org/genproto v0.0.0-20190307195333-5fe7a883aa19/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
google.golang.org/genproto v0.0.0-20190418145605-e7d98fc518a7/go.mod h1:VzzqZJRnGkLBvHegQrXjBqPurQTc5/KpmUdxsrq26oE=
|
||||
@@ -1721,6 +1906,7 @@ google.golang.org/genproto/googleapis/api v0.0.0-20240221002015-b0ce06bbee7c/go.
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240304161311-37d4d3c04a78/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2 h1:rIo7ocm2roD9DcFIX67Ym8icoGCKSARAiPljFhh5suQ=
|
||||
google.golang.org/genproto/googleapis/api v0.0.0-20240311132316-a219d84964c2/go.mod h1:O1cOfN1Cy6QEYr7VxtjOyP5AdAuR0aJ/MYZaaof623Y=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240314234333-6e1732d8331c h1:4z0DVWmDWWZ4OeQHLrb6lLBE3uCgSLs9DDA5Zb36DFg=
|
||||
google.golang.org/genproto/googleapis/bytestream v0.0.0-20240314234333-6e1732d8331c/go.mod h1:IN9OQUXZ0xT+26MDwZL8fJcYw+y99b0eYPA2U15Jt8o=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231030173426-d783a09b4405/go.mod h1:67X1fPuzjcrkymZzZV1vvkFeTn2Rvc6lYF9MYFGCcwE=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231106174013-bbf56f31fb17/go.mod h1:oQ5rr10WTTMvP4A36n8JpR1OrO1BEiV4f78CneXZxkA=
|
||||
@@ -1754,6 +1940,7 @@ google.golang.org/grpc v1.61.1/go.mod h1:VUbo7IFqmF1QtCAstipjG0GIoq49KvMe9+h1jFL
|
||||
google.golang.org/grpc v1.62.0/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
|
||||
google.golang.org/grpc v1.62.1 h1:B4n+nfKzOICUXMgyrNd19h/I9oH0L1pizfk1d4zSgTk=
|
||||
google.golang.org/grpc v1.62.1/go.mod h1:IWTG0VlJLCh1SkC58F7np9ka9mx/WNkjl4PGJaiq+QE=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0 h1:M1YKkFIboKNieVO5DLUEVzQfGwJD30Nv2jfUgzb5UcE=
|
||||
google.golang.org/grpc/cmd/protoc-gen-go-grpc v1.1.0/go.mod h1:6Kw0yEErY5E/yWrBtf03jp27GLLJujG4z/JK95pnjjw=
|
||||
google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c=
|
||||
google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
|
||||
@@ -1780,6 +1967,7 @@ honnef.co/go/tools v0.0.0-20190523083050-ea95bdfd59fc h1:/hemPrYIhOhy8zYrNj+069z
|
||||
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
honnef.co/go/tools v0.0.1-2020.1.3/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4 h1:UoveltGrhghAA7ePc+e+QYDHXrBps2PqFZiHkGR/xK8=
|
||||
honnef.co/go/tools v0.0.1-2020.1.4/go.mod h1:X/FiERA/W4tHapMX5mGpAtMSVEeEUOyHaw9vFzvIQ3k=
|
||||
k8s.io/component-base v0.26.7 h1:uqsOyZh0Zqoaup8tmHa491D/CvgFdGUs+X2H/inNUKM=
|
||||
k8s.io/component-base v0.26.7/go.mod h1:CZe1HTmX/DQdeBrb9XYOXzs96jXth8ZbFvhLMsoJLUg=
|
||||
@@ -1797,7 +1985,9 @@ nullprogram.com/x/optparse v1.0.0 h1:xGFgVi5ZaWOnYdac2foDT3vg0ZZC9ErXFV57mr4OHrI
|
||||
rsc.io/binaryregexp v0.2.0 h1:HfqmD5MEmC0zvwBuF187nq9mdnXjXsSivRiXN7SmRkE=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/pdf v0.1.1 h1:k1MczvYDUvJBe93bYd7wrZLLUEcLZAuF824/I4e5Xr4=
|
||||
rsc.io/quote/v3 v3.1.0 h1:9JKUTTIUgS6kzR9mK1YuGKv6Nl+DijDNIc0ghT58FaY=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0 h1:7uVkIFmeBqHfdjD+gZwtXXI+RODJ2Wc4O7MPEh/QiW4=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.37 h1:fAPTNEpzQMOLMGwOHNbUkR2xXTQwMJOZYNx+/mLlOh0=
|
||||
sigs.k8s.io/apiserver-network-proxy/konnectivity-client v0.0.37/go.mod h1:vfnxT4FXNT8eGvO+xi/DsyC/qHmdujqwrUa1WSspCsk=
|
||||
|
||||
5854
prover/Cargo.lock
generated
5854
prover/Cargo.lock
generated
File diff suppressed because it is too large
Load Diff
@@ -1,50 +0,0 @@
|
||||
[package]
|
||||
name = "prover"
|
||||
version = "0.1.0"
|
||||
edition = "2021"
|
||||
|
||||
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
|
||||
|
||||
|
||||
[patch.crates-io]
|
||||
ethers-signers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves", branch = "v0.1.0" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/halo2.git"]
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/poseidon.git"]
|
||||
poseidon = { git = "https://github.com/scroll-tech/poseidon.git", branch = "main" }
|
||||
[patch."https://github.com/privacy-scaling-explorations/bls12_381"]
|
||||
bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/impl_scalar_field" }
|
||||
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0"
|
||||
log = "0.4"
|
||||
env_logger = "0.11.3"
|
||||
serde = { version = "1.0.198", features = ["derive"] }
|
||||
serde_json = "1.0.116"
|
||||
futures = "0.3.30"
|
||||
|
||||
ethers-core = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branch = "v2.0.7" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "160db6c"}
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
reqwest-retry = "0.5"
|
||||
once_cell = "1.19.0"
|
||||
hex = "0.4.3"
|
||||
tiny-keccak = { version = "2.0.0", features = ["sha3", "keccak"] }
|
||||
rand = "0.8.5"
|
||||
eth-keystore = "0.5.0"
|
||||
rlp = "0.5.2"
|
||||
tokio = "1.37.0"
|
||||
async-trait = "0.1"
|
||||
sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
ctor = "0.2.8"
|
||||
@@ -1,48 +0,0 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
else
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "\#" -f2 | cut -c-7)
|
||||
endif
|
||||
|
||||
ZKEVM_VERSION=$(shell ./print_high_zkevm_version.sh)
|
||||
ifeq (${ZKEVM_VERSION},)
|
||||
$(error ZKEVM_VERSION not set)
|
||||
else
|
||||
$(info ZKEVM_VERSION is ${ZKEVM_VERSION})
|
||||
endif
|
||||
|
||||
ZKEVM_COMMIT=$(shell echo ${ZKEVM_VERSION} | cut -d " " -f2)
|
||||
$(info ZKEVM_COMMIT is ${ZKEVM_COMMIT})
|
||||
|
||||
HALO2_GPU_VERSION=$(shell ./print_halo2gpu_version.sh | sed -n '2p')
|
||||
|
||||
GIT_REV=$(shell git rev-parse --short HEAD)
|
||||
GO_TAG=$(shell grep "var tag = " ../common/version/version.go | cut -d "\"" -f2)
|
||||
|
||||
ifeq (${GO_TAG},)
|
||||
$(error GO_TAG not set)
|
||||
else
|
||||
$(info GO_TAG is ${GO_TAG})
|
||||
endif
|
||||
|
||||
ifeq (${HALO2_GPU_VERSION},)
|
||||
# use halo2_proofs with CPU
|
||||
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_VERSION}
|
||||
else
|
||||
# use halo2_gpu
|
||||
ZK_VERSION=${ZKEVM_COMMIT}-${HALO2_GPU_VERSION}
|
||||
endif
|
||||
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
|
||||
|
||||
lint:
|
||||
cargo check --all-features
|
||||
cargo clippy --all-features --all-targets -- -D warnings
|
||||
cargo fmt --all
|
||||
@@ -1,30 +0,0 @@
|
||||
{
|
||||
"sdk_config": {
|
||||
"prover_name_prefix": "prover-1",
|
||||
"keys_dir": "keys",
|
||||
"coordinator": {
|
||||
"base_url": "http://localhost:8555",
|
||||
"retry_count": 10,
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "http://localhost:9999"
|
||||
},
|
||||
"prover": {
|
||||
"circuit_types": [1,2,3],
|
||||
"circuit_version": "v0.13.1"
|
||||
},
|
||||
"db_path": "unique-db-path-for-prover-1"
|
||||
},
|
||||
"low_version_circuit": {
|
||||
"hard_fork_name": "darwin",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"high_version_circuit": {
|
||||
"hard_fork_name": "darwinV2",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
}
|
||||
}
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
config_file="$HOME/.cargo/config"
|
||||
|
||||
if [ ! -e "$config_file" ]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if [[ $(head -n 1 "$config_file") == "#"* ]]; then
|
||||
exit 0
|
||||
fi
|
||||
|
||||
halo2gpu_path=$(grep -Po '(?<=paths = \[")([^"]*)' $config_file)
|
||||
|
||||
pushd $halo2gpu_path
|
||||
|
||||
commit_hash=$(git log --pretty=format:%h -n 1)
|
||||
echo "${commit_hash:0:7}"
|
||||
|
||||
popd
|
||||
|
||||
@@ -1,10 +0,0 @@
|
||||
#!/bin/bash
|
||||
set -ue
|
||||
|
||||
higher_zkevm_item=`grep "zkevm-circuits.git" ./Cargo.lock | sort | uniq | awk -F "[#=]" '{print $3" "$4}' | sort -k 1 | tail -n 1`
|
||||
|
||||
higher_version=`echo $higher_zkevm_item | awk '{print $1}'`
|
||||
|
||||
higher_commit=`echo $higher_zkevm_item | cut -d ' ' -f2 | cut -c-7`
|
||||
|
||||
echo "$higher_version $higher_commit"
|
||||
@@ -1 +0,0 @@
|
||||
nightly-2023-12-03
|
||||
@@ -1,9 +0,0 @@
|
||||
edition = "2021"
|
||||
|
||||
comment_width = 100
|
||||
imports_granularity = "Crate"
|
||||
max_width = 100
|
||||
newline_style = "Unix"
|
||||
# normalize_comments = true
|
||||
reorder_imports = true
|
||||
wrap_comments = true
|
||||
@@ -1,51 +0,0 @@
|
||||
use anyhow::{bail, Result};
|
||||
|
||||
static SCROLL_PROVER_ASSETS_DIR_ENV_NAME: &str = "SCROLL_PROVER_ASSETS_DIR";
|
||||
static mut SCROLL_PROVER_ASSETS_DIRS: Vec<String> = vec![];
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct AssetsDirEnvConfig {}
|
||||
|
||||
impl AssetsDirEnvConfig {
|
||||
pub fn init() -> Result<()> {
|
||||
let value = std::env::var(SCROLL_PROVER_ASSETS_DIR_ENV_NAME)?;
|
||||
let dirs: Vec<&str> = value.split(',').collect();
|
||||
if dirs.len() != 2 {
|
||||
bail!("env variable SCROLL_PROVER_ASSETS_DIR value must be 2 parts seperated by comma.")
|
||||
}
|
||||
unsafe {
|
||||
SCROLL_PROVER_ASSETS_DIRS = dirs.into_iter().map(|s| s.to_string()).collect();
|
||||
log::info!(
|
||||
"init SCROLL_PROVER_ASSETS_DIRS: {:?}",
|
||||
SCROLL_PROVER_ASSETS_DIRS
|
||||
);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn enable_first() {
|
||||
unsafe {
|
||||
log::info!(
|
||||
"set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}",
|
||||
&SCROLL_PROVER_ASSETS_DIRS[0]
|
||||
);
|
||||
std::env::set_var(
|
||||
SCROLL_PROVER_ASSETS_DIR_ENV_NAME,
|
||||
&SCROLL_PROVER_ASSETS_DIRS[0],
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
pub fn enable_second() {
|
||||
unsafe {
|
||||
log::info!(
|
||||
"set env {SCROLL_PROVER_ASSETS_DIR_ENV_NAME} to {}",
|
||||
&SCROLL_PROVER_ASSETS_DIRS[1]
|
||||
);
|
||||
std::env::set_var(
|
||||
SCROLL_PROVER_ASSETS_DIR_ENV_NAME,
|
||||
&SCROLL_PROVER_ASSETS_DIRS[1],
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,76 +0,0 @@
|
||||
#![feature(lazy_cell)]
|
||||
#![feature(core_intrinsics)]
|
||||
|
||||
mod config;
|
||||
mod prover;
|
||||
mod types;
|
||||
mod utils;
|
||||
mod zk_circuits_handler;
|
||||
|
||||
use clap::{ArgAction, Parser};
|
||||
use prover::{LocalProver, LocalProverConfig};
|
||||
use scroll_proving_sdk::{
|
||||
prover::ProverBuilder,
|
||||
utils::{get_version, init_tracing},
|
||||
};
|
||||
use tokio::runtime;
|
||||
use utils::get_prover_type;
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(disable_version_flag = true)]
|
||||
struct Args {
|
||||
/// Path of config file
|
||||
#[arg(long = "config", default_value = "conf/config.json")]
|
||||
config_file: String,
|
||||
|
||||
/// Version of this prover
|
||||
#[arg(short, long, action = ArgAction::SetTrue)]
|
||||
version: bool,
|
||||
|
||||
/// Path of log file
|
||||
#[arg(long = "log.file")]
|
||||
log_file: Option<String>,
|
||||
}
|
||||
|
||||
fn main() -> anyhow::Result<()> {
|
||||
let rt = runtime::Builder::new_multi_thread()
|
||||
.thread_stack_size(16 * 1024 * 1024) // Set stack size to 16MB
|
||||
.enable_all()
|
||||
.build()
|
||||
.expect("Failed to create Tokio runtime");
|
||||
|
||||
rt.block_on(async {
|
||||
init_tracing();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
if args.version {
|
||||
println!("version is {}", get_version());
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
let cfg = LocalProverConfig::from_file(args.config_file)?;
|
||||
let sdk_config = cfg.sdk_config.clone();
|
||||
let mut prover_types = vec![];
|
||||
sdk_config
|
||||
.prover
|
||||
.circuit_types
|
||||
.iter()
|
||||
.for_each(|circuit_type| {
|
||||
if let Some(pt) = get_prover_type(*circuit_type) {
|
||||
if !prover_types.contains(&pt) {
|
||||
prover_types.push(pt);
|
||||
}
|
||||
}
|
||||
});
|
||||
let local_prover = LocalProver::new(cfg, prover_types);
|
||||
let prover = ProverBuilder::new(sdk_config)
|
||||
.with_proving_service(Box::new(local_prover))
|
||||
.build()
|
||||
.await?;
|
||||
|
||||
prover.run().await;
|
||||
|
||||
Ok(())
|
||||
})
|
||||
}
|
||||
@@ -1,192 +0,0 @@
|
||||
use crate::{
|
||||
types::ProverType,
|
||||
utils::get_prover_type,
|
||||
zk_circuits_handler::{CircuitsHandler, CircuitsHandlerProvider},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::{
|
||||
config::Config as SdkConfig,
|
||||
prover::{
|
||||
proving_service::{
|
||||
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||
QueryTaskResponse, TaskStatus,
|
||||
},
|
||||
ProvingService,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs::File,
|
||||
sync::{Arc, Mutex},
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
use tokio::{runtime::Handle, sync::RwLock, task::JoinHandle};
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LocalProverConfig {
|
||||
pub sdk_config: SdkConfig,
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
pub low_version_circuit: CircuitConfig,
|
||||
}
|
||||
|
||||
impl LocalProverConfig {
|
||||
pub fn from_reader<R>(reader: R) -> Result<Self>
|
||||
where
|
||||
R: std::io::Read,
|
||||
{
|
||||
serde_json::from_reader(reader).map_err(|e| anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn from_file(file_name: String) -> Result<Self> {
|
||||
let file = File::open(file_name)?;
|
||||
Self::from_reader(&file)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub hard_fork_name: String,
|
||||
pub params_path: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
pub struct LocalProver {
|
||||
config: LocalProverConfig,
|
||||
prover_types: Vec<ProverType>,
|
||||
circuits_handler_provider: RwLock<CircuitsHandlerProvider>,
|
||||
next_task_id: Arc<Mutex<u64>>,
|
||||
current_task: Arc<Mutex<Option<JoinHandle<Result<String>>>>>,
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl ProvingService for LocalProver {
|
||||
fn is_local(&self) -> bool {
|
||||
true
|
||||
}
|
||||
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
|
||||
let mut prover_types = vec![];
|
||||
req.circuit_types.iter().for_each(|circuit_type| {
|
||||
if let Some(pt) = get_prover_type(*circuit_type) {
|
||||
if !prover_types.contains(&pt) {
|
||||
prover_types.push(pt);
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let vks = self
|
||||
.circuits_handler_provider
|
||||
.read()
|
||||
.await
|
||||
.init_vks(&self.config, prover_types)
|
||||
.await;
|
||||
GetVkResponse { vks, error: None }
|
||||
}
|
||||
async fn prove(&self, req: ProveRequest) -> ProveResponse {
|
||||
let handler = self
|
||||
.circuits_handler_provider
|
||||
.write()
|
||||
.await
|
||||
.get_circuits_handler(&req.hard_fork_name, self.prover_types.clone())
|
||||
.expect("failed to get circuit handler");
|
||||
|
||||
match self.do_prove(req, handler).await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => ProveResponse {
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("failed to request proof: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
async fn query_task(&self, req: QueryTaskRequest) -> QueryTaskResponse {
|
||||
let handle = self.current_task.lock().unwrap().take();
|
||||
if let Some(handle) = handle {
|
||||
if handle.is_finished() {
|
||||
return match handle.await {
|
||||
Ok(Ok(proof)) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Success,
|
||||
proof: Some(proof),
|
||||
..Default::default()
|
||||
},
|
||||
Ok(Err(e)) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("proving task failed: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
Err(e) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("proving task panicked: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
} else {
|
||||
*self.current_task.lock().unwrap() = Some(handle);
|
||||
return QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Proving,
|
||||
..Default::default()
|
||||
};
|
||||
}
|
||||
}
|
||||
// If no handle is found
|
||||
QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some("no proving task is running".to_string()),
|
||||
..Default::default()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LocalProver {
|
||||
pub fn new(config: LocalProverConfig, prover_types: Vec<ProverType>) -> Self {
|
||||
let circuits_handler_provider = CircuitsHandlerProvider::new(config.clone())
|
||||
.expect("failed to create circuits handler provider");
|
||||
|
||||
Self {
|
||||
config,
|
||||
prover_types,
|
||||
circuits_handler_provider: RwLock::new(circuits_handler_provider),
|
||||
next_task_id: Arc::new(Mutex::new(0)),
|
||||
current_task: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn do_prove(
|
||||
&self,
|
||||
req: ProveRequest,
|
||||
handler: Arc<Box<dyn CircuitsHandler>>,
|
||||
) -> Result<ProveResponse> {
|
||||
let task_id = {
|
||||
let mut next_task_id = self.next_task_id.lock().unwrap();
|
||||
*next_task_id += 1;
|
||||
*next_task_id
|
||||
};
|
||||
|
||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
||||
|
||||
let req_clone = req.clone();
|
||||
let handle = Handle::current();
|
||||
let task_handle =
|
||||
tokio::task::spawn_blocking(move || handle.block_on(handler.get_proof_data(req_clone)));
|
||||
|
||||
*self.current_task.lock().unwrap() = Some(task_handle);
|
||||
|
||||
Ok(ProveResponse {
|
||||
task_id: task_id.to_string(),
|
||||
circuit_type: req.circuit_type,
|
||||
circuit_version: req.circuit_version,
|
||||
hard_fork_name: req.hard_fork_name,
|
||||
status: TaskStatus::Proving,
|
||||
created_at,
|
||||
input: Some(req.input),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
@@ -1,153 +0,0 @@
|
||||
use ethers_core::types::H256;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use scroll_proving_sdk::prover::types::CircuitType;
|
||||
|
||||
pub type CommonHash = H256;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ProverType {
|
||||
Chunk,
|
||||
Batch,
|
||||
}
|
||||
|
||||
impl ProverType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => ProverType::Chunk,
|
||||
2 => ProverType::Batch,
|
||||
_ => {
|
||||
panic!("invalid prover_type")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProverType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProverType::Chunk => serializer.serialize_u8(1),
|
||||
ProverType::Batch => serializer.serialize_u8(2),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProverType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProverType::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct Task {
|
||||
#[serde(rename = "type", default)]
|
||||
pub task_type: CircuitType,
|
||||
pub task_data: String,
|
||||
#[serde(default)]
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct ProofDetail {
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub proof_type: CircuitType,
|
||||
pub proof_data: String,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ProofFailureType {
|
||||
Undefined,
|
||||
Panic,
|
||||
NoPanic,
|
||||
}
|
||||
|
||||
impl ProofFailureType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => ProofFailureType::Panic,
|
||||
2 => ProofFailureType::NoPanic,
|
||||
_ => ProofFailureType::Undefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProofFailureType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProofFailureType::Undefined => serializer.serialize_u8(0),
|
||||
ProofFailureType::Panic => serializer.serialize_u8(1),
|
||||
ProofFailureType::NoPanic => serializer.serialize_u8(2),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProofFailureType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProofFailureType::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProofFailureType {
|
||||
fn default() -> Self {
|
||||
Self::Undefined
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ProofStatus {
|
||||
Ok,
|
||||
Error,
|
||||
}
|
||||
|
||||
impl ProofStatus {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
0 => ProofStatus::Ok,
|
||||
_ => ProofStatus::Error,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProofStatus {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
ProofStatus::Ok => serializer.serialize_u8(0),
|
||||
ProofStatus::Error => serializer.serialize_u8(1),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ProofStatus {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(ProofStatus::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for ProofStatus {
|
||||
fn default() -> Self {
|
||||
Self::Ok
|
||||
}
|
||||
}
|
||||
@@ -1,18 +0,0 @@
|
||||
use crate::types::ProverType;
|
||||
use scroll_proving_sdk::prover::types::CircuitType;
|
||||
|
||||
pub fn get_circuit_types(prover_type: ProverType) -> Vec<CircuitType> {
|
||||
match prover_type {
|
||||
ProverType::Chunk => vec![CircuitType::Chunk],
|
||||
ProverType::Batch => vec![CircuitType::Batch, CircuitType::Bundle],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_prover_type(task_type: CircuitType) -> Option<ProverType> {
|
||||
match task_type {
|
||||
CircuitType::Undefined => None,
|
||||
CircuitType::Chunk => Some(ProverType::Chunk),
|
||||
CircuitType::Batch => Some(ProverType::Batch),
|
||||
CircuitType::Bundle => Some(ProverType::Batch),
|
||||
}
|
||||
}
|
||||
@@ -1,165 +0,0 @@
|
||||
mod common;
|
||||
mod darwin;
|
||||
mod darwin_v2;
|
||||
|
||||
use crate::{
|
||||
config::AssetsDirEnvConfig, prover::LocalProverConfig, types::ProverType,
|
||||
utils::get_circuit_types,
|
||||
};
|
||||
use anyhow::{bail, Result};
|
||||
use async_trait::async_trait;
|
||||
use darwin::DarwinHandler;
|
||||
use darwin_v2::DarwinV2Handler;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
type HardForkName = String;
|
||||
|
||||
pub mod utils {
|
||||
pub fn encode_vk(vk: Vec<u8>) -> String {
|
||||
base64::encode(vk)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
pub trait CircuitsHandler: Send + Sync {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>>;
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
|
||||
}
|
||||
|
||||
type CircuitsHandlerBuilder = fn(
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>>;
|
||||
|
||||
pub struct CircuitsHandlerProvider {
|
||||
config: LocalProverConfig,
|
||||
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
|
||||
current_fork_name: Option<HardForkName>,
|
||||
current_circuit: Option<Arc<Box<dyn CircuitsHandler>>>,
|
||||
}
|
||||
|
||||
impl CircuitsHandlerProvider {
|
||||
pub fn new(config: LocalProverConfig) -> Result<Self> {
|
||||
let mut m: HashMap<HardForkName, CircuitsHandlerBuilder> = HashMap::new();
|
||||
|
||||
if let Err(e) = AssetsDirEnvConfig::init() {
|
||||
panic!("AssetsDirEnvConfig init failed: {:#}", e);
|
||||
}
|
||||
|
||||
fn handler_builder(
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!(
|
||||
"now init zk circuits handler, hard_fork_name: {}",
|
||||
&config.low_version_circuit.hard_fork_name
|
||||
);
|
||||
AssetsDirEnvConfig::enable_first();
|
||||
DarwinHandler::new(
|
||||
prover_types,
|
||||
&config.low_version_circuit.params_path,
|
||||
&config.low_version_circuit.assets_path,
|
||||
)
|
||||
.map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
m.insert(
|
||||
config.low_version_circuit.hard_fork_name.clone(),
|
||||
handler_builder,
|
||||
);
|
||||
|
||||
fn next_handler_builder(
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!(
|
||||
"now init zk circuits handler, hard_fork_name: {}",
|
||||
&config.high_version_circuit.hard_fork_name
|
||||
);
|
||||
AssetsDirEnvConfig::enable_second();
|
||||
DarwinV2Handler::new(
|
||||
prover_types,
|
||||
&config.high_version_circuit.params_path,
|
||||
&config.high_version_circuit.assets_path,
|
||||
)
|
||||
.map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
|
||||
m.insert(
|
||||
config.high_version_circuit.hard_fork_name.clone(),
|
||||
next_handler_builder,
|
||||
);
|
||||
|
||||
let provider = CircuitsHandlerProvider {
|
||||
config,
|
||||
circuits_handler_builder_map: m,
|
||||
current_fork_name: None,
|
||||
current_circuit: None,
|
||||
};
|
||||
|
||||
Ok(provider)
|
||||
}
|
||||
|
||||
pub fn get_circuits_handler(
|
||||
&mut self,
|
||||
hard_fork_name: &String,
|
||||
prover_types: Vec<ProverType>,
|
||||
) -> Result<Arc<Box<dyn CircuitsHandler>>> {
|
||||
match &self.current_fork_name {
|
||||
Some(fork_name) if fork_name == hard_fork_name => {
|
||||
log::info!("get circuits handler from cache");
|
||||
if let Some(handler) = &self.current_circuit {
|
||||
Ok(handler.clone())
|
||||
} else {
|
||||
bail!("missing cached handler, there must be something wrong.")
|
||||
}
|
||||
}
|
||||
_ => {
|
||||
log::info!(
|
||||
"failed to get circuits handler from cache, create a new one: {hard_fork_name}"
|
||||
);
|
||||
if let Some(builder) = self.circuits_handler_builder_map.get(hard_fork_name) {
|
||||
log::info!("building circuits handler for {hard_fork_name}");
|
||||
let handler = builder(prover_types, &self.config)
|
||||
.expect("failed to build circuits handler");
|
||||
self.current_fork_name = Some(hard_fork_name.clone());
|
||||
let arc_handler = Arc::new(handler);
|
||||
self.current_circuit = Some(arc_handler.clone());
|
||||
Ok(arc_handler)
|
||||
} else {
|
||||
bail!("missing builder, there must be something wrong.")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn init_vks(
|
||||
&self,
|
||||
config: &LocalProverConfig,
|
||||
prover_types: Vec<ProverType>,
|
||||
) -> Vec<String> {
|
||||
let mut vks = Vec::new();
|
||||
for (hard_fork_name, build) in self.circuits_handler_builder_map.iter() {
|
||||
let handler =
|
||||
build(prover_types.clone(), config).expect("failed to build circuits handler");
|
||||
|
||||
for prover_type in prover_types.iter() {
|
||||
for task_type in get_circuit_types(*prover_type).into_iter() {
|
||||
let vk = handler
|
||||
.get_vk(task_type)
|
||||
.await
|
||||
.map_or("".to_string(), utils::encode_vk);
|
||||
log::info!(
|
||||
"vk for {hard_fork_name}, is {vk}, task_type: {:?}",
|
||||
task_type
|
||||
);
|
||||
if !vk.is_empty() {
|
||||
vks.push(vk)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
vks
|
||||
}
|
||||
}
|
||||
@@ -1,33 +0,0 @@
|
||||
use std::{collections::BTreeMap, rc::Rc};
|
||||
|
||||
use crate::types::ProverType;
|
||||
|
||||
use once_cell::sync::OnceCell;
|
||||
|
||||
use halo2_proofs::{halo2curves::bn256::Bn256, poly::kzg::commitment::ParamsKZG};
|
||||
|
||||
static mut PARAMS_MAP: OnceCell<Rc<BTreeMap<u32, ParamsKZG<Bn256>>>> = OnceCell::new();
|
||||
|
||||
pub fn get_params_map_instance<'a, F>(load_params_func: F) -> &'a BTreeMap<u32, ParamsKZG<Bn256>>
|
||||
where
|
||||
F: FnOnce() -> BTreeMap<u32, ParamsKZG<Bn256>>,
|
||||
{
|
||||
unsafe {
|
||||
PARAMS_MAP.get_or_init(|| {
|
||||
let params_map = load_params_func();
|
||||
Rc::new(params_map)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_degrees<F>(prover_types: &std::collections::HashSet<ProverType>, f: F) -> Vec<u32>
|
||||
where
|
||||
F: FnMut(&ProverType) -> Vec<u32>,
|
||||
{
|
||||
prover_types
|
||||
.iter()
|
||||
.flat_map(f)
|
||||
.collect::<std::collections::HashSet<u32>>()
|
||||
.into_iter()
|
||||
.collect()
|
||||
}
|
||||
@@ -1,401 +0,0 @@
|
||||
use super::{common::*, CircuitsHandler};
|
||||
use crate::types::ProverType;
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::types::CommonHash;
|
||||
use std::env;
|
||||
|
||||
use prover_darwin::{
|
||||
aggregator::Prover as BatchProver,
|
||||
check_chunk_hashes,
|
||||
common::Prover as CommonProver,
|
||||
config::{AGG_DEGREES, ZKEVM_DEGREES},
|
||||
zkevm::Prover as ChunkProver,
|
||||
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
|
||||
ChunkProof, ChunkProvingTask,
|
||||
};
|
||||
|
||||
// Only used for debugging.
|
||||
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
#[serde(flatten)]
|
||||
pub batch_proving_task: BatchProvingTask,
|
||||
}
|
||||
|
||||
type BundleTaskDetail = BundleProvingTask;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DarwinHandler {
|
||||
chunk_prover: Option<RwLock<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RwLock<BatchProver<'static>>>,
|
||||
}
|
||||
|
||||
impl DarwinHandler {
|
||||
pub fn new_multi(
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
.into_iter()
|
||||
.collect::<std::collections::HashSet<ProverType>>();
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
ProverType::Batch => AGG_DEGREES.clone(),
|
||||
});
|
||||
let params_map = get_params_map_instance(|| {
|
||||
log::info!(
|
||||
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
|
||||
class_name,
|
||||
prover_types_set,
|
||||
degrees
|
||||
);
|
||||
CommonProver::load_params_map(params_dir, °rees)
|
||||
});
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
handler.chunk_prover = Some(RwLock::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RwLock::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(prover_types: Vec<ProverType>, params_dir: &str, assets_dir: &str) -> Result<Self> {
|
||||
Self::new_multi(prover_types, params_dir, assets_dir)
|
||||
}
|
||||
|
||||
async fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof =
|
||||
prover
|
||||
.write()
|
||||
.await
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_chunk_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let chunk_traces: Vec<BlockTrace> = serde_json::from_str(&prove_request.input)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_traces).await?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
async fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
|
||||
.collect();
|
||||
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.read().await.check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.write().await.gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_batch_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
async fn gen_bundle_proof_raw(
|
||||
&self,
|
||||
bundle_task_detail: BundleTaskDetail,
|
||||
) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.write().await.gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(bundle_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_bundle_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail).await?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for DarwinHandler {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
CircuitType::Chunk => self.chunk_prover.as_ref().unwrap().read().await.get_vk(),
|
||||
CircuitType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_batch_vk(),
|
||||
CircuitType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_bundle_vk(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
match prove_request.circuit_type {
|
||||
CircuitType::Chunk => self.gen_chunk_proof(prove_request).await,
|
||||
CircuitType::Batch => self.gen_batch_proof(prove_request).await,
|
||||
CircuitType::Bundle => self.gen_bundle_proof(prove_request).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================== tests module ========================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use prover_darwin::utils::chunk_trace_to_witness_block;
|
||||
use scroll_proving_sdk::utils::init_tracing;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
init_tracing();
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
static DEFAULT_WORK_DIR: &str = "/assets";
|
||||
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
std::env::var("DARWIN_TEST_DIR")
|
||||
.unwrap_or(String::from(DEFAULT_WORK_DIR))
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
});
|
||||
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
|
||||
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
|
||||
static PROOF_DUMP_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
|
||||
static BATCH_DIR_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
|
||||
static BATCH_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
|
||||
static CHUNK_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = true;
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinHandler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(CircuitType::Chunk).await.unwrap();
|
||||
|
||||
check_vk(CircuitType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_infos = vec![];
|
||||
let mut chunk_proofs = vec![];
|
||||
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
|
||||
let chunk_id = format!("chunk_proof{}", id + 1);
|
||||
log::info!("start to process {chunk_id}");
|
||||
let chunk_trace = read_chunk_trace(chunk_path)?;
|
||||
|
||||
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace).await?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler = chunk_handler;
|
||||
let batch_vk = batch_handler.get_vk(CircuitType::Batch).await.unwrap();
|
||||
check_vk(CircuitType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn make_batch_task_detail(_: Vec<ChunkInfo>, _: Vec<ChunkProof>) -> BatchTaskDetail {
|
||||
todo!();
|
||||
// BatchTaskDetail {
|
||||
// chunk_infos,
|
||||
// batch_proving_task: BatchProvingTask {
|
||||
// parent_batch_hash: todo!(),
|
||||
// parent_state_root: todo!(),
|
||||
// batch_header: todo!(),
|
||||
// chunk_proofs,
|
||||
// },
|
||||
// }
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: CircuitType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: CircuitType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
CircuitType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
CircuitType::Batch => BATCH_VK_PATH.clone(),
|
||||
CircuitType::Bundle => todo!(),
|
||||
CircuitType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
Ok(encode_vk(data))
|
||||
}
|
||||
|
||||
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
|
||||
log::info!("read_chunk_trace, {:?}", path);
|
||||
let mut chunk_trace: Vec<BlockTrace> = vec![];
|
||||
|
||||
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
|
||||
let f = std::fs::File::open(file)?;
|
||||
Ok(serde_json::from_reader(&f)?)
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(&path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.into_iter()
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
return None;
|
||||
}
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
|
||||
log::info!("files in chunk {:?} is {:?}", path, files);
|
||||
for file in files {
|
||||
let block_trace = read_block_trace(&path.join(file))?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
} else {
|
||||
let block_trace = read_block_trace(&path)?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
Ok(chunk_trace)
|
||||
}
|
||||
|
||||
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
|
||||
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
|
||||
let entries = std::fs::read_dir(&batch_path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
log::info!("files in batch {:?} is {:?}", batch_path, files);
|
||||
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
|
||||
}
|
||||
|
||||
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
|
||||
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
|
||||
Ok(ChunkInfo::from_witness_block(&witness_block, false))
|
||||
}
|
||||
|
||||
fn dump_proof(id: String, proof_data: String) -> Result<()> {
|
||||
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
|
||||
Ok(std::fs::write(dump_path.join(id), proof_data)?)
|
||||
}
|
||||
}
|
||||
@@ -1,459 +0,0 @@
|
||||
use super::{common::*, CircuitsHandler};
|
||||
use crate::types::ProverType;
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::types::CommonHash;
|
||||
use std::env;
|
||||
|
||||
use prover_darwin_v2::{
|
||||
aggregator::Prover as BatchProver,
|
||||
check_chunk_hashes,
|
||||
common::Prover as CommonProver,
|
||||
config::{AGG_DEGREES, ZKEVM_DEGREES},
|
||||
zkevm::Prover as ChunkProver,
|
||||
BatchProof, BatchProvingTask, BlockTrace, BundleProof, BundleProvingTask, ChunkInfo,
|
||||
ChunkProof, ChunkProvingTask,
|
||||
};
|
||||
|
||||
// Only used for debugging.
|
||||
static OUTPUT_DIR: Lazy<Option<String>> = Lazy::new(|| env::var("PROVER_OUTPUT_DIR").ok());
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct BatchTaskDetail {
|
||||
pub chunk_infos: Vec<ChunkInfo>,
|
||||
#[serde(flatten)]
|
||||
pub batch_proving_task: BatchProvingTask,
|
||||
}
|
||||
|
||||
type BundleTaskDetail = BundleProvingTask;
|
||||
|
||||
#[derive(Debug, Clone, Deserialize)]
|
||||
pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DarwinV2Handler {
|
||||
chunk_prover: Option<RwLock<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RwLock<BatchProver<'static>>>,
|
||||
}
|
||||
|
||||
impl DarwinV2Handler {
|
||||
pub fn new_multi(
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
.into_iter()
|
||||
.collect::<std::collections::HashSet<ProverType>>();
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
ProverType::Batch => AGG_DEGREES.clone(),
|
||||
});
|
||||
let params_map = get_params_map_instance(|| {
|
||||
log::info!(
|
||||
"calling get_params_map from {}, prover_types: {:?}, degrees: {:?}",
|
||||
class_name,
|
||||
prover_types_set,
|
||||
degrees
|
||||
);
|
||||
CommonProver::load_params_map(params_dir, °rees)
|
||||
});
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
handler.chunk_prover = Some(RwLock::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RwLock::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(prover_types: Vec<ProverType>, params_dir: &str, assets_dir: &str) -> Result<Self> {
|
||||
Self::new_multi(prover_types, params_dir, assets_dir)
|
||||
}
|
||||
|
||||
async fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof =
|
||||
prover
|
||||
.write()
|
||||
.await
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_chunk_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let chunk_traces: Vec<BlockTrace> = serde_json::from_str(&prove_request.input)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_traces).await?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
async fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
.clone()
|
||||
.into_iter()
|
||||
.zip(batch_task_detail.batch_proving_task.chunk_proofs.clone())
|
||||
.collect();
|
||||
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.write().await.check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.write().await.gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_batch_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
async fn gen_bundle_proof_raw(
|
||||
&self,
|
||||
bundle_task_detail: BundleTaskDetail,
|
||||
) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.write().await.gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(bundle_proof);
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
async fn gen_bundle_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail).await?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for DarwinV2Handler {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
CircuitType::Chunk => self.chunk_prover.as_ref().unwrap().read().await.get_vk(),
|
||||
CircuitType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_batch_vk(),
|
||||
CircuitType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_bundle_vk(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
match prove_request.circuit_type {
|
||||
CircuitType::Chunk => self.gen_chunk_proof(prove_request).await,
|
||||
CircuitType::Batch => self.gen_batch_proof(prove_request).await,
|
||||
CircuitType::Bundle => self.gen_bundle_proof(prove_request).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================== tests module ========================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use ethers_core::types::H256;
|
||||
use prover_darwin_v2::{
|
||||
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
|
||||
MAX_AGG_SNARKS,
|
||||
};
|
||||
use scroll_proving_sdk::utils::init_tracing;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
init_tracing();
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
static DEFAULT_WORK_DIR: &str = "/assets";
|
||||
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
std::env::var("DARWIN_V2_TEST_DIR")
|
||||
.unwrap_or(String::from(DEFAULT_WORK_DIR))
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
});
|
||||
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
|
||||
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
|
||||
static PROOF_DUMP_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
|
||||
static BATCH_DIR_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
|
||||
static BATCH_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_batch.vkey", *WORK_DIR));
|
||||
static CHUNK_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/vk_chunk.vkey", *WORK_DIR));
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = true;
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinV2Handler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(CircuitType::Chunk).await.unwrap();
|
||||
|
||||
check_vk(CircuitType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_traces = vec![];
|
||||
let mut chunk_infos = vec![];
|
||||
let mut chunk_proofs = vec![];
|
||||
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
|
||||
let chunk_id = format!("chunk_proof{}", id + 1);
|
||||
log::info!("start to process {chunk_id}");
|
||||
let chunk_trace = read_chunk_trace(chunk_path)?;
|
||||
chunk_traces.push(chunk_trace.clone());
|
||||
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace).await?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler = chunk_handler;
|
||||
let batch_vk = batch_handler.get_vk(CircuitType::Batch).await.unwrap();
|
||||
check_vk(CircuitType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
// copied from https://github.com/scroll-tech/scroll-prover/blob/main/integration/src/prove.rs
|
||||
fn get_blob_from_chunks(chunks: &[ChunkInfo]) -> Vec<u8> {
|
||||
let num_chunks = chunks.len();
|
||||
|
||||
let padded_chunk =
|
||||
ChunkInfo::mock_padded_chunk_info_for_testing(chunks.last().as_ref().unwrap());
|
||||
let chunks_with_padding = [
|
||||
chunks.to_vec(),
|
||||
vec![padded_chunk; MAX_AGG_SNARKS - num_chunks],
|
||||
]
|
||||
.concat();
|
||||
let batch_data = BatchData::<{ MAX_AGG_SNARKS }>::new(chunks.len(), &chunks_with_padding);
|
||||
let batch_bytes = batch_data.get_batch_data_bytes();
|
||||
let blob_bytes = eip4844::get_blob_bytes(&batch_bytes);
|
||||
log::info!("blob_bytes len {}", blob_bytes.len());
|
||||
blob_bytes
|
||||
}
|
||||
|
||||
// TODO: chunk_infos can be extracted from chunk_proofs.
|
||||
// Still needed?
|
||||
fn make_batch_task_detail(
|
||||
chunk_traces: Vec<Vec<BlockTrace>>,
|
||||
chunk_proofs: Vec<ChunkProof>,
|
||||
last_batcher_header: Option<BatchHeader<{ MAX_AGG_SNARKS }>>,
|
||||
) -> BatchTaskDetail {
|
||||
// dummy parent batch hash
|
||||
let dummy_parent_batch_hash = H256([
|
||||
0xab, 0xac, 0xad, 0xae, 0xaf, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
|
||||
0, 0, 0, 0, 0, 0, 0, 0,
|
||||
]);
|
||||
let chunk_infos: Vec<_> = chunk_proofs.iter().map(|p| p.chunk_info.clone()).collect();
|
||||
|
||||
let l1_message_popped = chunk_traces
|
||||
.iter()
|
||||
.flatten()
|
||||
.map(|chunk| chunk.num_l1_txs())
|
||||
.sum();
|
||||
let last_block_timestamp = chunk_traces.last().map_or(0, |block_traces| {
|
||||
block_traces
|
||||
.last()
|
||||
.map_or(0, |block_trace| block_trace.header.timestamp.as_u64())
|
||||
});
|
||||
|
||||
let blob_bytes = get_blob_from_chunks(&chunk_infos);
|
||||
let batch_header = BatchHeader::construct_from_chunks(
|
||||
last_batcher_header.map_or(4, |header| header.version),
|
||||
last_batcher_header.map_or(123, |header| header.batch_index + 1),
|
||||
l1_message_popped,
|
||||
last_batcher_header.map_or(l1_message_popped, |header| {
|
||||
header.total_l1_message_popped + l1_message_popped
|
||||
}),
|
||||
last_batcher_header.map_or(dummy_parent_batch_hash, |header| header.batch_hash()),
|
||||
last_block_timestamp,
|
||||
&chunk_infos,
|
||||
&blob_bytes,
|
||||
);
|
||||
BatchTaskDetail {
|
||||
chunk_infos,
|
||||
batch_proving_task: BatchProvingTask {
|
||||
chunk_proofs,
|
||||
batch_header,
|
||||
blob_bytes,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: CircuitType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: CircuitType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
CircuitType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
CircuitType::Batch => BATCH_VK_PATH.clone(),
|
||||
CircuitType::Bundle => todo!(),
|
||||
CircuitType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
Ok(encode_vk(data))
|
||||
}
|
||||
|
||||
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
|
||||
log::info!("read_chunk_trace, {:?}", path);
|
||||
let mut chunk_trace: Vec<BlockTrace> = vec![];
|
||||
|
||||
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
|
||||
let f = std::fs::File::open(file)?;
|
||||
Ok(serde_json::from_reader(&f)?)
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(&path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.into_iter()
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
return None;
|
||||
}
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
|
||||
log::info!("files in chunk {:?} is {:?}", path, files);
|
||||
for file in files {
|
||||
let block_trace = read_block_trace(&path.join(file))?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
} else {
|
||||
let block_trace = read_block_trace(&path)?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
Ok(chunk_trace)
|
||||
}
|
||||
|
||||
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
|
||||
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
|
||||
let entries = std::fs::read_dir(&batch_path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
log::info!("files in batch {:?} is {:?}", batch_path, files);
|
||||
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
|
||||
}
|
||||
|
||||
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
|
||||
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
|
||||
Ok(ChunkInfo::from_witness_block(&witness_block, false))
|
||||
}
|
||||
|
||||
fn dump_proof(id: String, proof_data: String) -> Result<()> {
|
||||
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
|
||||
Ok(std::fs::write(dump_path.join(id), proof_data)?)
|
||||
}
|
||||
}
|
||||
@@ -32,5 +32,5 @@ docker_push:
|
||||
docker push scrolltech/rollup-relayer:${IMAGE_VERSION}
|
||||
|
||||
docker:
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/gas-oracle:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/gas_oracle.Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/rollup-relayer:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/rollup_relayer.Dockerfile
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/gas-oracle:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/gas_oracle.Dockerfile --platform=linux/amd64
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/rollup-relayer:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/rollup_relayer.Dockerfile --platform=linux/amd64
|
||||
|
||||
@@ -33,3 +33,45 @@ make rollup_bins
|
||||
./build/bin/gas_oracle --config ./conf/config.json
|
||||
./build/bin/rollup_relayer --config ./conf/config.json
|
||||
```
|
||||
|
||||
## Proposer Tool
|
||||
|
||||
The Proposer Tool replays historical blocks with custom configurations (e.g., future hardfork configs, custom chunk/batch/bundle proposer configs) to generate chunks/batches/bundles, helping test parameter changes before protocol upgrade.
|
||||
|
||||
You can:
|
||||
|
||||
1. Enable different hardforks in the genesis configuration.
|
||||
2. Set custom chunk-proposer, batch-proposer, and bundle-proposer parameters.
|
||||
3. Analyze resulting metrics (blob size, block count, transaction count, gas usage).
|
||||
|
||||
## How to run the proposer tool?
|
||||
|
||||
### Set the configs
|
||||
|
||||
1. Set genesis config to enable desired hardforks in [`proposer-tool-genesis.json`](./proposer-tool-genesis.json).
|
||||
2. Set proposer config in [`proposer-tool-config.json`](./proposer-tool-config.json) for data analysis.
|
||||
3. Set `start-l2-block` in the launch command of proposer-tool in [`docker-compose-proposer-tool.yml`](./docker-compose-proposer-tool.yml) to the block number you want to start from. The default is `0`, which means starting from the genesis block.
|
||||
|
||||
### Start the proposer tool using docker-compose
|
||||
|
||||
Prerequisite: an RPC URL to an archive L2 node. The default url in [`proposer-tool-config.json`](./proposer-tool-config.json) is `https://rpc.scroll.io`.
|
||||
|
||||
```
|
||||
cd rollup
|
||||
DOCKER_BUILDKIT=1 docker-compose -f docker-compose-proposer-tool.yml up -d
|
||||
```
|
||||
|
||||
> Note: The port 5432 of database is mapped to the host machine. You can use `psql` or any db clients to connect to the database.
|
||||
|
||||
> The DSN for the database is `postgres://postgres:postgres@db:5432/scroll?sslmode=disable`.
|
||||
|
||||
|
||||
### Reset env
|
||||
```
|
||||
docker-compose -f docker-compose-proposer-tool.yml down -v
|
||||
```
|
||||
|
||||
If you need to rebuild the images, removing the old images is necessary. You can do this by running the following command:
|
||||
```
|
||||
docker images | grep rollup | awk '{print $3}' | xargs docker rmi -f
|
||||
```
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -1,42 +1,60 @@
|
||||
package bridgeabi
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math/big"
|
||||
"testing"
|
||||
|
||||
"github.com/scroll-tech/go-ethereum/common"
|
||||
"github.com/scroll-tech/go-ethereum/common/hexutil"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestPackCommitBatch(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
func TestPackCommitBatches(t *testing.T) {
|
||||
scrollChainABI, err := ScrollChainMetaData.GetAbi()
|
||||
assert.NoError(err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
version := uint8(1)
|
||||
var parentBatchHeader []byte
|
||||
var chunks [][]byte
|
||||
var skippedL1MessageBitmap []byte
|
||||
version := uint8(7)
|
||||
var parentBatchHash common.Hash
|
||||
var lastBatchHash common.Hash
|
||||
|
||||
_, err = scrollChainABI.Pack("commitBatch", version, parentBatchHeader, chunks, skippedL1MessageBitmap)
|
||||
assert.NoError(err)
|
||||
_, err = scrollChainABI.Pack("commitBatches", version, parentBatchHash, lastBatchHash)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestPackFinalizeBatchWithProof(t *testing.T) {
|
||||
assert := assert.New(t)
|
||||
|
||||
func TestPackFinalizeBundlePostEuclidV2(t *testing.T) {
|
||||
l1RollupABI, err := ScrollChainMetaData.GetAbi()
|
||||
assert.NoError(err)
|
||||
assert.NoError(t, err)
|
||||
|
||||
batchHeader := []byte{}
|
||||
prevStateRoot := common.Hash{}
|
||||
postStateRoot := common.Hash{}
|
||||
withdrawRoot := common.Hash{}
|
||||
aggrProof := []byte{}
|
||||
var batchHeader []byte
|
||||
totalL1MessagesPoppedOverall := big.NewInt(0)
|
||||
var postStateRoot common.Hash
|
||||
var withdrawRoot common.Hash
|
||||
var aggrProof []byte
|
||||
|
||||
_, err = l1RollupABI.Pack("finalizeBatchWithProof", batchHeader, prevStateRoot, postStateRoot, withdrawRoot, aggrProof)
|
||||
assert.NoError(err)
|
||||
_, err = l1RollupABI.Pack("finalizeBundlePostEuclidV2", batchHeader, totalL1MessagesPoppedOverall, postStateRoot, withdrawRoot, aggrProof)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestPackCommitAndFinalizeBatch(t *testing.T) {
|
||||
l1RollupABI, err := ScrollChainMetaData.GetAbi()
|
||||
assert.NoError(t, err)
|
||||
|
||||
version := uint8(7)
|
||||
var parentBatchHash common.Hash
|
||||
// Create the FinalizeStruct tuple as an abi-compatible struct
|
||||
finalizeStruct := struct {
|
||||
BatchHeader []byte
|
||||
TotalL1MessagesPoppedOverall *big.Int
|
||||
PostStateRoot common.Hash
|
||||
WithdrawRoot common.Hash
|
||||
ZkProof []byte
|
||||
}{
|
||||
TotalL1MessagesPoppedOverall: big.NewInt(0),
|
||||
}
|
||||
|
||||
_, err = l1RollupABI.Pack("commitAndFinalizeBatch", version, parentBatchHash, finalizeStruct)
|
||||
assert.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestPackImportGenesisBatch(t *testing.T) {
|
||||
@@ -74,3 +92,20 @@ func TestPackSetL2BaseFee(t *testing.T) {
|
||||
_, err = l2GasOracleABI.Pack("setL2BaseFee", baseFee)
|
||||
assert.NoError(err)
|
||||
}
|
||||
|
||||
func TestPrintABISignatures(t *testing.T) {
|
||||
// print all error signatures of ABI
|
||||
abi, err := ScrollChainMetaData.GetAbi()
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
for _, method := range abi.Methods {
|
||||
fmt.Println(hexutil.Encode(method.ID[:4]), method.Sig, method.Name)
|
||||
}
|
||||
|
||||
fmt.Println("------------------------------")
|
||||
for _, errors := range abi.Errors {
|
||||
fmt.Println(hexutil.Encode(errors.ID[:4]), errors.Sig, errors.Name)
|
||||
}
|
||||
}
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user