Refactor/zkvm 3 (#1684)

This commit is contained in:
Ho
2025-07-01 07:39:27 +09:00
committed by GitHub
parent 9dc57c6126
commit ea38ae7e96
60 changed files with 1495 additions and 592 deletions

View File

@@ -29,7 +29,7 @@ jobs:
steps:
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2024-12-06
toolchain: nightly-2025-02-14
override: true
components: rustfmt, clippy
- name: Install Go

View File

@@ -33,7 +33,7 @@ jobs:
steps:
- uses: actions-rs/toolchain@v1
with:
toolchain: nightly-2023-12-03
toolchain: nightly-2025-02-14
override: true
components: rustfmt, clippy
- name: Install Go

1
.gitignore vendored
View File

@@ -24,3 +24,4 @@ sftp-config.json
*~
target
zkvm-prover/config.json

173
Cargo.lock generated
View File

@@ -3996,13 +3996,36 @@ dependencies = [
[[package]]
name = "openvm-algebra-complex-macros"
version = "0.1.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"openvm-macros-common",
"openvm-macros-common 1.1.1",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-algebra-complex-macros"
version = "0.1.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
dependencies = [
"openvm-macros-common 1.2.0",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-algebra-guest"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"halo2curves-axiom",
"num-bigint 0.4.6",
"openvm-algebra-complex-macros 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=a0ae88f)",
"openvm-algebra-moduli-macros 1.1.1",
"serde-big-array",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-algebra-guest"
version = "1.2.0"
@@ -4010,18 +4033,28 @@ source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed1
dependencies = [
"halo2curves-axiom",
"num-bigint 0.4.6",
"openvm-algebra-complex-macros",
"openvm-algebra-moduli-macros",
"openvm-algebra-complex-macros 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=bdb4831)",
"openvm-algebra-moduli-macros 1.2.0",
"serde-big-array",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-algebra-moduli-macros"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"openvm-macros-common 1.1.1",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-algebra-moduli-macros"
version = "1.2.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
dependencies = [
"openvm-macros-common",
"openvm-macros-common 1.2.0",
"quote",
"syn 2.0.101",
]
@@ -4031,7 +4064,7 @@ name = "openvm-algebra-transpiler"
version = "1.2.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
dependencies = [
"openvm-algebra-guest",
"openvm-algebra-guest 1.2.0",
"openvm-instructions",
"openvm-instructions-derive",
"openvm-stark-backend",
@@ -4217,12 +4250,12 @@ dependencies = [
"num-traits",
"once_cell",
"openvm-algebra-circuit",
"openvm-algebra-guest",
"openvm-algebra-guest 1.2.0",
"openvm-circuit",
"openvm-circuit-derive",
"openvm-circuit-primitives",
"openvm-circuit-primitives-derive",
"openvm-ecc-guest",
"openvm-ecc-guest 1.2.0",
"openvm-ecc-transpiler",
"openvm-instructions",
"openvm-mod-circuit-builder",
@@ -4235,6 +4268,29 @@ dependencies = [
"strum 0.26.3",
]
[[package]]
name = "openvm-ecc-guest"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"ecdsa",
"elliptic-curve",
"group 0.13.0",
"halo2curves-axiom",
"hex-literal",
"lazy_static",
"num-bigint 0.4.6",
"once_cell",
"openvm 1.1.1",
"openvm-algebra-guest 1.1.1",
"openvm-algebra-moduli-macros 1.1.1",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=a0ae88f)",
"openvm-ecc-sw-macros 1.1.1",
"openvm-rv32im-guest 1.1.1",
"serde",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-ecc-guest"
version = "1.2.0"
@@ -4250,22 +4306,32 @@ dependencies = [
"num-bigint 0.4.6",
"once_cell",
"openvm 1.2.0",
"openvm-algebra-guest",
"openvm-algebra-moduli-macros",
"openvm-algebra-guest 1.2.0",
"openvm-algebra-moduli-macros 1.2.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=bdb4831)",
"openvm-ecc-sw-macros",
"openvm-ecc-sw-macros 1.2.0",
"openvm-rv32im-guest 1.2.0",
"p256",
"serde",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-ecc-sw-macros"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"openvm-macros-common 1.1.1",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-ecc-sw-macros"
version = "1.2.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
dependencies = [
"openvm-macros-common",
"openvm-macros-common 1.2.0",
"quote",
"syn 2.0.101",
]
@@ -4275,7 +4341,7 @@ name = "openvm-ecc-transpiler"
version = "1.2.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed13b0741d3a052d434a9c995c6d5d"
dependencies = [
"openvm-ecc-guest",
"openvm-ecc-guest 1.2.0",
"openvm-instructions",
"openvm-instructions-derive",
"openvm-stark-backend",
@@ -4359,6 +4425,14 @@ dependencies = [
"strum 0.26.3",
]
[[package]]
name = "openvm-macros-common"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"syn 2.0.101",
]
[[package]]
name = "openvm-macros-common"
version = "1.2.0"
@@ -4491,10 +4565,10 @@ dependencies = [
"openvm-circuit-primitives",
"openvm-circuit-primitives-derive",
"openvm-ecc-circuit",
"openvm-ecc-guest",
"openvm-ecc-guest 1.2.0",
"openvm-instructions",
"openvm-mod-circuit-builder",
"openvm-pairing-guest",
"openvm-pairing-guest 1.2.0",
"openvm-pairing-transpiler",
"openvm-rv32-adapters",
"openvm-rv32im-circuit",
@@ -4504,6 +4578,32 @@ dependencies = [
"strum 0.26.3",
]
[[package]]
name = "openvm-pairing-guest"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"group 0.13.0",
"halo2curves-axiom",
"hex-literal",
"itertools 0.14.0",
"lazy_static",
"num-bigint 0.4.6",
"num-traits",
"openvm 1.1.1",
"openvm-algebra-complex-macros 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=a0ae88f)",
"openvm-algebra-guest 1.1.1",
"openvm-algebra-moduli-macros 1.1.1",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=a0ae88f)",
"openvm-ecc-guest 1.1.1",
"openvm-ecc-sw-macros 1.1.1",
"openvm-platform 1.1.1",
"openvm-rv32im-guest 1.1.1",
"rand 0.8.5",
"serde",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-pairing-guest"
version = "1.2.0"
@@ -4517,12 +4617,12 @@ dependencies = [
"num-bigint 0.4.6",
"num-traits",
"openvm 1.2.0",
"openvm-algebra-complex-macros",
"openvm-algebra-guest",
"openvm-algebra-moduli-macros",
"openvm-algebra-complex-macros 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=bdb4831)",
"openvm-algebra-guest 1.2.0",
"openvm-algebra-moduli-macros 1.2.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=bdb4831)",
"openvm-ecc-guest",
"openvm-ecc-sw-macros",
"openvm-ecc-guest 1.2.0",
"openvm-ecc-sw-macros 1.2.0",
"openvm-platform 1.2.0",
"openvm-rv32im-guest 1.2.0",
"rand 0.8.5",
@@ -4537,7 +4637,7 @@ source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed1
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
"openvm-pairing-guest",
"openvm-pairing-guest 1.2.0",
"openvm-stark-backend",
"openvm-transpiler",
"rrs-lib",
@@ -4745,6 +4845,15 @@ dependencies = [
"strum 0.26.3",
]
[[package]]
name = "openvm-sha256-guest"
version = "1.1.1"
source = "git+https://github.com/openvm-org/openvm.git?rev=a0ae88f#a0ae88f2c4d98b651c653a1e9b7598d3ad15455e"
dependencies = [
"openvm-platform 1.1.1",
"sha2",
]
[[package]]
name = "openvm-sha256-guest"
version = "1.2.0"
@@ -4761,7 +4870,7 @@ source = "git+https://github.com/openvm-org/openvm.git?rev=bdb4831#bdb4831fefed1
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
"openvm-sha256-guest",
"openvm-sha256-guest 1.2.0",
"openvm-stark-backend",
"openvm-transpiler",
"rrs-lib",
@@ -5568,7 +5677,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.1.0"
version = "4.5.8"
dependencies = [
"async-trait",
"base64 0.22.1",
@@ -7351,7 +7460,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-prover"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"alloy-primitives",
"base64 0.22.1",
@@ -7387,7 +7496,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"base64 0.22.1",
"bincode",
@@ -7407,7 +7516,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-base"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"alloy-primitives",
"alloy-serde 0.8.3",
@@ -7422,10 +7531,14 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-batch"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"alloy-primitives",
"itertools 0.14.0",
"openvm 1.1.1",
"openvm-ecc-guest 1.1.1",
"openvm-pairing-guest 1.1.1",
"openvm-sha256-guest 1.1.1",
"rkyv",
"scroll-zkvm-types-base",
"serde",
@@ -7435,7 +7548,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-bundle"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"alloy-primitives",
"itertools 0.14.0",
@@ -7448,7 +7561,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-chunk"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"alloy-primitives",
"itertools 0.14.0",
@@ -7467,7 +7580,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-verifier"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=29c99de#29c99def6f496bf70a590823cda4b74f67625bc2"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=6078604#607860401682165d5822fba7223ca3419db36b22"
dependencies = [
"bincode",
"eyre",

View File

@@ -17,9 +17,9 @@ repository = "https://github.com/scroll-tech/scroll"
version = "4.5.8"
[workspace.dependencies]
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "29c99de", package = "scroll-zkvm-prover" }
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "29c99de", package = "scroll-zkvm-verifier" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "29c99de" }
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604", package = "scroll-zkvm-prover" }
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604", package = "scroll-zkvm-verifier" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "6078604" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade", features = ["scroll"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "zkvm/euclid-upgrade" }

3
common/.gitignore vendored
View File

@@ -1,4 +1,3 @@
/build/bin
.idea
libzkp/impl/target
libzkp/interface/*.a
libzkp

View File

@@ -4,5 +4,4 @@ test:
go test -v -race -coverprofile=coverage.txt -covermode=atomic -p 1 $(PWD)/...
lint: ## Lint the files - used for CI
GOBIN=$(PWD)/build/bin go run ../build/lint.go
cd libzkp/impl && cargo fmt --all -- --check && cargo clippy --release -- -D warnings
GOBIN=$(PWD)/build/bin go run ../build/lint.go

View File

@@ -51,6 +51,7 @@ test-gpu-verifier: $(LIBZKP_PATH)
lint: ## Lint the files - used for CI
GOBIN=$(PWD)/build/bin go run ../build/lint.go
cd ../ && cargo fmt --all -- --check && cargo clippy --release -- -D warnings
clean: ## Empty out the bin folder
@rm -rf build/bin

View File

@@ -90,12 +90,12 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
cfg.ProverManager = &coordinatorConfig.ProverManager{
ProversPerSession: 1,
Verifier: &coordinatorConfig.VerifierConfig{
HighVersionCircuit: &coordinatorConfig.CircuitConfig{
AssetsPath: "",
ForkName: "euclidV2",
MinProverVersion: "v4.4.89",
MinProverVersion: "v4.4.89",
Verifiers: []coordinatorConfig.AssetConfig{{
AssetsPath: "",
ForkName: "euclidV2",
},
},
}},
BatchCollectionTimeSec: 60,
ChunkCollectionTimeSec: 60,
SessionAttempts: 10,

View File

@@ -19,6 +19,7 @@ import (
)
var app *cli.App
var cfg *config.Config
func init() {
// Set up coordinator app info.
@@ -29,16 +30,29 @@ func init() {
app.Version = version.Version
app.Flags = append(app.Flags, utils.CommonFlags...)
app.Before = func(ctx *cli.Context) error {
return utils.LogSetup(ctx)
if err := utils.LogSetup(ctx); err != nil {
return err
}
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
var err error
cfg, err = config.NewConfig(cfgFile)
if err != nil {
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
}
return nil
}
// sub commands
app.Commands = []*cli.Command{
{
Name: "verify",
Usage: "verify an proof, specified by [forkname] <type> <proof path>",
Action: verify,
},
}
}
func action(ctx *cli.Context) error {
cfgFile := ctx.String(utils.ConfigFileFlag.Name)
cfg, err := config.NewConfig(cfgFile)
if err != nil {
log.Crit("failed to load config file", "config file", cfgFile, "error", err)
}
db, err := database.InitDB(cfg.DB)
if err != nil {
log.Crit("failed to init db connection", "err", err)

View File

@@ -0,0 +1,118 @@
package main
import (
"bytes"
"encoding/base64"
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"scroll-tech/coordinator/internal/logic/verifier"
"scroll-tech/common/types/message"
"github.com/scroll-tech/go-ethereum/log"
"github.com/urfave/cli/v2"
)
func verify(cCtx *cli.Context) error {
var forkName, proofType, proofPath string
if cCtx.Args().Len() <= 2 {
forkName = cfg.ProverManager.Verifier.Verifiers[0].ForkName
proofType = cCtx.Args().First()
proofPath = cCtx.Args().Get(1)
} else {
forkName = cCtx.Args().First()
proofType = cCtx.Args().Get(1)
proofPath = cCtx.Args().Get(2)
}
log.Info("verify proof in: ", proofPath, "type", proofType, "forkName", forkName)
// Load the content of the proof file
data, err := os.ReadFile(filepath.Clean(proofPath))
if err != nil {
return fmt.Errorf("error reading file: %w", err)
}
vf, err := verifier.NewVerifier(cfg.ProverManager.Verifier)
if err != nil {
return err
}
var ret bool
switch strings.ToLower(proofType) {
case "chunk":
proof := &message.OpenVMChunkProof{}
if err := json.Unmarshal(data, proof); err != nil {
return err
}
vk, ok := vf.ChunkVk[forkName]
if !ok {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
)
}
} else {
proof.Vk = vk
}
ret, err = vf.VerifyChunkProof(proof, forkName)
case "batch":
proof := &message.OpenVMBatchProof{}
if err := json.Unmarshal(data, proof); err != nil {
return err
}
vk, ok := vf.BatchVk[forkName]
if !ok {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
)
}
} else {
proof.Vk = vk
}
ret, err = vf.VerifyBatchProof(proof, forkName)
case "bundle":
proof := &message.OpenVMBundleProof{}
if err := json.Unmarshal(data, proof); err != nil {
return err
}
vk, ok := vf.BundleVk[forkName]
if !ok {
return fmt.Errorf("no vk loaded for fork %s", forkName)
}
if len(proof.Vk) != 0 {
if bytes.Equal(proof.Vk, vk) {
return fmt.Errorf("unmatch vk with expected: expected %s, get %s",
base64.StdEncoding.EncodeToString(vk),
base64.StdEncoding.EncodeToString(proof.Vk),
)
}
} else {
proof.Vk = vk
}
ret, err = vf.VerifyBundleProof(proof, forkName)
default:
return fmt.Errorf("unsupport proof type %s", proofType)
}
if err != nil {
return err
}
log.Info("verified:", "ret", ret)
return nil
}

View File

@@ -7,11 +7,11 @@
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"high_version_circuit": {
"min_prover_version": "v4.4.45",
"verifiers": [{
"assets_path": "assets",
"fork_name": "euclidV2",
"min_prover_version": "v4.4.45"
}
"fork_name": "euclidV2"
}]
}
},
"db": {
@@ -21,7 +21,10 @@
"maxIdleNum": 20
},
"l2": {
"chain_id": 111
"chain_id": 111,
"l2geth": {
"endpoint": "not need to specified for mocking"
}
},
"auth": {
"secret": "prover secret key",

View File

@@ -46,6 +46,7 @@ require (
)
require (
github.com/VictoriaMetrics/fastcache v1.12.2 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bits-and-blooms/bitset v1.20.0 // indirect
github.com/btcsuite/btcd v0.20.1-beta // indirect
@@ -55,28 +56,57 @@ require (
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
github.com/crate-crypto/go-kzg-4844 v1.1.0 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea // indirect
github.com/edsrzf/mmap-go v1.0.0 // indirect
github.com/ethereum/c-kzg-4844 v1.0.3 // indirect
github.com/fjl/memsize v0.0.2 // indirect
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
github.com/gorilla/websocket v1.4.2 // indirect
github.com/hashicorp/go-bexpr v0.1.10 // indirect
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect
github.com/holiman/bloomfilter/v2 v2.0.3 // indirect
github.com/holiman/uint256 v1.3.2 // indirect
github.com/huin/goupnp v1.0.2 // indirect
github.com/iden3/go-iden3-crypto v0.0.17 // indirect
github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458 // indirect
github.com/klauspost/compress v1.17.9 // indirect
github.com/mattn/go-colorable v0.1.8 // indirect
github.com/mattn/go-runewidth v0.0.15 // indirect
github.com/mitchellh/pointerstructure v1.2.0 // indirect
github.com/mmcloughlin/addchain v0.4.0 // indirect
github.com/olekukonko/tablewriter v0.0.5 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.5.0 // indirect
github.com/prometheus/common v0.48.0 // indirect
github.com/prometheus/procfs v0.12.0 // indirect
github.com/prometheus/tsdb v0.7.1 // indirect
github.com/rivo/uniseg v0.4.4 // indirect
github.com/rjeczalik/notify v0.9.1 // indirect
github.com/rs/cors v1.7.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/scroll-tech/zktrie v0.8.4 // indirect
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
github.com/sourcegraph/conc v0.3.0 // indirect
github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4 // indirect
github.com/supranational/blst v0.3.13 // indirect
github.com/syndtr/goleveldb v1.0.1-0.20210819022825-2ae1ddf74ef7 // indirect
github.com/tklauser/go-sysconf v0.3.14 // indirect
github.com/tklauser/numcpus v0.9.0 // indirect
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef // indirect
github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.uber.org/atomic v1.7.0 // indirect
go.uber.org/multierr v1.9.0 // indirect
golang.org/x/crypto v0.32.0 // indirect
golang.org/x/sync v0.11.0 // indirect
golang.org/x/sys v0.30.0 // indirect
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba // indirect
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
gopkg.in/urfave/cli.v1 v1.20.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
rsc.io/tmplfunc v0.0.3 // indirect
)

View File

@@ -1,12 +1,18 @@
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI=
github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI=
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
github.com/agiledragon/gomonkey/v2 v2.12.0 h1:ek0dYu9K1rSV+TgkW5LvNNPRWyDZVIxGMCFI6Pz9o38=
github.com/agiledragon/gomonkey/v2 v2.12.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah4HI848JfFxHt+iPb26b4zyfspmqY0/8=
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM=
github.com/appleboy/gin-jwt/v2 v2.9.1 h1:l29et8iLW6omcHltsOP6LLk4s3v4g2FbFs0koxGWVZs=
github.com/appleboy/gin-jwt/v2 v2.9.1/go.mod h1:jwcPZJ92uoC9nOUTOKWoN/f6JZOgMSKlFSHw5/FrRUk=
github.com/appleboy/gofight/v2 v2.1.2 h1:VOy3jow4vIK8BRQJoC/I9muxyYlJ2yb9ht2hZoS3rf4=
github.com/appleboy/gofight/v2 v2.1.2/go.mod h1:frW+U1QZEdDgixycTj4CygQ48yLTUhplt43+Wczp3rw=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
github.com/bits-and-blooms/bitset v1.20.0 h1:2F+rfL86jE2d/bmw7OhqUg2Sj/1rURkBn3MdfoPyRVU=
@@ -24,6 +30,9 @@ github.com/bytedance/sonic v1.5.0/go.mod h1:ED5hyg4y6t3/9Ku1R6dU/4KyJ48DZ4jPhfY1
github.com/bytedance/sonic v1.10.0-rc/go.mod h1:ElCzW+ufi8qKqNW0FY314xriJhyJhuoJ3gFZdAHF7NM=
github.com/bytedance/sonic v1.10.1 h1:7a1wuFXL1cMy7a3f7/VFcEtriuXQnUBhtoVfOZiaysc=
github.com/bytedance/sonic v1.10.1/go.mod h1:iZcSUejdk5aukTND/Eu/ivjQuEL0Cu9/rf50Hi0u/g4=
github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk=
github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
@@ -45,16 +54,32 @@ github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea h1:j4317fAZh7X6GqbFowYdYdI0L9bwxL07jyPZIdepyZ0=
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ=
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw=
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
github.com/ethereum/c-kzg-4844 v1.0.3 h1:IEnbOHwjixW2cTvKRUlAAUOeleV7nNM/umJR+qy4WDs=
github.com/ethereum/c-kzg-4844 v1.0.3/go.mod h1:VewdlzQmpT5QSrVhbBuGoCdFJkpaJlO1aQputP83wc0=
github.com/fjl/memsize v0.0.2 h1:27txuSD9or+NZlnOWdKUxeBzTAUkWCVh+4Gf2dWFOzA=
github.com/fjl/memsize v0.0.2/go.mod h1:VvhXpOYNQvB+uIk2RvXzuaQtkQJzzIx6lSBe1xv7hi0=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q7OhCmWKU=
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI=
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww=
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
github.com/gin-gonic/gin v1.8.1/go.mod h1:ji8BvRH1azfM+SYow9zQ6SZMvR8qOMZHmsCuWR9tTTk=
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
github.com/go-kit/kit v0.8.0 h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0=
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA=
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
@@ -73,19 +98,31 @@ github.com/go-playground/validator/v10 v10.15.5 h1:LEBecTWb/1j5TNY1YYG2RcOUN3R7N
github.com/go-playground/validator/v10 v10.15.5/go.mod h1:9iXMNT7sEkjXb0I+enO7QXmzG6QCsPWY4zveKFVRSyU=
github.com/go-resty/resty/v2 v2.7.0 h1:me+K9p3uhSmXtrBZ4k9jcEAfJmuC8IivWHwaLZwPrFY=
github.com/go-resty/resty/v2 v2.7.0/go.mod h1:9PWDzw47qPphMRFfhsyk0NnSgvluHcljSMVIq3w7q0I=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
github.com/goccy/go-json v0.9.7/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.0/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/golang-jwt/jwt/v4 v4.4.3/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
github.com/golang-jwt/jwt/v4 v4.5.0/go.mod h1:m21LjoU+eqJr34lmDMbreY2eSTRJ1cv77w39/MY0Ch0=
github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
github.com/golang/protobuf v1.4.0-rc.1/go.mod h1:ceaxUfeHdC40wWswd/P6IGgMaK3YpKi5j83Wpe3EHw8=
github.com/golang/protobuf v1.4.0-rc.1.0.20200221234624-67d41d38c208/go.mod h1:xKAWHe0F5eneWXFV3EuXVDTCmh+JuBKY0li0aMyXATA=
github.com/golang/protobuf v1.4.0-rc.2/go.mod h1:LlEzMj4AhA7rCAGe4KMBDvJI+AwstrUpVNzEA03Pprs=
github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:WU3c8KckQ9AFe+yFwt9sWVRKCVIyN9cPHBJSNnbL67w=
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
github.com/golang/protobuf v1.5.0/go.mod h1:FsONVRAS9T7sI+LIUmWTfcYkHO4aIWwzhcaSAoJOfIk=
github.com/golang/snappy v0.0.4/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb h1:PBC98N2aIaM3XXiurYmW7fx4GZkL8feAMVq7nEjURHk=
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI=
github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY=
@@ -93,13 +130,24 @@ github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/
github.com/google/subcommands v1.2.0/go.mod h1:ZjhPrFU+Olkh9WazFPsl27BQ4UPiG37m3yTrtFlrHVk=
github.com/google/uuid v1.6.0 h1:NIvaJDMOsjHA8n1jAhLSgzrAzy1Hgr+hNrb57e+94F0=
github.com/google/uuid v1.6.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
github.com/gorilla/websocket v1.4.2 h1:+/TMaTYc4QFitKJxsQ7Yye35DkWvkdLcvGKqM+x0Ufc=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/hashicorp/go-bexpr v0.1.10 h1:9kuI5PFotCboP3dkDYFr/wi0gg0QVbSNz5oFRpxn4uE=
github.com/hashicorp/go-bexpr v0.1.10/go.mod h1:oxlubA2vC/gFVfX1A6JGp7ls7uCDlfJn732ehYYg+g0=
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d h1:dg1dEPuWpEqDnvIw251EVy4zlP8gWbsGj4BsUKCRpYs=
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao=
github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA=
github.com/holiman/uint256 v1.3.2 h1:a9EgMPSC1AAaj1SZL5zIQD3WbwTuHrMGOerLjGmM/TA=
github.com/holiman/uint256 v1.3.2/go.mod h1:EOMSn4q6Nyt9P6efbI3bueV4e1b3dGlUCXeiRV4ng7E=
github.com/hpcloud/tail v1.0.0/go.mod h1:ab1qPbhIpdTxEkNHXyeSf5vhxWSCs/tWer42PpOxQnU=
github.com/huin/goupnp v1.0.2 h1:RfGLP+h3mvisuWEyybxNq5Eft3NWhHLPeUN72kpKZoI=
github.com/huin/goupnp v1.0.2/go.mod h1:0dxJBVBHqTMjIUMkESDTNgOOx/Mw5wYIfyFmdzSamkM=
github.com/huin/goutil v0.0.0-20170803182201-1ca381bf3150/go.mod h1:PpLOETDnJ0o3iZrZfqZzyLl6l7F3c6L1oWn7OICBi6o=
github.com/iden3/go-iden3-crypto v0.0.17 h1:NdkceRLJo/pI4UpcjVah4lN/a3yzxRUGXqxbWcYh9mY=
github.com/iden3/go-iden3-crypto v0.0.17/go.mod h1:dLpM4vEPJ3nDHzhWFXDjzkn1qHoBeOT/3UEhXsEsP3E=
github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458 h1:6OvNmYgJyexcZ3pYbTI9jWx5tHo1Dee/tWbLMfPe2TA=
github.com/jackpal/go-nat-pmp v1.0.2-0.20160603034137-1fa385a6f458/go.mod h1:QPH045xvCAeXUZOxsnwmrtiCoxIr9eob+4orBN1SBKc=
github.com/jessevdk/go-flags v0.0.0-20141203071132-1679536dcc89/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
@@ -115,6 +163,7 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
@@ -129,14 +178,22 @@ github.com/leanovate/gopter v0.2.11/go.mod h1:aK3tzZP/C+p1m3SPRE4SYZFGP7jjkuSI4f
github.com/leodido/go-urn v1.2.1/go.mod h1:zt4jvISO2HfUBqxjfIshjdMTYS56ZS/qv49ictyFfxY=
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
github.com/leodido/go-urn v1.2.4/go.mod h1:7ZrI8mTSeBSHl/UaRyKQW1qZeMgak41ANeCNaVckg+4=
github.com/mattn/go-colorable v0.1.8 h1:c1ghPdyEDarC70ftn0y+A/Ee++9zz8ljHG1b13eJ0s8=
github.com/mattn/go-colorable v0.1.8/go.mod h1:u6P/XSegPjTcexA+o6vUJrdnUu04hMope9wVRipJSqc=
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY=
github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y=
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
github.com/mitchellh/pointerstructure v1.2.0 h1:O+i9nHnXS3l/9Wu7r4NrEdwA2VFTicjUEN1uBnDo34A=
github.com/mitchellh/pointerstructure v1.2.0/go.mod h1:BRAsLI5zgXmw97Lf6s25bs8ohIXc3tViBH44KcwB2g4=
github.com/mmcloughlin/addchain v0.4.0 h1:SobOdjm2xLj1KkXN5/n0xTIWyZA2+s99UCY1iPfkHRY=
github.com/mmcloughlin/addchain v0.4.0/go.mod h1:A86O+tHqZLMNO4w6ZZ4FlVQEadcoqkyU72HC5wJ4RlU=
github.com/mmcloughlin/profile v0.1.1/go.mod h1:IhHD7q1ooxgwTgjxQYkACGA77oFTDdFVejUS1/tS/qU=
@@ -145,36 +202,55 @@ github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd h1:TRLaZ9cD/w
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
github.com/modern-go/reflect2 v1.0.2 h1:xBagoLtFs94CBntxluKeaWgTMpvLxC4ur3nMaC9Gz0M=
github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjYzDa0/r8luk=
github.com/nxadm/tail v1.4.4 h1:DQuhQpB1tVlglWS2hLQ5OV6B5r8aGxSrPc5Qo6uTN78=
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.7.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
github.com/onsi/ginkgo v1.14.0 h1:2mOpI4JVVPBN+WQRa0WKH2eXR+Ey+uK4n7Zj0aYpIQA=
github.com/onsi/ginkgo v1.14.0/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
github.com/onsi/gomega v1.10.1/go.mod h1:iN09h71vgCQne3DLsj+A5owkum+a2tYe+TOCB1ybHNo=
github.com/pelletier/go-toml/v2 v2.0.1/go.mod h1:r9LEWfGN8R5k0VXJ+0BkIe7MYkRdwZOjgMj2KwnJFUo=
github.com/pelletier/go-toml/v2 v2.0.6/go.mod h1:eumQOmlWiOPt5WriQQqoM5y18pDHwha2N+QD+EUNTek=
github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6/MHO4=
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU=
github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k=
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE=
github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc=
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
github.com/rjeczalik/notify v0.9.1 h1:CLCKso/QK1snAlnhNR/CNvNiFU2saUtjV0bx3EwNeCE=
github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRrjvIXnJho=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
github.com/rogpeppe/go-internal v1.8.0/go.mod h1:WmiCO8CzOY8rg0OYDC4/i/2WRWAB6poM+XZ2dLUbcbE=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6 h1:vb2XLvQwCf+F/ifP6P/lfeiQrHY6+Yb/E3R4KHXLqSE=
@@ -187,9 +263,15 @@ github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKl
github.com/shirou/gopsutil v3.21.11+incompatible/go.mod h1:5b4v6he4MtMOwMlS0TUMTu2PcXUg8+E1lC7eC3UO/RA=
github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8=
github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o=
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4 h1:Gb2Tyox57NRNuZ2d3rmvB3pcmbu7O1RS3m8WRx7ilrg=
github.com/status-im/keycard-go v0.0.0-20190316090335-8537d3370df4/go.mod h1:RZLeN1LMWmRsyYjvAu+I6Dm9QmlDaIIt+Y+4Kd7Tp+Q=
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
github.com/stretchr/testify v1.6.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
@@ -216,6 +298,8 @@ github.com/tklauser/numcpus v0.9.0 h1:lmyCHtANi8aRUgkckBgoDk1nHCux3n2cgkJLXdQGPD
github.com/tklauser/numcpus v0.9.0/go.mod h1:SN6Nq1O3VychhC1npsWostA+oW+VOQTxZrS604NSRyI=
github.com/twitchyliquid64/golang-asm v0.15.1 h1:SU5vSMR7hnwNxj24w34ZyCi/FmDZTkS4MhqMhdFk5YI=
github.com/twitchyliquid64/golang-asm v0.15.1/go.mod h1:a1lVb/DtPvCB8fslRZhAngC2+aY1QWCk3Cedj/Gdt08=
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef h1:wHSqTBrZW24CsNJDfeh9Ex6Pm0Rcpc7qrgKBiL44vF4=
github.com/tyler-smith/go-bip39 v1.0.1-0.20181017060643-dbb3b84ba2ef/go.mod h1:sJ5fKU0s6JVwZjjcUEX2zFOnvq0ASQ2K9Zr6cf67kNs=
github.com/ugorji/go v1.2.7/go.mod h1:nF9osbDWLy6bDVv/Rtoh6QgnvNDpmCalQV5urGCCS6M=
github.com/ugorji/go/codec v1.2.7/go.mod h1:WGN1fab3R1fzQlVQTkfxVtIBhWDRqOviHU95kRgeqEY=
github.com/ugorji/go/codec v1.2.11 h1:BMaWp1Bb6fHwEtbplGBGJ498wD+LKlNSl25MjdZY4dU=
@@ -227,11 +311,16 @@ github.com/xrash/smetrics v0.0.0-20201216005158-039620a65673/go.mod h1:N3UwUGtsr
github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY=
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
go.uber.org/atomic v1.7.0 h1:ADUqmZGgLDDfbSL9ZmPxKTybcoEYHgpYfELNoN+7hsw=
go.uber.org/atomic v1.7.0/go.mod h1:fEN4uk6kAWBTFdckzkM89CLk9XfWZrxpCo0nPH17wJc=
go.uber.org/multierr v1.9.0 h1:7fIwc/ZtS0q++VgcfqFDxSBZVv/Xo49/SYnDFupUwlI=
go.uber.org/multierr v1.9.0/go.mod h1:X2jQV1h+kxSjClGpnseKVIxpmcjrj7MNnI0bnlfKTVQ=
golang.org/x/arch v0.0.0-20210923205945-b76863e36670/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20211215153901-e495a2d5b3d3/go.mod h1:IxCIyHEi3zRg3s0A5j5BB6A9Jmi73HwBIUl50j+osU4=
@@ -240,7 +329,10 @@ golang.org/x/crypto v0.32.0 h1:euUpcYgM8WcP71gNpTqQCn6rC2t6ULUPiOzfWaXVVfc=
golang.org/x/crypto v0.32.0/go.mod h1:ZnnJkOaASj8g0AjIduWNlq2NRxL0PlBrbKVyZ6V/Ugc=
golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4=
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
golang.org/x/net v0.0.0-20210226172049-e18ecbb05110/go.mod h1:m0MpNAwzfU5UDzcl9v0D8zg8gWTRqZa9RBIspLL5mdg=
golang.org/x/net v0.0.0-20211029224645-99673261e6eb/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
golang.org/x/net v0.0.0-20211112202133-69e39bad7dc2/go.mod h1:9nx3DQGgdP8bBQD5qxJ1jj9UTztislL4KSBs9R2vV5Y=
@@ -250,13 +342,25 @@ golang.org/x/net v0.4.0/go.mod h1:MBQ8lrhLObU/6UmLb4fmbmk5OcyYmqtbGd/9yIeKjEE=
golang.org/x/net v0.23.0 h1:7EYJ93RZ9vYSZAIb2x3lnuvqO5zneoD6IvWjuhfxjTs=
golang.org/x/net v0.23.0/go.mod h1:JKghWKKOSdJwpW2GEx0Ja7fmaKnMsbu+MWVZTokSYmg=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.11.0 h1:GGz8+XQP4FvTTrjZPzNKTMFtSXH80RAzG+5ghFPgK9w=
golang.org/x/sync v0.11.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190916202348-b4ddaad3f8a3/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200116001909-b77594299b42/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200223170610-d5e6a3e2c0ae/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210423082822-04245dca01da/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20210615035016-665e8c7367d1/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -269,36 +373,56 @@ golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.30.0 h1:QjkSwP/36a20jFYWkSue1YwXzLmsV5Gfq7Eiy72C1uc=
golang.org/x/sys v0.30.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.3.0/go.mod h1:q750SLmJuPmVoN1blW3UFBPREJfb1KmY3vwxfr+nFDA=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2/go.mod h1:bEr9sfX3Q8Zfm5fL9x+3itogRgK3+ptLWKqgva+5dAk=
golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ=
golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.5.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.21.0 h1:zyQAAkrwaneQ066sspRyJaG9VNi/YJ1NfzcGB3hZ/qo=
golang.org/x/text v0.21.0/go.mod h1:4IBbMaMmOPCJ8SecivzSH54+73PCFmPWxNTLm+vZkEQ=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba h1:O8mE0/t419eoIwhTFpKVkHiTs/Igowgfkj25AcZrtiE=
golang.org/x/time v0.0.0-20210220033141-f8bda1e9f3ba/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc=
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 h1:go1bK/D/BFZV2I8cIQd1NKEZ+0owSTG1fDTci4IqFcE=
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
google.golang.org/protobuf v1.20.1-0.20200309200217-e05f789c0967/go.mod h1:A+miEFZTKqfCUM6K7xSMQL9OKL/b6hQv+e19PK+JZNE=
google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzikPIcrTAo=
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw=
google.golang.org/protobuf v1.28.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.28.1/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I=
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU=
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0=
gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.4.0 h1:D8xgwECY7CYvx+Y2n4sBz93Jn9JRvxdiyyo8CTfuKaY=
gopkg.in/yaml.v2 v2.4.0/go.mod h1:RDklbk79AGWmwhnvt/jBztapEOGDOx6ZbXqjP6csGnQ=
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=

View File

@@ -55,16 +55,17 @@ type Config struct {
Auth *Auth `json:"auth"`
}
// CircuitConfig circuit items.
type CircuitConfig struct {
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
MinProverVersion string `json:"min_prover_version"`
// AssetConfig contain assets configurated for each fork, the defaul vkfile name is "OpenVmVk.json".
type AssetConfig struct {
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
Vkfile string `json:"vk_file,omitempty"`
}
// VerifierConfig load zk verifier config.
type VerifierConfig struct {
HighVersionCircuit *CircuitConfig `json:"high_version_circuit"`
MinProverVersion string `json:"min_prover_version"`
Verifiers []AssetConfig `json:"verifiers"`
}
// NewConfig returns a new instance of Config.

View File

@@ -20,11 +20,11 @@ func TestConfig(t *testing.T) {
"batch_collection_time_sec": 180,
"chunk_collection_time_sec": 180,
"verifier": {
"high_version_circuit": {
"min_prover_version": "v4.4.45",
"verifiers": [{
"assets_path": "assets",
"fork_name": "euclidV2",
"min_prover_version": "v4.4.45"
}
"fork_name": "euclidV2"
}]
},
"max_verifier_workers": 4
},

View File

@@ -1,12 +1,15 @@
package api
import (
"encoding/json"
"github.com/prometheus/client_golang/prometheus"
"github.com/scroll-tech/go-ethereum/log"
"github.com/scroll-tech/go-ethereum/params"
"gorm.io/gorm"
"scroll-tech/coordinator/internal/config"
"scroll-tech/coordinator/internal/logic/libzkp"
"scroll-tech/coordinator/internal/logic/verifier"
)
@@ -29,7 +32,7 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
log.Info("verifier created", "openVmVerifier", vf.OpenVMVkMap)
// TODO: enable this when the libzkp has been updated
/*l2cfg := cfg.L2.Endpoint
l2cfg := cfg.L2.Endpoint
if l2cfg == nil {
panic("l2geth is not specified")
}
@@ -37,9 +40,9 @@ func InitController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.D
if err != nil {
panic(err)
}
libzkp.InitL2geth(string(l2cfgBytes))*/
libzkp.InitL2geth(string(l2cfgBytes))
Auth = NewAuthController(db, cfg, vf)
GetTask = NewGetTaskController(cfg, chainCfg, db, reg)
GetTask = NewGetTaskController(cfg, chainCfg, db, vf, reg)
SubmitProof = NewSubmitProofController(cfg, chainCfg, db, vf, reg)
}

View File

@@ -17,6 +17,7 @@ import (
"scroll-tech/coordinator/internal/config"
"scroll-tech/coordinator/internal/logic/provertask"
"scroll-tech/coordinator/internal/logic/verifier"
coordinatorType "scroll-tech/coordinator/internal/types"
)
@@ -25,13 +26,15 @@ type GetTaskController struct {
proverTasks map[message.ProofType]provertask.ProverTask
getTaskAccessCounter *prometheus.CounterVec
l2syncer *l2Syncer
}
// NewGetTaskController create a get prover task controller
func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *GetTaskController {
chunkProverTask := provertask.NewChunkProverTask(cfg, chainCfg, db, reg)
batchProverTask := provertask.NewBatchProverTask(cfg, chainCfg, db, reg)
bundleProverTask := provertask.NewBundleProverTask(cfg, chainCfg, db, reg)
func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, verifier *verifier.Verifier, reg prometheus.Registerer) *GetTaskController {
chunkProverTask := provertask.NewChunkProverTask(cfg, chainCfg, db, verifier.ChunkVk, reg)
batchProverTask := provertask.NewBatchProverTask(cfg, chainCfg, db, verifier.BatchVk, reg)
bundleProverTask := provertask.NewBundleProverTask(cfg, chainCfg, db, verifier.BundleVk, reg)
ptc := &GetTaskController{
proverTasks: make(map[message.ProofType]provertask.ProverTask),
@@ -44,6 +47,13 @@ func NewGetTaskController(cfg *config.Config, chainCfg *params.ChainConfig, db *
ptc.proverTasks[message.ProofTypeChunk] = chunkProverTask
ptc.proverTasks[message.ProofTypeBatch] = batchProverTask
ptc.proverTasks[message.ProofTypeBundle] = bundleProverTask
if syncer, err := createL2Syncer(cfg); err != nil {
log.Crit("can not init l2 syncer", "err", err)
} else {
ptc.l2syncer = syncer
}
return ptc
}
@@ -78,6 +88,17 @@ func (ptc *GetTaskController) GetTasks(ctx *gin.Context) {
return
}
if getTaskParameter.ProverHeight == 0 {
// help update the prover height with internal l2geth
if blk, err := ptc.l2syncer.getLatestBlockNumber(ctx); err == nil {
getTaskParameter.ProverHeight = blk
} else {
nerr := fmt.Errorf("inner l2geth failure, err:%w", err)
types.RenderFailure(ctx, types.InternalServerError, nerr)
return
}
}
proofType := ptc.proofType(&getTaskParameter)
proverTask, isExist := ptc.proverTasks[proofType]
if !isExist {

View File

@@ -0,0 +1,71 @@
//go:build !mock_verifier
package api
import (
"errors"
"fmt"
"sync"
"time"
"github.com/gin-gonic/gin"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/log"
"scroll-tech/coordinator/internal/config"
)
type l2Syncer struct {
l2gethClient *ethclient.Client
lastBlockNumber struct {
sync.RWMutex
data uint64
t time.Time
}
}
func createL2Syncer(cfg *config.Config) (*l2Syncer, error) {
if cfg.L2 == nil || cfg.L2.Endpoint == nil {
return nil, fmt.Errorf("l2 endpoint is not set in config")
} else {
l2gethClient, err := ethclient.Dial(cfg.L2.Endpoint.Url)
if err != nil {
return nil, fmt.Errorf("dial l2geth endpoint fail, err: %s", err)
}
return &l2Syncer{
l2gethClient: l2gethClient,
}, nil
}
}
// getLatestBlockNumber gets the latest block number, using cache if available and not expired
func (syncer *l2Syncer) getLatestBlockNumber(ctx *gin.Context) (uint64, error) {
// First check if we have a cached value that's still valid
syncer.lastBlockNumber.RLock()
if !syncer.lastBlockNumber.t.IsZero() && time.Since(syncer.lastBlockNumber.t) < time.Second*10 {
blockNumber := syncer.lastBlockNumber.data
syncer.lastBlockNumber.RUnlock()
return blockNumber, nil
}
syncer.lastBlockNumber.RUnlock()
// If not cached or expired, fetch from the client
if syncer.l2gethClient == nil {
return 0, errors.New("L2 geth client not initialized")
}
blockNumber, err := syncer.l2gethClient.BlockNumber(ctx)
if err != nil {
return 0, fmt.Errorf("failed to get latest block number: %w", err)
}
// Update the cache
syncer.lastBlockNumber.Lock()
syncer.lastBlockNumber.data = blockNumber
syncer.lastBlockNumber.t = time.Now()
syncer.lastBlockNumber.Unlock()
log.Debug("updated block height reference", "height", blockNumber)
return blockNumber, nil
}

View File

@@ -0,0 +1,20 @@
//go:build mock_verifier
package api
import (
"scroll-tech/coordinator/internal/config"
"github.com/gin-gonic/gin"
)
type l2Syncer struct{}
func createL2Syncer(_ *config.Config) (*l2Syncer, error) {
return &l2Syncer{}, nil
}
// getLatestBlockNumber gets the latest block number, using cache if available and not expired
func (syncer *l2Syncer) getLatestBlockNumber(_ *gin.Context) (uint64, error) {
return 99999994, nil
}

View File

@@ -31,9 +31,11 @@ type LoginLogic struct {
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
proverVersionHardForkMap := make(map[string][]string)
var highHardForks []string
highHardForks = append(highHardForks, cfg.ProverManager.Verifier.HighVersionCircuit.ForkName)
proverVersionHardForkMap[cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion] = highHardForks
var hardForks []string
for _, cfg := range cfg.ProverManager.Verifier.Verifiers {
hardForks = append(hardForks, cfg.ForkName)
}
proverVersionHardForkMap[cfg.ProverManager.Verifier.MinProverVersion] = hardForks
return &LoginLogic{
cfg: cfg,
@@ -56,8 +58,8 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
return errors.New("auth message verify failure")
}
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", l.cfg.ProverManager.Verifier.HighVersionCircuit.MinProverVersion, login.Message.ProverVersion)
if !version.CheckScrollRepoVersion(login.Message.ProverVersion, l.cfg.ProverManager.Verifier.MinProverVersion) {
return fmt.Errorf("incompatible prover version. please upgrade your prover, minimum allowed version: %s, actual version: %s", l.cfg.ProverManager.Verifier.MinProverVersion, login.Message.ProverVersion)
}
vks := make(map[string]struct{})

View File

@@ -11,6 +11,7 @@ import "C" //nolint:typecheck
import (
"fmt"
"os"
"strings"
"unsafe"
"scroll-tech/common/types/message"
@@ -34,18 +35,10 @@ func InitVerifier(configJSON string) {
C.init_verifier(cConfig)
}
// Initialize the verifier
func InitL2geth(configJSON string) {
cConfig := goToCString(configJSON)
defer freeCString(cConfig)
C.init_l2geth(cConfig)
}
// Verify a chunk proof
func VerifyChunkProof(proofData, forkName string) bool {
cProof := goToCString(proofData)
cForkName := goToCString(forkName)
cForkName := goToCString(strings.ToLower(forkName))
defer freeCString(cProof)
defer freeCString(cForkName)
@@ -56,7 +49,7 @@ func VerifyChunkProof(proofData, forkName string) bool {
// Verify a batch proof
func VerifyBatchProof(proofData, forkName string) bool {
cProof := goToCString(proofData)
cForkName := goToCString(forkName)
cForkName := goToCString(strings.ToLower(forkName))
defer freeCString(cProof)
defer freeCString(cForkName)
@@ -67,7 +60,7 @@ func VerifyBatchProof(proofData, forkName string) bool {
// Verify a bundle proof
func VerifyBundleProof(proofData, forkName string) bool {
cProof := goToCString(proofData)
cForkName := goToCString(forkName)
cForkName := goToCString(strings.ToLower(forkName))
defer freeCString(cProof)
defer freeCString(cForkName)
@@ -96,8 +89,8 @@ func fromMessageTaskType(taskType int) int {
}
// Generate a universal task
func GenerateUniversalTask(taskType int, taskJSON, forkName string) (bool, string, string, []byte) {
return generateUniversalTask(fromMessageTaskType(taskType), taskJSON, forkName)
func GenerateUniversalTask(taskType int, taskJSON, forkName string, expectedVk []byte) (bool, string, string, []byte) {
return generateUniversalTask(fromMessageTaskType(taskType), taskJSON, strings.ToLower(forkName), expectedVk)
}
// Generate wrapped proof
@@ -127,7 +120,7 @@ func GenerateWrappedProof(proofJSON, metadata string, vkData []byte) string {
// Dumps a verification key to a file
func DumpVk(forkName, filePath string) error {
cForkName := goToCString(forkName)
cForkName := goToCString(strings.ToLower(forkName))
cFilePath := goToCString(filePath)
defer freeCString(cForkName)
defer freeCString(cFilePath)

View File

@@ -32,7 +32,13 @@ typedef struct {
// Generate a universal task based on task type and input JSON
// Returns a struct containing task data, metadata, and expected proof hash
HandlingResult gen_universal_task(int task_type, char* task, char* fork_name);
HandlingResult gen_universal_task(
int task_type,
char* task,
char* fork_name,
const unsigned char* expected_vk,
size_t expected_vk_len
);
// Release memory allocated for a HandlingResult returned by gen_universal_task
void release_task_result(HandlingResult result);

View File

@@ -11,7 +11,10 @@ import (
"github.com/scroll-tech/go-ethereum/common"
)
func generateUniversalTask(taskType int, taskJSON, forkName string) (bool, string, string, []byte) {
func InitL2geth(configJSON string) {
}
func generateUniversalTask(taskType int, taskJSON, forkName string, expectedVk []byte) (bool, string, string, []byte) {
fmt.Printf("call mocked generate universal task %d, taskJson %s\n", taskType, taskJSON)
var metadata interface{}

View File

@@ -7,14 +7,29 @@ package libzkp
#include "libzkp.h"
*/
import "C" //nolint:typecheck
import "unsafe"
func generateUniversalTask(taskType int, taskJSON, forkName string) (bool, string, string, []byte) {
// Initialize the handler for universal task
func InitL2geth(configJSON string) {
cConfig := goToCString(configJSON)
defer freeCString(cConfig)
C.init_l2geth(cConfig)
}
func generateUniversalTask(taskType int, taskJSON, forkName string, expectedVk []byte) (bool, string, string, []byte) {
cTask := goToCString(taskJSON)
cForkName := goToCString(forkName)
defer freeCString(cTask)
defer freeCString(cForkName)
result := C.gen_universal_task(C.int(taskType), cTask, cForkName)
// Create a C array from Go slice
var cVk *C.uchar
if len(expectedVk) > 0 {
cVk = (*C.uchar)(unsafe.Pointer(&expectedVk[0]))
}
result := C.gen_universal_task(C.int(taskType), cTask, cForkName, cVk, C.size_t(len(expectedVk)))
defer C.release_task_result(result)
// Check if the operation was successful

View File

@@ -36,12 +36,13 @@ type BatchProverTask struct {
}
// NewBatchProverTask new a batch collector
func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *BatchProverTask {
func NewBatchProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, expectedVk map[string][]byte, reg prometheus.Registerer) *BatchProverTask {
bp := &BatchProverTask{
BaseProverTask: BaseProverTask{
db: db,
cfg: cfg,
chainCfg: chainCfg,
expectedVk: expectedVk,
blockOrm: orm.NewL2Block(db),
chunkOrm: orm.NewChunk(db),
batchOrm: orm.NewBatch(db),
@@ -83,10 +84,25 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
for i := 0; i < 5; i++ {
var getTaskError error
var tmpBatchTask *orm.Batch
tmpBatchTask, getTaskError = bp.batchOrm.GetAssignedBatch(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask != nil {
tmpBatchTask, getTaskError = bp.batchOrm.GetBatchByHash(ctx.Copy(), taskCtx.hasAssignedTask.TaskID)
if getTaskError != nil {
log.Error("failed to get batch has assigned to prover", "taskID", taskCtx.hasAssignedTask.TaskID, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
} else if tmpBatchTask == nil {
// if the assigned batch dropped, there would be too much issue to assign another
return nil, fmt.Errorf("prover with publicKey %s is already assigned a dropped batch. ProverName: %s, ProverVersion: %s",
taskCtx.PublicKey, taskCtx.ProverName, taskCtx.ProverVersion)
}
}
if tmpBatchTask == nil {
tmpBatchTask, getTaskError = bp.batchOrm.GetAssignedBatch(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
}
}
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
@@ -114,29 +130,32 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, nil
}
// Don't dispatch the same failing job to the same prover
proverTasks, getFailedTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, 2)
if getFailedTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBatch.String(), "task ID", tmpBatchTask.Hash, "error", getFailedTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBatchTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
// we are simply pick the chunk which has been assigned, so don't bother to update attempts or check failed before
if taskCtx.hasAssignedTask == nil {
// Don't dispatch the same failing job to the same prover
proverTasks, getFailedTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, 2)
if getFailedTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBatch.String(), "task ID", tmpBatchTask.Hash, "error", getFailedTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBatchTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
}
}
}
rowsAffected, updateAttemptsErr := bp.batchOrm.UpdateBatchAttempts(ctx.Copy(), tmpBatchTask.Index, tmpBatchTask.ActiveAttempts, tmpBatchTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update batch attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
rowsAffected, updateAttemptsErr := bp.batchOrm.UpdateBatchAttempts(ctx.Copy(), tmpBatchTask.Index, tmpBatchTask.ActiveAttempts, tmpBatchTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update batch attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
}
}
batchTask = tmpBatchTask
@@ -149,19 +168,24 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
}
log.Info("start batch proof generation session", "task_id", batchTask.Hash, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
proverTask := orm.ProverTask{
TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeBatch),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
var proverTask *orm.ProverTask
if taskCtx.hasAssignedTask == nil {
proverTask = &orm.ProverTask{
TaskID: batchTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeBatch),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
}
} else {
proverTask = taskCtx.hasAssignedTask
}
taskMsg, err := bp.formatProverTask(ctx.Copy(), &proverTask, batchTask, hardForkName)
taskMsg, err := bp.formatProverTask(ctx.Copy(), proverTask, batchTask, hardForkName)
if err != nil {
bp.recoverActiveAttempts(ctx, batchTask)
log.Error("format prover task failure", "task_id", batchTask.Hash, "err", err)
@@ -169,6 +193,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
}
if getTaskParameter.Universal {
var metadata []byte
taskMsg, metadata, err = bp.applyUniversal(taskMsg)
if err != nil {
bp.recoverActiveAttempts(ctx, batchTask)
@@ -179,10 +204,12 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
}
// Store session info.
if err = bp.proverTaskOrm.InsertProverTask(ctx.Copy(), &proverTask); err != nil {
bp.recoverActiveAttempts(ctx, batchTask)
log.Error("insert batch prover task info fail", "task_id", batchTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask == nil {
if err = bp.proverTaskOrm.InsertProverTask(ctx.Copy(), proverTask); err != nil {
bp.recoverActiveAttempts(ctx, batchTask)
log.Error("insert batch prover task info fail", "task_id", batchTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
}
}
// notice uuid is set as a side effect of InsertProverTask
taskMsg.UUID = proverTask.UUID.String()

View File

@@ -33,12 +33,13 @@ type BundleProverTask struct {
}
// NewBundleProverTask new a bundle collector
func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *BundleProverTask {
func NewBundleProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, expectedVk map[string][]byte, reg prometheus.Registerer) *BundleProverTask {
bp := &BundleProverTask{
BaseProverTask: BaseProverTask{
db: db,
chainCfg: chainCfg,
cfg: cfg,
expectedVk: expectedVk,
blockOrm: orm.NewL2Block(db),
chunkOrm: orm.NewChunk(db),
batchOrm: orm.NewBatch(db),
@@ -81,10 +82,25 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
for i := 0; i < 5; i++ {
var getTaskError error
var tmpBundleTask *orm.Bundle
tmpBundleTask, getTaskError = bp.bundleOrm.GetAssignedBundle(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned bundle proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask != nil {
tmpBundleTask, getTaskError = bp.bundleOrm.GetBundleByHash(ctx.Copy(), taskCtx.hasAssignedTask.TaskID)
if getTaskError != nil {
log.Error("failed to get bundle has assigned to prover", "taskID", taskCtx.hasAssignedTask.TaskID, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
} else if tmpBundleTask == nil {
// if the assigned chunk dropped, there would be too much issue to assign another
return nil, fmt.Errorf("prover with publicKey %s is already assigned a dropped bundle. ProverName: %s, ProverVersion: %s",
taskCtx.PublicKey, taskCtx.ProverName, taskCtx.ProverVersion)
}
}
if tmpBundleTask == nil {
tmpBundleTask, getTaskError = bp.bundleOrm.GetAssignedBundle(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
if getTaskError != nil {
log.Error("failed to get assigned bundle proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
}
}
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
@@ -112,31 +128,33 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
return nil, nil
}
// Don't dispatch the same failing job to the same prover
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBundle, tmpBundleTask.Hash, 2)
if getTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBundle.String(), "task ID", tmpBundleTask.Hash, "error", getTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBundleTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
// we are simply pick the chunk which has been assigned, so don't bother to update attempts or check failed before
if taskCtx.hasAssignedTask == nil {
// Don't dispatch the same failing job to the same prover
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBundle, tmpBundleTask.Hash, 2)
if getTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBundle.String(), "task ID", tmpBundleTask.Hash, "error", getTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpBundleTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
}
}
rowsAffected, updateAttemptsErr := bp.bundleOrm.UpdateBundleAttempts(ctx.Copy(), tmpBundleTask.Hash, tmpBundleTask.ActiveAttempts, tmpBundleTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update bundle attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
}
}
rowsAffected, updateAttemptsErr := bp.bundleOrm.UpdateBundleAttempts(ctx.Copy(), tmpBundleTask.Hash, tmpBundleTask.ActiveAttempts, tmpBundleTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update bundle attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
}
bundleTask = tmpBundleTask
break
}
@@ -147,19 +165,24 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
}
log.Info("start bundle proof generation session", "task index", bundleTask.Index, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
proverTask := orm.ProverTask{
TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeBundle),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
var proverTask *orm.ProverTask
if taskCtx.hasAssignedTask == nil {
proverTask = &orm.ProverTask{
TaskID: bundleTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeBundle),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
}
} else {
proverTask = taskCtx.hasAssignedTask
}
taskMsg, err := bp.formatProverTask(ctx.Copy(), &proverTask, hardForkName)
taskMsg, err := bp.formatProverTask(ctx.Copy(), proverTask, hardForkName)
if err != nil {
bp.recoverActiveAttempts(ctx, bundleTask)
log.Error("format bundle prover task failure", "task_id", bundleTask.Hash, "err", err)
@@ -179,10 +202,12 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
}
// Store session info.
if err = bp.proverTaskOrm.InsertProverTask(ctx.Copy(), &proverTask); err != nil {
bp.recoverActiveAttempts(ctx, bundleTask)
log.Error("insert bundle prover task info fail", "task_id", bundleTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask == nil {
if err = bp.proverTaskOrm.InsertProverTask(ctx.Copy(), proverTask); err != nil {
bp.recoverActiveAttempts(ctx, bundleTask)
log.Error("insert bundle prover task info fail", "task_id", bundleTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
}
}
// notice uuid is set as a side effect of InsertProverTask
taskMsg.UUID = proverTask.UUID.String()

View File

@@ -33,12 +33,13 @@ type ChunkProverTask struct {
}
// NewChunkProverTask new a chunk prover task
func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, reg prometheus.Registerer) *ChunkProverTask {
func NewChunkProverTask(cfg *config.Config, chainCfg *params.ChainConfig, db *gorm.DB, expectedVk map[string][]byte, reg prometheus.Registerer) *ChunkProverTask {
cp := &ChunkProverTask{
BaseProverTask: BaseProverTask{
db: db,
cfg: cfg,
chainCfg: chainCfg,
expectedVk: expectedVk,
chunkOrm: orm.NewChunk(db),
blockOrm: orm.NewL2Block(db),
proverTaskOrm: orm.NewProverTask(db),
@@ -79,12 +80,26 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
for i := 0; i < 5; i++ {
var getTaskError error
var tmpChunkTask *orm.Chunk
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
if getTaskError != nil {
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask != nil {
log.Debug("retrieved assigned task chunk", "taskID", taskCtx.hasAssignedTask.TaskID, "prover", taskCtx.ProverName)
tmpChunkTask, getTaskError = cp.chunkOrm.GetChunkByHash(ctx.Copy(), taskCtx.hasAssignedTask.TaskID)
if getTaskError != nil {
log.Error("failed to get chunk has assigned to prover", "taskID", taskCtx.hasAssignedTask.TaskID, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
} else if tmpChunkTask == nil {
// if the assigned chunk dropped, there would be too much issue to assign another
return nil, fmt.Errorf("prover with publicKey %s is already assigned a dropped chunk. ProverName: %s, ProverVersion: %s",
taskCtx.PublicKey, taskCtx.ProverName, taskCtx.ProverVersion)
}
}
if tmpChunkTask == nil {
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
if getTaskError != nil {
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
return nil, ErrCoordinatorInternalFailure
}
}
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
if tmpChunkTask == nil {
@@ -110,31 +125,33 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, nil
}
// Don't dispatch the same failing job to the same prover
proverTasks, getFailedTaskError := cp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, 2)
if getFailedTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeChunk.String(), "task ID", tmpChunkTask.Hash, "error", getFailedTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpChunkTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
// we are simply pick the chunk which has been assigned, so don't bother to update attempts or check failed before
if taskCtx.hasAssignedTask == nil {
// Don't dispatch the same failing job to the same prover
proverTasks, getFailedTaskError := cp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, 2)
if getFailedTaskError != nil {
log.Error("failed to get prover tasks", "proof type", message.ProofTypeChunk.String(), "task ID", tmpChunkTask.Hash, "error", getFailedTaskError)
return nil, ErrCoordinatorInternalFailure
}
for i := 0; i < len(proverTasks); i++ {
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight, "task ID", tmpChunkTask.Hash, "prover name", taskCtx.ProverName, "prover public key", taskCtx.PublicKey)
return nil, nil
}
}
rowsAffected, updateAttemptsErr := cp.chunkOrm.UpdateChunkAttempts(ctx.Copy(), tmpChunkTask.Index, tmpChunkTask.ActiveAttempts, tmpChunkTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update chunk attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
}
}
rowsAffected, updateAttemptsErr := cp.chunkOrm.UpdateChunkAttempts(ctx.Copy(), tmpChunkTask.Index, tmpChunkTask.ActiveAttempts, tmpChunkTask.TotalAttempts)
if updateAttemptsErr != nil {
log.Error("failed to update chunk attempts", "height", getTaskParameter.ProverHeight, "err", updateAttemptsErr)
return nil, ErrCoordinatorInternalFailure
}
if rowsAffected == 0 {
time.Sleep(100 * time.Millisecond)
continue
}
chunkTask = tmpChunkTask
break
}
@@ -145,19 +162,24 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
}
log.Info("start chunk generation session", "task_id", chunkTask.Hash, "public key", taskCtx.PublicKey, "prover name", taskCtx.ProverName)
proverTask := orm.ProverTask{
TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeChunk),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
var proverTask *orm.ProverTask
if taskCtx.hasAssignedTask == nil {
proverTask = &orm.ProverTask{
TaskID: chunkTask.Hash,
ProverPublicKey: taskCtx.PublicKey,
TaskType: int16(message.ProofTypeChunk),
ProverName: taskCtx.ProverName,
ProverVersion: taskCtx.ProverVersion,
ProvingStatus: int16(types.ProverAssigned),
FailureType: int16(types.ProverTaskFailureTypeUndefined),
// here why need use UTC time. see scroll/common/database/db.go
AssignedAt: utils.NowUTC(),
}
} else {
proverTask = taskCtx.hasAssignedTask
}
taskMsg, err := cp.formatProverTask(ctx.Copy(), &proverTask, chunkTask, hardForkName)
taskMsg, err := cp.formatProverTask(ctx.Copy(), proverTask, chunkTask, hardForkName)
if err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("format prover task failure", "task_id", chunkTask.Hash, "err", err)
@@ -175,10 +197,12 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
proverTask.Metadata = metadata
}
if err = cp.proverTaskOrm.InsertProverTask(ctx.Copy(), &proverTask); err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("insert chunk prover task fail", "task_id", chunkTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
if taskCtx.hasAssignedTask == nil {
if err = cp.proverTaskOrm.InsertProverTask(ctx.Copy(), proverTask); err != nil {
cp.recoverActiveAttempts(ctx, chunkTask)
log.Error("insert chunk prover task fail", "task_id", chunkTask.Hash, "publicKey", taskCtx.PublicKey, "err", err)
return nil, ErrCoordinatorInternalFailure
}
}
// notice uuid is set as a side effect of InsertProverTask
taskMsg.UUID = proverTask.UUID.String()

View File

@@ -38,9 +38,10 @@ type ProverTask interface {
// BaseProverTask a base prover task which contain series functions
type BaseProverTask struct {
cfg *config.Config
chainCfg *params.ChainConfig
db *gorm.DB
cfg *config.Config
chainCfg *params.ChainConfig
db *gorm.DB
expectedVk map[string][]byte
batchOrm *orm.Batch
chunkOrm *orm.Chunk
@@ -57,10 +58,11 @@ type proverTaskContext struct {
ProverProviderType uint8
HardForkNames map[string]struct{}
taskType message.ProofType
chunkTask *orm.Chunk
batchTask *orm.Batch
bundleTask *orm.Bundle
taskType message.ProofType
chunkTask *orm.Chunk
batchTask *orm.Batch
bundleTask *orm.Bundle
hasAssignedTask *orm.ProverTask
}
// hardForkName get the chunk/batch/bundle hard fork name
@@ -175,19 +177,22 @@ func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, e
return nil, fmt.Errorf("public key %s is blocked from fetching tasks. ProverName: %s, ProverVersion: %s", publicKey, proverName, proverVersion)
}
isAssigned, err := b.proverTaskOrm.IsProverAssigned(ctx.Copy(), publicKey.(string))
assigned, err := b.proverTaskOrm.IsProverAssigned(ctx.Copy(), publicKey.(string))
if err != nil {
return nil, fmt.Errorf("failed to check if prover %s is assigned a task, err: %w", publicKey.(string), err)
}
if isAssigned {
return nil, fmt.Errorf("prover with publicKey %s is already assigned a task. ProverName: %s, ProverVersion: %s", publicKey, proverName, proverVersion)
}
ptc.hasAssignedTask = assigned
return &ptc, nil
}
func (b *BaseProverTask) applyUniversal(schema *coordinatorType.GetTaskSchema) (*coordinatorType.GetTaskSchema, []byte, error) {
ok, uTaskData, metadata, _ := libzkp.GenerateUniversalTask(schema.TaskType, schema.TaskData, schema.HardForkName)
expectedVk, ok := b.expectedVk[schema.HardForkName]
if !ok {
return nil, nil, fmt.Errorf("no expectedVk found from hardfork %s", schema.HardForkName)
}
ok, uTaskData, metadata, _ := libzkp.GenerateUniversalTask(schema.TaskType, schema.TaskData, schema.HardForkName, expectedVk)
if !ok {
return nil, nil, fmt.Errorf("can not generate universal task, see coordinator log for the reason")
}

View File

@@ -178,7 +178,20 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
if len(proverTask.Metadata) == 0 {
return errors.New("can not re-wrapping proof: no metadata has been recorded in advance")
}
proofParameter.Proof = libzkp.GenerateWrappedProof(proofParameter.Proof, string(proverTask.Metadata), []byte{})
var expected_vk []byte
switch message.ProofType(proofParameter.TaskType) {
case message.ProofTypeChunk:
expected_vk = m.verifier.ChunkVk[hardForkName]
case message.ProofTypeBatch:
expected_vk = m.verifier.BatchVk[hardForkName]
case message.ProofTypeBundle:
expected_vk = m.verifier.BundleVk[hardForkName]
}
if len(expected_vk) == 0 {
return errors.New("no vk specified match current hard fork, check your config")
}
proofParameter.Proof = libzkp.GenerateWrappedProof(proofParameter.Proof, string(proverTask.Metadata), expected_vk)
if proofParameter.Proof == "" {
return errors.New("can not re-wrapping proof, see coordinator log for reason")
}

View File

@@ -10,7 +10,13 @@ import (
// NewVerifier Sets up a mock verifier.
func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
return &Verifier{cfg: cfg, OpenVMVkMap: map[string]struct{}{"mock_vk": {}}}, nil
return &Verifier{
cfg: cfg,
OpenVMVkMap: map[string]struct{}{"mock_vk": {}},
ChunkVk: map[string][]byte{"euclidV2": []byte("mock_vk")},
BatchVk: map[string][]byte{"euclidV2": []byte("mock_vk")},
BundleVk: map[string][]byte{},
}, nil
}
// VerifyChunkProof return a mock verification result for a ChunkProof.

View File

@@ -11,4 +11,7 @@ const InvalidTestProof = "this is a invalid proof"
type Verifier struct {
cfg *config.VerifierConfig
OpenVMVkMap map[string]struct{}
ChunkVk map[string][]byte
BatchVk map[string][]byte
BundleVk map[string][]byte
}

View File

@@ -18,7 +18,7 @@ import (
"scroll-tech/coordinator/internal/logic/libzkp"
)
// This struct maps to `CircuitConfig` in libzkp/impl/src/verifier.rs
// This struct maps to `CircuitConfig` in libzkp/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.CircuitConfig` being changed
type rustCircuitConfig struct {
@@ -26,24 +26,28 @@ type rustCircuitConfig struct {
AssetsPath string `json:"assets_path"`
}
func newRustCircuitConfig(cfg *config.CircuitConfig) *rustCircuitConfig {
func newRustCircuitConfig(cfg config.AssetConfig) *rustCircuitConfig {
return &rustCircuitConfig{
ForkName: cfg.ForkName,
AssetsPath: cfg.AssetsPath,
}
}
// This struct maps to `VerifierConfig` in coordinator/internal/logic/libzkp/impl/src/verifier.rs
// This struct maps to `VerifierConfig` in coordinator/internal/logic/libzkp/src/verifier.rs
// Define a brand new struct here is to eliminate side effects in case fields
// in `*config.VerifierConfig` being changed
type rustVerifierConfig struct {
HighVersionCircuit *rustCircuitConfig `json:"high_version_circuit"`
Circuits []*rustCircuitConfig `json:"circuits"`
}
func newRustVerifierConfig(cfg *config.VerifierConfig) *rustVerifierConfig {
return &rustVerifierConfig{
HighVersionCircuit: newRustCircuitConfig(cfg.HighVersionCircuit),
out := &rustVerifierConfig{}
for _, cfg := range cfg.Verifiers {
out.Circuits = append(out.Circuits, newRustCircuitConfig(cfg))
}
return out
}
type rustVkDump struct {
@@ -65,10 +69,15 @@ func NewVerifier(cfg *config.VerifierConfig) (*Verifier, error) {
v := &Verifier{
cfg: cfg,
OpenVMVkMap: make(map[string]struct{}),
ChunkVk: make(map[string][]byte),
BatchVk: make(map[string][]byte),
BundleVk: make(map[string][]byte),
}
if err := v.loadOpenVMVks(message.EuclidV2Fork); err != nil {
return nil, err
for _, cfg := range cfg.Verifiers {
if err := v.loadOpenVMVks(cfg); err != nil {
return nil, err
}
}
return v, nil
@@ -108,27 +117,28 @@ func (v *Verifier) VerifyBundleProof(proof *message.OpenVMBundleProof, forkName
return libzkp.VerifyBundleProof(string(buf), forkName), nil
}
func (v *Verifier) ReadVK(filePat string) (string, error) {
// func (v *Verifier) ReadVK(filePat string) (string, error) {
f, err := os.Open(filepath.Clean(filePat))
if err != nil {
return "", err
}
byt, err := io.ReadAll(f)
if err != nil {
return "", err
}
return base64.StdEncoding.EncodeToString(byt), nil
}
// f, err := os.Open(filepath.Clean(filePat))
// if err != nil {
// return "", err
// }
// byt, err := io.ReadAll(f)
// if err != nil {
// return "", err
// }
// return base64.StdEncoding.EncodeToString(byt), nil
// }
func (v *Verifier) loadOpenVMVks(forkName string) error {
tempFile := path.Join(os.TempDir(), "openVmVk.json")
err := libzkp.DumpVk(forkName, tempFile)
if err != nil {
return err
}
func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
f, err := os.Open(filepath.Clean(tempFile))
vkFileName := cfg.Vkfile
if vkFileName == "" {
vkFileName = "openVmVk.json"
}
vkFile := path.Join(cfg.AssetsPath, vkFileName)
f, err := os.Open(filepath.Clean(vkFile))
if err != nil {
return err
}
@@ -144,5 +154,23 @@ func (v *Verifier) loadOpenVMVks(forkName string) error {
v.OpenVMVkMap[dump.Chunk] = struct{}{}
v.OpenVMVkMap[dump.Batch] = struct{}{}
v.OpenVMVkMap[dump.Bundle] = struct{}{}
log.Info("Load vks", "from", cfg.AssetsPath, "chunk", dump.Chunk, "batch", dump.Batch, "bundle", dump.Bundle)
decodedBytes, err := base64.StdEncoding.DecodeString(dump.Chunk)
if err != nil {
return err
}
v.ChunkVk[cfg.ForkName] = decodedBytes
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Batch)
if err != nil {
return err
}
v.BatchVk[cfg.ForkName] = decodedBytes
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Bundle)
if err != nil {
return err
}
v.BundleVk[cfg.ForkName] = decodedBytes
return nil
}

View File

@@ -29,11 +29,11 @@ func TestFFI(t *testing.T) {
as := assert.New(t)
cfg := &config.VerifierConfig{
HighVersionCircuit: &config.CircuitConfig{
AssetsPath: *assetsPathHi,
ForkName: "euclidV2",
MinProverVersion: "",
},
MinProverVersion: "",
Verifiers: []config.AssetConfig{{
AssetsPath: *assetsPathHi,
ForkName: "euclidV2",
}},
}
v, err := NewVerifier(cfg)

View File

@@ -57,17 +57,17 @@ func (*ProverTask) TableName() string {
}
// IsProverAssigned checks if a prover with the given public key has been assigned a task.
func (o *ProverTask) IsProverAssigned(ctx context.Context, publicKey string) (bool, error) {
func (o *ProverTask) IsProverAssigned(ctx context.Context, publicKey string) (*ProverTask, error) {
db := o.db.WithContext(ctx)
var task ProverTask
err := db.Where("prover_public_key = ? AND proving_status = ?", publicKey, types.ProverAssigned).First(&task).Error
if err != nil {
if errors.Is(err, gorm.ErrRecordNotFound) {
return false, nil
return nil, nil
}
return false, err
return nil, err
}
return true, nil
return &task, nil
}
// GetProverTasks get prover tasks
@@ -269,6 +269,24 @@ func (o *ProverTask) UpdateProverTaskProvingStatusAndFailureType(ctx context.Con
return nil
}
// UpdateProverTaskProvingStatusAndFailureType updates the proving_status of a specific ProverTask record.
func (o *ProverTask) UpdateProverTaskAssignedTime(ctx context.Context, uuid uuid.UUID, t time.Time, dbTX ...*gorm.DB) error {
db := o.db
if len(dbTX) > 0 && dbTX[0] != nil {
db = dbTX[0]
}
db = db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("uuid = ?", uuid)
updates := make(map[string]interface{})
updates["assigned_at"] = t
if err := db.Updates(updates).Error; err != nil {
return fmt.Errorf("ProverTask.UpdateProverTaskAssignedTime error: %w, uuid:%s, status: %v", err, uuid, t)
}
return nil
}
// UpdateProverTaskFailureType update the prover task failure type
func (o *ProverTask) UpdateProverTaskFailureType(ctx context.Context, uuid uuid.UUID, failureType types.ProverTaskFailureType, dbTX ...*gorm.DB) error {
db := o.db

View File

@@ -79,16 +79,17 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
tokenTimeout = 60
conf = &config.Config{
L2: &config.L2{
ChainID: 111,
ChainID: 111,
Endpoint: &config.L2Endpoint{},
},
ProverManager: &config.ProverManager{
ProversPerSession: proversPerSession,
Verifier: &config.VerifierConfig{
HighVersionCircuit: &config.CircuitConfig{
AssetsPath: "",
ForkName: "euclidV2",
MinProverVersion: "v4.4.89",
},
MinProverVersion: "v4.4.89",
Verifiers: []config.AssetConfig{{
AssetsPath: "",
ForkName: "euclidV2",
}},
},
BatchCollectionTimeSec: 10,
ChunkCollectionTimeSec: 10,

View File

@@ -26,10 +26,12 @@ pub fn checkout_chunk_task(
}
/// Generate required staff for proving tasks
/// return (pi_hash, metadata, task)
pub fn gen_universal_task(
task_type: i32,
task_json: &str,
fork_name: &str,
expected_vk: &[u8],
interpreter: Option<impl ChunkInterpreter>,
) -> eyre::Result<(B256, String, String)> {
use proofs::*;
@@ -44,7 +46,7 @@ pub fn gen_universal_task(
Bundle(BundleProofMetadata),
}
let (pi_hash, metadata, u_task) = match task_type {
let (pi_hash, metadata, mut u_task) = match task_type {
x if x == TaskType::Chunk as i32 => {
let task = serde_json::from_str::<ChunkProvingTask>(task_json)?;
let (pi_hash, metadata, u_task) =
@@ -64,6 +66,8 @@ pub fn gen_universal_task(
_ => return Err(eyre::eyre!("unrecognized task type {task_type}")),
};
u_task.vk = Vec::from(expected_vk);
Ok((
pi_hash,
serde_json::to_string(&metadata)?,
@@ -106,7 +110,24 @@ pub fn verifier_init(config: &str) -> eyre::Result<()> {
pub fn verify_proof(proof: Vec<u8>, fork_name: &str, task_type: TaskType) -> eyre::Result<bool> {
let verifier = verifier::get_verifier(fork_name)?;
let ret = verifier.verify(task_type, proof)?;
let ret = verifier.lock().unwrap().verify(task_type, &proof)?;
if let Ok(debug_value) = std::env::var("ZKVM_DEBUG_PROOF") {
use std::time::{SystemTime, UNIX_EPOCH};
if !ret && debug_value.to_lowercase() == "true" {
// Dump req.input to a temporary file
let timestamp = SystemTime::now()
.duration_since(UNIX_EPOCH)
.unwrap_or_default()
.as_secs();
let filename = format!("/tmp/proof_{}.json", timestamp);
if let Err(e) = std::fs::write(&filename, &proof) {
eprintln!("Failed to write proof to file {}: {}", filename, e);
} else {
println!("Dumped failed proof to {}", filename);
}
}
}
Ok(ret)
}
@@ -115,7 +136,7 @@ pub fn verify_proof(proof: Vec<u8>, fork_name: &str, task_type: TaskType) -> eyr
pub fn dump_vk(fork_name: &str, file: &str) -> eyre::Result<()> {
let verifier = verifier::get_verifier(fork_name)?;
verifier.dump_vk(Path::new(file));
verifier.lock().unwrap().dump_vk(Path::new(file));
Ok(())
}

View File

@@ -179,7 +179,7 @@ impl<Metadata: ProofMetadata> WrappedProof<Metadata> {
/// Sanity checks on the wrapped proof:
///
/// - pi_hash computed in host does in fact match pi_hash computed in guest
pub fn sanity_check(&self, fork_name: ForkName) {
pub fn pi_hash_check(&self, fork_name: ForkName) -> bool {
let proof_pi = self.proof.public_values();
let expected_pi = self
@@ -192,10 +192,11 @@ impl<Metadata: ProofMetadata> WrappedProof<Metadata> {
.map(|&v| v as u32)
.collect::<Vec<_>>();
assert_eq!(
expected_pi, proof_pi,
"pi mismatch: expected={expected_pi:?}, found={proof_pi:?}"
);
let ret = expected_pi == proof_pi;
if !ret {
tracing::warn!("pi mismatch: expected={expected_pi:?}, found={proof_pi:?}");
}
ret
}
}

View File

@@ -9,13 +9,39 @@ pub use chunk::{ChunkProvingTask, ChunkTask};
pub use chunk_interpreter::ChunkInterpreter;
pub use scroll_zkvm_types::task::ProvingTask;
use crate::proofs::{BatchProofMetadata, BundleProofMetadata, ChunkProofMetadata};
use chunk_interpreter::{DummyInterpreter, TryFromWithInterpreter};
use crate::proofs::{self, BatchProofMetadata, BundleProofMetadata, ChunkProofMetadata};
use sbv_primitives::B256;
use scroll_zkvm_types::{
chunk::ChunkInfo,
public_inputs::{ForkName, MultiVersionPublicInputs},
};
use scroll_zkvm_types::public_inputs::{ForkName, MultiVersionPublicInputs};
fn check_aggregation_proofs<Metadata>(
proofs: &[proofs::WrappedProof<Metadata>],
fork_name: ForkName,
) -> eyre::Result<()>
where
Metadata: proofs::ProofMetadata,
{
use std::panic::{self, AssertUnwindSafe};
panic::catch_unwind(AssertUnwindSafe(|| {
for w in proofs.windows(2) {
w[1].metadata
.pi_hash_info()
.validate(w[0].metadata.pi_hash_info(), fork_name);
}
}))
.map_err(|e| {
let error_msg = if let Some(string) = e.downcast_ref::<String>() {
string.clone()
} else if let Some(str) = e.downcast_ref::<&str>() {
str.to_string()
} else {
"Unknown validation error occurred".to_string()
};
eyre::eyre!("Chunk data validation failed: {}", error_msg)
})?;
Ok(())
}
/// Generate required staff for chunk proving
pub fn gen_universal_chunk_task(
@@ -23,11 +49,10 @@ pub fn gen_universal_chunk_task(
fork_name: ForkName,
interpreter: Option<impl ChunkInterpreter>,
) -> eyre::Result<(B256, ChunkProofMetadata, ProvingTask)> {
let chunk_info = if let Some(interpreter) = interpreter {
ChunkInfo::try_from_with_interpret(&mut task, interpreter)
} else {
ChunkInfo::try_from_with_interpret(&mut task, DummyInterpreter {})
}?;
if let Some(interpreter) = interpreter {
task.prepare_task_via_interpret(interpreter)?;
}
let chunk_info = task.precheck_and_build_metadata()?;
let proving_task = task.try_into()?;
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
Ok((

View File

@@ -5,7 +5,7 @@ use sbv_primitives::{B256, U256};
use scroll_zkvm_types::{
batch::{
BatchHeader, BatchHeaderV6, BatchHeaderV7, BatchInfo, BatchWitness, EnvelopeV6, EnvelopeV7,
PointEvalWitness, ReferenceHeader, N_BLOB_BYTES,
PointEvalWitness, ReferenceHeader, ToArchievedWitness, N_BLOB_BYTES,
},
public_inputs::ForkName,
task::ProvingTask,
@@ -104,7 +104,7 @@ impl BatchProvingTask {
fn build_guest_input(&self) -> BatchWitness {
let fork_name = self.fork_name.to_lowercase().as_str().into();
// calculate point eval needed and compare with task input
// sanity check: calculate point eval needed and compare with task input
let (kzg_commitment, kzg_proof, challenge_digest) = {
let blob = point_eval::to_blob(&self.blob_bytes);
let commitment = point_eval::blob_to_kzg_commitment(&blob);
@@ -120,12 +120,12 @@ impl BatchProvingTask {
EnvelopeV6::from(self.blob_bytes.as_slice()).challenge_digest(versioned_hash)
}
BatchHeaderV::V7(_) => {
assert_eq!(
fork_name,
ForkName::EuclidV2,
"hardfork mismatch for da-codec@v7 header: found={fork_name:?}, expected={:?}",
ForkName::EuclidV2,
);
match fork_name {
ForkName::EuclidV2 => (),
_ => unreachable!("hardfork mismatch for da-codec@v6 header: found={fork_name:?}, expected={:?}",
[ForkName::EuclidV2],
),
}
let padded_blob_bytes = {
let mut padded_blob_bytes = self.blob_bytes.to_vec();
padded_blob_bytes.resize(N_BLOB_BYTES, 0);
@@ -175,79 +175,19 @@ impl BatchProvingTask {
pub fn precheck_and_build_metadata(&self) -> Result<BatchInfo> {
let fork_name = ForkName::from(self.fork_name.as_str());
let (parent_state_root, state_root, chain_id, withdraw_root) = (
self.chunk_proofs
.first()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.prev_state_root,
self.chunk_proofs
.last()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.post_state_root,
self.chunk_proofs
.last()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.chain_id,
self.chunk_proofs
.last()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.withdraw_root,
);
let (parent_batch_hash, prev_msg_queue_hash, post_msg_queue_hash) = match self.batch_header
{
BatchHeaderV::V6(h) => {
assert_eq!(
fork_name,
ForkName::EuclidV1,
"hardfork mismatch for da-codec@v6 header: found={fork_name:?}, expected={:?}",
ForkName::EuclidV1,
);
(h.parent_batch_hash, Default::default(), Default::default())
}
BatchHeaderV::V7(h) => {
assert_eq!(
fork_name,
ForkName::EuclidV2,
"hardfork mismatch for da-codec@v7 header: found={fork_name:?}, expected={:?}",
ForkName::EuclidV2,
);
(
h.parent_batch_hash,
self.chunk_proofs
.first()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.prev_msg_queue_hash,
self.chunk_proofs
.last()
.expect("at least one chunk in batch")
.metadata
.chunk_info
.post_msg_queue_hash,
)
}
};
// for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair
let witness = self.build_guest_input();
let archieved = ToArchievedWitness::create(&witness)
.map_err(|e| eyre::eyre!("archieve batch witness fail: {e}"))?;
let archieved_witness = archieved
.access()
.map_err(|e| eyre::eyre!("access archieved batch witness fail: {e}"))?;
let metadata: BatchInfo = archieved_witness.into();
let batch_hash = self.batch_header.batch_hash();
super::check_aggregation_proofs(self.chunk_proofs.as_slice(), fork_name)?;
Ok(BatchInfo {
parent_state_root,
parent_batch_hash,
state_root,
batch_hash,
chain_id,
withdraw_root,
prev_msg_queue_hash,
post_msg_queue_hash,
})
Ok(metadata)
}
}

View File

@@ -1,7 +1,8 @@
use crate::proofs::BatchProof;
use eyre::Result;
use scroll_zkvm_types::{
bundle::{BundleInfo, BundleWitness},
bundle::{BundleInfo, BundleWitness, ToArchievedWitness},
public_inputs::ForkName,
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};
@@ -50,57 +51,21 @@ impl BundleProvingTask {
}
pub fn precheck_and_build_metadata(&self) -> Result<BundleInfo> {
use eyre::eyre;
let err_prefix = format!("metadata_with_prechecks for task_id={}", self.identifier());
let fork_name = ForkName::from(self.fork_name.as_str());
// for every aggregation task, there are two steps needed to build the metadata:
// 1. generate data for metadata from the witness
// 2. validate every adjacent proof pair
let witness = self.build_guest_input();
let archieved = ToArchievedWitness::create(&witness)
.map_err(|e| eyre::eyre!("archieve bundle witness fail: {e}"))?;
let archieved_witness = archieved
.access()
.map_err(|e| eyre::eyre!("access archieved bundle witness fail: {e}"))?;
let metadata: BundleInfo = archieved_witness.into();
for w in self.batch_proofs.windows(2) {
if w[1].metadata.batch_info.chain_id != w[0].metadata.batch_info.chain_id {
return Err(eyre!("{err_prefix}: chain_id mismatch"));
}
super::check_aggregation_proofs(self.batch_proofs.as_slice(), fork_name)?;
if w[1].metadata.batch_info.parent_state_root != w[0].metadata.batch_info.state_root {
return Err(eyre!("{err_prefix}: state_root not chained"));
}
if w[1].metadata.batch_info.parent_batch_hash != w[0].metadata.batch_info.batch_hash {
return Err(eyre!("{err_prefix}: batch_hash not chained"));
}
}
let (first_batch, last_batch) = (
&self
.batch_proofs
.first()
.expect("at least one batch in bundle")
.metadata
.batch_info,
&self
.batch_proofs
.last()
.expect("at least one batch in bundle")
.metadata
.batch_info,
);
let chain_id = first_batch.chain_id;
let num_batches = u32::try_from(self.batch_proofs.len()).expect("num_batches: u32");
let prev_state_root = first_batch.parent_state_root;
let prev_batch_hash = first_batch.parent_batch_hash;
let post_state_root = last_batch.state_root;
let batch_hash = last_batch.batch_hash;
let withdraw_root = last_batch.withdraw_root;
let msg_queue_hash = last_batch.post_msg_queue_hash;
Ok(BundleInfo {
chain_id,
msg_queue_hash,
num_batches,
prev_state_root,
prev_batch_hash,
post_state_root,
batch_hash,
withdraw_root,
})
Ok(metadata)
}
}

View File

@@ -2,7 +2,7 @@ use super::chunk_interpreter::*;
use eyre::Result;
use sbv_primitives::{types::BlockWitness, B256};
use scroll_zkvm_types::{
chunk::{execute, ChunkInfo, ChunkWitness},
chunk::{execute, ChunkInfo, ChunkWitness, ToArchievedWitness},
task::ProvingTask,
utils::{to_rkyv_bytes, RancorError},
};
@@ -129,23 +129,33 @@ impl ChunkProvingTask {
fn insert_state(&mut self, node: sbv_primitives::Bytes) {
self.block_witnesses[0].states.push(node);
}
}
const MAX_FETCH_NODES_ATTEMPTS: usize = 15;
pub fn precheck_and_build_metadata(&self) -> Result<ChunkInfo> {
let witness = self.build_guest_input();
let archieved = ToArchievedWitness::create(&witness)
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
let archieved_witness = archieved
.access()
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
impl TryFromWithInterpreter<&mut ChunkProvingTask> for ChunkInfo {
fn try_from_with_interpret(
value: &mut ChunkProvingTask,
let ret = ChunkInfo::try_from(archieved_witness).map_err(|e| eyre::eyre!("{e}"))?;
Ok(ret)
}
/// this method check the validate of current task (there may be missing storage node)
/// and try fixing it until everything is ok
pub fn prepare_task_via_interpret(
&mut self,
interpreter: impl ChunkInterpreter,
) -> eyre::Result<Self> {
) -> eyre::Result<()> {
use eyre::eyre;
let err_prefix = format!(
"metadata_with_prechecks for task_id={:?}",
value.identifier()
self.identifier()
);
if value.block_witnesses.is_empty() {
if self.block_witnesses.is_empty() {
return Err(eyre!(
"{err_prefix}: chunk should contain at least one block",
));
@@ -156,8 +166,15 @@ impl TryFromWithInterpreter<&mut ChunkProvingTask> for ChunkInfo {
let err_parse_re = regex::Regex::new(pattern)?;
let mut attempts = 0;
loop {
match execute(&value.build_guest_input()) {
Ok(chunk_info) => return Ok(chunk_info),
let witness = self.build_guest_input();
let archieved = ToArchievedWitness::create(&witness)
.map_err(|e| eyre::eyre!("archieve chunk witness fail: {e}"))?;
let archieved_witness = archieved
.access()
.map_err(|e| eyre::eyre!("access archieved chunk witness fail: {e}"))?;
match execute(archieved_witness) {
Ok(_) => return Ok(()),
Err(e) => {
if let Some(caps) = err_parse_re.captures(&e) {
let hash = caps[2].to_string();
@@ -174,7 +191,7 @@ impl TryFromWithInterpreter<&mut ChunkProvingTask> for ChunkInfo {
hash.parse::<sbv_primitives::B256>().expect("should be hex");
let node = interpreter.try_fetch_storage_node(node_hash)?;
tracing::warn!("missing node fetched: {node}");
value.insert_state(node);
self.insert_state(node);
} else {
return Err(eyre!("{err_prefix}: {e}"));
}
@@ -183,3 +200,5 @@ impl TryFromWithInterpreter<&mut ChunkProvingTask> for ChunkInfo {
}
}
}
const MAX_FETCH_NODES_ATTEMPTS: usize = 15;

View File

@@ -4,7 +4,11 @@ mod euclidv2;
use euclidv2::EuclidV2Verifier;
use eyre::Result;
use serde::{Deserialize, Serialize};
use std::{cell::OnceCell, path::Path, rc::Rc};
use std::{
collections::HashMap,
path::Path,
sync::{Arc, Mutex, OnceLock},
};
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
@@ -31,7 +35,7 @@ pub struct VKDump {
}
pub trait ProofVerifier {
fn verify(&self, task_type: TaskType, proof: Vec<u8>) -> Result<bool>;
fn verify(&self, task_type: TaskType, proof: &[u8]) -> Result<bool>;
fn dump_vk(&self, file: &Path);
}
@@ -43,36 +47,49 @@ pub struct CircuitConfig {
#[derive(Debug, Serialize, Deserialize)]
pub struct VerifierConfig {
pub high_version_circuit: CircuitConfig,
pub circuits: Vec<CircuitConfig>,
}
type HardForkName = String;
struct VerifierPair(HardForkName, Rc<Box<dyn ProofVerifier>>);
static mut VERIFIER_HIGH: OnceCell<VerifierPair> = OnceCell::new();
type VerifierType = Arc<Mutex<dyn ProofVerifier + Send>>;
static VERIFIERS: OnceLock<HashMap<HardForkName, VerifierType>> = OnceLock::new();
pub fn init(config: VerifierConfig) {
let verifier = EuclidV2Verifier::new(&config.high_version_circuit.assets_path);
unsafe {
VERIFIER_HIGH
.set(VerifierPair(
config.high_version_circuit.fork_name,
Rc::new(Box::new(verifier)),
))
.unwrap_unchecked();
let mut verifiers: HashMap<HardForkName, VerifierType> = Default::default();
for cfg in &config.circuits {
let canonical_fork_name = cfg.fork_name.to_lowercase();
let verifier = EuclidV2Verifier::new(&cfg.assets_path, canonical_fork_name.as_str().into());
let ret = verifiers.insert(canonical_fork_name, Arc::new(Mutex::new(verifier)));
assert!(
ret.is_none(),
"DO NOT init the same fork {} twice",
cfg.fork_name
);
tracing::info!("load verifier config for fork {}", cfg.fork_name);
}
let ret = VERIFIERS.set(verifiers).is_ok();
assert!(ret);
}
pub fn get_verifier(fork_name: &str) -> Result<Rc<Box<dyn ProofVerifier>>> {
unsafe {
if let Some(verifier) = VERIFIER_HIGH.get() {
if verifier.0 == fork_name {
return Ok(verifier.1.clone());
}
pub fn get_verifier(fork_name: &str) -> Result<Arc<Mutex<dyn ProofVerifier>>> {
if let Some(verifiers) = VERIFIERS.get() {
if let Some(verifier) = verifiers.get(fork_name) {
return Ok(verifier.clone());
}
Err(eyre::eyre!(
"failed to get verifier, key not found: {}, has {:?}",
fork_name,
verifiers.keys().collect::<Vec<_>>(),
))
} else {
Err(eyre::eyre!(
"failed to get verifier, not inited {}",
fork_name
))
}
Err(eyre::eyre!(
"failed to get verifier, key not found, {}",
fork_name
))
}

View File

@@ -1,4 +1,4 @@
use super::{ProofVerifier, TaskType, VKDump};
use super::{ProofVerifier, TaskType};
use eyre::Result;
@@ -6,61 +6,64 @@ use crate::{
proofs::{AsRootProof, BatchProof, BundleProof, ChunkProof, IntoEvmProof},
utils::panic_catch,
};
use scroll_zkvm_verifier_euclid::verifier::{BatchVerifier, BundleVerifierEuclidV2, ChunkVerifier};
use std::{fs::File, path::Path};
use scroll_zkvm_types::public_inputs::ForkName;
use scroll_zkvm_verifier_euclid::verifier::UniversalVerifier;
use std::path::Path;
pub struct EuclidV2Verifier {
chunk_verifier: ChunkVerifier,
batch_verifier: BatchVerifier,
bundle_verifier: BundleVerifierEuclidV2,
verifier: UniversalVerifier,
fork: ForkName,
}
impl EuclidV2Verifier {
pub fn new(assets_dir: &str) -> Self {
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
let config = Path::new(assets_dir).join("root-verifier-vm-config");
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
Self {
chunk_verifier: ChunkVerifier::setup(&config, &exe, &verifier_bin)
verifier: UniversalVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up chunk verifier"),
batch_verifier: BatchVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up batch verifier"),
bundle_verifier: BundleVerifierEuclidV2::setup(&config, &exe, &verifier_bin)
.expect("Setting up bundle verifier"),
fork,
}
}
}
impl ProofVerifier for EuclidV2Verifier {
fn verify(&self, task_type: super::TaskType, proof: Vec<u8>) -> Result<bool> {
fn verify(&self, task_type: super::TaskType, proof: &[u8]) -> Result<bool> {
panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof.as_slice()).unwrap();
self.chunk_verifier.verify_proof(proof.as_root_proof())
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
self.verifier
.verify_proof(proof.as_root_proof(), &proof.vk)
.unwrap()
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof.as_slice()).unwrap();
self.batch_verifier.verify_proof(proof.as_root_proof())
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
self.verifier
.verify_proof(proof.as_root_proof(), &proof.vk)
.unwrap()
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof.as_slice()).unwrap();
self.bundle_verifier
.verify_proof_evm(&proof.into_evm_proof())
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
let vk = proof.vk.clone();
let evm_proof = proof.into_evm_proof();
self.verifier.verify_proof_evm(&evm_proof, &vk).unwrap()
}
})
.map_err(|err_str: String| eyre::eyre!("{err_str}"))
}
fn dump_vk(&self, file: &Path) {
use base64::{prelude::BASE64_STANDARD, Engine};
let f = File::create(file).expect("Failed to open file to dump VK");
let dump = VKDump {
chunk_vk: BASE64_STANDARD.encode(self.chunk_verifier.get_app_vk()),
batch_vk: BASE64_STANDARD.encode(self.batch_verifier.get_app_vk()),
bundle_vk: BASE64_STANDARD.encode(self.bundle_verifier.get_app_vk()),
};
serde_json::to_writer(f, &dump).expect("Failed to dump VK");
fn dump_vk(&self, _file: &Path) {
panic!("dump vk has been deprecated");
}
}

View File

@@ -91,6 +91,8 @@ pub unsafe extern "C" fn gen_universal_task(
task_type: i32,
task: *const c_char,
fork_name: *const c_char,
expected_vk: *const u8,
expected_vk_len: usize,
) -> HandlingResult {
let mut interpreter = None;
let task_json = if task_type == TaskType::Chunk as i32 {
@@ -109,10 +111,22 @@ pub unsafe extern "C" fn gen_universal_task(
} else {
c_char_to_str(task).to_string()
};
let ret =
libzkp::gen_universal_task(task_type, &task_json, c_char_to_str(fork_name), interpreter);
if let Ok((pi_hash, task_json, meta_json)) = ret {
let expected_vk = if expected_vk_len > 0 {
std::slice::from_raw_parts(expected_vk, expected_vk_len)
} else {
&[]
};
let ret = libzkp::gen_universal_task(
task_type,
&task_json,
c_char_to_str(fork_name),
expected_vk,
interpreter,
);
if let Ok((pi_hash, meta_json, task_json)) = ret {
let expected_pi_hash = pi_hash.0.map(|byte| byte as c_char);
HandlingResult {
ok: true as c_char,

View File

@@ -1,7 +1,7 @@
[package]
name = "prover"
version = "0.1.0"
edition = "2021"
version.workspace = true
edition.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html

View File

@@ -2,12 +2,13 @@ mod prover;
mod types;
mod zk_circuits_handler;
use clap::{ArgAction, Parser};
use clap::{ArgAction, Parser, Subcommand};
use prover::{LocalProver, LocalProverConfig};
use scroll_proving_sdk::{
prover::ProverBuilder,
prover::{types::ProofType, ProverBuilder},
utils::{get_version, init_tracing},
};
use std::{fs::File, path::Path};
#[derive(Parser, Debug)]
#[command(disable_version_flag = true)]
@@ -16,6 +17,9 @@ struct Args {
#[arg(long = "config", default_value = "conf/config.json")]
config_file: String,
#[arg(long = "forkname")]
fork_name: Option<String>,
/// Version of this prover
#[arg(short, long, action = ArgAction::SetTrue)]
version: bool,
@@ -23,6 +27,40 @@ struct Args {
/// Path of log file
#[arg(long = "log.file")]
log_file: Option<String>,
#[command(subcommand)]
command: Option<Commands>,
}
#[derive(Subcommand, Debug)]
enum Commands {
/// Dump vk of this prover
Dump {
/// File to save the vks
file_name: String,
},
}
fn dump_vk(file: &Path, prover: &LocalProver, fork_name: &str) -> eyre::Result<()> {
let f = File::create(file)?;
#[derive(Debug, serde::Serialize)]
struct VKDump {
pub chunk_vk: String,
pub batch_vk: String,
pub bundle_vk: String,
}
let handler = prover.new_handler(fork_name);
let dump = VKDump {
chunk_vk: handler.get_vk(ProofType::Chunk),
batch_vk: handler.get_vk(ProofType::Batch),
bundle_vk: handler.get_vk(ProofType::Bundle),
};
serde_json::to_writer(f, &dump)?;
Ok(())
}
#[tokio::main]
@@ -37,14 +75,25 @@ async fn main() -> eyre::Result<()> {
}
let cfg = LocalProverConfig::from_file(args.config_file)?;
let default_fork_name = cfg.circuits.keys().next().unwrap().clone();
let sdk_config = cfg.sdk_config.clone();
let local_prover = LocalProver::new(cfg);
let prover = ProverBuilder::new(sdk_config, local_prover)
.build()
.await
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
let local_prover = LocalProver::new(cfg.clone());
prover.run().await;
match args.command {
Some(Commands::Dump { file_name }) => {
let fork_name = args.fork_name.unwrap_or(default_fork_name);
println!("dump vk for {fork_name}");
dump_vk(Path::new(&file_name), &local_prover, &fork_name)?;
}
None => {
let prover = ProverBuilder::new(sdk_config, local_prover)
.build()
.await
.map_err(|e| eyre::eyre!("build prover fail: {e}"))?;
prover.run().await;
}
}
Ok(())
}

View File

@@ -1,6 +1,5 @@
use crate::zk_circuits_handler::{euclidV2::EuclidV2Handler, CircuitsHandler};
use async_trait::async_trait;
use base64::{prelude::BASE64_STANDARD, Engine};
use eyre::Result;
use scroll_proving_sdk::{
config::Config as SdkConfig,
@@ -9,6 +8,7 @@ use scroll_proving_sdk::{
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
QueryTaskResponse, TaskStatus,
},
types::ProofType,
ProvingService,
},
};
@@ -16,7 +16,7 @@ use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
fs::File,
sync::Arc,
sync::{Arc, OnceLock},
time::{SystemTime, UNIX_EPOCH},
};
use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
@@ -45,6 +45,9 @@ impl LocalProverConfig {
pub struct CircuitConfig {
pub hard_fork_name: String,
pub workspace_path: String,
/// cached vk value to save some initial cost, for debugging only
#[serde(default)]
pub vks: HashMap<ProofType, String>,
}
pub struct LocalProver {
@@ -52,7 +55,7 @@ pub struct LocalProver {
next_task_id: u64,
current_task: Option<JoinHandle<Result<String>>>,
active_handler: Option<(String, Arc<dyn CircuitsHandler>)>,
handlers: HashMap<String, OnceLock<Arc<dyn CircuitsHandler>>>,
}
#[async_trait]
@@ -62,13 +65,13 @@ impl ProvingService for LocalProver {
}
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
let mut vks = vec![];
for hard_fork_name in self.config.circuits.keys() {
let handler = self.new_handler(hard_fork_name);
for (hard_fork_name, cfg) in self.config.circuits.iter() {
for proof_type in &req.proof_types {
let vk = handler.get_vk(*proof_type).await;
if let Some(vk) = vk {
vks.push(BASE64_STANDARD.encode(vk));
if let Some(vk) = cfg.vks.get(proof_type) {
vks.push(vk.clone())
} else {
let handler = self.get_or_init_handler(hard_fork_name);
vks.push(handler.get_vk(*proof_type));
}
}
}
@@ -76,11 +79,8 @@ impl ProvingService for LocalProver {
GetVkResponse { vks, error: None }
}
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
self.set_active_handler(&req.hard_fork_name);
match self
.do_prove(req, self.active_handler.as_ref().unwrap().1.clone())
.await
{
let handler = self.get_or_init_handler(&req.hard_fork_name);
match self.do_prove(req, handler).await {
Ok(resp) => resp,
Err(e) => ProveResponse {
status: TaskStatus::Failed,
@@ -133,11 +133,16 @@ impl ProvingService for LocalProver {
impl LocalProver {
pub fn new(config: LocalProverConfig) -> Self {
let handlers = config
.circuits
.keys()
.map(|k| (k.clone(), OnceLock::new()))
.collect();
Self {
config,
next_task_id: 0,
current_task: None,
active_handler: None,
handlers,
}
}
@@ -168,25 +173,25 @@ impl LocalProver {
})
}
fn set_active_handler(&mut self, hard_fork_name: &str) {
if let Some(handler) = &self.active_handler {
if handler.0 == hard_fork_name {
return;
}
}
self.active_handler = Some((hard_fork_name.to_string(), self.new_handler(hard_fork_name)));
fn get_or_init_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
let lk = self
.handlers
.get(hard_fork_name)
.expect("coordinator should never sent unexpected forkname");
lk.get_or_init(|| self.new_handler(hard_fork_name)).clone()
}
fn new_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
pub fn new_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
// if we got assigned a task for an unknown hard fork, there is something wrong in the
// coordinator
let config = self.config.circuits.get(hard_fork_name).unwrap();
match hard_fork_name {
"euclidV2" => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(
&config.workspace_path,
)))) as Arc<dyn CircuitsHandler>,
_ => unreachable!(),
// The new EuclidV2Handler is a universal handler
// We can add other handler implements if needed
"some future forkname" => unreachable!(),
_ => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(config))))
as Arc<dyn CircuitsHandler>,
}
}
}

View File

@@ -11,7 +11,7 @@ use std::path::Path;
#[async_trait]
pub trait CircuitsHandler: Sync + Send {
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>>;
fn get_vk(&self, task_type: ProofType) -> String;
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
}
@@ -54,14 +54,12 @@ impl Phase {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_config = workspace_path.join("bundle/openvm.toml");
let segment_len = Some((1 << 22) - 100);
match self {
Phase::EuclidV2 => ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app.vmexe"),
..Default::default()
},
ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app.vmexe"),
..Default::default()
}
}
}

View File

@@ -1,7 +1,13 @@
use std::{path::Path, sync::Arc};
use std::{
collections::HashMap,
path::Path,
sync::{Arc, OnceLock},
};
use super::{CircuitsHandler, Phase};
use crate::prover::CircuitConfig;
use async_trait::async_trait;
use base64::{prelude::BASE64_STANDARD, Engine};
use eyre::Result;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover_euclid::{BatchProver, BundleProverEuclidV2, ChunkProver};
@@ -11,12 +17,14 @@ pub struct EuclidV2Handler {
chunk_prover: ChunkProver,
batch_prover: BatchProver,
bundle_prover: BundleProverEuclidV2,
cached_vks: HashMap<ProofType, OnceLock<String>>,
}
unsafe impl Send for EuclidV2Handler {}
impl EuclidV2Handler {
pub fn new(workspace_path: &str) -> Self {
pub fn new(cfg: &CircuitConfig) -> Self {
let workspace_path = &cfg.workspace_path;
let p = Phase::EuclidV2;
let workspace_path = Path::new(workspace_path);
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
@@ -28,46 +36,80 @@ impl EuclidV2Handler {
let bundle_prover = BundleProverEuclidV2::setup(p.phase_spec_bundle(workspace_path))
.expect("Failed to setup bundle prover");
let build_vk_cache = |proof_type: ProofType| {
let vk = if let Some(vk) = cfg.vks.get(&proof_type) {
OnceLock::from(vk.clone())
} else {
OnceLock::new()
};
(proof_type, vk)
};
Self {
chunk_prover,
batch_prover,
bundle_prover,
cached_vks: HashMap::from([
build_vk_cache(ProofType::Chunk),
build_vk_cache(ProofType::Batch),
build_vk_cache(ProofType::Bundle),
]),
}
}
pub fn get_vk_and_cache(&self, task_type: ProofType) -> String {
match task_type {
ProofType::Chunk => self.cached_vks[&ProofType::Chunk]
.get_or_init(|| BASE64_STANDARD.encode(self.chunk_prover.get_app_vk())),
ProofType::Batch => self.cached_vks[&ProofType::Batch]
.get_or_init(|| BASE64_STANDARD.encode(self.batch_prover.get_app_vk())),
ProofType::Bundle => self.cached_vks[&ProofType::Bundle]
.get_or_init(|| BASE64_STANDARD.encode(self.bundle_prover.get_evm_vk())),
_ => unreachable!("Unsupported proof type {:?}", task_type),
}
.clone()
}
}
#[async_trait]
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
Some(match task_type {
ProofType::Chunk => self.try_lock().unwrap().chunk_prover.get_app_vk(),
ProofType::Batch => self.try_lock().unwrap().batch_prover.get_app_vk(),
ProofType::Bundle => self.try_lock().unwrap().bundle_prover.get_app_vk(),
_ => unreachable!("Unsupported proof type"),
})
fn get_vk(&self, task_type: ProofType) -> String {
self.try_lock()
.expect("get vk is on called before other entry is used")
.get_vk_and_cache(task_type)
}
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
let handler_self = self.lock().await;
let u_task: ProvingTask = serde_json::from_str(&prove_request.input)?;
let expected_vk = handler_self.get_vk_and_cache(prove_request.proof_type);
if BASE64_STANDARD.encode(&u_task.vk) != expected_vk {
eyre::bail!(
"vk is not match!, prove type {:?}, expected {}, get {}",
prove_request.proof_type,
expected_vk,
BASE64_STANDARD.encode(&u_task.vk),
);
}
let proof = match prove_request.proof_type {
ProofType::Chunk => self
.try_lock()
.unwrap()
ProofType::Chunk => handler_self
.chunk_prover
.gen_proof_universal(&u_task, false)?,
ProofType::Batch => self
.try_lock()
.unwrap()
ProofType::Batch => handler_self
.batch_prover
.gen_proof_universal(&u_task, false)?,
ProofType::Bundle => self
.try_lock()
.unwrap()
ProofType::Bundle => handler_self
.bundle_prover
.gen_proof_universal(&u_task, true)?,
_ => return Err(eyre::eyre!("Unsupported proof type")),
_ => {
return Err(eyre::eyre!(
"Unsupported proof type {:?}",
prove_request.proof_type
))
}
};
//TODO: check expected PI
Ok(serde_json::to_string(&proof)?)
}
}

1
database/.gitignore vendored
View File

@@ -1,2 +1,3 @@
/build/bin
.idea
localdbg

7
zkvm-prover/.work/.gitignore vendored Normal file
View File

@@ -0,0 +1,7 @@
*.vmexe
*.bin
*.sol
cache
db
*.json
?

View File

@@ -0,0 +1,34 @@
[app_fri_params.fri_params]
log_blowup = 1
log_final_poly_len = 0
num_queries = 100
proof_of_work_bits = 16
[app_vm_config.rv32i]
[app_vm_config.rv32m]
[app_vm_config.io]
[app_vm_config.keccak]
[app_vm_config.castf]
[app_vm_config.modular]
supported_modulus = [
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
"52435875175126190479447740508185965837690552500527637822603658699938581184513",
]
[app_vm_config.native]
[app_vm_config.pairing]
supported_curves = ["Bls12_381"]
[app_vm_config.sha256]
[app_vm_config.fp2]
supported_modulus = [
"4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787",
]
[[app_vm_config.ecc.supported_curves]]
modulus = "4002409555221667393417789825735904156556882819939007885332058136124031650490837864442687629129015664037894272559787"
scalar = "52435875175126190479447740508185965837690552500527637822603658699938581184513"
a = "0"
b = "4"

View File

@@ -0,0 +1,17 @@
[app_fri_params.fri_params]
log_blowup = 1
log_final_poly_len = 0
num_queries = 100
proof_of_work_bits = 16
[app_vm_config.rv32i]
[app_vm_config.rv32m]
[app_vm_config.io]
[app_vm_config.keccak]
[app_vm_config.castf]
[app_vm_config.native]

View File

@@ -0,0 +1,53 @@
[app_fri_params.fri_params]
log_blowup = 1
log_final_poly_len = 0
num_queries = 100
proof_of_work_bits = 16
[app_vm_config.rv32i]
[app_vm_config.io]
[app_vm_config.keccak]
[app_vm_config.rv32m]
range_tuple_checker_sizes = [256, 8192]
[app_vm_config.bigint]
range_tuple_checker_sizes = [256, 8192]
[app_vm_config.modular]
supported_modulus = [
"21888242871839275222246405745257275088696311157297823662689037894645226208583",
"21888242871839275222246405745257275088548364400416034343698204186575808495617",
"115792089237316195423570985008687907853269984665640564039457584007908834671663",
"115792089237316195423570985008687907852837564279074904382605163141518161494337",
"115792089210356248762697446949407573530086143415290314195533631308867097853951",
"115792089210356248762697446949407573529996955224135760342422259061068512044369"
]
[app_vm_config.fp2]
supported_modulus = ["21888242871839275222246405745257275088696311157297823662689037894645226208583"]
[app_vm_config.pairing]
supported_curves = ["Bn254"]
[app_vm_config.sha256]
[[app_vm_config.ecc.supported_curves]]
modulus = "115792089237316195423570985008687907853269984665640564039457584007908834671663"
scalar = "115792089237316195423570985008687907852837564279074904382605163141518161494337"
a = "0"
b = "7"
[[app_vm_config.ecc.supported_curves]]
modulus = "115792089210356248762697446949407573530086143415290314195533631308867097853951"
scalar = "115792089210356248762697446949407573529996955224135760342422259061068512044369"
a = "115792089210356248762697446949407573530086143415290314195533631308867097853948"
b = "41058363725152142129326129780047268409114441015993725554835256314039467401291"
[[app_vm_config.ecc.supported_curves]]
modulus = "21888242871839275222246405745257275088696311157297823662689037894645226208583"
scalar = "21888242871839275222246405745257275088548364400416034343698204186575808495617"
a = "0"
b = "3"

View File

@@ -1,5 +1,10 @@
.PHONY: prover lint tests_binary
RUST_MIN_STACK ?= 16777216
export RUST_MIN_STACK
CIRCUIT_STUFF = .work/chunk/app.vmexe .work/batch/app.vmexe .work/bundle/app.vmexe
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ../Cargo.lock | cut -d "#" -f2 | cut -c-7)
else
@@ -36,7 +41,7 @@ else
endif
prover:
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cd ../crates/prover-bin && cargo build --release
tests_binary:
cargo clean && cargo test --release --no-run
@@ -46,3 +51,9 @@ lint:
cargo check --all-features
cargo clippy --all-features --all-targets -- -D warnings
cargo fmt --all
$(CIRCUIT_STUFF):
bash .work/download-release.sh
test_run: $(CIRCUIT_STUFF)
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json

View File

@@ -1,31 +0,0 @@
{
"sdk_config": {
"prover_name_prefix": "prover-1",
"keys_dir": "keys",
"coordinator": {
"base_url": "http://localhost:8555",
"retry_count": 10,
"retry_wait_time_sec": 10,
"connection_timeout_sec": 30
},
"l2geth": {
"endpoint": "http://localhost:9999"
},
"prover": {
"circuit_type": 2,
"supported_proof_types": [
1,
2,
3
],
"circuit_version": "v0.13.1"
},
"db_path": "unique-db-path-for-prover-1"
},
"circuits": {
"euclidV2": {
"hard_fork_name": "euclidV2",
"workspace_path": "/home/ubuntu/prover-workdir"
}
}
}

View File

@@ -20,12 +20,17 @@
],
"circuit_version": "v0.13.1"
},
"health_listener_addr": "127.0.0.1:10080",
"db_path": ".work/db"
},
"circuits": {
"euclidV2": {
"hard_fork_name": "euclidV2",
"workspace_path": ".work"
}
"workspace_path": ".work/euclid"
},
"feynman": {
"hard_fork_name": "feynman",
"workspace_path": ".work/feynman1"
},
}
}

View File

@@ -0,0 +1,20 @@
#!/bin/bash
# release version
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
SCROLL_ZKVM_VERSION=$($SHELL ./print_high_zkvm_version.sh | cut -d' ' -f1|cut -c2-)
fi
echo $SCROLL_ZKVM_VERSION
# chunk-circuit exe
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/chunk/app.vmexe -O .work/chunk/app.vmexe
# batch-circuit exe
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/batch/app.vmexe -O .work/batch/app.vmexe
# bundle-circuit exe
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app.vmexe -O .work/bundle/app.vmexe
# bundle-circuit exe, legacy version, may not exist
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/bundle/app_euclidv1.vmexe -O .work/bundle/app_euclidv1.vmexe || echo "legacy app not exist for $SCROLL_ZKVM_VERSION"