Compare commits

...

8 Commits

Author SHA1 Message Date
georgehao
7ff5b190ec bump version to v4.5.44 (#1731) 2025-09-02 09:56:59 +08:00
Ho
b297edd28d [Feat] Prover loading assets (circuits) dynamically (#1717) 2025-08-29 19:32:44 +09:00
Péter Garamvölgyi
47c85d4983 Fix unique chunk hash (#1727) 2025-08-26 14:27:24 +02:00
Morty
1552e98b79 fix(bridge-history+rollup-relayer): update da-codec to prevent zstd deadlock (#1724)
Co-authored-by: yiweichi <yiweichi@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2025-08-25 16:23:01 +08:00
Péter Garamvölgyi
a65b3066a3 fix: remove unnecessary logs (#1725) 2025-08-22 15:02:20 +02:00
Morty
1f2b397bbd feat(bridge-history): add aws s3 blob client (#1716)
Co-authored-by: yiweichi <yiweichi@users.noreply.github.com>
Co-authored-by: colin <102356659+colinlyguo@users.noreply.github.com>
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2025-08-12 14:59:44 +08:00
colin
ae791a0714 fix(rollup-relayer): sanity checks (#1720) 2025-08-12 14:57:02 +08:00
colin
c012f7132d feat(rollup-relayer): add sanity checks before committing and finalizing (#1714)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2025-08-11 17:49:29 +08:00
53 changed files with 1644 additions and 986 deletions

481
Cargo.lock generated

File diff suppressed because it is too large Load Diff

View File

@@ -17,9 +17,9 @@ repository = "https://github.com/scroll-tech/scroll"
version = "4.5.8"
[workspace.dependencies]
scroll-zkvm-prover-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1", package = "scroll-zkvm-prover" }
scroll-zkvm-verifier-euclid = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1", package = "scroll-zkvm-verifier" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", branch = "feat/0.5.1" }
scroll-zkvm-prover = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
scroll-zkvm-verifier = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
scroll-zkvm-types = { git = "https://github.com/scroll-tech/zkvm-prover", rev = "89a2dc1" }
sbv-primitives = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3", features = ["scroll"] }
sbv-utils = { git = "https://github.com/scroll-tech/stateless-block-verifier", branch = "chore/openvm-1.3" }

View File

@@ -10,7 +10,7 @@ require (
github.com/go-redis/redis/v8 v8.11.5
github.com/pressly/goose/v3 v3.16.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9
github.com/stretchr/testify v1.9.0
github.com/urfave/cli/v2 v2.25.7

View File

@@ -309,8 +309,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6 h1:vb2XLvQwCf+F/ifP6P/lfeiQrHY6+Yb/E3R4KHXLqSE=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9 h1:u371VK8eOU2Z/0SVf5KDI3eJc8msHSpJbav4do/8n38=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -38,6 +38,7 @@ type FetcherConfig struct {
BeaconNodeAPIEndpoint string `json:"BeaconNodeAPIEndpoint"`
BlobScanAPIEndpoint string `json:"BlobScanAPIEndpoint"`
BlockNativeAPIEndpoint string `json:"BlockNativeAPIEndpoint"`
AwsS3Endpoint string `json:"AwsS3Endpoint"`
}
// RedisConfig redis config

View File

@@ -39,6 +39,9 @@ type L1MessageFetcher struct {
// NewL1MessageFetcher creates a new L1MessageFetcher instance.
func NewL1MessageFetcher(ctx context.Context, cfg *config.FetcherConfig, db *gorm.DB, client *ethclient.Client) (*L1MessageFetcher, error) {
blobClient := blob_client.NewBlobClients()
if cfg.AwsS3Endpoint != "" {
blobClient.AddBlobClient(blob_client.NewAwsS3Client(cfg.AwsS3Endpoint))
}
if cfg.BeaconNodeAPIEndpoint != "" {
beaconNodeClient, err := blob_client.NewBeaconNodeClient(cfg.BeaconNodeAPIEndpoint)
if err != nil {

View File

@@ -15,7 +15,7 @@ require (
github.com/modern-go/reflect2 v1.0.2
github.com/orcaman/concurrent-map v1.0.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587
github.com/stretchr/testify v1.10.0
github.com/testcontainers/testcontainers-go v0.30.0
github.com/testcontainers/testcontainers-go/modules/compose v0.30.0
@@ -184,7 +184,7 @@ require (
github.com/rjeczalik/notify v0.9.1 // indirect
github.com/rs/cors v1.7.0 // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435 // indirect
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 // indirect
github.com/scroll-tech/zktrie v0.8.4 // indirect
github.com/secure-systems-lab/go-securesystemslib v0.4.0 // indirect
github.com/serialx/hashring v0.0.0-20190422032157-8b2912629002 // indirect

View File

@@ -636,10 +636,10 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435 h1:X9fkvjrYBY79lGgKEPpUhuiJ4vWpWwzOVw4H8CU8L54=
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587 h1:wG1+gb+K4iLtxAHhiAreMdIjP5x9hB64duraN2+u1QU=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587/go.mod h1:YyfB2AyAtphlbIuDQgaxc2b9mo0zE4EBA1+qtXvzlmg=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/secure-systems-lab/go-securesystemslib v0.4.0 h1:b23VGrQhTA8cN2CbBw7/FulN9fTtqYUdS5+Oxzt+DUE=

View File

@@ -135,10 +135,18 @@ type BlockContextV2 struct {
NumL1Msgs uint16 `json:"num_l1_msgs"`
}
// Metric data carried with OpenVMProof
type OpenVMProofStat struct {
TotalCycle uint64 `json:"total_cycles"`
ExecutionTimeMills uint64 `json:"execution_time_mills"`
ProvingTimeMills uint64 `json:"proving_time_mills"`
}
// Proof for flatten VM proof
type OpenVMProof struct {
Proof []byte `json:"proofs"`
PublicValues []byte `json:"public_values"`
Proof []byte `json:"proofs"`
PublicValues []byte `json:"public_values"`
Stat *OpenVMProofStat `json:"stat,omitempty"`
}
// Proof for flatten EVM proof
@@ -150,7 +158,8 @@ type OpenVMEvmProof struct {
// OpenVMChunkProof includes the proof info that are required for chunk verification and rollup.
type OpenVMChunkProof struct {
MetaData struct {
ChunkInfo *ChunkInfo `json:"chunk_info"`
ChunkInfo *ChunkInfo `json:"chunk_info"`
TotalGasUsed uint64 `json:"chunk_total_gas"`
} `json:"metadata"`
VmProof *OpenVMProof `json:"proof"`

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.5.38"
var tag = "v4.5.44"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -34,6 +34,13 @@ coordinator_cron:
coordinator_tool:
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_tool ./cmd/tool
localsetup: coordinator_api ## Local setup: build coordinator_api, copy config, and setup releases
@echo "Copying configuration files..."
cp -r $(PWD)/conf $(PWD)/build/bin/
@echo "Setting up releases..."
cd $(PWD)/build && bash setup_releases.sh
#coordinator_api_skip_libzkp:
# go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator_api ./cmd/api

View File

@@ -0,0 +1,62 @@
#!/bin/bash
# release version
if [ -z "${SCROLL_ZKVM_VERSION}" ]; then
echo "SCROLL_ZKVM_VERSION not set"
exit 1
fi
# set ASSET_DIR by reading from config.json
CONFIG_FILE="bin/conf/config.json"
if [ ! -f "$CONFIG_FILE" ]; then
echo "Config file $CONFIG_FILE not found"
exit 1
fi
# get the number of verifiers in the array
VERIFIER_COUNT=$(jq -r '.prover_manager.verifier.verifiers | length' "$CONFIG_FILE")
if [ "$VERIFIER_COUNT" = "null" ] || [ "$VERIFIER_COUNT" -eq 0 ]; then
echo "No verifiers found in config file"
exit 1
fi
echo "Found $VERIFIER_COUNT verifier(s) in config"
# iterate through each verifier entry
for ((i=0; i<$VERIFIER_COUNT; i++)); do
# extract assets_path for current verifier
ASSETS_PATH=$(jq -r ".prover_manager.verifier.verifiers[$i].assets_path" "$CONFIG_FILE")
FORK_NAME=$(jq -r ".prover_manager.verifier.verifiers[$i].fork_name" "$CONFIG_FILE")
if [ "$ASSETS_PATH" = "null" ]; then
echo "Warning: Could not find assets_path for verifier $i, skipping..."
continue
fi
echo "Processing verifier $i ($FORK_NAME): assets_path=$ASSETS_PATH"
# check if it's an absolute path (starts with /)
if [[ "$ASSETS_PATH" = /* ]]; then
# absolute path, use as is
ASSET_DIR="$ASSETS_PATH"
else
# relative path, prefix with "bin/"
ASSET_DIR="bin/$ASSETS_PATH"
fi
echo "Using ASSET_DIR: $ASSET_DIR"
# create directory if it doesn't exist
mkdir -p "$ASSET_DIR"
# assets for verifier-only mode
echo "Downloading assets for $FORK_NAME to $ASSET_DIR..."
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/verifier.bin -O ${ASSET_DIR}/verifier.bin
wget https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/$SCROLL_ZKVM_VERSION/verifier/openVmVk.json -O ${ASSET_DIR}/openVmVk.json
echo "Completed downloading assets for $FORK_NAME"
echo "---"
done
echo "All verifier assets downloaded successfully"

View File

@@ -12,7 +12,7 @@
{
"assets_path": "assets",
"fork_name": "euclidV2"
},
},
{
"assets_path": "assets",
"fork_name": "feynman"

View File

@@ -9,7 +9,7 @@ require (
github.com/google/uuid v1.6.0
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.19.0
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7
github.com/shopspring/decimal v1.3.1
github.com/stretchr/testify v1.10.0

View File

@@ -253,8 +253,8 @@ github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6 h1:vb2XLvQwCf+F/ifP6P/lfeiQrHY6+Yb/E3R4KHXLqSE=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7 h1:1rN1qocsQlOyk1VCpIEF1J5pfQbLAi1pnMZSLQS37jQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -57,9 +57,10 @@ type Config struct {
// AssetConfig contain assets configurated for each fork, the defaul vkfile name is "OpenVmVk.json".
type AssetConfig struct {
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
Vkfile string `json:"vk_file,omitempty"`
AssetsPath string `json:"assets_path"`
ForkName string `json:"fork_name"`
Vkfile string `json:"vk_file,omitempty"`
MinProverVersion string `json:"min_prover_version,omitempty"`
}
// VerifierConfig load zk verifier config.

View File

@@ -24,18 +24,16 @@ type LoginLogic struct {
openVmVks map[string]struct{}
proverVersionHardForkMap map[string][]string
proverVersionHardForkMap map[string]string
}
// NewLoginLogic new a LoginLogic
func NewLoginLogic(db *gorm.DB, cfg *config.Config, vf *verifier.Verifier) *LoginLogic {
proverVersionHardForkMap := make(map[string][]string)
proverVersionHardForkMap := make(map[string]string)
var hardForks []string
for _, cfg := range cfg.ProverManager.Verifier.Verifiers {
hardForks = append(hardForks, cfg.ForkName)
proverVersionHardForkMap[cfg.ForkName] = cfg.MinProverVersion
}
proverVersionHardForkMap[cfg.ProverManager.Verifier.MinProverVersion] = hardForks
return &LoginLogic{
cfg: cfg,
@@ -101,9 +99,15 @@ func (l *LoginLogic) ProverHardForkName(login *types.LoginParameter) (string, er
}
proverVersion := proverVersionSplits[0]
if hardForkNames, ok := l.proverVersionHardForkMap[proverVersion]; ok {
return strings.Join(hardForkNames, ","), nil
var hardForkNames []string
for n, minVersion := range l.proverVersionHardForkMap {
if minVersion == "" || version.CheckScrollRepoVersion(proverVersion, minVersion) {
hardForkNames = append(hardForkNames, n)
}
}
if len(hardForkNames) == 0 {
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
}
return "", fmt.Errorf("invalid prover prover_version:%s", login.Message.ProverVersion)
return strings.Join(hardForkNames, ","), nil
}

View File

@@ -71,6 +71,9 @@ type ProofReceiverLogic struct {
validateFailureProverTaskStatusNotOk prometheus.Counter
validateFailureProverTaskTimeout prometheus.Counter
validateFailureProverTaskHaveVerifier prometheus.Counter
proverSpeed *prometheus.GaugeVec
provingTime prometheus.Gauge
evmCyclePerGas prometheus.Gauge
ChunkTask provertask.ProverTask
BundleTask provertask.ProverTask
@@ -79,6 +82,7 @@ type ProofReceiverLogic struct {
// NewSubmitProofReceiverLogic create a proof receiver logic
func NewSubmitProofReceiverLogic(cfg *config.ProverManager, chainCfg *params.ChainConfig, db *gorm.DB, vf *verifier.Verifier, reg prometheus.Registerer) *ProofReceiverLogic {
return &ProofReceiverLogic{
chunkOrm: orm.NewChunk(db),
batchOrm: orm.NewBatch(db),
@@ -133,6 +137,18 @@ func NewSubmitProofReceiverLogic(cfg *config.ProverManager, chainCfg *params.Cha
Name: "coordinator_validate_failure_submit_have_been_verifier",
Help: "Total number of submit proof validate failure proof have been verifier.",
}),
evmCyclePerGas: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "evm_circuit_cycle_per_gas",
Help: "VM cycles cost for a gas unit cost in evm execution",
}),
provingTime: promauto.With(reg).NewGauge(prometheus.GaugeOpts{
Name: "chunk_proving_time",
Help: "Wall clock time for chunk proving in second",
}),
proverSpeed: promauto.With(reg).NewGaugeVec(prometheus.GaugeOpts{
Name: "prover_speed",
Help: "Cycle against running time of prover (in mhz)",
}, []string{"type", "phase"}),
}
}
@@ -204,12 +220,34 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofParameter coor
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyChunkProof(chunkProof, hardForkName)
if stat := chunkProof.VmProof.Stat; stat != nil {
if g, _ := m.proverSpeed.GetMetricWithLabelValues("chunk", "exec"); g != nil && stat.ExecutionTimeMills > 0 {
g.Set(float64(stat.TotalCycle) / float64(stat.ExecutionTimeMills*1000))
}
if g, _ := m.proverSpeed.GetMetricWithLabelValues("chunk", "proving"); g != nil && stat.ProvingTimeMills > 0 {
g.Set(float64(stat.TotalCycle) / float64(stat.ProvingTimeMills*1000))
}
if chunkProof.MetaData.TotalGasUsed > 0 {
cycle_per_gas := float64(stat.TotalCycle) / float64(chunkProof.MetaData.TotalGasUsed)
m.evmCyclePerGas.Set(cycle_per_gas)
}
m.provingTime.Set(float64(stat.ProvingTimeMills) / 1000)
}
case message.ProofTypeBatch:
batchProof := &message.OpenVMBatchProof{}
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &batchProof); unmarshalErr != nil {
return unmarshalErr
}
success, verifyErr = m.verifier.VerifyBatchProof(batchProof, hardForkName)
if stat := batchProof.VmProof.Stat; stat != nil {
if g, _ := m.proverSpeed.GetMetricWithLabelValues("batch", "exec"); g != nil && stat.ExecutionTimeMills > 0 {
g.Set(float64(stat.TotalCycle) / float64(stat.ExecutionTimeMills*1000))
}
if g, _ := m.proverSpeed.GetMetricWithLabelValues("batch", "proving"); g != nil && stat.ProvingTimeMills > 0 {
g.Set(float64(stat.TotalCycle) / float64(stat.ProvingTimeMills*1000))
}
}
case message.ProofTypeBundle:
bundleProof := &message.OpenVMBundleProof{}
if unmarshalErr := json.Unmarshal([]byte(proofParameter.Proof), &bundleProof); unmarshalErr != nil {

View File

@@ -4,6 +4,7 @@ package verifier
import (
"encoding/base64"
"encoding/hex"
"encoding/json"
"fmt"
"io"
@@ -129,6 +130,23 @@ const blocked_vks = `
D6YFHwTLZF/U2zpYJPQ3LwJZRm85yA5Vq2iFBqd3Mk4iwOUpS8sbOp3vg2+NDxhhKphgYpuUlykpdsoRhEt+cw==,
`
// tries to decode s as hex, and if that fails, as base64.
func decodeVkString(s string) ([]byte, error) {
// Try hex decoding first
if b, err := hex.DecodeString(s); err == nil {
return b, nil
}
// Fallback to base64 decoding
b, err := base64.StdEncoding.DecodeString(s)
if err != nil {
return nil, err
}
if len(b) == 0 {
return nil, fmt.Errorf("decode vk string %s fail (empty bytes)", s)
}
return b, nil
}
func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
vkFileName := cfg.Vkfile
@@ -165,17 +183,17 @@ func (v *Verifier) loadOpenVMVks(cfg config.AssetConfig) error {
v.OpenVMVkMap[dump.Bundle] = struct{}{}
log.Info("Load vks", "from", cfg.AssetsPath, "chunk", dump.Chunk, "batch", dump.Batch, "bundle", dump.Bundle)
decodedBytes, err := base64.StdEncoding.DecodeString(dump.Chunk)
decodedBytes, err := decodeVkString(dump.Chunk)
if err != nil {
return err
}
v.ChunkVk[cfg.ForkName] = decodedBytes
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Batch)
decodedBytes, err = decodeVkString(dump.Batch)
if err != nil {
return err
}
v.BatchVk[cfg.ForkName] = decodedBytes
decodedBytes, err = base64.StdEncoding.DecodeString(dump.Bundle)
decodedBytes, err = decodeVkString(dump.Bundle)
if err != nil {
return err
}

View File

@@ -584,7 +584,8 @@ func testTimeoutProof(t *testing.T) {
err = chunkOrm.UpdateBatchHashInRange(context.Background(), 0, 100, batch.Hash)
assert.NoError(t, err)
encodeData, err := json.Marshal(message.OpenVMChunkProof{VmProof: &message.OpenVMProof{}, MetaData: struct {
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
ChunkInfo *message.ChunkInfo `json:"chunk_info"`
TotalGasUsed uint64 `json:"chunk_total_gas"`
}{ChunkInfo: &message.ChunkInfo{}}})
assert.NoError(t, err)
assert.NotEmpty(t, encodeData)

View File

@@ -1,16 +1,16 @@
[patch."https://github.com/openvm-org/openvm.git"]
openvm-build = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-continuations = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-instructions ={ git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-native-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-native-compiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-native-recursion = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-native-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-rv32im-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
openvm-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.2.1-rc.1-pipe", default-features = false }
openvm-build = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-continuations = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-instructions ={ git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-native-circuit = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-native-compiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-native-recursion = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-native-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-rv32im-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
openvm-sdk = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false, features = ["parallel", "bench-metrics", "evm-prove"] }
openvm-transpiler = { git = "ssh://git@github.com/scroll-tech/openvm-gpu.git", branch = "patch-v1.3.0-pipe", default-features = false }
[patch."https://github.com/openvm-org/stark-backend.git"]
openvm-stark-backend = { git = "ssh://git@github.com/scroll-tech/openvm-stark-gpu.git", branch = "main", features = ["gpu"] }
@@ -42,4 +42,4 @@ p3-poseidon2-air = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", t
p3-symmetric = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
p3-uni-stark = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
p3-maybe-rayon = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" } # the "parallel" feature is NOT on by default to allow single-threaded benchmarking
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }
p3-bn254-fr = { git = "ssh://git@github.com/scroll-tech/plonky3-gpu.git", tag = "v0.2.1" }

View File

@@ -4573,36 +4573,36 @@ dependencies = [
[[package]]
name = "openvm"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"bytemuck",
"num-bigint 0.4.6",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-platform 1.2.1-rc.0",
"openvm-rv32im-guest 1.2.1-rc.0",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-rv32im-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"serde",
]
[[package]]
name = "openvm"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"bytemuck",
"getrandom 0.2.16",
"getrandom 0.3.3",
"num-bigint 0.4.6",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-platform 1.3.0",
"openvm-rv32im-guest 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-platform 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"serde",
]
[[package]]
name = "openvm-algebra-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -4630,10 +4630,10 @@ dependencies = [
[[package]]
name = "openvm-algebra-complex-macros"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-macros-common 1.2.1-rc.0",
"openvm-macros-common 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"quote",
"syn 2.0.101",
]
@@ -4641,25 +4641,25 @@ dependencies = [
[[package]]
name = "openvm-algebra-complex-macros"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"openvm-macros-common 1.3.0",
"openvm-macros-common 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-algebra-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"halo2curves-axiom",
"num-bigint 0.4.6",
"once_cell",
"openvm-algebra-complex-macros 1.2.1-rc.0",
"openvm-algebra-moduli-macros 1.2.1-rc.0",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-rv32im-guest 1.2.1-rc.0",
"openvm-algebra-complex-macros 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-algebra-moduli-macros 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-rv32im-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"serde-big-array",
"strum_macros 0.26.4",
]
@@ -4667,27 +4667,27 @@ dependencies = [
[[package]]
name = "openvm-algebra-guest"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"halo2curves-axiom",
"num-bigint 0.4.6",
"once_cell",
"openvm-algebra-complex-macros 1.3.0",
"openvm-algebra-moduli-macros 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-rv32im-guest 1.3.0",
"openvm-algebra-complex-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-moduli-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"serde-big-array",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-algebra-moduli-macros"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"num-bigint 0.4.6",
"num-prime",
"openvm-macros-common 1.2.1-rc.0",
"openvm-macros-common 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"quote",
"syn 2.0.101",
]
@@ -4695,21 +4695,21 @@ dependencies = [
[[package]]
name = "openvm-algebra-moduli-macros"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"num-bigint 0.4.6",
"num-prime",
"openvm-macros-common 1.3.0",
"openvm-macros-common 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-algebra-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-algebra-guest 1.2.1-rc.0",
"openvm-algebra-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-instructions",
"openvm-instructions-derive",
"openvm-stark-backend",
@@ -4720,8 +4720,8 @@ dependencies = [
[[package]]
name = "openvm-bigint-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -4742,17 +4742,17 @@ dependencies = [
[[package]]
name = "openvm-bigint-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-platform 1.2.1-rc.0",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-bigint-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-bigint-guest",
"openvm-instructions",
@@ -4766,20 +4766,20 @@ dependencies = [
[[package]]
name = "openvm-build"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"cargo_metadata",
"eyre",
"openvm-platform 1.2.1-rc.0",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"serde",
"serde_json",
]
[[package]]
name = "openvm-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"backtrace",
"cfg-if",
@@ -4809,8 +4809,8 @@ dependencies = [
[[package]]
name = "openvm-circuit-derive"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"itertools 0.14.0",
"quote",
@@ -4819,8 +4819,8 @@ dependencies = [
[[package]]
name = "openvm-circuit-primitives"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"itertools 0.14.0",
@@ -4834,8 +4834,8 @@ dependencies = [
[[package]]
name = "openvm-circuit-primitives-derive"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"itertools 0.14.0",
"quote",
@@ -4844,8 +4844,8 @@ dependencies = [
[[package]]
name = "openvm-continuations"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derivative",
"openvm-circuit",
@@ -4860,7 +4860,7 @@ dependencies = [
[[package]]
name = "openvm-custom-insn"
version = "0.1.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"proc-macro2",
"quote",
@@ -4870,7 +4870,7 @@ dependencies = [
[[package]]
name = "openvm-custom-insn"
version = "0.1.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"proc-macro2",
"quote",
@@ -4879,16 +4879,14 @@ dependencies = [
[[package]]
name = "openvm-ecc-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
"eyre",
"hex-literal",
"lazy_static",
"num-bigint 0.4.6",
"num-integer",
"num-traits",
"once_cell",
"openvm-algebra-circuit",
@@ -4909,19 +4907,19 @@ dependencies = [
[[package]]
name = "openvm-ecc-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"ecdsa",
"elliptic-curve",
"group 0.13.0",
"halo2curves-axiom",
"once_cell",
"openvm 1.2.1-rc.0",
"openvm-algebra-guest 1.2.1-rc.0",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-ecc-sw-macros 1.2.1-rc.0",
"openvm-rv32im-guest 1.2.1-rc.0",
"openvm 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-algebra-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-ecc-sw-macros 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-rv32im-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"serde",
"strum_macros 0.26.4",
]
@@ -4929,28 +4927,28 @@ dependencies = [
[[package]]
name = "openvm-ecc-guest"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"ecdsa",
"elliptic-curve",
"group 0.13.0",
"halo2curves-axiom",
"once_cell",
"openvm 1.3.0",
"openvm-algebra-guest 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-ecc-sw-macros 1.3.0",
"openvm-rv32im-guest 1.3.0",
"openvm 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-ecc-sw-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"serde",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-ecc-sw-macros"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-macros-common 1.2.1-rc.0",
"openvm-macros-common 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"quote",
"syn 2.0.101",
]
@@ -4958,19 +4956,19 @@ dependencies = [
[[package]]
name = "openvm-ecc-sw-macros"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"openvm-macros-common 1.3.0",
"openvm-macros-common 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"quote",
"syn 2.0.101",
]
[[package]]
name = "openvm-ecc-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-ecc-guest 1.2.1-rc.0",
"openvm-ecc-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-instructions",
"openvm-instructions-derive",
"openvm-stark-backend",
@@ -4981,8 +4979,8 @@ dependencies = [
[[package]]
name = "openvm-instructions"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"backtrace",
"derive-new 0.6.0",
@@ -4998,8 +4996,8 @@ dependencies = [
[[package]]
name = "openvm-instructions-derive"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"quote",
"syn 2.0.101",
@@ -5007,8 +5005,8 @@ dependencies = [
[[package]]
name = "openvm-keccak256-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -5033,16 +5031,16 @@ dependencies = [
[[package]]
name = "openvm-keccak256-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-platform 1.2.1-rc.0",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
]
[[package]]
name = "openvm-keccak256-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
@@ -5055,8 +5053,8 @@ dependencies = [
[[package]]
name = "openvm-macros-common"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"syn 2.0.101",
]
@@ -5064,15 +5062,15 @@ dependencies = [
[[package]]
name = "openvm-macros-common"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"syn 2.0.101",
]
[[package]]
name = "openvm-mod-circuit-builder"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"itertools 0.14.0",
"num-bigint 0.4.6",
@@ -5090,8 +5088,8 @@ dependencies = [
[[package]]
name = "openvm-native-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -5117,8 +5115,8 @@ dependencies = [
[[package]]
name = "openvm-native-compiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"backtrace",
"itertools 0.14.0",
@@ -5141,8 +5139,8 @@ dependencies = [
[[package]]
name = "openvm-native-compiler-derive"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"quote",
"syn 2.0.101",
@@ -5150,8 +5148,8 @@ dependencies = [
[[package]]
name = "openvm-native-recursion"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"cfg-if",
"itertools 0.14.0",
@@ -5178,8 +5176,8 @@ dependencies = [
[[package]]
name = "openvm-native-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-instructions",
"openvm-transpiler",
@@ -5189,7 +5187,7 @@ dependencies = [
[[package]]
name = "openvm-pairing"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"group 0.13.0",
"halo2curves-axiom",
@@ -5197,24 +5195,24 @@ dependencies = [
"itertools 0.14.0",
"num-bigint 0.4.6",
"num-traits",
"openvm 1.3.0",
"openvm-algebra-complex-macros 1.3.0",
"openvm-algebra-guest 1.3.0",
"openvm-algebra-moduli-macros 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-ecc-guest 1.3.0",
"openvm-ecc-sw-macros 1.3.0",
"openvm-pairing-guest 1.3.0",
"openvm-platform 1.3.0",
"openvm-rv32im-guest 1.3.0",
"openvm 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-complex-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-moduli-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-ecc-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-ecc-sw-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-pairing-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-platform 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"rand 0.8.5",
"serde",
]
[[package]]
name = "openvm-pairing-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -5229,10 +5227,10 @@ dependencies = [
"openvm-circuit-primitives",
"openvm-circuit-primitives-derive",
"openvm-ecc-circuit",
"openvm-ecc-guest 1.2.1-rc.0",
"openvm-ecc-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-instructions",
"openvm-mod-circuit-builder",
"openvm-pairing-guest 1.2.1-rc.0",
"openvm-pairing-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-pairing-transpiler",
"openvm-rv32-adapters",
"openvm-rv32im-circuit",
@@ -5244,8 +5242,8 @@ dependencies = [
[[package]]
name = "openvm-pairing-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"halo2curves-axiom",
"hex-literal",
@@ -5253,11 +5251,11 @@ dependencies = [
"lazy_static",
"num-bigint 0.4.6",
"num-traits",
"openvm 1.2.1-rc.0",
"openvm-algebra-guest 1.2.1-rc.0",
"openvm-algebra-moduli-macros 1.2.1-rc.0",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-ecc-guest 1.2.1-rc.0",
"openvm 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-algebra-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-algebra-moduli-macros 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-ecc-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"rand 0.8.5",
"serde",
"strum_macros 0.26.4",
@@ -5266,7 +5264,7 @@ dependencies = [
[[package]]
name = "openvm-pairing-guest"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"halo2curves-axiom",
"hex-literal",
@@ -5274,11 +5272,11 @@ dependencies = [
"lazy_static",
"num-bigint 0.4.6",
"num-traits",
"openvm 1.3.0",
"openvm-algebra-guest 1.3.0",
"openvm-algebra-moduli-macros 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-ecc-guest 1.3.0",
"openvm 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-algebra-moduli-macros 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-ecc-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"rand 0.8.5",
"serde",
"strum_macros 0.26.4",
@@ -5286,12 +5284,12 @@ dependencies = [
[[package]]
name = "openvm-pairing-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
"openvm-pairing-guest 1.2.1-rc.0",
"openvm-pairing-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-stark-backend",
"openvm-transpiler",
"rrs-lib",
@@ -5300,28 +5298,28 @@ dependencies = [
[[package]]
name = "openvm-platform"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"libm",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-rv32im-guest 1.2.1-rc.0",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-rv32im-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
]
[[package]]
name = "openvm-platform"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"libm",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-rv32im-guest 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
]
[[package]]
name = "openvm-poseidon2-air"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derivative",
"lazy_static",
@@ -5337,8 +5335,8 @@ dependencies = [
[[package]]
name = "openvm-rv32-adapters"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"itertools 0.14.0",
@@ -5357,8 +5355,8 @@ dependencies = [
[[package]]
name = "openvm-rv32im-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -5380,10 +5378,10 @@ dependencies = [
[[package]]
name = "openvm-rv32im-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe)",
"openvm-custom-insn 0.1.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"p3-field 0.1.0",
"strum_macros 0.26.4",
]
@@ -5391,21 +5389,21 @@ dependencies = [
[[package]]
name = "openvm-rv32im-guest"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"p3-field 0.1.0",
"strum_macros 0.26.4",
]
[[package]]
name = "openvm-rv32im-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
"openvm-rv32im-guest 1.2.1-rc.0",
"openvm-rv32im-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-stark-backend",
"openvm-transpiler",
"rrs-lib",
@@ -5416,8 +5414,8 @@ dependencies = [
[[package]]
name = "openvm-sdk"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"async-trait",
"bitcode",
@@ -5431,7 +5429,7 @@ dependencies = [
"itertools 0.14.0",
"metrics",
"num-bigint 0.4.6",
"openvm 1.2.1-rc.0",
"openvm 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-algebra-circuit",
"openvm-algebra-transpiler",
"openvm-bigint-circuit",
@@ -5471,16 +5469,16 @@ dependencies = [
[[package]]
name = "openvm-sha2"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"openvm-sha256-guest 1.3.0",
"openvm-sha256-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"sha2 0.10.9",
]
[[package]]
name = "openvm-sha256-air"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-circuit-primitives",
"openvm-stark-backend",
@@ -5490,8 +5488,8 @@ dependencies = [
[[package]]
name = "openvm-sha256-circuit"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"derive-new 0.6.0",
"derive_more 1.0.0",
@@ -5513,28 +5511,28 @@ dependencies = [
[[package]]
name = "openvm-sha256-guest"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-platform 1.2.1-rc.0",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
]
[[package]]
name = "openvm-sha256-guest"
version = "1.3.0"
source = "git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422#4973d38cb3f2e14ebdd59e03802e65bb657ee422"
source = "git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0#5368d4756993fc1e51092499a816867cf4808de0"
dependencies = [
"openvm-platform 1.3.0",
"openvm-platform 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
]
[[package]]
name = "openvm-sha256-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"openvm-instructions",
"openvm-instructions-derive",
"openvm-sha256-guest 1.2.1-rc.0",
"openvm-sha256-guest 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-stark-backend",
"openvm-transpiler",
"rrs-lib",
@@ -5609,13 +5607,13 @@ dependencies = [
[[package]]
name = "openvm-transpiler"
version = "1.2.1-rc.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.2.1-rc.1-pipe#7dd6d1620d07c2c3faa5b91105cbb3d19ff0c9b0"
version = "1.3.0"
source = "git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe#07e2731a2afd8bcb05b76566331e68e1e4ef00d0"
dependencies = [
"elf",
"eyre",
"openvm-instructions",
"openvm-platform 1.2.1-rc.0",
"openvm-platform 1.3.0 (git+ssh://git@github.com/scroll-tech/openvm-gpu.git?branch=patch-v1.3.0-pipe)",
"openvm-stark-backend",
"rrs-lib",
"thiserror 1.0.69",
@@ -6609,6 +6607,7 @@ dependencies = [
"ctor",
"eyre",
"futures",
"futures-util",
"hex",
"http 1.3.1",
"once_cell",
@@ -7047,6 +7046,7 @@ dependencies = [
"url",
"wasm-bindgen",
"wasm-bindgen-futures",
"wasm-streams",
"web-sys",
"webpki-roots 1.0.0",
]
@@ -8662,7 +8662,7 @@ dependencies = [
[[package]]
name = "scroll-proving-sdk"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/scroll-proving-sdk.git?branch=refactor%2Fscroll#c144015870771db14b1b5d6071e4d3c4e9b48b9c"
source = "git+https://github.com/scroll-tech/scroll-proving-sdk.git?rev=4c36ab2#4c36ab29255481c34beb08ee7c3d8d4f5d7390c2"
dependencies = [
"anyhow",
"async-trait",
@@ -8692,7 +8692,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-prover"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"alloy-primitives",
"base64 0.22.1",
@@ -8714,8 +8714,8 @@ dependencies = [
"revm 24.0.0",
"rkyv",
"sbv-primitives",
"scroll-alloy-evm",
"scroll-zkvm-types",
"scroll-zkvm-types-batch",
"scroll-zkvm-types-chunk",
"scroll-zkvm-verifier",
"serde",
@@ -8730,15 +8730,17 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"base64 0.22.1",
"bincode",
"c-kzg",
"openvm-continuations",
"openvm-native-recursion",
"openvm-sdk",
"openvm-stark-sdk",
"rkyv",
"sbv-primitives",
"scroll-zkvm-types-base",
"scroll-zkvm-types-batch",
"scroll-zkvm-types-bundle",
@@ -8750,7 +8752,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-base"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"alloy-primitives",
"alloy-serde 1.0.16",
@@ -8765,17 +8767,17 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-batch"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"alloy-primitives",
"halo2curves-axiom",
"itertools 0.14.0",
"openvm 1.3.0",
"openvm-ecc-guest 1.3.0",
"openvm 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-ecc-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-pairing",
"openvm-pairing-guest 1.3.0",
"openvm-pairing-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-sha2",
"openvm-sha256-guest 1.3.0",
"openvm-sha256-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"rkyv",
"scroll-zkvm-types-base",
"serde",
@@ -8785,7 +8787,7 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-bundle"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"alloy-primitives",
"itertools 0.14.0",
@@ -8798,13 +8800,13 @@ dependencies = [
[[package]]
name = "scroll-zkvm-types-chunk"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"alloy-primitives",
"itertools 0.14.0",
"openvm 1.3.0",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=4973d38cb3f2e14ebdd59e03802e65bb657ee422)",
"openvm-rv32im-guest 1.3.0",
"openvm 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-custom-insn 0.1.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"openvm-rv32im-guest 1.3.0 (git+https://github.com/openvm-org/openvm.git?rev=5368d4756993fc1e51092499a816867cf4808de0)",
"revm-precompile 21.0.0 (git+https://github.com/scroll-tech/revm?branch=feat%2Freth-v74)",
"rkyv",
"sbv-core",
@@ -8818,11 +8820,12 @@ dependencies = [
[[package]]
name = "scroll-zkvm-verifier"
version = "0.5.0"
source = "git+https://github.com/scroll-tech/zkvm-prover?tag=v0.5.0rc1#0eb4c11df4909dc6096dfc98875038385578264a"
source = "git+https://github.com/scroll-tech/zkvm-prover?rev=89a2dc1#89a2dc19633f0e0de390177a7ba142bfdea164cc"
dependencies = [
"bincode",
"eyre",
"itertools 0.14.0",
"once_cell",
"openvm-circuit",
"openvm-continuations",
"openvm-native-circuit",
@@ -8832,7 +8835,9 @@ dependencies = [
"revm 24.0.0",
"scroll-zkvm-types",
"serde",
"serde_json",
"snark-verifier-sdk",
"tracing",
]
[[package]]
@@ -10237,6 +10242,7 @@ dependencies = [
"form_urlencoded",
"idna",
"percent-encoding",
"serde",
]
[[package]]
@@ -10414,6 +10420,19 @@ dependencies = [
"unicode-ident",
]
[[package]]
name = "wasm-streams"
version = "0.4.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "15053d8d85c7eccdbefef60f06769760a563c7f0a9d6902a13d35c7800b0ad65"
dependencies = [
"futures-util",
"js-sys",
"wasm-bindgen",
"wasm-bindgen-futures",
"web-sys",
]
[[package]]
name = "wasm-timer"
version = "0.2.5"

View File

@@ -6,7 +6,7 @@ edition.workspace = true
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
scroll-zkvm-types.workspace = true
scroll-zkvm-verifier-euclid.workspace = true
scroll-zkvm-verifier.workspace = true
alloy-primitives.workspace = true #depress the effect of "native-keccak"
sbv-primitives = {workspace = true, features = ["scroll-compress-ratio", "scroll"]}

View File

@@ -5,7 +5,7 @@ pub use verifier::{TaskType, VerifierConfig};
mod utils;
use sbv_primitives::B256;
use scroll_zkvm_types::util::vec_as_base64;
use scroll_zkvm_types::utils::vec_as_base64;
use serde::{Deserialize, Serialize};
use serde_json::value::RawValue;
use std::path::Path;

View File

@@ -7,10 +7,10 @@ use scroll_zkvm_types::{
batch::BatchInfo,
bundle::BundleInfo,
chunk::ChunkInfo,
proof::{EvmProof, OpenVmEvmProof, ProofEnum, RootProof},
proof::{EvmProof, OpenVmEvmProof, ProofEnum, StarkProof},
public_inputs::{ForkName, MultiVersionPublicInputs},
types_agg::{AggregationInput, ProgramCommitment},
util::vec_as_base64,
utils::vec_as_base64,
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
@@ -40,7 +40,7 @@ pub struct WrappedProof<Metadata> {
}
pub trait AsRootProof {
fn as_root_proof(&self) -> &RootProof;
fn as_root_proof(&self) -> &StarkProof;
}
pub trait AsEvmProof {
@@ -61,17 +61,17 @@ pub type BatchProof = WrappedProof<BatchProofMetadata>;
pub type BundleProof = WrappedProof<BundleProofMetadata>;
impl AsRootProof for ChunkProof {
fn as_root_proof(&self) -> &RootProof {
fn as_root_proof(&self) -> &StarkProof {
self.proof
.as_root_proof()
.as_stark_proof()
.expect("batch proof use root proof")
}
}
impl AsRootProof for BatchProof {
fn as_root_proof(&self) -> &RootProof {
fn as_root_proof(&self) -> &StarkProof {
self.proof
.as_root_proof()
.as_stark_proof()
.expect("batch proof use root proof")
}
}
@@ -122,6 +122,8 @@ pub trait PersistableProof: Sized {
pub struct ChunkProofMetadata {
/// The chunk information describing the list of blocks contained within the chunk.
pub chunk_info: ChunkInfo,
/// Additional data for stat
pub chunk_total_gas: u64,
}
impl ProofMetadata for ChunkProofMetadata {

View File

@@ -44,12 +44,16 @@ pub fn gen_universal_chunk_task(
if let Some(interpreter) = interpreter {
task.prepare_task_via_interpret(interpreter)?;
}
let chunk_total_gas = task.stats().total_gas_used;
let chunk_info = task.precheck_and_build_metadata()?;
let proving_task = task.try_into()?;
let expected_pi_hash = chunk_info.pi_hash_by_fork(fork_name);
Ok((
expected_pi_hash,
ChunkProofMetadata { chunk_info },
ChunkProofMetadata {
chunk_info,
chunk_total_gas,
},
proving_task,
))
}

View File

@@ -91,7 +91,7 @@ impl TryFrom<BatchProvingTask> for ProvingTask {
aggregated_proofs: value
.chunk_proofs
.into_iter()
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
.collect(),
serialized_witness: vec![to_rkyv_bytes::<RancorError>(&witness)?.into_vec()],
vk: Vec::new(),

View File

@@ -18,7 +18,7 @@ pub mod base64 {
pub mod point_eval {
use c_kzg;
use sbv_primitives::{types::eips::eip4844::BLS_MODULUS, B256 as H256, U256};
use scroll_zkvm_types::util::sha256_rv32;
use scroll_zkvm_types::utils::sha256_rv32;
/// Given the blob-envelope, translate it to a fixed size EIP-4844 blob.
///

View File

@@ -81,7 +81,7 @@ impl TryFrom<BundleProvingTask> for ProvingTask {
aggregated_proofs: value
.batch_proofs
.into_iter()
.map(|w_proof| w_proof.proof.into_root_proof().expect("expect root proof"))
.map(|w_proof| w_proof.proof.into_stark_proof().expect("expect root proof"))
.collect(),
serialized_witness: vec![witness.rkyv_serialize(None)?.to_vec()],
vk: Vec::new(),

View File

@@ -1,7 +1,6 @@
#![allow(static_mut_refs)]
mod euclidv2;
use euclidv2::EuclidV2Verifier;
mod universal;
use eyre::Result;
use serde::{Deserialize, Serialize};
use std::{
@@ -9,6 +8,7 @@ use std::{
path::Path,
sync::{Arc, Mutex, OnceLock},
};
use universal::Verifier;
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum TaskType {
@@ -61,7 +61,7 @@ pub fn init(config: VerifierConfig) {
for cfg in &config.circuits {
let canonical_fork_name = cfg.fork_name.to_lowercase();
let verifier = EuclidV2Verifier::new(&cfg.assets_path, canonical_fork_name.as_str().into());
let verifier = Verifier::new(&cfg.assets_path, canonical_fork_name.as_str().into());
let ret = verifiers.insert(canonical_fork_name, Arc::new(Mutex::new(verifier)));
assert!(
ret.is_none(),

View File

@@ -7,59 +7,47 @@ use crate::{
utils::panic_catch,
};
use scroll_zkvm_types::public_inputs::ForkName;
use scroll_zkvm_verifier_euclid::verifier::UniversalVerifier;
use scroll_zkvm_verifier::verifier::UniversalVerifier;
use std::path::Path;
pub struct EuclidV2Verifier {
pub struct Verifier {
verifier: UniversalVerifier,
fork: ForkName,
}
impl EuclidV2Verifier {
impl Verifier {
pub fn new(assets_dir: &str, fork: ForkName) -> Self {
let verifier_bin = Path::new(assets_dir).join("verifier.bin");
let config = Path::new(assets_dir).join("root-verifier-vm-config");
let exe = Path::new(assets_dir).join("root-verifier-committed-exe");
Self {
verifier: UniversalVerifier::setup(&config, &exe, &verifier_bin)
.expect("Setting up chunk verifier"),
verifier: UniversalVerifier::setup(&verifier_bin).expect("Setting up chunk verifier"),
fork,
}
}
}
impl ProofVerifier for EuclidV2Verifier {
impl ProofVerifier for Verifier {
fn verify(&self, task_type: super::TaskType, proof: &[u8]) -> Result<bool> {
panic_catch(|| match task_type {
TaskType::Chunk => {
let proof = serde_json::from_slice::<ChunkProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
self.verifier
.verify_proof(proof.as_root_proof(), &proof.vk)
.unwrap()
assert!(proof.pi_hash_check(self.fork));
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
}
TaskType::Batch => {
let proof = serde_json::from_slice::<BatchProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
self.verifier
.verify_proof(proof.as_root_proof(), &proof.vk)
.unwrap()
assert!(proof.pi_hash_check(self.fork));
UniversalVerifier::verify_stark_proof(proof.as_root_proof(), &proof.vk).unwrap()
}
TaskType::Bundle => {
let proof = serde_json::from_slice::<BundleProof>(proof).unwrap();
if !proof.pi_hash_check(self.fork) {
return false;
}
assert!(proof.pi_hash_check(self.fork));
let vk = proof.vk.clone();
let evm_proof = proof.into_evm_proof();
self.verifier.verify_proof_evm(&evm_proof, &vk).unwrap()
self.verifier.verify_evm_proof(&evm_proof, &vk).unwrap()
}
})
.map(|_| true)
.map_err(|err_str: String| eyre::eyre!("{err_str}"))
}

View File

@@ -7,8 +7,8 @@ edition.workspace = true
[dependencies]
scroll-zkvm-types.workspace = true
scroll-zkvm-prover-euclid.workspace = true
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", branch = "refactor/scroll" }
scroll-zkvm-prover.workspace = true
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "4c36ab2" }
serde.workspace = true
serde_json.workspace = true
once_cell.workspace =true
@@ -17,8 +17,9 @@ tiny-keccak = { workspace = true, features = ["sha3", "keccak"] }
eyre.workspace = true
futures = "0.3.30"
futures-util = "0.3"
reqwest = { version = "0.12.4", features = ["gzip"] }
reqwest = { version = "0.12.4", features = ["gzip", "stream"] }
reqwest-middleware = "0.3"
reqwest-retry = "0.5"
hex = "0.4.3"
@@ -30,5 +31,5 @@ sled = "0.34.7"
http = "1.1.0"
clap = { version = "4.5", features = ["derive"] }
ctor = "0.2.8"
url = "2.5.4"
url = { version = "2.5.4", features = ["serde",] }
serde_bytes = "0.11.15"

View File

@@ -0,0 +1,7 @@
{
"feynman": {
"b68fdc3f28a5ce006280980df70cd3447e56913e5bca6054603ba85f0794c23a6618ea25a7991845bbc5fd571670ee47379ba31ace92d345bca59702a0d4112d": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/chunk/",
"9a3f66370f11e3303f1a1248921025104e83253efea43a70d221cf4e15fc145bf2be2f4468d1ac4a70e7682babb1c60417e21c7633d4b55b58f44703ec82b05a": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/batch/",
"1f8627277e1c1f6e1cc70c03e6fde06929e5ea27ca5b1d56e23b235dfeda282e22c0e5294bcb1b3a9def836f8d0f18612a9860629b9497292976ca11844b7e73": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/0.5.2/bundle/"
}
}

View File

@@ -34,11 +34,6 @@ struct Args {
#[derive(Subcommand, Debug)]
enum Commands {
/// Dump vk of this prover
Dump {
/// path to save the verifier's asset
asset_path: String,
},
Handle {
/// path to save the verifier's asset
task_path: String,
@@ -64,16 +59,10 @@ async fn main() -> eyre::Result<()> {
}
let cfg = LocalProverConfig::from_file(args.config_file)?;
let default_fork_name = cfg.circuits.keys().next().unwrap().clone();
let sdk_config = cfg.sdk_config.clone();
let local_prover = LocalProver::new(cfg.clone());
match args.command {
Some(Commands::Dump { asset_path }) => {
let fork_name = args.fork_name.unwrap_or(default_fork_name);
println!("dump assets for {fork_name} into {asset_path}");
local_prover.dump_verifier_assets(&fork_name, asset_path.as_ref())?;
}
Some(Commands::Handle { task_path }) => {
let file = File::open(Path::new(&task_path))?;
let reader = BufReader::new(file);

View File

@@ -1,4 +1,4 @@
use crate::zk_circuits_handler::{euclidV2::EuclidV2Handler, CircuitsHandler};
use crate::zk_circuits_handler::{universal::UniversalHandler, CircuitsHandler};
use async_trait::async_trait;
use eyre::Result;
use scroll_proving_sdk::{
@@ -16,12 +16,111 @@ use serde::{Deserialize, Serialize};
use std::{
collections::HashMap,
fs::File,
path::Path,
sync::{Arc, OnceLock},
path::{Path, PathBuf},
sync::{Arc, LazyLock},
time::{SystemTime, UNIX_EPOCH},
};
use tokio::{runtime::Handle, sync::Mutex, task::JoinHandle};
#[derive(Clone, Serialize, Deserialize)]
pub struct AssetsLocationData {
/// the base url to form a general downloading url for an asset, MUST HAVE A TRAILING SLASH
pub base_url: url::Url,
#[serde(default)]
/// a altered url for specififed vk
pub asset_detours: HashMap<String, url::Url>,
}
impl AssetsLocationData {
pub fn gen_asset_url(&self, vk_as_path: &str, proof_type: ProofType) -> Result<url::Url> {
Ok(self.base_url.join(
match proof_type {
ProofType::Chunk => format!("chunk/{vk_as_path}/"),
ProofType::Batch => format!("batch/{vk_as_path}/"),
ProofType::Bundle => format!("bundle/{vk_as_path}/"),
t => eyre::bail!("unrecognized proof type: {}", t as u8),
}
.as_str(),
)?)
}
pub fn validate(&self) -> Result<()> {
if !self.base_url.path().ends_with('/') {
eyre::bail!(
"base_url must have a trailing slash, got: {}",
self.base_url
);
}
Ok(())
}
pub async fn get_asset(
&self,
vk: &str,
url_base: &url::Url,
base_path: impl AsRef<Path>,
) -> Result<PathBuf> {
let download_files = ["app.vmexe", "openvm.toml"];
// Step 1: Create a local path for storage
let storage_path = base_path.as_ref().join(vk);
std::fs::create_dir_all(&storage_path)?;
// Step 2 & 3: Download each file if needed
let client = reqwest::Client::new();
for filename in download_files.iter() {
let local_file_path = storage_path.join(filename);
let download_url = url_base.join(filename)?;
// Check if file already exists
if local_file_path.exists() {
// Get file metadata to check size
if let Ok(metadata) = std::fs::metadata(&local_file_path) {
// Make a HEAD request to get remote file size
if let Ok(head_resp) = client.head(download_url.clone()).send().await {
if let Some(content_length) = head_resp.headers().get("content-length") {
if let Ok(remote_size) =
content_length.to_str().unwrap_or("0").parse::<u64>()
{
// If sizes match, skip download
if metadata.len() == remote_size {
println!("File {} already exists with matching size, skipping download", filename);
continue;
}
}
}
}
}
}
println!("Downloading {} from {}", filename, download_url);
let response = client.get(download_url).send().await?;
if !response.status().is_success() {
eyre::bail!(
"Failed to download {}: HTTP status {}",
filename,
response.status()
);
}
// Stream the content directly to file instead of loading into memory
let mut file = std::fs::File::create(&local_file_path)?;
let mut stream = response.bytes_stream();
use futures_util::StreamExt;
while let Some(chunk) = stream.next().await {
std::io::Write::write_all(&mut file, &chunk?)?;
}
}
// Step 4: Return the storage path
Ok(storage_path)
}
}
#[derive(Clone, Serialize, Deserialize)]
pub struct LocalProverConfig {
pub sdk_config: SdkConfig,
@@ -45,7 +144,11 @@ impl LocalProverConfig {
#[derive(Clone, Serialize, Deserialize)]
pub struct CircuitConfig {
pub hard_fork_name: String,
/// The path to save assets for a specified hard fork phase
pub workspace_path: String,
#[serde(flatten)]
/// The location data for dynamic loading
pub location_data: AssetsLocationData,
/// cached vk value to save some initial cost, for debugging only
#[serde(default)]
pub vks: HashMap<ProofType, String>,
@@ -56,7 +159,7 @@ pub struct LocalProver {
next_task_id: u64,
current_task: Option<JoinHandle<Result<String>>>,
handlers: HashMap<String, OnceLock<Arc<dyn CircuitsHandler>>>,
handlers: HashMap<String, Arc<dyn CircuitsHandler>>,
}
#[async_trait]
@@ -64,24 +167,15 @@ impl ProvingService for LocalProver {
fn is_local(&self) -> bool {
true
}
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
let mut vks = vec![];
for (hard_fork_name, cfg) in self.config.circuits.iter() {
for proof_type in &req.proof_types {
if let Some(vk) = cfg.vks.get(proof_type) {
vks.push(vk.clone())
} else {
let handler = self.get_or_init_handler(hard_fork_name);
vks.push(handler.get_vk(*proof_type).await);
}
}
async fn get_vks(&self, _: GetVkRequest) -> GetVkResponse {
// get vk has been deprecated in new prover with dynamic asset loading scheme
GetVkResponse {
vks: vec![],
error: None,
}
GetVkResponse { vks, error: None }
}
async fn prove(&mut self, req: ProveRequest) -> ProveResponse {
let handler = self.get_or_init_handler(&req.hard_fork_name);
match self.do_prove(req, handler).await {
match self.do_prove(req).await {
Ok(resp) => resp,
Err(e) => ProveResponse {
status: TaskStatus::Failed,
@@ -132,34 +226,91 @@ impl ProvingService for LocalProver {
}
}
static GLOBAL_ASSET_URLS: LazyLock<HashMap<String, HashMap<String, url::Url>>> =
LazyLock::new(|| {
const ASSETS_JSON: &str = include_str!("../assets_url_preset.json");
serde_json::from_str(ASSETS_JSON).expect("Failed to parse assets_url_preset.json")
});
impl LocalProver {
pub fn new(config: LocalProverConfig) -> Self {
let handlers = config
.circuits
.keys()
.map(|k| (k.clone(), OnceLock::new()))
.collect();
pub fn new(mut config: LocalProverConfig) -> Self {
for (fork_name, circuit_config) in config.circuits.iter_mut() {
// validate each base url
circuit_config.location_data.validate().unwrap();
let mut template_url_mapping = GLOBAL_ASSET_URLS
.get(&fork_name.to_lowercase())
.cloned()
.unwrap_or_default();
// apply default settings in template
for (key, url) in circuit_config.location_data.asset_detours.drain() {
template_url_mapping.insert(key, url);
}
circuit_config.location_data.asset_detours = template_url_mapping;
// validate each detours url
for url in circuit_config.location_data.asset_detours.values() {
assert!(
url.path().ends_with('/'),
"url {} must be end with /",
url.as_str()
);
}
}
Self {
config,
next_task_id: 0,
current_task: None,
handlers,
handlers: HashMap::new(),
}
}
async fn do_prove(
&mut self,
req: ProveRequest,
handler: Arc<dyn CircuitsHandler>,
) -> Result<ProveResponse> {
async fn do_prove(&mut self, req: ProveRequest) -> Result<ProveResponse> {
self.next_task_id += 1;
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
let req_clone = req.clone();
let prover_task = UniversalHandler::get_task_from_input(&req.input)?;
let vk = hex::encode(&prover_task.vk);
let handler = if let Some(handler) = self.handlers.get(&vk) {
handler.clone()
} else {
let base_config = self
.config
.circuits
.get(&req.hard_fork_name)
.ok_or_else(|| {
eyre::eyre!(
"coordinator sent unexpected forkname {}",
req.hard_fork_name
)
})?;
let url_base = if let Some(url) = base_config.location_data.asset_detours.get(&vk) {
url.clone()
} else {
base_config
.location_data
.gen_asset_url(&vk, req.proof_type)?
};
let asset_path = base_config
.location_data
.get_asset(&vk, &url_base, &base_config.workspace_path)
.await?;
let circuits_handler = Arc::new(Mutex::new(UniversalHandler::new(
&asset_path,
req.proof_type,
)?));
self.handlers.insert(vk, circuits_handler.clone());
circuits_handler
};
let handle = Handle::current();
let task_handle =
tokio::task::spawn_blocking(move || handle.block_on(handler.get_proof_data(req_clone)));
let is_evm = req.proof_type == ProofType::Bundle;
let task_handle = tokio::task::spawn_blocking(move || {
handle.block_on(handler.get_proof_data(&prover_task, is_evm))
});
self.current_task = Some(task_handle);
Ok(ProveResponse {
@@ -173,77 +324,4 @@ impl LocalProver {
..Default::default()
})
}
fn get_or_init_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
let lk = self
.handlers
.get(hard_fork_name)
.expect("coordinator should never sent unexpected forkname");
lk.get_or_init(|| self.new_handler(hard_fork_name)).clone()
}
pub fn new_handler(&self, hard_fork_name: &str) -> Arc<dyn CircuitsHandler> {
// if we got assigned a task for an unknown hard fork, there is something wrong in the
// coordinator
let config = self.config.circuits.get(hard_fork_name).unwrap();
match hard_fork_name {
// The new EuclidV2Handler is a universal handler
// We can add other handler implements if needed
"some future forkname" => unreachable!(),
_ => Arc::new(Arc::new(Mutex::new(EuclidV2Handler::new(config))))
as Arc<dyn CircuitsHandler>,
}
}
pub fn dump_verifier_assets(&self, hard_fork_name: &str, out_path: &Path) -> Result<()> {
let config = self
.config
.circuits
.get(hard_fork_name)
.ok_or_else(|| eyre::eyre!("no corresponding config for fork {hard_fork_name}"))?;
if !config.vks.is_empty() {
eyre::bail!("clean vks cache first or we will have wrong dumped vk");
}
let workspace_path = &config.workspace_path;
let universal_prover = EuclidV2Handler::new(config);
let _ = universal_prover
.get_prover()
.dump_universal_verifier(Some(out_path))?;
#[derive(Debug, serde::Serialize)]
struct VKDump {
pub chunk_vk: String,
pub batch_vk: String,
pub bundle_vk: String,
}
let dump = VKDump {
chunk_vk: universal_prover.get_vk_and_cache(ProofType::Chunk),
batch_vk: universal_prover.get_vk_and_cache(ProofType::Batch),
bundle_vk: universal_prover.get_vk_and_cache(ProofType::Bundle),
};
let f = File::create(out_path.join("openVmVk.json"))?;
serde_json::to_writer(f, &dump)?;
// Copy verifier.bin from workspace bundle directory to output path
let bundle_verifier_path = Path::new(workspace_path)
.join("bundle")
.join("verifier.bin");
if bundle_verifier_path.exists() {
let dest_path = out_path.join("verifier.bin");
std::fs::copy(&bundle_verifier_path, &dest_path)
.map_err(|e| eyre::eyre!("Failed to copy verifier.bin: {}", e))?;
} else {
eprintln!(
"Warning: verifier.bin not found at {:?}",
bundle_verifier_path
);
}
Ok(())
}
}

View File

@@ -1,65 +1,13 @@
//pub mod euclid;
#[allow(non_snake_case)]
pub mod euclidV2;
pub mod universal;
use async_trait::async_trait;
use eyre::Result;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover_euclid::ProverConfig;
use std::path::Path;
use scroll_zkvm_types::ProvingTask;
#[async_trait]
pub trait CircuitsHandler: Sync + Send {
async fn get_vk(&self, task_type: ProofType) -> String;
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
}
#[derive(Clone, Copy)]
pub(crate) enum Phase {
EuclidV2,
}
impl Phase {
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("chunk/app.vmexe");
let path_app_config = workspace_path.join("chunk/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("batch/app.vmexe");
let path_app_config = workspace_path.join("batch/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_config = workspace_path.join("bundle/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app.vmexe"),
..Default::default()
}
}
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String>;
}

View File

@@ -1,144 +0,0 @@
use std::{path::Path, sync::Arc};
use super::CircuitsHandler;
use anyhow::{anyhow, Result};
use async_trait::async_trait;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover_euclid::{
task::{batch::BatchProvingTask, bundle::BundleProvingTask, chunk::ChunkProvingTask},
BatchProver, BundleProverEuclidV1, ChunkProver, ProverConfig,
};
use tokio::sync::Mutex;
pub struct EuclidHandler {
chunk_prover: ChunkProver,
batch_prover: BatchProver,
bundle_prover: BundleProverEuclidV1,
}
#[derive(Clone, Copy)]
pub(crate) enum Phase {
EuclidV1,
EuclidV2,
}
impl Phase {
pub fn as_str(&self) -> &str {
match self {
Phase::EuclidV1 => "euclidv1",
Phase::EuclidV2 => "euclidv2",
}
}
pub fn phase_spec_chunk(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("chunk/app.vmexe");
let path_app_config = workspace_path.join("chunk/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_batch(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_exe = workspace_path.join("batch/app.vmexe");
let path_app_config = workspace_path.join("batch/openvm.toml");
let segment_len = Some((1 << 22) - 100);
ProverConfig {
dir_cache,
path_app_config,
path_app_exe,
segment_len,
..Default::default()
}
}
pub fn phase_spec_bundle(&self, workspace_path: &Path) -> ProverConfig {
let dir_cache = Some(workspace_path.join("cache"));
let path_app_config = workspace_path.join("bundle/openvm.toml");
let segment_len = Some((1 << 22) - 100);
match self {
Phase::EuclidV1 => ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app_euclidv1.vmexe"),
..Default::default()
},
Phase::EuclidV2 => ProverConfig {
dir_cache,
path_app_config,
segment_len,
path_app_exe: workspace_path.join("bundle/app.vmexe"),
..Default::default()
},
}
}
}
unsafe impl Send for EuclidHandler {}
impl EuclidHandler {
pub fn new(workspace_path: &str) -> Self {
let p = Phase::EuclidV1;
let workspace_path = Path::new(workspace_path);
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
.expect("Failed to setup chunk prover");
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
.expect("Failed to setup batch prover");
let bundle_prover = BundleProverEuclidV1::setup(p.phase_spec_bundle(workspace_path))
.expect("Failed to setup bundle prover");
Self {
chunk_prover,
batch_prover,
bundle_prover,
}
}
}
#[async_trait]
impl CircuitsHandler for Arc<Mutex<EuclidHandler>> {
async fn get_vk(&self, task_type: ProofType) -> Option<Vec<u8>> {
Some(match task_type {
ProofType::Chunk => self.try_lock().unwrap().chunk_prover.get_app_vk(),
ProofType::Batch => self.try_lock().unwrap().batch_prover.get_app_vk(),
ProofType::Bundle => self.try_lock().unwrap().bundle_prover.get_app_vk(),
_ => unreachable!("Unsupported proof type"),
})
}
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
match prove_request.proof_type {
ProofType::Chunk => {
let task: ChunkProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self.try_lock().unwrap().chunk_prover.gen_proof(&task)?;
Ok(serde_json::to_string(&proof)?)
}
ProofType::Batch => {
let task: BatchProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self.try_lock().unwrap().batch_prover.gen_proof(&task)?;
Ok(serde_json::to_string(&proof)?)
}
ProofType::Bundle => {
let batch_proofs: BundleProvingTask = serde_json::from_str(&prove_request.input)?;
let proof = self
.try_lock()
.unwrap()
.bundle_prover
.gen_proof_evm(&batch_proofs)?;
Ok(serde_json::to_string(&proof)?)
}
_ => Err(anyhow!("Unsupported proof type")),
}
}
}

View File

@@ -1,119 +0,0 @@
use std::{
collections::HashMap,
path::Path,
sync::{Arc, OnceLock},
};
use super::{CircuitsHandler, Phase};
use crate::prover::CircuitConfig;
use async_trait::async_trait;
use base64::{prelude::BASE64_STANDARD, Engine};
use eyre::Result;
use scroll_proving_sdk::prover::{proving_service::ProveRequest, ProofType};
use scroll_zkvm_prover_euclid::{BatchProver, BundleProverEuclidV2, ChunkProver};
use scroll_zkvm_types::ProvingTask;
use tokio::sync::Mutex;
pub struct EuclidV2Handler {
chunk_prover: ChunkProver,
batch_prover: BatchProver,
bundle_prover: BundleProverEuclidV2,
cached_vks: HashMap<ProofType, OnceLock<String>>,
}
unsafe impl Send for EuclidV2Handler {}
impl EuclidV2Handler {
pub fn new(cfg: &CircuitConfig) -> Self {
let workspace_path = &cfg.workspace_path;
let p = Phase::EuclidV2;
let workspace_path = Path::new(workspace_path);
let chunk_prover = ChunkProver::setup(p.phase_spec_chunk(workspace_path))
.expect("Failed to setup chunk prover");
let batch_prover = BatchProver::setup(p.phase_spec_batch(workspace_path))
.expect("Failed to setup batch prover");
let bundle_prover = BundleProverEuclidV2::setup(p.phase_spec_bundle(workspace_path))
.expect("Failed to setup bundle prover");
let build_vk_cache = |proof_type: ProofType| {
let vk = if let Some(vk) = cfg.vks.get(&proof_type) {
OnceLock::from(vk.clone())
} else {
OnceLock::new()
};
(proof_type, vk)
};
Self {
chunk_prover,
batch_prover,
bundle_prover,
cached_vks: HashMap::from([
build_vk_cache(ProofType::Chunk),
build_vk_cache(ProofType::Batch),
build_vk_cache(ProofType::Bundle),
]),
}
}
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
/// universal prover, before that, use bundle_prover as the represent one
pub fn get_prover(&self) -> &BundleProverEuclidV2 {
&self.bundle_prover
}
pub fn get_vk_and_cache(&self, task_type: ProofType) -> String {
match task_type {
ProofType::Chunk => self.cached_vks[&ProofType::Chunk]
.get_or_init(|| BASE64_STANDARD.encode(self.chunk_prover.get_app_vk())),
ProofType::Batch => self.cached_vks[&ProofType::Batch]
.get_or_init(|| BASE64_STANDARD.encode(self.batch_prover.get_app_vk())),
ProofType::Bundle => self.cached_vks[&ProofType::Bundle]
.get_or_init(|| BASE64_STANDARD.encode(self.bundle_prover.get_app_vk())),
_ => unreachable!("Unsupported proof type {:?}", task_type),
}
.clone()
}
}
#[async_trait]
impl CircuitsHandler for Arc<Mutex<EuclidV2Handler>> {
async fn get_vk(&self, task_type: ProofType) -> String {
self.lock().await.get_vk_and_cache(task_type)
}
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
let handler_self = self.lock().await;
let u_task: ProvingTask = serde_json::from_str(&prove_request.input)?;
let expected_vk = handler_self.get_vk_and_cache(prove_request.proof_type);
if BASE64_STANDARD.encode(&u_task.vk) != expected_vk {
eyre::bail!(
"vk is not match!, prove type {:?}, expected {}, get {}",
prove_request.proof_type,
expected_vk,
BASE64_STANDARD.encode(&u_task.vk),
);
}
let proof = match prove_request.proof_type {
ProofType::Chunk => handler_self
.chunk_prover
.gen_proof_universal(&u_task, false)?,
ProofType::Batch => handler_self
.batch_prover
.gen_proof_universal(&u_task, false)?,
ProofType::Bundle => handler_self
.bundle_prover
.gen_proof_universal(&u_task, true)?,
_ => {
return Err(eyre::eyre!(
"Unsupported proof type {:?}",
prove_request.proof_type
))
}
};
//TODO: check expected PI
Ok(serde_json::to_string(&proof)?)
}
}

View File

@@ -0,0 +1,63 @@
use std::path::Path;
use super::CircuitsHandler;
use async_trait::async_trait;
use base64::{prelude::BASE64_STANDARD, Engine};
use eyre::Result;
use scroll_proving_sdk::prover::ProofType;
use scroll_zkvm_prover::{Prover, ProverConfig};
use scroll_zkvm_types::ProvingTask;
use tokio::sync::Mutex;
pub struct UniversalHandler {
prover: Prover,
}
unsafe impl Send for UniversalHandler {}
impl UniversalHandler {
pub fn new(workspace_path: impl AsRef<Path>, proof_type: ProofType) -> Result<Self> {
let path_app_exe = workspace_path.as_ref().join("app.vmexe");
let path_app_config = workspace_path.as_ref().join("openvm.toml");
let segment_len = Some((1 << 22) - 100);
let config = ProverConfig {
path_app_config,
path_app_exe,
segment_len,
};
let use_evm = proof_type == ProofType::Bundle;
let prover = Prover::setup(config, use_evm, None)?;
Ok(Self { prover })
}
/// get_prover get the inner prover, later we would replace chunk/batch/bundle_prover with
/// universal prover, before that, use bundle_prover as the represent one
pub fn get_prover(&self) -> &Prover {
&self.prover
}
pub fn get_task_from_input(input: &str) -> Result<ProvingTask> {
Ok(serde_json::from_str(input)?)
}
}
#[async_trait]
impl CircuitsHandler for Mutex<UniversalHandler> {
async fn get_proof_data(&self, u_task: &ProvingTask, need_snark: bool) -> Result<String> {
let handler_self = self.lock().await;
if need_snark && handler_self.prover.evm_prover.is_none() {
eyre::bail!(
"do not init prover for evm (vk: {})",
BASE64_STANDARD.encode(handler_self.get_prover().get_app_vk())
)
}
let proof = handler_self
.get_prover()
.gen_proof_universal(u_task, need_snark)?;
Ok(serde_json::to_string(&proof)?)
}
}

View File

@@ -1408,13 +1408,12 @@ github.com/scroll-tech/da-codec v0.1.3-0.20250609113414-f33adf0904bd h1:NUol+dPt
github.com/scroll-tech/da-codec v0.1.3-0.20250609113414-f33adf0904bd/go.mod h1:gz5x3CsLy5htNTbv4PWRPBU9nSAujfx1U2XtFcXoFuk=
github.com/scroll-tech/da-codec v0.1.3-0.20250609154559-8935de62c148 h1:cyK1ifU2fRoMl8YWR9LOsZK4RvJnlG3RODgakj5I8VY=
github.com/scroll-tech/da-codec v0.1.3-0.20250609154559-8935de62c148/go.mod h1:gz5x3CsLy5htNTbv4PWRPBU9nSAujfx1U2XtFcXoFuk=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs=
github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9 h1:u371VK8eOU2Z/0SVf5KDI3eJc8msHSpJbav4do/8n38=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250729113104-bd8f141bb3e9/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 h1:nn5Wsu0esKSJiIVhscUtVbo7ada43DJhG55ua/hjS5I=
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=

View File

@@ -15,7 +15,7 @@ require (
github.com/holiman/uint256 v1.3.2
github.com/mitchellh/mapstructure v1.5.0
github.com/prometheus/client_golang v1.16.0
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7
github.com/smartystreets/goconvey v1.8.0
github.com/spf13/viper v1.19.0

View File

@@ -285,8 +285,8 @@ github.com/sagikazarmark/locafero v0.4.0 h1:HApY1R9zGo4DBgr7dqsTH/JJxLTTsOt7u6ke
github.com/sagikazarmark/locafero v0.4.0/go.mod h1:Pe1W6UlPYUk/+wc/6KFhbORCfqzgYEpgQ3O5fPuL3H4=
github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6gto+ugjYE=
github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6 h1:vb2XLvQwCf+F/ifP6P/lfeiQrHY6+Yb/E3R4KHXLqSE=
github.com/scroll-tech/da-codec v0.1.3-0.20250626091118-58b899494da6/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7 h1:1rN1qocsQlOyk1VCpIEF1J5pfQbLAi1pnMZSLQS37jQ=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250626110859-cc9a1dd82de7/go.mod h1:pDCZ4iGvEGmdIe4aSAGBrb7XSrKEML6/L/wEMmNxOdk=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=

View File

@@ -290,6 +290,12 @@ func (r *Layer2Relayer) commitGenesisBatch(batchHash string, batchHeader []byte,
log.Info("Validium importGenesis", "calldata", common.Bytes2Hex(calldata))
} else {
// rollup mode: pass batchHeader and stateRoot
// Check state root is not zero
if stateRoot == (common.Hash{}) {
return fmt.Errorf("state root is zero")
}
calldata, packErr = r.l1RollupABI.Pack("importGenesisBatch", batchHeader, stateRoot)
if packErr != nil {
return fmt.Errorf("failed to pack rollup importGenesisBatch with batch header: %v and state root: %v. error: %v", common.Bytes2Hex(batchHeader), stateRoot, packErr)
@@ -388,8 +394,6 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
// return if not hitting target price
if skip {
log.Debug("Skipping batch submission", "first batch index", dbBatches[0].Index, "backlog count", backlogCount, "reason", err)
log.Debug("first batch index", dbBatches[0].Index)
log.Debug("backlog count", backlogCount)
return
}
if err != nil {
@@ -502,6 +506,11 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
log.Error("failed to construct normal payload", "codecVersion", codecVersion, "start index", firstBatch.Index, "end index", lastBatch.Index, "err", err)
return
}
if err = r.sanityChecksCommitBatchCodecV7CalldataAndBlobs(calldata, blobs); err != nil {
log.Error("Sanity check failed for calldata and blobs", "codecVersion", codecVersion, "start index", firstBatch.Index, "end index", lastBatch.Index, "err", err)
return
}
}
default:
log.Error("unsupported codec version in ProcessPendingBatches", "codecVersion", codecVersion, "start index", firstBatch, "end index", lastBatch.Index)
@@ -999,6 +1008,18 @@ func (r *Layer2Relayer) constructCommitBatchPayloadCodecV7(batchesToSubmit []*db
}
func (r *Layer2Relayer) constructCommitBatchPayloadValidium(batch *dbBatchWithChunks) ([]byte, uint64, uint64, error) {
// Check state root is not zero
stateRoot := common.HexToHash(batch.Batch.StateRoot)
if stateRoot == (common.Hash{}) {
return nil, 0, 0, fmt.Errorf("batch %d state root is zero", batch.Batch.Index)
}
// Check parent batch hash is not zero
parentBatchHash := common.HexToHash(batch.Batch.ParentBatchHash)
if parentBatchHash == (common.Hash{}) {
return nil, 0, 0, fmt.Errorf("batch %d parent batch hash is zero", batch.Batch.Index)
}
// Calculate metrics
var maxBlockHeight uint64
var totalGasUsed uint64
@@ -1018,6 +1039,7 @@ func (r *Layer2Relayer) constructCommitBatchPayloadValidium(batch *dbBatchWithCh
lastChunk := batch.Chunks[len(batch.Chunks)-1]
commitment := common.HexToHash(lastChunk.EndBlockHash)
version := encoding.CodecVersion(batch.Batch.CodecVersion)
calldata, err := r.validiumABI.Pack("commitBatch", version, common.HexToHash(batch.Batch.ParentBatchHash), common.HexToHash(batch.Batch.StateRoot), common.HexToHash(batch.Batch.WithdrawRoot), commitment[:])
if err != nil {
@@ -1028,6 +1050,12 @@ func (r *Layer2Relayer) constructCommitBatchPayloadValidium(batch *dbBatchWithCh
}
func (r *Layer2Relayer) constructFinalizeBundlePayloadCodecV7(dbBatch *orm.Batch, endChunk *orm.Chunk, aggProof *message.OpenVMBundleProof) ([]byte, error) {
// Check state root is not zero
stateRoot := common.HexToHash(dbBatch.StateRoot)
if stateRoot == (common.Hash{}) {
return nil, fmt.Errorf("batch %d state root is zero", dbBatch.Index)
}
if aggProof != nil { // finalizeBundle with proof.
calldata, packErr := r.l1RollupABI.Pack(
"finalizeBundlePostEuclidV2",

View File

@@ -0,0 +1,487 @@
package relayer
import (
"fmt"
"math/big"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/accounts/abi"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto/kzg4844"
"scroll-tech/rollup/internal/orm"
)
// sanityChecksCommitBatchCodecV7CalldataAndBlobs performs comprehensive validation of the constructed
// transaction data (calldata and blobs) by parsing them and comparing against database records.
// This ensures the constructed transaction data is correct and consistent with the database state.
func (r *Layer2Relayer) sanityChecksCommitBatchCodecV7CalldataAndBlobs(calldata []byte, blobs []*kzg4844.Blob) error {
if r.l1RollupABI == nil {
return fmt.Errorf("l1RollupABI is nil: cannot parse commitBatches calldata")
}
calldataInfo, err := parseCommitBatchesCalldata(r.l1RollupABI, calldata)
if err != nil {
return fmt.Errorf("failed to parse calldata: %w", err)
}
batchesToValidate, l1MessagesWithBlockNumbers, err := r.getBatchesFromCalldata(calldataInfo)
if err != nil {
return fmt.Errorf("failed to get batches from database: %w", err)
}
if err := r.validateCalldataAndBlobsAgainstDatabase(calldataInfo, blobs, batchesToValidate, l1MessagesWithBlockNumbers); err != nil {
return fmt.Errorf("calldata and blobs validation failed: %w", err)
}
if err := r.validateDatabaseConsistency(batchesToValidate); err != nil {
return fmt.Errorf("database consistency validation failed: %w", err)
}
return nil
}
// CalldataInfo holds parsed information from commitBatches calldata
type CalldataInfo struct {
Version uint8
ParentBatchHash common.Hash
LastBatchHash common.Hash
}
// parseCommitBatchesCalldata parses the commitBatches calldata and extracts key information
func parseCommitBatchesCalldata(abi *abi.ABI, calldata []byte) (*CalldataInfo, error) {
method := abi.Methods["commitBatches"]
decoded, err := method.Inputs.Unpack(calldata[4:])
if err != nil {
return nil, fmt.Errorf("failed to unpack commitBatches calldata: %w", err)
}
if len(decoded) != 3 {
return nil, fmt.Errorf("unexpected number of decoded parameters: got %d, want 3", len(decoded))
}
version, ok := decoded[0].(uint8)
if !ok {
return nil, fmt.Errorf("failed to type assert version to uint8")
}
parentBatchHashB, ok := decoded[1].([32]uint8)
if !ok {
return nil, fmt.Errorf("failed to type assert parentBatchHash to [32]uint8")
}
parentBatchHash := common.BytesToHash(parentBatchHashB[:])
lastBatchHashB, ok := decoded[2].([32]uint8)
if !ok {
return nil, fmt.Errorf("failed to type assert lastBatchHash to [32]uint8")
}
lastBatchHash := common.BytesToHash(lastBatchHashB[:])
return &CalldataInfo{
Version: version,
ParentBatchHash: parentBatchHash,
LastBatchHash: lastBatchHash,
}, nil
}
// getBatchesFromCalldata retrieves the relevant batches from database based on calldata information
func (r *Layer2Relayer) getBatchesFromCalldata(info *CalldataInfo) ([]*dbBatchWithChunks, map[uint64][]*types.TransactionData, error) {
// Get the parent batch to determine the starting point
parentBatch, err := r.batchOrm.GetBatchByHash(r.ctx, info.ParentBatchHash.Hex())
if err != nil {
return nil, nil, fmt.Errorf("failed to get parent batch by hash %s: %w", info.ParentBatchHash.Hex(), err)
}
// Get the last batch to determine the ending point
lastBatch, err := r.batchOrm.GetBatchByHash(r.ctx, info.LastBatchHash.Hex())
if err != nil {
return nil, nil, fmt.Errorf("failed to get last batch by hash %s: %w", info.LastBatchHash.Hex(), err)
}
// Get all batches in the range (parent+1 to last)
firstBatchIndex := parentBatch.Index + 1
lastBatchIndex := lastBatch.Index
// Check if the range is valid
if firstBatchIndex > lastBatchIndex {
return nil, nil, fmt.Errorf("no batches found in range: first index %d, last index %d", firstBatchIndex, lastBatchIndex)
}
var batchesToValidate []*dbBatchWithChunks
l1MessagesWithBlockNumbers := make(map[uint64][]*types.TransactionData)
for batchIndex := firstBatchIndex; batchIndex <= lastBatchIndex; batchIndex++ {
dbBatch, err := r.batchOrm.GetBatchByIndex(r.ctx, batchIndex)
if err != nil {
return nil, nil, fmt.Errorf("failed to get batch by index %d: %w", batchIndex, err)
}
// Get chunks for this batch
dbChunks, err := r.chunkOrm.GetChunksInRange(r.ctx, dbBatch.StartChunkIndex, dbBatch.EndChunkIndex)
if err != nil {
return nil, nil, fmt.Errorf("failed to get chunks for batch %d: %w", batchIndex, err)
}
batchesToValidate = append(batchesToValidate, &dbBatchWithChunks{
Batch: dbBatch,
Chunks: dbChunks,
})
// If there are L1 messages in this batch, retrieve L1 messages with block numbers
for _, chunk := range dbChunks {
if chunk.TotalL1MessagesPoppedInChunk > 0 {
blockWithL1Messages, err := r.l2BlockOrm.GetL2BlocksInRange(r.ctx, chunk.StartBlockNumber, chunk.EndBlockNumber)
if err != nil {
return nil, nil, fmt.Errorf("failed to get L2 blocks for chunk %d: %w", chunk.Index, err)
}
var l1MessagesCount uint64
for _, block := range blockWithL1Messages {
bn := block.Header.Number.Uint64()
seenL2 := false
for _, tx := range block.Transactions {
if tx.Type == types.L1MessageTxType {
if seenL2 {
// Invariant violated: found an L1 message after an L2 transaction in the same block.
return nil, nil, fmt.Errorf("L1 message after L2 transaction in block %d", bn)
}
l1MessagesWithBlockNumbers[bn] = append(l1MessagesWithBlockNumbers[bn], tx)
l1MessagesCount++
} else {
seenL2 = true
}
}
}
if chunk.TotalL1MessagesPoppedInChunk != l1MessagesCount {
return nil, nil, fmt.Errorf("chunk %d has inconsistent L1 messages count: expected %d, got %d", chunk.Index, chunk.TotalL1MessagesPoppedInChunk, l1MessagesCount)
}
}
}
}
return batchesToValidate, l1MessagesWithBlockNumbers, nil
}
// validateDatabaseConsistency performs comprehensive validation of database records
func (r *Layer2Relayer) validateDatabaseConsistency(batchesToValidate []*dbBatchWithChunks) error {
if len(batchesToValidate) == 0 {
return fmt.Errorf("no batches to validate")
}
// Get previous chunk for continuity check
firstChunk := batchesToValidate[0].Chunks[0]
if firstChunk.Index == 0 {
return fmt.Errorf("genesis chunk should not be in normal batch submission flow, chunk index: %d", firstChunk.Index)
}
prevChunk, err := r.chunkOrm.GetChunkByIndex(r.ctx, firstChunk.Index-1)
if err != nil {
return fmt.Errorf("failed to get previous chunk %d for continuity check: %w", firstChunk.Index-1, err)
}
firstBatchCodecVersion := batchesToValidate[0].Batch.CodecVersion
for i, batch := range batchesToValidate {
// Validate codec version consistency
if batch.Batch.CodecVersion != firstBatchCodecVersion {
return fmt.Errorf("batch %d has different codec version %d, expected %d", batch.Batch.Index, batch.Batch.CodecVersion, firstBatchCodecVersion)
}
// Validate individual batch
if err := r.validateSingleBatchConsistency(batch, i, batchesToValidate); err != nil {
return err
}
// Validate chunks in this batch
if err := r.validateBatchChunksConsistency(batch, prevChunk); err != nil {
return err
}
// Update prevChunk to the last chunk of this batch for next iteration
if len(batch.Chunks) == 0 {
return fmt.Errorf("batch %d has no chunks", batch.Batch.Index)
}
prevChunk = batch.Chunks[len(batch.Chunks)-1]
}
return nil
}
// validateSingleBatchConsistency validates a single batch's consistency
func (r *Layer2Relayer) validateSingleBatchConsistency(batch *dbBatchWithChunks, i int, allBatches []*dbBatchWithChunks) error {
if batch == nil || batch.Batch == nil {
return fmt.Errorf("batch %d is nil", i)
}
if len(batch.Chunks) == 0 {
return fmt.Errorf("batch %d has no chunks", batch.Batch.Index)
}
// Validate essential batch fields
batchHash := common.HexToHash(batch.Batch.Hash)
if batchHash == (common.Hash{}) {
return fmt.Errorf("batch %d hash is zero", batch.Batch.Index)
}
if batch.Batch.Index == 0 {
return fmt.Errorf("batch %d has zero index (only genesis batch should have index 0)", i)
}
parentBatchHash := common.HexToHash(batch.Batch.ParentBatchHash)
if parentBatchHash == (common.Hash{}) {
return fmt.Errorf("batch %d parent batch hash is zero", batch.Batch.Index)
}
stateRoot := common.HexToHash(batch.Batch.StateRoot)
if stateRoot == (common.Hash{}) {
return fmt.Errorf("batch %d state root is zero", batch.Batch.Index)
}
// Check batch index continuity
if i > 0 {
prevBatch := allBatches[i-1]
if batch.Batch.Index != prevBatch.Batch.Index+1 {
return fmt.Errorf("batch index is not sequential: prev batch index %d, current batch index %d", prevBatch.Batch.Index, batch.Batch.Index)
}
if parentBatchHash != common.HexToHash(prevBatch.Batch.Hash) {
return fmt.Errorf("parent batch hash does not match previous batch hash: expected %s, got %s", prevBatch.Batch.Hash, batch.Batch.ParentBatchHash)
}
} else {
// For the first batch, verify continuity with parent batch from database
parentBatch, err := r.batchOrm.GetBatchByHash(r.ctx, batch.Batch.ParentBatchHash)
if err != nil {
return fmt.Errorf("failed to get parent batch %s for batch %d: %w", batch.Batch.ParentBatchHash, batch.Batch.Index, err)
}
if batch.Batch.Index != parentBatch.Index+1 {
return fmt.Errorf("first batch index is not sequential with parent: parent batch index %d, current batch index %d", parentBatch.Index, batch.Batch.Index)
}
}
// Validate L1 message queue consistency
if err := r.validateMessageQueueConsistency(batch.Batch.Index, batch.Chunks, common.HexToHash(batch.Batch.PrevL1MessageQueueHash), common.HexToHash(batch.Batch.PostL1MessageQueueHash)); err != nil {
return err
}
return nil
}
// validateBatchChunksConsistency validates chunks within a batch
func (r *Layer2Relayer) validateBatchChunksConsistency(batch *dbBatchWithChunks, prevChunk *orm.Chunk) error {
// Check codec version consistency between chunks and batch
for _, chunk := range batch.Chunks {
if chunk.CodecVersion != batch.Batch.CodecVersion {
return fmt.Errorf("batch %d chunk %d has different codec version %d, expected %d", batch.Batch.Index, chunk.Index, chunk.CodecVersion, batch.Batch.CodecVersion)
}
}
// Validate each chunk individually
currentPrevChunk := prevChunk
for j, chunk := range batch.Chunks {
if err := r.validateSingleChunkConsistency(chunk, currentPrevChunk); err != nil {
return fmt.Errorf("batch %d chunk %d: %w", batch.Batch.Index, j, err)
}
currentPrevChunk = chunk
}
return nil
}
// validateSingleChunkConsistency validates a single chunk
func (r *Layer2Relayer) validateSingleChunkConsistency(chunk *orm.Chunk, prevChunk *orm.Chunk) error {
if chunk == nil {
return fmt.Errorf("chunk is nil")
}
chunkHash := common.HexToHash(chunk.Hash)
if chunkHash == (common.Hash{}) {
return fmt.Errorf("chunk %d hash is zero", chunk.Index)
}
// Check chunk index continuity
if chunk.Index != prevChunk.Index+1 {
return fmt.Errorf("chunk index is not sequential: prev chunk index %d, current chunk index %d", prevChunk.Index, chunk.Index)
}
// Validate block range
if chunk.StartBlockNumber == 0 && chunk.EndBlockNumber == 0 {
return fmt.Errorf("chunk %d has zero block range", chunk.Index)
}
if chunk.StartBlockNumber > chunk.EndBlockNumber {
return fmt.Errorf("chunk %d has invalid block range: start %d > end %d", chunk.Index, chunk.StartBlockNumber, chunk.EndBlockNumber)
}
// Check hash fields
startBlockHash := common.HexToHash(chunk.StartBlockHash)
if startBlockHash == (common.Hash{}) {
return fmt.Errorf("chunk %d start block hash is zero", chunk.Index)
}
endBlockHash := common.HexToHash(chunk.EndBlockHash)
if endBlockHash == (common.Hash{}) {
return fmt.Errorf("chunk %d end block hash is zero", chunk.Index)
}
// Check block continuity with previous chunk
if prevChunk.EndBlockNumber+1 != chunk.StartBlockNumber {
return fmt.Errorf("chunk is not continuous with previous chunk %d: prev end block %d, current start block %d", prevChunk.Index, prevChunk.EndBlockNumber, chunk.StartBlockNumber)
}
// Check L1 messages continuity
expectedPoppedBefore := prevChunk.TotalL1MessagesPoppedBefore + prevChunk.TotalL1MessagesPoppedInChunk
if chunk.TotalL1MessagesPoppedBefore != expectedPoppedBefore {
return fmt.Errorf("L1 messages popped before is incorrect: expected %d, got %d", expectedPoppedBefore, chunk.TotalL1MessagesPoppedBefore)
}
return nil
}
// validateCalldataAndBlobsAgainstDatabase validates calldata and blobs against database records
func (r *Layer2Relayer) validateCalldataAndBlobsAgainstDatabase(calldataInfo *CalldataInfo, blobs []*kzg4844.Blob, batchesToValidate []*dbBatchWithChunks, l1MessagesWithBlockNumbers map[uint64][]*types.TransactionData) error {
// Validate blobs
if len(blobs) == 0 {
return fmt.Errorf("no blobs provided")
}
// Validate blob count
if len(blobs) != len(batchesToValidate) {
return fmt.Errorf("blob count mismatch: got %d blobs, expected %d batches", len(blobs), len(batchesToValidate))
}
// Get first and last batches for validation, length check is already done above
firstBatch := batchesToValidate[0].Batch
lastBatch := batchesToValidate[len(batchesToValidate)-1].Batch
// Validate codec version
if calldataInfo.Version != uint8(firstBatch.CodecVersion) {
return fmt.Errorf("version mismatch: calldata=%d, db=%d", calldataInfo.Version, firstBatch.CodecVersion)
}
// Validate parent batch hash
if calldataInfo.ParentBatchHash != common.HexToHash(firstBatch.ParentBatchHash) {
return fmt.Errorf("parentBatchHash mismatch: calldata=%s, db=%s", calldataInfo.ParentBatchHash.Hex(), firstBatch.ParentBatchHash)
}
// Validate last batch hash
if calldataInfo.LastBatchHash != common.HexToHash(lastBatch.Hash) {
return fmt.Errorf("lastBatchHash mismatch: calldata=%s, db=%s", calldataInfo.LastBatchHash.Hex(), lastBatch.Hash)
}
// Get codec for blob decoding
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(firstBatch.CodecVersion))
if err != nil {
return fmt.Errorf("failed to get codec: %w", err)
}
// Validate each blob against its corresponding batch
for i, blob := range blobs {
dbBatch := batchesToValidate[i].Batch
if err := r.validateSingleBlobAgainstBatch(blob, dbBatch, codec, l1MessagesWithBlockNumbers); err != nil {
return fmt.Errorf("blob validation failed for batch %d: %w", dbBatch.Index, err)
}
}
return nil
}
// validateSingleBlobAgainstBatch validates a single blob against its batch data
func (r *Layer2Relayer) validateSingleBlobAgainstBatch(blob *kzg4844.Blob, dbBatch *orm.Batch, codec encoding.Codec, l1MessagesWithBlockNumbers map[uint64][]*types.TransactionData) error {
// Decode blob payload
payload, err := codec.DecodeBlob(blob)
if err != nil {
return fmt.Errorf("failed to decode blob: %w", err)
}
// Validate batch hash
daBatch, err := assembleDABatchFromPayload(payload, dbBatch, codec, l1MessagesWithBlockNumbers)
if err != nil {
return fmt.Errorf("failed to assemble batch from payload: %w", err)
}
if daBatch.Hash() != common.HexToHash(dbBatch.Hash) {
return fmt.Errorf("batch hash mismatch: decoded from blob=%s, db=%s", daBatch.Hash().Hex(), dbBatch.Hash)
}
return nil
}
// validateMessageQueueConsistency validates L1 message queue hash consistency
func (r *Layer2Relayer) validateMessageQueueConsistency(batchIndex uint64, chunks []*orm.Chunk, prevL1MsgQueueHash common.Hash, postL1MsgQueueHash common.Hash) error {
if len(chunks) == 0 {
return fmt.Errorf("batch %d has no chunks for message queue validation", batchIndex)
}
firstChunk := chunks[0]
lastChunk := chunks[len(chunks)-1]
// Calculate total L1 messages in this batch
var totalL1MessagesInBatch uint64
for _, chunk := range chunks {
totalL1MessagesInBatch += chunk.TotalL1MessagesPoppedInChunk
}
// If there were L1 messages processed before this batch, prev hash should not be zero
if firstChunk.TotalL1MessagesPoppedBefore > 0 && prevL1MsgQueueHash == (common.Hash{}) {
return fmt.Errorf("batch %d prev L1 message queue hash is zero but %d L1 messages were processed before", batchIndex, firstChunk.TotalL1MessagesPoppedBefore)
}
// If there are any L1 messages processed up to this batch, post hash should not be zero
totalL1MessagesProcessed := lastChunk.TotalL1MessagesPoppedBefore + lastChunk.TotalL1MessagesPoppedInChunk
if totalL1MessagesProcessed > 0 && postL1MsgQueueHash == (common.Hash{}) {
return fmt.Errorf("batch %d post L1 message queue hash is zero but %d L1 messages were processed in total", batchIndex, totalL1MessagesProcessed)
}
// Prev and post queue hashes should be different if L1 messages were processed in this batch
if totalL1MessagesInBatch > 0 && prevL1MsgQueueHash == postL1MsgQueueHash {
return fmt.Errorf("batch %d has same prev and post L1 message queue hashes but processed %d L1 messages in this batch", batchIndex, totalL1MessagesInBatch)
}
return nil
}
func assembleDABatchFromPayload(payload encoding.DABlobPayload, dbBatch *orm.Batch, codec encoding.Codec, l1MessagesWithBlockNumbers map[uint64][]*types.TransactionData) (encoding.DABatch, error) {
blocks, err := assembleBlocksFromPayload(payload, l1MessagesWithBlockNumbers)
if err != nil {
return nil, fmt.Errorf("failed to assemble blocks from payload batch_index=%d codec_version=%d parent_batch_hash=%s: %w", dbBatch.Index, dbBatch.CodecVersion, dbBatch.ParentBatchHash, err)
}
batch := &encoding.Batch{
Index: dbBatch.Index, // The database provides only batch index, other fields are derived from blob payload
ParentBatchHash: common.HexToHash(dbBatch.ParentBatchHash), // The first batch's parent hash is verified with calldata, subsequent batches are linked via dbBatch.ParentBatchHash and verified in database consistency checks
PrevL1MessageQueueHash: payload.PrevL1MessageQueueHash(),
PostL1MessageQueueHash: payload.PostL1MessageQueueHash(),
Blocks: blocks,
Chunks: []*encoding.Chunk{ // One chunk for this batch to pass sanity checks when building DABatch
{
Blocks: blocks,
PrevL1MessageQueueHash: payload.PrevL1MessageQueueHash(),
PostL1MessageQueueHash: payload.PostL1MessageQueueHash(),
},
},
}
daBatch, err := codec.NewDABatch(batch)
if err != nil {
return nil, fmt.Errorf("failed to build DABatch batch_index=%d codec_version=%d parent_batch_hash=%s: %w", dbBatch.Index, dbBatch.CodecVersion, dbBatch.ParentBatchHash, err)
}
return daBatch, nil
}
func assembleBlocksFromPayload(payload encoding.DABlobPayload, l1MessagesWithBlockNumbers map[uint64][]*types.TransactionData) ([]*encoding.Block, error) {
daBlocks := payload.Blocks()
txns := payload.Transactions()
if len(daBlocks) != len(txns) {
return nil, fmt.Errorf("mismatched number of blocks and transactions: %d blocks, %d transactions", len(daBlocks), len(txns))
}
blocks := make([]*encoding.Block, len(daBlocks))
for i := range daBlocks {
blocks[i] = &encoding.Block{
Header: &types.Header{
Number: new(big.Int).SetUint64(daBlocks[i].Number()),
Time: daBlocks[i].Timestamp(),
BaseFee: daBlocks[i].BaseFee(),
GasLimit: daBlocks[i].GasLimit(),
},
}
// Ensure per-block ordering: [L1 messages][L2 transactions]. Prepend L1 messages (if any), then append L2 transactions.
if l1Messages, ok := l1MessagesWithBlockNumbers[daBlocks[i].Number()]; ok {
blocks[i].Transactions = l1Messages
}
blocks[i].Transactions = append(blocks[i].Transactions, encoding.TxsToTxsData(txns[i])...)
}
return blocks, nil
}

View File

@@ -0,0 +1,131 @@
package relayer
import (
"encoding/json"
"fmt"
"math/big"
"os"
"path/filepath"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto/kzg4844"
bridgeabi "scroll-tech/rollup/abi"
"scroll-tech/rollup/internal/orm"
)
func TestAssembleDABatch(t *testing.T) {
calldataHex := "0x9bbaa2ba0000000000000000000000000000000000000000000000000000000000000008146793a7d71663cd87ec9713f72242a3798d5e801050130a3e16efaa09fb803e58af2593dadc8b9fff75a2d27199cb97ec115bade109b8d691a512608ef180eb"
blobsPath := filepath.Join("../../../testdata", "commit_batches_blobs.json")
calldata, err := hexutil.Decode(strings.TrimSpace(calldataHex))
assert.NoErrorf(t, err, "failed to decode calldata: %s", calldataHex)
blobs, err := loadBlobsFromJSON(blobsPath)
assert.NoErrorf(t, err, "failed to read blobs: %s", blobsPath)
assert.NotEmpty(t, blobs, "no blobs provided")
info, err := parseCommitBatchesCalldata(bridgeabi.ScrollChainABI, calldata)
assert.NoError(t, err)
codec, err := encoding.CodecFromVersion(encoding.CodecVersion(info.Version))
assert.NoErrorf(t, err, "failed to get codec from version %d", info.Version)
parentBatchHash := info.ParentBatchHash
index := uint64(113571)
t.Logf("calldata parsed: version=%d parentBatchHash=%s lastBatchHash=%s blobs=%d", info.Version, info.ParentBatchHash.Hex(), info.LastBatchHash.Hex(), len(blobs))
fromAddr := common.HexToAddress("0x61d8d3e7f7c656493d1d76aaa1a836cedfcbc27b")
toAddr := common.HexToAddress("0xba50f5340fb9f3bd074bd638c9be13ecb36e603d")
l1MessagesWithBlockNumbers := map[uint64][]*types.TransactionData{
11488527: {
&types.TransactionData{
Type: types.L1MessageTxType,
Nonce: 1072515,
Gas: 340000,
To: &toAddr,
Value: (*hexutil.Big)(big.NewInt(0)),
Data: "0x8ef1332e00000000000000000000000081f3843af1fbab046b771f0d440c04ebb2b7513f000000000000000000000000cec03800074d0ac0854bf1f34153cc4c8baeeb1e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000105d8300000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000084f03efa3700000000000000000000000000000000000000000000000000000000000024730000000000000000000000000000000000000000000000000000000000000040000000000000000000000000000000000000000000000000000000000000000171bdb6e3062daaee1845ba4cb1902169feb5a9b9555a882d45637d3bd29eb83500000000000000000000000000000000000000000000000000000000",
From: fromAddr,
},
},
11488622: {
&types.TransactionData{
Type: types.L1MessageTxType,
Nonce: 1072516,
Gas: 340000,
To: &toAddr,
Value: (*hexutil.Big)(big.NewInt(0)),
Data: "0x8ef1332e00000000000000000000000081f3843af1fbab046b771f0d440c04ebb2b7513f000000000000000000000000cec03800074d0ac0854bf1f34153cc4c8baeeb1e00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000105d8400000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000084f03efa370000000000000000000000000000000000000000000000000000000000002474000000000000000000000000000000000000000000000000000000000000004000000000000000000000000000000000000000000000000000000000000000012aeb01535c1845b689bfce22e53029ec59ec75ea20f660d7c5fcd99f55b75b6900000000000000000000000000000000000000000000000000000000",
From: fromAddr,
},
},
11489190: {
&types.TransactionData{
Type: types.L1MessageTxType,
Nonce: 1072517,
Gas: 168000,
To: &toAddr,
Value: (*hexutil.Big)(big.NewInt(0)),
Data: "0x8ef1332e0000000000000000000000003b1399523f819ea4c4d3e76dddefaf4226c6ba570000000000000000000000003b1399523f819ea4c4d3e76dddefaf4226c6ba5700000000000000000000000000000000000000000000000000000000000027100000000000000000000000000000000000000000000000000000000000105d8500000000000000000000000000000000000000000000000000000000000000a00000000000000000000000000000000000000000000000000000000000000000",
From: fromAddr,
},
},
}
for i, blob := range blobs {
payload, decErr := codec.DecodeBlob(blob)
assert.NoErrorf(t, decErr, "blob[%d] decode failed", i)
if decErr != nil {
continue
}
dbBatch := &orm.Batch{
Index: index,
ParentBatchHash: parentBatchHash.Hex(),
}
daBatch, asmErr := assembleDABatchFromPayload(payload, dbBatch, codec, l1MessagesWithBlockNumbers)
assert.NoErrorf(t, asmErr, "blob[%d] assemble failed", i)
if asmErr == nil {
t.Logf("blob[%d] DABatch hash=%s", i, daBatch.Hash().Hex())
}
index += 1
parentBatchHash = daBatch.Hash()
}
}
func loadBlobsFromJSON(path string) ([]*kzg4844.Blob, error) {
raw, err := os.ReadFile(path)
if err != nil {
return nil, err
}
var arr []hexutil.Bytes
if err := json.Unmarshal(raw, &arr); err != nil {
return nil, fmt.Errorf("invalid JSON; expect [\"0x...\"] array: %w", err)
}
out := make([]*kzg4844.Blob, 0, len(arr))
var empty kzg4844.Blob
want := len(empty)
for i, b := range arr {
if len(b) != want {
return nil, fmt.Errorf("blob[%d] length mismatch: got %d, want %d", i, len(b), want)
}
blob := new(kzg4844.Blob)
copy(blob[:], b)
out = append(out, blob)
}
return out, nil
}

View File

@@ -70,15 +70,18 @@ func testL2RelayerProcessPendingBatches(t *testing.T) {
_, err = chunkOrm.InsertChunk(context.Background(), chunk2, encoding.CodecV7, rutils.ChunkMetrics{})
assert.NoError(t, err)
batchOrm := orm.NewBatch(db)
genesisBatch, err := batchOrm.GetBatchByIndex(context.Background(), 0)
assert.NoError(t, err)
batch := &encoding.Batch{
Index: 1,
TotalL1MessagePoppedBefore: 0,
ParentBatchHash: common.Hash{},
ParentBatchHash: common.HexToHash(genesisBatch.Hash),
Chunks: []*encoding.Chunk{chunk1, chunk2},
Blocks: []*encoding.Block{block1, block2},
}
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.InsertBatch(context.Background(), batch, encoding.CodecV7, rutils.BatchMetrics{})
assert.NoError(t, err)

View File

@@ -81,6 +81,7 @@ func setupEnv(t *testing.T) {
block1 = &encoding.Block{}
err = json.Unmarshal(templateBlockTrace1, block1)
assert.NoError(t, err)
block1.Header.Number = big.NewInt(1)
chunk1 = &encoding.Chunk{Blocks: []*encoding.Block{block1}}
codec, err := encoding.CodecFromVersion(encoding.CodecV0)
assert.NoError(t, err)
@@ -94,6 +95,7 @@ func setupEnv(t *testing.T) {
block2 = &encoding.Block{}
err = json.Unmarshal(templateBlockTrace2, block2)
assert.NoError(t, err)
block2.Header.Number = big.NewInt(2)
chunk2 = &encoding.Chunk{Blocks: []*encoding.Block{block2}}
daChunk2, err := codec.NewDAChunk(chunk2, chunk1.NumL1Messages(0))
assert.NoError(t, err)

View File

@@ -266,6 +266,19 @@ func (o *Batch) GetBatchByIndex(ctx context.Context, index uint64) (*Batch, erro
return &batch, nil
}
// GetBatchByHash retrieves the batch by the given hash.
func (o *Batch) GetBatchByHash(ctx context.Context, hash string) (*Batch, error) {
db := o.db.WithContext(ctx)
db = db.Model(&Batch{})
db = db.Where("hash = ?", hash)
var batch Batch
if err := db.First(&batch).Error; err != nil {
return nil, fmt.Errorf("Batch.GetBatchByHash error: %w, batch hash: %v", err, hash)
}
return &batch, nil
}
// InsertBatch inserts a new batch into the database.
func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecVersion encoding.CodecVersion, metrics rutils.BatchMetrics, dbTX ...*gorm.DB) (*Batch, error) {
if batch == nil {

File diff suppressed because one or more lines are too long

View File

@@ -5,8 +5,8 @@ go 1.22
toolchain go1.22.2
require (
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587
github.com/stretchr/testify v1.10.0
gorm.io/gorm v1.25.7-0.20240204074919-46816ad31dde
)

View File

@@ -93,10 +93,10 @@ github.com/rjeczalik/notify v0.9.1 h1:CLCKso/QK1snAlnhNR/CNvNiFU2saUtjV0bx3EwNeC
github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRrjvIXnJho=
github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8=
github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4=
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435 h1:X9fkvjrYBY79lGgKEPpUhuiJ4vWpWwzOVw4H8CU8L54=
github.com/scroll-tech/da-codec v0.1.3-0.20250310095435-012aaee6b435/go.mod h1:yhTS9OVC0xQGhg7DN5iV5KZJvnSIlFWAxDdp+6jxQtY=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601 h1:NEsjCG6uSvLRBlsP3+x6PL1kM+Ojs3g8UGotIPgJSz8=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250305151038-478940e79601/go.mod h1:OblWe1+QrZwdpwO0j/LY3BSGuKT3YPUFBDQQgvvfStQ=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178 h1:4utngmJHXSOS5FoSdZhEV1xMRirpArbXvyoCZY9nYj0=
github.com/scroll-tech/da-codec v0.1.3-0.20250826112206-b4cce5c5d178/go.mod h1:Z6kN5u2khPhiqHyk172kGB7o38bH/nj7Ilrb/46wZGg=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587 h1:wG1+gb+K4iLtxAHhiAreMdIjP5x9hB64duraN2+u1QU=
github.com/scroll-tech/go-ethereum v1.10.14-0.20250625112225-a67863c65587/go.mod h1:YyfB2AyAtphlbIuDQgaxc2b9mo0zE4EBA1+qtXvzlmg=
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
github.com/shirou/gopsutil v3.21.11+incompatible h1:+1+c1VGhc88SSonWP6foOcLhvnKlUeu/erjjvaPEYiI=

View File

@@ -3,8 +3,6 @@
RUST_MIN_STACK ?= 16777216
export RUST_MIN_STACK
CIRCUIT_STUFF = .work/euclid/chunk/app.vmexe .work/feynman/chunk/app.vmexe
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
PLONKY3_VERSION=$(shell grep -m 1 "Plonky3.git" ../Cargo.lock | cut -d "#" -f2 | cut -c-7)
else
@@ -53,16 +51,10 @@ lint:
cargo clippy --all-features --all-targets -- -D warnings
cargo fmt --all
$(CIRCUIT_STUFF):
@echo "Download stuff with download-release.sh, and put them into correct directory";
@exit 1;
test_run: $(CIRCUIT_STUFF)
test_run:
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json
test_e2e_run: $(CIRCUIT_STUFF) ${E2E_HANDLE_SET}
test_e2e_run: ${E2E_HANDLE_SET}
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json handle ${E2E_HANDLE_SET}
gen_verifier_stuff:
mkdir -p ${DUMP_DIR}
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo run --release -p prover -- --config ./config.json --forkname feynman dump ${DUMP_DIR}

View File

@@ -24,13 +24,10 @@
"db_path": ".work/db"
},
"circuits": {
"euclidV2": {
"hard_fork_name": "euclidV2",
"workspace_path": ".work/euclid"
},
"feynman": {
"hard_fork_name": "feynman",
"workspace_path": ".work/feynman1"
},
"base_url": "https://circuit-release.s3.us-west-2.amazonaws.com/scroll-zkvm/releases/feynman/",
"workspace_path": ".work/feynman"
}
}
}