mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-12 15:38:18 -05:00
Compare commits
6 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
9ee65119d8 | ||
|
|
fb1c800532 | ||
|
|
2baad2ecad | ||
|
|
bdf2968771 | ||
|
|
4d09e13b0c | ||
|
|
a98a2ff4b5 |
@@ -15,6 +15,7 @@ func Route(router *gin.Engine, conf *config.Config) {
|
||||
router.Use(cors.New(cors.Config{
|
||||
AllowOrigins: []string{"*"},
|
||||
AllowMethods: []string{"GET", "POST", "PUT", "DELETE"},
|
||||
AllowHeaders: []string{"Origin", "Content-Type", "Authorization"},
|
||||
AllowCredentials: true,
|
||||
MaxAge: 12 * time.Hour,
|
||||
}))
|
||||
|
||||
28
common/libzkp/impl/Cargo.lock
generated
28
common/libzkp/impl/Cargo.lock
generated
@@ -32,7 +32,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "aggregator"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"ark-std",
|
||||
"env_logger 0.10.0",
|
||||
@@ -432,7 +432,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||
[[package]]
|
||||
name = "bus-mapping"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -1045,7 +1045,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "eth-types"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"ethers-core",
|
||||
"ethers-signers",
|
||||
@@ -1223,7 +1223,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "external-tracer"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"geth-utils",
|
||||
@@ -1436,7 +1436,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gadgets"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"digest 0.7.6",
|
||||
"eth-types",
|
||||
@@ -1476,7 +1476,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "geth-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)",
|
||||
@@ -2074,7 +2074,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "keccak256"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"eth-types",
|
||||
@@ -2261,7 +2261,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -2276,7 +2276,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpt-zktrie"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"bus-mapping",
|
||||
"eth-types",
|
||||
@@ -2752,7 +2752,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prover"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.1#15aac6e1484a42f723098fbc9d8783f374e7e90a"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.3#337089ac40bac756d88b9ae30a3be1f82538b216"
|
||||
dependencies = [
|
||||
"aggregator",
|
||||
"anyhow",
|
||||
@@ -3621,7 +3621,7 @@ checksum = "62bb4feee49fdd9f707ef802e22365a35de4b7b299de4763d44bfea899442ff9"
|
||||
[[package]]
|
||||
name = "snark-verifier"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#f8bdcbee60348e5c996c04f19ff30522e6b276b0"
|
||||
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c306ec57849921e690221b10b8a08189868d4a"
|
||||
dependencies = [
|
||||
"bytes",
|
||||
"ethereum-types 0.14.1",
|
||||
@@ -3645,7 +3645,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "snark-verifier-sdk"
|
||||
version = "0.0.1"
|
||||
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#f8bdcbee60348e5c996c04f19ff30522e6b276b0"
|
||||
source = "git+https://github.com/scroll-tech/snark-verifier?branch=develop#12c306ec57849921e690221b10b8a08189868d4a"
|
||||
dependencies = [
|
||||
"bincode",
|
||||
"env_logger 0.10.0",
|
||||
@@ -4037,7 +4037,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
||||
[[package]]
|
||||
name = "types"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.1#15aac6e1484a42f723098fbc9d8783f374e7e90a"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.3#337089ac40bac756d88b9ae30a3be1f82538b216"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"blake2",
|
||||
@@ -4482,7 +4482,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
|
||||
[[package]]
|
||||
name = "zkevm-circuits"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?branch=develop#2855c13b5d3e6ec4056f823f56a33bf25d0080bb"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.3#2c8c749b3e4a61e89028289f4ff93157c5671d7b"
|
||||
dependencies = [
|
||||
"array-init",
|
||||
"bus-mapping",
|
||||
|
||||
@@ -18,8 +18,8 @@ maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
|
||||
|
||||
[dependencies]
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.1" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.1" }
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.3" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.3" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
|
||||
|
||||
log = "0.4"
|
||||
@@ -33,8 +33,6 @@ once_cell = "1.8.0"
|
||||
|
||||
[profile.test]
|
||||
opt-level = 3
|
||||
# debug-assertions = true
|
||||
|
||||
[profile.release]
|
||||
opt-level = 3
|
||||
# debug-assertions = true
|
||||
|
||||
@@ -33,7 +33,7 @@ func (r ProofType) String() string {
|
||||
case ProofTypeBatch:
|
||||
return "proof type batch"
|
||||
default:
|
||||
return "illegal proof type"
|
||||
return fmt.Sprintf("illegal proof type: %d", r)
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -118,7 +118,7 @@ func TestProveTypeString(t *testing.T) {
|
||||
assert.Equal(t, "proof type batch", proofTypeBatch.String())
|
||||
|
||||
illegalProof := ProofType(3)
|
||||
assert.Equal(t, "illegal proof type", illegalProof.String())
|
||||
assert.Equal(t, "illegal proof type: 3", illegalProof.String())
|
||||
}
|
||||
|
||||
func TestProofMsgPublicKey(t *testing.T) {
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.1.0"
|
||||
var tag = "v4.1.6"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -81,9 +81,11 @@ func (c *CoordinatorApp) MockConfig(store bool) error {
|
||||
}
|
||||
// Reset prover manager config for manager test cases.
|
||||
cfg.ProverManager = &coordinatorConfig.ProverManager{
|
||||
ProversPerSession: 1,
|
||||
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true},
|
||||
CollectionTimeSec: 60,
|
||||
ProversPerSession: 1,
|
||||
Verifier: &coordinatorConfig.VerifierConfig{MockMode: true},
|
||||
CollectionTimeSec: 60,
|
||||
SessionAttempts: 10,
|
||||
MaxVerifierWorkers: 4,
|
||||
}
|
||||
cfg.DB.DSN = base.DBImg.Endpoint()
|
||||
cfg.L2.ChainID = 111
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
{
|
||||
"prover_manager": {
|
||||
"provers_per_session": 1,
|
||||
"session_attempts": 2,
|
||||
"session_attempts": 5,
|
||||
"collection_time_sec": 180,
|
||||
"verifier": {
|
||||
"mock_mode": true,
|
||||
"params_path": "",
|
||||
"assets_path": ""
|
||||
},
|
||||
"max_verifier_workers": 10
|
||||
"max_verifier_workers": 4
|
||||
},
|
||||
"db": {
|
||||
"driver_name": "postgres",
|
||||
@@ -22,6 +22,6 @@
|
||||
"auth": {
|
||||
"secret": "prover secret key",
|
||||
"challenge_expire_duration_sec": 3600,
|
||||
"login_expire_duration_sec": 3600
|
||||
"login_expire_duration_sec": 10
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,24 +8,19 @@ import (
|
||||
"scroll-tech/common/database"
|
||||
)
|
||||
|
||||
const (
|
||||
defaultNumberOfVerifierWorkers = 10
|
||||
defaultNumberOfSessionRetryAttempts = 2
|
||||
)
|
||||
|
||||
// ProverManager loads sequencer configuration items.
|
||||
type ProverManager struct {
|
||||
// The amount of provers to pick per proof generation session.
|
||||
ProversPerSession uint8 `json:"provers_per_session"`
|
||||
// Number of attempts that a session can be retried if previous attempts failed.
|
||||
// Currently we only consider proving timeout as failure here.
|
||||
SessionAttempts uint8 `json:"session_attempts,omitempty"`
|
||||
SessionAttempts uint8 `json:"session_attempts"`
|
||||
// Zk verifier config.
|
||||
Verifier *VerifierConfig `json:"verifier,omitempty"`
|
||||
Verifier *VerifierConfig `json:"verifier"`
|
||||
// Proof collection time (in seconds).
|
||||
CollectionTimeSec int `json:"collection_time_sec"`
|
||||
// Max number of workers in verifier worker pool
|
||||
MaxVerifierWorkers int `json:"max_verifier_workers,omitempty"`
|
||||
MaxVerifierWorkers int `json:"max_verifier_workers"`
|
||||
}
|
||||
|
||||
// L2 loads l2geth configuration items.
|
||||
@@ -38,7 +33,7 @@ type L2 struct {
|
||||
type Auth struct {
|
||||
Secret string `json:"secret"`
|
||||
ChallengeExpireDurationSec int `json:"challenge_expire_duration_sec"`
|
||||
LoginExpireDurationSec int `json:"token_expire_duration_sec"` // unit: seconds
|
||||
LoginExpireDurationSec int `json:"token_expire_duration_sec"`
|
||||
}
|
||||
|
||||
// Config load configuration items.
|
||||
@@ -69,12 +64,5 @@ func NewConfig(file string) (*Config, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cfg.ProverManager.MaxVerifierWorkers == 0 {
|
||||
cfg.ProverManager.MaxVerifierWorkers = defaultNumberOfVerifierWorkers
|
||||
}
|
||||
if cfg.ProverManager.SessionAttempts == 0 {
|
||||
cfg.ProverManager.SessionAttempts = defaultNumberOfSessionRetryAttempts
|
||||
}
|
||||
|
||||
return cfg, nil
|
||||
}
|
||||
|
||||
@@ -14,14 +14,14 @@ func TestConfig(t *testing.T) {
|
||||
configTemplate := `{
|
||||
"prover_manager": {
|
||||
"provers_per_session": 1,
|
||||
"session_attempts": %d,
|
||||
"session_attempts": 5,
|
||||
"collection_time_sec": 180,
|
||||
"verifier": {
|
||||
"mock_mode": true,
|
||||
"params_path": "",
|
||||
"agg_vk_path": ""
|
||||
},
|
||||
"max_verifier_workers": %d
|
||||
"max_verifier_workers": 4
|
||||
},
|
||||
"db": {
|
||||
"driver_name": "postgres",
|
||||
@@ -46,8 +46,7 @@ func TestConfig(t *testing.T) {
|
||||
assert.NoError(t, tmpFile.Close())
|
||||
assert.NoError(t, os.Remove(tmpFile.Name()))
|
||||
}()
|
||||
config := fmt.Sprintf(configTemplate, defaultNumberOfSessionRetryAttempts, defaultNumberOfVerifierWorkers)
|
||||
_, err = tmpFile.WriteString(config)
|
||||
_, err = tmpFile.WriteString(configTemplate)
|
||||
assert.NoError(t, err)
|
||||
|
||||
cfg, err := NewConfig(tmpFile.Name())
|
||||
@@ -88,36 +87,4 @@ func TestConfig(t *testing.T) {
|
||||
_, err = NewConfig(tmpFile.Name())
|
||||
assert.Error(t, err)
|
||||
})
|
||||
|
||||
t.Run("Default MaxVerifierWorkers", func(t *testing.T) {
|
||||
tmpFile, err := os.CreateTemp("", "example")
|
||||
assert.NoError(t, err)
|
||||
defer func() {
|
||||
assert.NoError(t, tmpFile.Close())
|
||||
assert.NoError(t, os.Remove(tmpFile.Name()))
|
||||
}()
|
||||
config := fmt.Sprintf(configTemplate, defaultNumberOfSessionRetryAttempts, 0)
|
||||
_, err = tmpFile.WriteString(config)
|
||||
assert.NoError(t, err)
|
||||
|
||||
cfg, err := NewConfig(tmpFile.Name())
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, defaultNumberOfVerifierWorkers, cfg.ProverManager.MaxVerifierWorkers)
|
||||
})
|
||||
|
||||
t.Run("Default SessionAttempts", func(t *testing.T) {
|
||||
tmpFile, err := os.CreateTemp("", "example")
|
||||
assert.NoError(t, err)
|
||||
defer func() {
|
||||
assert.NoError(t, tmpFile.Close())
|
||||
assert.NoError(t, os.Remove(tmpFile.Name()))
|
||||
}()
|
||||
config := fmt.Sprintf(configTemplate, 0, defaultNumberOfVerifierWorkers)
|
||||
_, err = tmpFile.WriteString(config)
|
||||
assert.NoError(t, err)
|
||||
|
||||
cfg, err := NewConfig(tmpFile.Name())
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, uint8(defaultNumberOfSessionRetryAttempts), cfg.ProverManager.SessionAttempts)
|
||||
})
|
||||
}
|
||||
|
||||
@@ -31,8 +31,16 @@ func (a *AuthController) Login(c *gin.Context) (interface{}, error) {
|
||||
if err := c.ShouldBind(&login); err != nil {
|
||||
return "", fmt.Errorf("missing the public_key, err:%w", err)
|
||||
}
|
||||
|
||||
// check login parameter's token is equal to bearer token, the Authorization must be existed
|
||||
// if not exist, the jwt token will intercept it
|
||||
brearToken := c.GetHeader("Authorization")
|
||||
if brearToken != "Bearer "+login.Message.Challenge {
|
||||
return "", fmt.Errorf("check challenge failure for the not equal challenge string")
|
||||
}
|
||||
|
||||
// check the challenge is used, if used, return failure
|
||||
if err := a.loginLogic.InsertChallengeString(c, login.Signature); err != nil {
|
||||
if err := a.loginLogic.InsertChallengeString(c, login.Message.Challenge); err != nil {
|
||||
return "", fmt.Errorf("login insert challenge string failure:%w", err)
|
||||
}
|
||||
return login, nil
|
||||
|
||||
@@ -20,6 +20,6 @@ func NewLoginLogic(db *gorm.DB) *LoginLogic {
|
||||
}
|
||||
|
||||
// InsertChallengeString insert and check the challenge string is existed
|
||||
func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, signature string) error {
|
||||
return l.challengeOrm.InsertChallenge(ctx, signature)
|
||||
func (l *LoginLogic) InsertChallengeString(ctx *gin.Context, challenge string) error {
|
||||
return l.challengeOrm.InsertChallenge(ctx, challenge)
|
||||
}
|
||||
|
||||
@@ -87,7 +87,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
|
||||
if errors.Is(err, ErrValidatorFailureProofMsgStatusNotOk) {
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
}
|
||||
return nil
|
||||
return err
|
||||
}
|
||||
|
||||
proofTime := time.Since(proverTask.CreatedAt)
|
||||
@@ -134,23 +134,21 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
|
||||
}
|
||||
|
||||
if verifyErr != nil || !success {
|
||||
if verifyErr != nil {
|
||||
// TODO: this is only a temp workaround for testnet, we should return err in real cases
|
||||
log.Error("failed to verify zk proof", "proof id", proofMsg.ID, "prover pk", pk, "prove type",
|
||||
proofMsg.Type, "proof time", proofTimeSec, "error", verifyErr)
|
||||
}
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
|
||||
// TODO: Prover needs to be slashed if proof is invalid.
|
||||
coordinatorProofsVerifiedFailedTimeTimer.Update(proofTime)
|
||||
|
||||
log.Info("proof verified by coordinator failed", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
"prover pk", pk, "prove type", proofMsg.Type, "proof time", proofTimeSec, "error", verifyErr)
|
||||
return nil
|
||||
|
||||
if verifyErr == nil {
|
||||
verifyErr = fmt.Errorf("verification succeeded and it's an invalid proof")
|
||||
}
|
||||
return verifyErr
|
||||
}
|
||||
|
||||
if err := m.closeProofTask(ctx, proofMsg.ID, pk, proofMsg); err != nil {
|
||||
m.proofRecover(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
return err
|
||||
}
|
||||
|
||||
coordinatorProofsVerifiedSuccessTimeTimer.Update(proofTime)
|
||||
|
||||
@@ -343,8 +343,11 @@ func (o *Chunk) UpdateBatchHashInRange(ctx context.Context, startIndex uint64, e
|
||||
|
||||
// UpdateUnassignedChunkReturning update the unassigned batch which end_block_number <= height and return the update record
|
||||
func (o *Chunk) UpdateUnassignedChunkReturning(ctx context.Context, height, limit int) ([]*Chunk, error) {
|
||||
if height <= 0 {
|
||||
return nil, errors.New("Chunk.UpdateUnassignedBatchReturning error: height must be larger than zero")
|
||||
}
|
||||
if limit < 0 {
|
||||
return nil, errors.New("limit must not be smaller than zero")
|
||||
return nil, errors.New("Chunk.UpdateUnassignedBatchReturning error: limit must not be smaller than zero")
|
||||
}
|
||||
if limit == 0 {
|
||||
return nil, nil
|
||||
|
||||
@@ -17,6 +17,8 @@ func Route(router *gin.Engine, cfg *config.Config) {
|
||||
func v1(router *gin.RouterGroup, conf *config.Config) {
|
||||
r := router.Group("/v1")
|
||||
|
||||
r.GET("/health", api.HealthCheck.HealthCheck)
|
||||
|
||||
challengeMiddleware := middleware.ChallengeMiddleware(conf)
|
||||
r.GET("/challenge", challengeMiddleware.LoginHandler)
|
||||
|
||||
@@ -26,7 +28,6 @@ func v1(router *gin.RouterGroup, conf *config.Config) {
|
||||
// need jwt token api
|
||||
r.Use(loginMiddleware.MiddlewareFunc())
|
||||
{
|
||||
r.GET("/healthz", api.HealthCheck.HealthCheck)
|
||||
r.POST("/get_task", api.GetTask.GetTasks)
|
||||
r.POST("/submit_proof", api.SubmitProof.SubmitProof)
|
||||
}
|
||||
|
||||
@@ -2,7 +2,7 @@ package types
|
||||
|
||||
// GetTaskParameter for ProverTasks request parameter
|
||||
type GetTaskParameter struct {
|
||||
ProverHeight int `form:"prover_height" json:"prover_height" binding:"required"`
|
||||
ProverHeight int `form:"prover_height" json:"prover_height"`
|
||||
TaskType int `form:"task_type" json:"task_type"`
|
||||
}
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
package types
|
||||
|
||||
import (
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
)
|
||||
|
||||
// ProversInfo is assigned provers info of a task (session)
|
||||
type ProversInfo struct {
|
||||
ID string `json:"id"`
|
||||
ProverStatusList []*ProverStatus `json:"provers"`
|
||||
StartTimestamp int64 `json:"start_timestamp"`
|
||||
ProveType message.ProofType `json:"prove_type,omitempty"`
|
||||
}
|
||||
|
||||
// ProverStatus is the prover name and prover prove status
|
||||
type ProverStatus struct {
|
||||
PublicKey string `json:"public_key"`
|
||||
Name string `json:"name"`
|
||||
Status types.ProverProveStatus `json:"status"`
|
||||
}
|
||||
@@ -170,9 +170,7 @@ func testHandshake(t *testing.T) {
|
||||
}()
|
||||
|
||||
chunkProver := newMockProver(t, "prover_chunk_test", coordinatorURL, message.ProofTypeChunk)
|
||||
token := chunkProver.connectToCoordinator(t)
|
||||
assert.NotEmpty(t, token)
|
||||
assert.True(t, chunkProver.healthCheck(t, token, types.Success))
|
||||
assert.True(t, chunkProver.healthCheckSuccess(t))
|
||||
}
|
||||
|
||||
func testFailedHandshake(t *testing.T) {
|
||||
@@ -181,21 +179,17 @@ func testFailedHandshake(t *testing.T) {
|
||||
proofCollector, httpHandler := setupCoordinator(t, 1, coordinatorURL)
|
||||
defer func() {
|
||||
proofCollector.Stop()
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
}()
|
||||
|
||||
// Try to perform handshake without token
|
||||
chunkProver := newMockProver(t, "prover_chunk_test", coordinatorURL, message.ProofTypeChunk)
|
||||
token := chunkProver.connectToCoordinator(t)
|
||||
assert.NotEmpty(t, token)
|
||||
assert.True(t, chunkProver.healthCheck(t, token, types.Success))
|
||||
assert.True(t, chunkProver.healthCheckSuccess(t))
|
||||
|
||||
// Try to perform handshake with timeouted token
|
||||
// Try to perform handshake with server shutdown
|
||||
assert.NoError(t, httpHandler.Shutdown(context.Background()))
|
||||
time.Sleep(time.Second)
|
||||
batchProver := newMockProver(t, "prover_batch_test", coordinatorURL, message.ProofTypeBatch)
|
||||
token = chunkProver.connectToCoordinator(t)
|
||||
assert.NotEmpty(t, token)
|
||||
<-time.After(time.Duration(tokenTimeout+1) * time.Second)
|
||||
assert.True(t, batchProver.healthCheck(t, token, types.ErrJWTTokenExpired))
|
||||
assert.True(t, batchProver.healthCheckFailure(t))
|
||||
}
|
||||
|
||||
func testValidProof(t *testing.T) {
|
||||
@@ -235,7 +229,7 @@ func testValidProof(t *testing.T) {
|
||||
}
|
||||
proverTask := provers[i].getProverTask(t, proofType)
|
||||
assert.NotNil(t, proverTask)
|
||||
provers[i].submitProof(t, proverTask, proofStatus)
|
||||
provers[i].submitProof(t, proverTask, proofStatus, types.Success)
|
||||
}
|
||||
|
||||
// verify proof status
|
||||
@@ -296,7 +290,7 @@ func testInvalidProof(t *testing.T) {
|
||||
provers[i] = newMockProver(t, "prover_test"+strconv.Itoa(i), coordinatorURL, proofType)
|
||||
proverTask := provers[i].getProverTask(t, proofType)
|
||||
assert.NotNil(t, proverTask)
|
||||
provers[i].submitProof(t, proverTask, verifiedFailed)
|
||||
provers[i].submitProof(t, proverTask, verifiedFailed, types.ErrCoordinatorHandleZkProofFailure)
|
||||
}
|
||||
|
||||
// verify proof status
|
||||
@@ -357,7 +351,7 @@ func testProofGeneratedFailed(t *testing.T) {
|
||||
provers[i] = newMockProver(t, "prover_test"+strconv.Itoa(i), coordinatorURL, proofType)
|
||||
proverTask := provers[i].getProverTask(t, proofType)
|
||||
assert.NotNil(t, proverTask)
|
||||
provers[i].submitProof(t, proverTask, generatedFailed)
|
||||
provers[i].submitProof(t, proverTask, generatedFailed, types.ErrCoordinatorHandleZkProofFailure)
|
||||
}
|
||||
|
||||
// verify proof status
|
||||
@@ -431,12 +425,12 @@ func testTimeoutProof(t *testing.T) {
|
||||
chunkProver2 := newMockProver(t, "prover_test"+strconv.Itoa(2), coordinatorURL, message.ProofTypeChunk)
|
||||
proverChunkTask2 := chunkProver2.getProverTask(t, message.ProofTypeChunk)
|
||||
assert.NotNil(t, proverChunkTask2)
|
||||
chunkProver2.submitProof(t, proverChunkTask2, verifiedSuccess)
|
||||
chunkProver2.submitProof(t, proverChunkTask2, verifiedSuccess, types.Success)
|
||||
|
||||
batchProver2 := newMockProver(t, "prover_test"+strconv.Itoa(3), coordinatorURL, message.ProofTypeBatch)
|
||||
proverBatchTask2 := batchProver2.getProverTask(t, message.ProofTypeBatch)
|
||||
assert.NotNil(t, proverBatchTask2)
|
||||
batchProver2.submitProof(t, proverBatchTask2, verifiedSuccess)
|
||||
batchProver2.submitProof(t, proverBatchTask2, verifiedSuccess, types.Success)
|
||||
|
||||
// verify proof status, it should be verified now, because second prover sent valid proof
|
||||
chunkProofStatus2, err := chunkOrm.GetProvingStatusByHash(context.Background(), dbChunk.Hash)
|
||||
|
||||
@@ -108,17 +108,27 @@ func (r *mockProver) login(t *testing.T, challengeString string) string {
|
||||
return loginData.Token
|
||||
}
|
||||
|
||||
func (r *mockProver) healthCheck(t *testing.T, token string, errCode int) bool {
|
||||
func (r *mockProver) healthCheckSuccess(t *testing.T) bool {
|
||||
var result types.Response
|
||||
client := resty.New()
|
||||
resp, err := client.R().
|
||||
SetHeader("Content-Type", "application/json").
|
||||
SetHeader("Authorization", fmt.Sprintf("Bearer %s", token)).
|
||||
SetResult(&result).
|
||||
Get("http://" + r.coordinatorURL + "/coordinator/v1/healthz")
|
||||
Get("http://" + r.coordinatorURL + "/coordinator/v1/health")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, http.StatusOK, resp.StatusCode())
|
||||
assert.Equal(t, errCode, result.ErrCode)
|
||||
assert.Equal(t, ctypes.Success, result.ErrCode)
|
||||
return true
|
||||
}
|
||||
|
||||
func (r *mockProver) healthCheckFailure(t *testing.T) bool {
|
||||
var result types.Response
|
||||
client := resty.New()
|
||||
resp, err := client.R().
|
||||
SetResult(&result).
|
||||
Get("http://" + r.coordinatorURL + "/coordinator/v1/health")
|
||||
assert.Error(t, err)
|
||||
assert.Equal(t, 0, resp.StatusCode())
|
||||
assert.Equal(t, 0, result.ErrCode)
|
||||
return true
|
||||
}
|
||||
|
||||
@@ -151,7 +161,7 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) *t
|
||||
return &result.Data
|
||||
}
|
||||
|
||||
func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSchema, proofStatus proofStatus) {
|
||||
func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSchema, proofStatus proofStatus, errCode int) {
|
||||
proof := &message.ProofMsg{
|
||||
ProofDetail: &message.ProofDetail{
|
||||
ID: proverTaskSchema.TaskID,
|
||||
@@ -206,5 +216,5 @@ func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSc
|
||||
Post("http://" + r.coordinatorURL + "/coordinator/v1/submit_proof")
|
||||
assert.NoError(t, err)
|
||||
assert.Equal(t, http.StatusOK, resp.StatusCode())
|
||||
assert.Equal(t, ctypes.Success, result.ErrCode)
|
||||
assert.Equal(t, errCode, result.ErrCode)
|
||||
}
|
||||
|
||||
@@ -36,8 +36,8 @@ type LoginResponse struct {
|
||||
|
||||
// GetTaskRequest defines the request structure for GetTask API
|
||||
type GetTaskRequest struct {
|
||||
ProverHeight uint64 `json:"prover_height"`
|
||||
TaskType message.ProofType `json:"task_type"`
|
||||
ProverHeight uint64 `json:"prover_height,omitempty"`
|
||||
}
|
||||
|
||||
// GetTaskResponse defines the response structure for GetTask API
|
||||
|
||||
@@ -1,19 +1,20 @@
|
||||
{
|
||||
"prover_name": "my_prover",
|
||||
"prover_name": "prover-1",
|
||||
"keystore_path": "keystore.json",
|
||||
"keystore_password": "prover-pwd",
|
||||
"db_path": "bbolt_db",
|
||||
"db_path": "unique-db-path-for-prover-1",
|
||||
"core": {
|
||||
"params_path": "params"
|
||||
"params_path": "params",
|
||||
"proof_type": 2
|
||||
},
|
||||
"coordinator": {
|
||||
"base_url": "https://coordinator/v1",
|
||||
"base_url": "http://localhost:8555",
|
||||
"retry_count": 10,
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "/var/lib/jenkins/workspace/SequencerPipeline/MyPrivateNetwork/geth.ipc",
|
||||
"endpoint": "http://localhost:9999",
|
||||
"confirmations": "0x1"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -19,13 +19,13 @@ type Config struct {
|
||||
Core *ProverCoreConfig `json:"core"`
|
||||
DBPath string `json:"db_path"`
|
||||
Coordinator *CoordinatorConfig `json:"coordinator"`
|
||||
L2Geth *L2GethConfig `json:"l2geth"`
|
||||
L2Geth *L2GethConfig `json:"l2geth,omitempty"` // only for chunk_prover
|
||||
}
|
||||
|
||||
// ProverCoreConfig load zk prover config.
|
||||
type ProverCoreConfig struct {
|
||||
ParamsPath string `json:"params_path"`
|
||||
ProofType message.ProofType `json:"prove_type,omitempty"` // 0: chunk prover (default type), 1: batch prover
|
||||
ProofType message.ProofType `json:"proof_type,omitempty"` // 1: chunk prover (default type), 2: batch prover
|
||||
DumpDir string `json:"dump_dir,omitempty"`
|
||||
}
|
||||
|
||||
|
||||
@@ -36,8 +36,8 @@ type Prover struct {
|
||||
ctx context.Context
|
||||
cfg *config.Config
|
||||
coordinatorClient *client.CoordinatorClient
|
||||
l2GethClient *ethclient.Client
|
||||
stack *store.Stack
|
||||
l2GethClient *ethclient.Client // only applicable for a chunk_prover
|
||||
proverCore *core.ProverCore
|
||||
|
||||
isClosed int64
|
||||
@@ -60,10 +60,16 @@ func NewProver(ctx context.Context, cfg *config.Config) (*Prover, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Collect geth node.
|
||||
l2GethClient, err := ethclient.DialContext(ctx, cfg.L2Geth.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
var l2GethClient *ethclient.Client
|
||||
if cfg.Core.ProofType == message.ProofTypeChunk {
|
||||
if cfg.L2Geth == nil || cfg.L2Geth.Endpoint == "" {
|
||||
return nil, errors.New("Missing l2geth config for chunk prover")
|
||||
}
|
||||
// Connect l2geth node. Only applicable for a chunk_prover.
|
||||
l2GethClient, err = ethclient.DialContext(ctx, cfg.L2Geth.Endpoint)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Create prover_core instance
|
||||
@@ -145,6 +151,13 @@ func (r *Prover) proveAndSubmit() error {
|
||||
}
|
||||
}
|
||||
|
||||
defer func() {
|
||||
err = r.stack.Delete(task.Task.ID)
|
||||
if err != nil {
|
||||
log.Error("prover stack pop failed!", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
var proofMsg *message.ProofDetail
|
||||
if task.Times <= 2 {
|
||||
// If panic times <= 2, try to proof the task.
|
||||
@@ -153,44 +166,37 @@ func (r *Prover) proveAndSubmit() error {
|
||||
}
|
||||
|
||||
log.Info("start to prove task", "task-type", task.Task.Type, "task-id", task.Task.ID)
|
||||
proofMsg = r.prove(task)
|
||||
} else {
|
||||
// when the prover has more than 3 times panic,
|
||||
// it will omit to prove the task, submit StatusProofError and then Delete the task.
|
||||
proofMsg = &message.ProofDetail{
|
||||
Status: message.StatusProofError,
|
||||
Error: "zk proving panic",
|
||||
ID: task.Task.ID,
|
||||
Type: task.Task.Type,
|
||||
proofMsg, err = r.prove(task)
|
||||
if err != nil { // handling error from prove
|
||||
return fmt.Errorf("failed to prove task, task-type: %v, err: %v", task.Task.Type, err)
|
||||
}
|
||||
|
||||
return r.submitProof(proofMsg)
|
||||
}
|
||||
|
||||
defer func() {
|
||||
err = r.stack.Delete(task.Task.ID)
|
||||
if err != nil {
|
||||
log.Error("prover stack pop failed!", "err", err)
|
||||
}
|
||||
}()
|
||||
|
||||
return r.submitProof(proofMsg)
|
||||
// when the prover has more than 3 times panic,
|
||||
// it will omit to prove the task, submit StatusProofError and then Delete the task.
|
||||
return fmt.Errorf("zk proving panic for task, task-type: %v, task-id: %v", task.Task.Type, task.Task.ID)
|
||||
}
|
||||
|
||||
// fetchTaskFromCoordinator fetches a new task from the server
|
||||
func (r *Prover) fetchTaskFromCoordinator() (*store.ProvingTask, error) {
|
||||
// get the latest confirmed block number
|
||||
latestBlockNumber, err := putils.GetLatestConfirmedBlockNumber(r.ctx, r.l2GethClient, r.cfg.L2Geth.Confirmations)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch latest confirmed block number: %v", err)
|
||||
}
|
||||
|
||||
if latestBlockNumber == 0 {
|
||||
return nil, fmt.Errorf("omit to prove task of the genesis block, latestBlockNumber: %v", latestBlockNumber)
|
||||
}
|
||||
|
||||
// prepare the request
|
||||
req := &client.GetTaskRequest{
|
||||
ProverHeight: latestBlockNumber,
|
||||
TaskType: r.Type(),
|
||||
TaskType: r.Type(),
|
||||
}
|
||||
|
||||
if req.TaskType == message.ProofTypeChunk {
|
||||
// get the latest confirmed block number
|
||||
latestBlockNumber, err := putils.GetLatestConfirmedBlockNumber(r.ctx, r.l2GethClient, r.cfg.L2Geth.Confirmations)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to fetch latest confirmed block number: %v", err)
|
||||
}
|
||||
|
||||
if latestBlockNumber == 0 {
|
||||
return nil, fmt.Errorf("omit to prove task of the genesis block, latestBlockNumber: %v", latestBlockNumber)
|
||||
}
|
||||
req.ProverHeight = latestBlockNumber
|
||||
}
|
||||
|
||||
// send the request
|
||||
@@ -238,8 +244,9 @@ func (r *Prover) fetchTaskFromCoordinator() (*store.ProvingTask, error) {
|
||||
return provingTask, nil
|
||||
}
|
||||
|
||||
func (r *Prover) prove(task *store.ProvingTask) (detail *message.ProofDetail) {
|
||||
detail = &message.ProofDetail{
|
||||
// prove function tries to prove a task. It returns an error if the proof fails.
|
||||
func (r *Prover) prove(task *store.ProvingTask) (*message.ProofDetail, error) {
|
||||
detail := &message.ProofDetail{
|
||||
ID: task.Task.ID,
|
||||
Type: task.Task.Type,
|
||||
Status: message.StatusOk,
|
||||
@@ -249,30 +256,28 @@ func (r *Prover) prove(task *store.ProvingTask) (detail *message.ProofDetail) {
|
||||
case message.ProofTypeChunk:
|
||||
proof, err := r.proveChunk(task)
|
||||
if err != nil {
|
||||
log.Error("prove chunk failed!", "task-id", task.Task.ID, "err", err)
|
||||
detail.Status = message.StatusProofError
|
||||
detail.Error = err.Error()
|
||||
return
|
||||
return detail, err
|
||||
}
|
||||
detail.ChunkProof = proof
|
||||
log.Info("prove chunk successfully!", "task-id", task.Task.ID)
|
||||
return
|
||||
log.Info("prove chunk success", "task-id", task.Task.ID)
|
||||
return detail, nil
|
||||
|
||||
case message.ProofTypeBatch:
|
||||
proof, err := r.proveBatch(task)
|
||||
if err != nil {
|
||||
log.Error("prove batch failed!", "task-id", task.Task.ID, "err", err)
|
||||
detail.Status = message.StatusProofError
|
||||
detail.Error = err.Error()
|
||||
return
|
||||
return detail, err
|
||||
}
|
||||
detail.BatchProof = proof
|
||||
log.Info("prove batch successfully!", "task-id", task.Task.ID)
|
||||
return
|
||||
log.Info("prove batch success", "task-id", task.Task.ID)
|
||||
return detail, nil
|
||||
|
||||
default:
|
||||
log.Error("invalid task type", "task-id", task.Task.ID, "task-type", task.Task.Type)
|
||||
return
|
||||
err := fmt.Errorf("invalid task type: %v", task.Task.Type)
|
||||
return detail, err
|
||||
}
|
||||
}
|
||||
|
||||
@@ -282,7 +287,7 @@ func (r *Prover) proveChunk(task *store.ProvingTask) (*message.ChunkProof, error
|
||||
}
|
||||
traces, err := r.getSortedTracesByHashes(task.Task.ChunkTaskDetail.BlockHashes)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("get traces from eth node failed, block hashes: %v", task.Task.ChunkTaskDetail.BlockHashes)
|
||||
return nil, fmt.Errorf("get traces from eth node failed, block hashes: %v, err: %v", task.Task.ChunkTaskDetail.BlockHashes, err)
|
||||
}
|
||||
return r.proverCore.ProveChunk(task.Task.ID, traces)
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user