Compare commits

..

7 Commits

Author SHA1 Message Date
Steven
e8f5251ae2 fix: upgrade libzkp to use scroll-prover v0.5.12 (#769)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-08-10 20:10:30 +08:00
HAOYUatHZ
d10438ae0f ci(github): temp fix gacts/install-geth-tools (#770) 2023-08-10 19:28:31 +08:00
maskpp
a7ce900aa7 ci(github): add prover_stats_api docker build action (#768)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-08-10 17:21:27 +08:00
maskpp
49ec0b8fa8 fix(integration test): fix TestCoordinatorProverInteraction (#765)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2023-08-10 17:20:25 +08:00
georgehao
244e5e915a fix(coordinator): fix update proving_status bug (#759)
Co-authored-by: HAOYUatHZ <37070449+HAOYUatHZ@users.noreply.github.com>
2023-08-10 17:02:53 +08:00
HAOYUatHZ
dc53d6d022 ci(github): speed up docker builds (#766) 2023-08-10 14:59:51 +08:00
HAOYUatHZ
6dc89a9c0b fix coordinator version in docker (#764) 2023-08-10 00:24:54 +08:00
24 changed files with 340 additions and 195 deletions

View File

@@ -43,6 +43,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Lint
working-directory: 'bridge'
run: |
@@ -92,6 +94,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Build prerequisites
run: |
make dev_docker

View File

@@ -88,6 +88,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Build prerequisites
run: |
make dev_docker

View File

@@ -104,6 +104,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Build prerequisites
run: |
make dev_docker

View File

@@ -81,6 +81,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Build prerequisites
run: |
make dev_docker

View File

@@ -6,7 +6,7 @@ on:
- v**
jobs:
build-and-push:
event_watcher:
runs-on: ubuntu-latest
steps:
- name: Checkout code
@@ -27,6 +27,18 @@ jobs:
tags: scrolltech/event-watcher:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
gas_oracle:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push gas_oracle docker
uses: docker/build-push-action@v2
with:
@@ -36,6 +48,18 @@ jobs:
tags: scrolltech/gas-oracle:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
msg_relayer:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push msg_relayer docker
uses: docker/build-push-action@v2
with:
@@ -45,6 +69,18 @@ jobs:
tags: scrolltech/msg-relayer:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
rollup_relayer:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push rollup_relayer docker
uses: docker/build-push-action@v2
with:
@@ -54,6 +90,18 @@ jobs:
tags: scrolltech/rollup-relayer:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
bridgehistoryapi-cross-msg-fetcher:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push bridgehistoryapi-cross-msg-fetcher docker
uses: docker/build-push-action@v2
with:
@@ -63,6 +111,18 @@ jobs:
tags: scrolltech/bridgehistoryapi-cross-msg-fetcher:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
bridgehistoryapi-server:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push bridgehistoryapi-server docker
uses: docker/build-push-action@v2
with:
@@ -72,6 +132,18 @@ jobs:
tags: scrolltech/bridgehistoryapi-server:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
coordinator:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push coordinator docker
uses: docker/build-push-action@v2
with:
@@ -81,3 +153,24 @@ jobs:
tags: scrolltech/coordinator:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}
prover-stats-api:
runs-on: ubuntu-latest
steps:
- name: Checkout code
uses: actions/checkout@v2
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2
- name: Login to Docker Hub
uses: docker/login-action@v2
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Build and push prover-stats-api docker
uses: docker/build-push-action@v2
with:
context: .
file: ./build/dockerfiles/prover-stats-api.Dockerfile
push: true
tags: scrolltech/prover-stats-api:${{github.ref_name}}
# cache-from: type=gha,scope=${{ github.workflow }}
# cache-to: type=gha,scope=${{ github.workflow }}

View File

@@ -33,6 +33,8 @@ jobs:
version: '0.8.16'
- name: Install Geth Tools
uses: gacts/install-geth-tools@v1
with:
version: 1.10.19
- name: Build prerequisites
run: |
make dev_docker

View File

@@ -37,7 +37,7 @@ COPY . .
RUN cp -r ./common/libzkp/interface ./coordinator/internal/logic/verifier/lib
COPY --from=zkp-builder /app/target/release/libzkp.so ./coordinator/internal/logic/verifier/lib/
COPY --from=zkp-builder /app/target/release/libzktrie.so ./coordinator/internal/logic/verifier/lib/
RUN cd ./coordinator && go build -v -p 4 -o /bin/coordinator ./cmd && mv internal/logic/verifier/lib /bin/
RUN cd ./coordinator && make coordinator_skip_libzkp && mv ./build/bin/coordinator /bin/coordinator && mv internal/logic/verifier/lib /bin/
# Pull coordinator into a second stage deploy alpine container
FROM ubuntu:20.04
@@ -46,6 +46,6 @@ ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/li
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/coordinator /bin/
RUN /bin/coordinator --version
ENTRYPOINT ["/bin/coordinator"]

View File

@@ -32,7 +32,7 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"ark-std",
"env_logger 0.10.0",
@@ -432,7 +432,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"eth-types",
"ethers-core",
@@ -1048,7 +1048,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"ethers-core",
"ethers-signers",
@@ -1225,7 +1225,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"eth-types",
"geth-utils",
@@ -1438,7 +1438,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"digest 0.7.6",
"eth-types",
@@ -1478,7 +1478,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"env_logger 0.9.3",
"gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)",
@@ -2076,7 +2076,7 @@ dependencies = [
[[package]]
name = "keccak256"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"env_logger 0.9.3",
"eth-types",
@@ -2263,7 +2263,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"eth-types",
"ethers-core",
@@ -2278,7 +2278,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"bus-mapping",
"eth-types",
@@ -2754,7 +2754,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.11#461072246b6a9320fb81e0d037ab978019f9cae4"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.12#ef945901bee26c4b79be8af60e259443392368fa"
dependencies = [
"aggregator",
"anyhow",
@@ -4039,7 +4039,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
[[package]]
name = "types"
version = "0.4.0"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.11#461072246b6a9320fb81e0d037ab978019f9cae4"
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.12#ef945901bee26c4b79be8af60e259443392368fa"
dependencies = [
"base64 0.13.1",
"blake2",
@@ -4490,7 +4490,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
[[package]]
name = "zkevm-circuits"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.10#87cae118ffdcf3a085a7c3c24268f7a0df21fcd4"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
dependencies = [
"array-init",
"bus-mapping",

View File

@@ -20,8 +20,8 @@ maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
[dependencies]
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.11" }
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.11" }
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.12" }
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.12" }
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
log = "0.4"

View File

@@ -126,7 +126,7 @@ const (
ProvingTaskUnassigned
// ProvingTaskAssigned : proving_task is assigned to be proved
ProvingTaskAssigned
// ProvingTaskProved : proof has been returned by prover
// ProvingTaskProved DEPRECATED: proof has been returned by prover
ProvingTaskProved
// ProvingTaskVerified : proof is valid
ProvingTaskVerified

View File

@@ -20,12 +20,17 @@ var (
MessageRelayerApp MockAppName = "message-relayer-test"
// RollupRelayerApp the name of mock rollup-relayer app.
RollupRelayerApp MockAppName = "rollup-relayer-test"
// CoordinatorApp the name of mock coordinator app.
CoordinatorApp MockAppName = "coordinator-test"
// DBCliApp the name of mock database app.
DBCliApp MockAppName = "db_cli-test"
// ProverApp the name of mock prover app.
ProverApp MockAppName = "prover-test"
// CoordinatorApp the name of mock coordinator app.
CoordinatorApp MockAppName = "coordinator-test"
// ChunkProverApp the name of mock chunk prover app.
ChunkProverApp MockAppName = "chunkProver-test"
// BatchProverApp the name of mock batch prover app.
BatchProverApp MockAppName = "batchProver-test"
)
// RegisterSimulation register initializer function for integration-test.

View File

@@ -6,7 +6,7 @@ import (
"strings"
)
var tag = "v4.1.28"
var tag = "v4.1.31"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {
@@ -20,11 +20,13 @@ var commit = func() string {
}
}
}
return ""
// Set default value for integration test.
return "000000"
}()
// ZkVersion is commit-id of common/libzkp/impl/cargo.lock/scroll-prover and halo2, concated by a "-"
var ZkVersion string
// ZkVersion is commit-id of common/libzkp/impl/cargo.lock/scroll-prover and halo2, contacted by a "-"
// The default `000000-000000` is set for integration test, and will be overwritten by coordinator's & prover's actual compilations (see their Makefiles).
var ZkVersion = "000000-000000"
// Version denote the version of scroll protocol, including the l2geth, relayer, coordinator, prover, contracts and etc.
var Version = fmt.Sprintf("%s-%s-%s", tag, commit, ZkVersion)

View File

@@ -1,4 +1,4 @@
.PHONY: lint docker clean coordinator mock_coordinator
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator
IMAGE_NAME=coordinator
IMAGE_VERSION=latest
@@ -25,6 +25,9 @@ libzkp:
coordinator: libzkp ## Builds the Coordinator instance.
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator ./cmd
coordinator_skip_libzkp:
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator ./cmd
mock_coordinator: ## Builds the mocked Coordinator instance.
go build -tags="mock_prover mock_verifier" -o $(PWD)/build/bin/coordinator ./cmd

View File

@@ -56,7 +56,7 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, fmt.Errorf("get prover version from context failed")
}
if !version.CheckScrollProverVersion(proverVersion.(string)) {
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover")
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s", proverVersion.(string), version.Version)
}
batchTasks, err := bp.batchOrm.UpdateUnassignedBatchReturning(ctx, 1)

View File

@@ -56,7 +56,7 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
return nil, fmt.Errorf("get prover version from context failed")
}
if !version.CheckScrollProverVersion(proverVersion.(string)) {
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover")
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s", version.Version, proverVersion.(string))
}
// load and send chunk tasks

View File

@@ -2,6 +2,7 @@ package submitproof
import (
"context"
"encoding/json"
"errors"
"fmt"
"time"
@@ -34,8 +35,12 @@ var (
ErrValidatorFailureProofMsgStatusNotOk = errors.New("validator failure proof msg status not ok")
// ErrValidatorFailureProverTaskEmpty get none prover task
ErrValidatorFailureProverTaskEmpty = errors.New("validator failure get none prover task for the proof")
// ErrValidatorFailureProverInfoHasProofValid proof is vaild
ErrValidatorFailureProverInfoHasProofValid = errors.New("validator failure prover task info has proof valid")
// ErrValidatorFailureProverTaskCannotSubmitTwice prove task can not submit proof twice
ErrValidatorFailureProverTaskCannotSubmitTwice = errors.New("validator failure prove task cannot submit proof twice")
// ErrValidatorFailureProofTimeout the submit proof is timeout
ErrValidatorFailureProofTimeout = errors.New("validator failure submit proof timeout")
// ErrValidatorFailureTaskHaveVerifiedSuccess have proved success and verified success
ErrValidatorFailureTaskHaveVerifiedSuccess = errors.New("validator failure chunk/batch have proved and verified success")
)
// ProofReceiverLogic the proof receiver logic
@@ -83,47 +88,15 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
return ErrValidatorFailureProverTaskEmpty
}
if err = m.validator(proverTask, pk, proofMsg); err != nil {
if errors.Is(err, ErrValidatorFailureProofMsgStatusNotOk) {
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
}
return err
}
proofTime := time.Since(proverTask.CreatedAt)
proofTimeSec := uint64(proofTime.Seconds())
// store proof content
var storeProofErr error
switch proofMsg.Type {
case message.ProofTypeChunk:
storeProofErr = m.db.Transaction(func(tx *gorm.DB) error {
if dbErr := m.chunkOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.ChunkProof, proofTimeSec, tx); dbErr != nil {
return fmt.Errorf("failed to store chunk proof into db, err:%w", dbErr)
}
if dbErr := m.chunkOrm.UpdateProvingStatus(ctx, proofMsg.ID, types.ProvingTaskProved, tx); dbErr != nil {
return fmt.Errorf("failed to update chunk task status as proved, error:%w", dbErr)
}
return nil
})
case message.ProofTypeBatch:
storeProofErr = m.db.Transaction(func(tx *gorm.DB) error {
if dbErr := m.batchOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.BatchProof, proofTimeSec, tx); dbErr != nil {
return fmt.Errorf("failed to store batch proof into db, error:%w", dbErr)
}
if dbErr := m.batchOrm.UpdateProvingStatus(ctx, proofMsg.ID, types.ProvingTaskProved, tx); dbErr != nil {
return fmt.Errorf("failed to update batch task status as proved, error:%w", dbErr)
}
return nil
})
}
if storeProofErr != nil {
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
log.Error("failed to store basic proof into db", "error", storeProofErr)
return storeProofErr
}
log.Info("handling zk proof", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
"prover pk", pk, "prove type", proverTask.TaskType, "proof time", proofTimeSec)
coordinatorProofsReceivedTotalCounter.Inc(1)
if err = m.validator(ctx, proverTask, pk, proofMsg); err != nil {
return err
}
var success bool
var verifyErr error
@@ -134,7 +107,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
}
if verifyErr != nil || !success {
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg)
coordinatorProofsVerifiedFailedTimeTimer.Update(proofTime)
log.Info("proof verified by coordinator failed", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
@@ -149,8 +122,10 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
log.Info("proof verified and valid", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
"prover pk", pk, "prove type", proofMsg.Type, "proof time", proofTimeSec)
if err := m.closeProofTask(ctx, proofMsg.ID, pk, proofMsg); err != nil {
m.proofRecover(ctx, proofMsg.ID, pk, proofMsg.Type)
coordinatorProofsReceivedTotalCounter.Inc(1)
if err := m.closeProofTask(ctx, proofMsg.ID, pk, proofMsg, proofTimeSec); err != nil {
m.proofRecover(ctx, proofMsg.ID, pk, proofMsg)
return err
}
@@ -178,49 +153,78 @@ func (m *ProofReceiverLogic) checkAreAllChunkProofsReady(ctx context.Context, ch
return nil
}
func (m *ProofReceiverLogic) validator(proverTask *orm.ProverTask, pk string, proofMsg *message.ProofMsg) error {
func (m *ProofReceiverLogic) validator(ctx context.Context, proverTask *orm.ProverTask, pk string, proofMsg *message.ProofMsg) error {
// Ensure this prover is eligible to participate in the prover task.
if types.ProverProveStatus(proverTask.ProvingStatus) == types.ProverProofValid {
// In order to prevent DoS attacks, it is forbidden to repeatedly submit valid proofs.
// TODO: Defend invalid proof resubmissions by one of the following two methods:
// (i) slash the prover for each submission of invalid proof
// (ii) set the maximum failure retry times
log.Warn("prover has already submitted valid proof in proof session", "prover name", proverTask.ProverName,
"prover pk", proverTask.ProverPublicKey, "proof type", proverTask.TaskType, "proof id", proofMsg.ProofDetail.ID)
return ErrValidatorFailureProverInfoHasProofValid
log.Warn("the prover task cannot submit twice", "hash", proofMsg.ID, "prover pk", proverTask.ProverPublicKey,
"prover name", proverTask.ProverName, "proof type", proverTask.TaskType)
return ErrValidatorFailureProverTaskCannotSubmitTwice
}
proofTime := time.Since(proverTask.CreatedAt)
proofTimeSec := uint64(proofTime.Seconds())
log.Info("handling zk proof", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
"prover pk", pk, "prove type", proverTask.TaskType, "proof time", proofTimeSec)
if proofMsg.Status != message.StatusOk {
coordinatorProofsGeneratedFailedTimeTimer.Update(proofTime)
log.Info("proof generated by prover failed", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
"prover pk", pk, "prove type", proofMsg.Type, "proof time", proofTimeSec, "error", proofMsg.Error)
"prover pk", pk, "prove type", proofMsg.Type, "error", proofMsg.Error)
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsg.Type, proofMsg.ID, pk, types.ProverProofInvalid); updateErr != nil {
log.Error("proof generated by prover failed update prover task proving status failure", "proof id", proofMsg.ID,
"prover name", proverTask.ProverName, "prover pk", pk, "prove type", proofMsg.Type, "error", proofMsg.Error)
}
return ErrValidatorFailureProofMsgStatusNotOk
}
// if prover task FailureType is SessionInfoFailureTimeout, the submit proof is timeout, need skip it
if types.ProverTaskFailureType(proverTask.FailureType) == types.ProverTaskFailureTypeTimeout {
log.Info("proof submit proof have timeout, skip this submit proof", "hash", proofMsg.ID, "proof type", proverTask.TaskType,
"prover name", proverTask.ProverName, "prover public key", pk, "proof time", proofTimeSec)
return ErrValidatorFailureProofTimeout
}
// store the proof to prover task
if updateTaskProofErr := m.updateProverTaskProof(ctx, pk, proofMsg); updateTaskProofErr != nil {
log.Warn("update prover task proof failure", "hash", proofMsg.ID, "public key", pk,
"prover name", proverTask.ProverName, "error", updateTaskProofErr)
}
// if the batch/chunk have proved and verifier success, need skip this submit proof
if m.checkIsTaskSuccess(ctx, proofMsg.ID, proofMsg.Type) {
log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofMsg.ID,
"proof type", proverTask.TaskType, "prover name", proverTask.ProverName, "prover public key", pk)
return ErrValidatorFailureTaskHaveVerifiedSuccess
}
return nil
}
func (m *ProofReceiverLogic) proofFailure(ctx context.Context, hash string, pubKey string, proofMsgType message.ProofType) {
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsgType, types.ProvingTaskFailed); err != nil {
func (m *ProofReceiverLogic) proofFailure(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) {
log.Info("proof failure update proof status", "hash", hash, "public key", pubKey,
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskFailed.String())
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskFailed, 0); err != nil {
log.Error("failed to updated proof status ProvingTaskFailed", "hash", hash, "pubKey", pubKey, "error", err)
}
coordinatorSessionsFailedTotalCounter.Inc(1)
}
func (m *ProofReceiverLogic) proofRecover(ctx context.Context, hash string, pubKey string, proofMsgType message.ProofType) {
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsgType, types.ProvingTaskUnassigned); err != nil {
func (m *ProofReceiverLogic) proofRecover(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) {
log.Info("proof recover update proof status", "hash", hash, "public key", pubKey,
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskUnassigned.String())
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskUnassigned, 0); err != nil {
log.Error("failed to updated proof status ProvingTaskUnassigned", "hash", hash, "pubKey", pubKey, "error", err)
}
}
func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) error {
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg.Type, types.ProvingTaskVerified); err != nil {
func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg, proofTimeSec uint64) error {
log.Info("proof close task update proof status", "hash", hash, "public key", pubKey,
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskVerified.String())
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskVerified, proofTimeSec); err != nil {
log.Error("failed to updated proof status ProvingTaskVerified", "hash", hash, "pubKey", pubKey, "error", err)
return err
}
@@ -228,13 +232,7 @@ func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pu
}
// UpdateProofStatus update the chunk/batch task and session info status
func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string, proverPublicKey string, proofMsgType message.ProofType, status types.ProvingStatus) error {
// if the prover task failure type is SessionInfoFailureTimeout,
// just skip update the status because the proof result come too late.
if m.checkIsTimeoutFailure(ctx, hash, proverPublicKey) {
return nil
}
func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string, proverPublicKey string, proofMsg *message.ProofMsg, status types.ProvingStatus, proofTimeSec uint64) error {
var proverTaskStatus types.ProverProveStatus
switch status {
case types.ProvingTaskFailed, types.ProvingTaskUnassigned:
@@ -244,16 +242,31 @@ func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string,
}
err := m.db.Transaction(func(tx *gorm.DB) error {
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsgType, hash, proverPublicKey, proverTaskStatus, tx); updateErr != nil {
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsg.Type, hash, proverPublicKey, proverTaskStatus, tx); updateErr != nil {
return updateErr
}
// if the block batch has proof verified, so the failed status not update block batch proving status
if status == types.ProvingTaskFailed && m.checkIsTaskSuccess(ctx, hash, proofMsgType) {
if status == types.ProvingTaskFailed && m.checkIsTaskSuccess(ctx, hash, proofMsg.Type) {
log.Info("update proof status ProvingTaskFailed skip because other prover have prove success", "hash", hash, "public key", proverPublicKey)
return nil
}
switch proofMsgType {
if status == types.ProvingTaskVerified {
var storeProofErr error
switch proofMsg.Type {
case message.ProofTypeChunk:
storeProofErr = m.chunkOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.ChunkProof, proofTimeSec, tx)
case message.ProofTypeBatch:
storeProofErr = m.batchOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.BatchProof, proofTimeSec, tx)
}
if storeProofErr != nil {
log.Error("failed to store chunk/batch proof into db", "hash", hash, "public key", proverPublicKey, "error", storeProofErr)
return storeProofErr
}
}
switch proofMsg.Type {
case message.ProofTypeChunk:
if err := m.chunkOrm.UpdateProvingStatus(ctx, hash, status, tx); err != nil {
log.Error("failed to update chunk proving_status as failed", "msg.ID", hash, "error", err)
@@ -272,7 +285,7 @@ func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string,
return err
}
if status == types.ProvingTaskVerified && proofMsgType == message.ProofTypeChunk {
if status == types.ProvingTaskVerified && proofMsg.Type == message.ProofTypeChunk {
if checkReadyErr := m.checkAreAllChunkProofsReady(ctx, hash); checkReadyErr != nil {
log.Error("failed to check are all chunk proofs ready", "error", checkReadyErr)
return checkReadyErr
@@ -302,14 +315,19 @@ func (m *ProofReceiverLogic) checkIsTaskSuccess(ctx context.Context, hash string
return provingStatus == types.ProvingTaskVerified
}
func (m *ProofReceiverLogic) checkIsTimeoutFailure(ctx context.Context, hash, proverPublicKey string) bool {
proverTask, err := m.proverTaskOrm.GetProverTaskByTaskIDAndPubKey(ctx, hash, proverPublicKey)
if err != nil {
return false
func (m *ProofReceiverLogic) updateProverTaskProof(ctx context.Context, pk string, proofMsg *message.ProofMsg) error {
// store the proof to prover task
var proofBytes []byte
var marshalErr error
switch proofMsg.Type {
case message.ProofTypeChunk:
proofBytes, marshalErr = json.Marshal(proofMsg.ChunkProof)
case message.ProofTypeBatch:
proofBytes, marshalErr = json.Marshal(proofMsg.BatchProof)
}
if types.ProverTaskFailureType(proverTask.FailureType) == types.ProverTaskFailureTypeTimeout {
return true
if len(proofBytes) == 0 || marshalErr != nil {
return fmt.Errorf("updateProverTaskProof marshal proof error:%w", marshalErr)
}
return false
return m.proverTaskOrm.UpdateProverTaskProof(ctx, proofMsg.Type, proofMsg.ID, pk, proofBytes)
}

View File

@@ -112,6 +112,20 @@ func (o *ProverTask) GetProverTaskByTaskIDAndPubKey(ctx context.Context, taskID,
return &proverTask, nil
}
// GetProvingStatusByTaskID retrieves the proving status of a prover task
func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskID string) (types.ProverProveStatus, error) {
db := o.db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Select("proving_status")
db = db.Where("task_id = ?", taskID)
var proverTask ProverTask
if err := db.Find(&proverTask).Error; err != nil {
return types.ProverProofInvalid, fmt.Errorf("ProverTask.GetProvingStatusByTaskID error: %w, taskID: %v", err, taskID)
}
return types.ProverProveStatus(proverTask.ProvingStatus), nil
}
// GetAssignedProverTasks get the assigned prover task
func (o *ProverTask) GetAssignedProverTasks(ctx context.Context, limit int) ([]ProverTask, error) {
db := o.db.WithContext(ctx)
@@ -146,6 +160,19 @@ func (o *ProverTask) SetProverTask(ctx context.Context, proverTask *ProverTask,
return nil
}
// UpdateProverTaskProof update the prover task's proof
func (o *ProverTask) UpdateProverTaskProof(ctx context.Context, proofType message.ProofType, taskID string, pk string, proof []byte) error {
db := o.db
db = db.WithContext(ctx)
db = db.Model(&ProverTask{})
db = db.Where("task_type = ? AND task_id = ? AND prover_public_key = ?", int(proofType), taskID, pk)
if err := db.Update("proof", proof).Error; err != nil {
return fmt.Errorf("ProverTask.UpdateProverTaskProof error: %w, proof type: %v, taskID: %v, prover public key: %v", err, proofType.String(), taskID, pk)
}
return nil
}
// UpdateProverTaskProvingStatus updates the proving_status of a specific ProverTask record.
func (o *ProverTask) UpdateProverTaskProvingStatus(ctx context.Context, proofType message.ProofType, taskID string, pk string, status types.ProverProveStatus, dbTX ...*gorm.DB) error {
db := o.db

View File

@@ -38,10 +38,11 @@ var (
base *docker.App
db *gorm.DB
l2BlockOrm *orm.L2Block
chunkOrm *orm.Chunk
batchOrm *orm.Batch
db *gorm.DB
l2BlockOrm *orm.L2Block
chunkOrm *orm.Chunk
batchOrm *orm.Batch
proverTaskOrm *orm.ProverTask
wrappedBlock1 *types.WrappedBlock
wrappedBlock2 *types.WrappedBlock
@@ -130,6 +131,7 @@ func setEnv(t *testing.T) {
batchOrm = orm.NewBatch(db)
chunkOrm = orm.NewChunk(db)
l2BlockOrm = orm.NewL2Block(db)
proverTaskOrm = orm.NewProverTask(db)
templateBlockTrace, err := os.ReadFile("../../common/testdata/blockTrace_02.json")
assert.NoError(t, err)
@@ -365,8 +367,12 @@ func testProofGeneratedFailed(t *testing.T) {
tickStop = time.Tick(time.Minute)
)
var chunkProofStatus types.ProvingStatus
var batchProofStatus types.ProvingStatus
var (
chunkProofStatus types.ProvingStatus
batchProofStatus types.ProvingStatus
chunkProverTaskProvingStatus types.ProverProveStatus
batchProverTaskProvingStatus types.ProverProveStatus
)
for {
select {
@@ -375,7 +381,15 @@ func testProofGeneratedFailed(t *testing.T) {
assert.NoError(t, err)
batchProofStatus, err = batchOrm.GetProvingStatusByHash(context.Background(), batch.Hash)
assert.NoError(t, err)
if chunkProofStatus == types.ProvingTaskFailed && batchProofStatus == types.ProvingTaskFailed {
if chunkProofStatus == types.ProvingTaskAssigned && batchProofStatus == types.ProvingTaskAssigned {
return
}
chunkProverTaskProvingStatus, err = proverTaskOrm.GetProvingStatusByTaskID(context.Background(), dbChunk.Hash)
assert.NoError(t, err)
batchProverTaskProvingStatus, err = proverTaskOrm.GetProvingStatusByTaskID(context.Background(), batch.Hash)
assert.NoError(t, err)
if chunkProverTaskProvingStatus == types.ProverProofInvalid && batchProverTaskProvingStatus == types.ProverProofInvalid {
return
}
case <-tickStop:

View File

@@ -163,11 +163,16 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) *t
}
func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSchema, proofStatus proofStatus, errCode int) {
proofMsgStatus := message.StatusOk
if proofStatus == generatedFailed {
proofMsgStatus = message.StatusProofError
}
proof := &message.ProofMsg{
ProofDetail: &message.ProofDetail{
ID: proverTaskSchema.TaskID,
Type: message.ProofType(proverTaskSchema.TaskType),
Status: message.RespStatus(proofStatus),
Status: proofMsgStatus,
ChunkProof: &message.ChunkProof{},
BatchProof: &message.BatchProof{},
},

View File

@@ -31,7 +31,8 @@ func init() {
}
// Register `prover-test` app for integration-test.
utils.RegisterSimulation(app, utils.ProverApp)
utils.RegisterSimulation(app, utils.ChunkProverApp)
utils.RegisterSimulation(app, utils.BatchProverApp)
}
func action(ctx *cli.Context) error {

View File

@@ -6,11 +6,21 @@ import (
"time"
"scroll-tech/common/cmd"
"scroll-tech/common/utils"
"scroll-tech/common/version"
)
func TestRunProver(t *testing.T) {
prover := cmd.NewCmd("prover-test", "--version")
func TestRunChunkProver(t *testing.T) {
prover := cmd.NewCmd(string(utils.ChunkProverApp), "--version")
defer prover.WaitExit()
// wait result
prover.ExpectWithTimeout(t, true, time.Second*3, fmt.Sprintf("prover version %s", version.Version))
prover.RunApp(nil)
}
func TestRunBatchProver(t *testing.T) {
prover := cmd.NewCmd(string(utils.BatchProverApp), "--version")
defer prover.WaitExit()
// wait result

View File

@@ -4,15 +4,12 @@ import (
"encoding/json"
"fmt"
"os"
"sync"
"testing"
"time"
"github.com/google/uuid"
"github.com/scroll-tech/go-ethereum/rpc"
"golang.org/x/sync/errgroup"
proverConfig "scroll-tech/prover/config"
"scroll-tech/prover/config"
"scroll-tech/common/cmd"
"scroll-tech/common/docker"
@@ -31,7 +28,7 @@ func getIndex() int {
// ProverApp prover-test client manager.
type ProverApp struct {
Config *proverConfig.Config
Config *config.Config
base *docker.App
@@ -41,23 +38,30 @@ type ProverApp struct {
index int
name string
args []string
docker.AppAPI
}
// NewProverApp return a new proverApp manager.
func NewProverApp(base *docker.App, file string, httpURL string, proofType message.ProofType) *ProverApp {
uuid := uuid.New().String()
proverFile := fmt.Sprintf("/tmp/%s_%d_prover-config.json", uuid, base.Timestamp)
func NewProverApp(base *docker.App, mockName utils.MockAppName, file string, httpURL string) *ProverApp {
var proofType message.ProofType
switch mockName {
case utils.ChunkProverApp:
proofType = message.ProofTypeChunk
case utils.BatchProverApp:
proofType = message.ProofTypeBatch
default:
return nil
}
name := string(mockName)
proverFile := fmt.Sprintf("/tmp/%d_%s-config.json", base.Timestamp, name)
proverApp := &ProverApp{
base: base,
originFile: file,
proverFile: proverFile,
bboltDB: fmt.Sprintf("/tmp/%s_%d_bbolt_db", uuid, base.Timestamp),
bboltDB: fmt.Sprintf("/tmp/%d_%s_bbolt_db", base.Timestamp, name),
index: getIndex(),
name: string(utils.ProverApp),
args: []string{"--log.debug", "--config", proverFile},
name: name,
AppAPI: cmd.NewCmd(name, []string{"--log.debug", "--config", proverFile}...),
}
if err := proverApp.MockConfig(true, httpURL, proofType); err != nil {
panic(err)
@@ -66,18 +70,10 @@ func NewProverApp(base *docker.App, file string, httpURL string, proofType messa
}
// RunApp run prover-test child process by multi parameters.
func (r *ProverApp) RunApp(t *testing.T, args ...string) {
r.AppAPI = cmd.NewCmd(r.name, append(r.args, args...)...)
func (r *ProverApp) RunApp(t *testing.T) {
r.AppAPI.RunApp(func() bool { return r.AppAPI.WaitResult(t, time.Second*40, "prover start successfully") })
}
// RunAppWithExpectedResult runs the prover-test child process with multiple parameters,
// and checks for a specific expected result in the output.
func (r *ProverApp) RunAppWithExpectedResult(t *testing.T, expectedResult string, args ...string) {
r.AppAPI = cmd.NewCmd(r.name, append(r.args, args...)...)
r.AppAPI.RunApp(func() bool { return r.AppAPI.WaitResult(t, time.Second*40, expectedResult) })
}
// Free stop and release prover-test.
func (r *ProverApp) Free() {
if !utils.IsNil(r.AppAPI) {
@@ -90,7 +86,7 @@ func (r *ProverApp) Free() {
// MockConfig creates a new prover config.
func (r *ProverApp) MockConfig(store bool, httpURL string, proofType message.ProofType) error {
cfg, err := proverConfig.NewConfig(r.originFile)
cfg, err := config.NewConfig(r.originFile)
if err != nil {
return err
}
@@ -123,47 +119,3 @@ func (r *ProverApp) MockConfig(store bool, httpURL string, proofType message.Pro
}
return os.WriteFile(r.proverFile, data, 0600)
}
// ProverApps proverApp list.
type ProverApps []*ProverApp
// RunApps starts all the proverApps.
func (r ProverApps) RunApps(t *testing.T, args ...string) {
var eg errgroup.Group
for i := range r {
i := i
eg.Go(func() error {
r[i].RunApp(t, args...)
return nil
})
}
_ = eg.Wait()
}
// Free releases proverApps.
func (r ProverApps) Free() {
var wg sync.WaitGroup
wg.Add(len(r))
for i := range r {
i := i
go func() {
r[i].Free()
wg.Done()
}()
}
wg.Wait()
}
// WaitExit wait proverApps stopped.
func (r ProverApps) WaitExit() {
var wg sync.WaitGroup
wg.Add(len(r))
for i := range r {
i := i
go func() {
r[i].WaitExit()
wg.Done()
}()
}
wg.Wait()
}

View File

@@ -4,12 +4,10 @@ go 1.19
require (
github.com/go-resty/resty/v2 v2.7.0
github.com/google/uuid v1.3.0
github.com/scroll-tech/go-ethereum v1.10.14-0.20230804022247-26eeb40ea3ca
github.com/stretchr/testify v1.8.3
github.com/urfave/cli/v2 v2.25.7
go.etcd.io/bbolt v1.3.7
golang.org/x/sync v0.3.0
)
require (
@@ -21,6 +19,7 @@ require (
github.com/go-ole/go-ole v1.2.6 // indirect
github.com/go-stack/stack v1.8.1 // indirect
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/gorilla/websocket v1.5.0 // indirect
github.com/holiman/uint256 v1.2.3 // indirect
github.com/huin/goupnp v1.0.3 // indirect
@@ -45,6 +44,7 @@ require (
github.com/yusufpapurcu/wmi v1.2.2 // indirect
golang.org/x/crypto v0.11.0 // indirect
golang.org/x/net v0.12.0 // indirect
golang.org/x/sync v0.3.0 // indirect
golang.org/x/sys v0.10.0 // indirect
golang.org/x/time v0.3.0 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect

View File

@@ -22,7 +22,6 @@ import (
"scroll-tech/common/database"
"scroll-tech/common/docker"
"scroll-tech/common/types"
"scroll-tech/common/types/message"
"scroll-tech/common/utils"
"scroll-tech/common/version"
@@ -38,13 +37,11 @@ var (
)
func TestMain(m *testing.M) {
version.Version = "v1.2.3-aaa-bbb-ccc"
base = docker.NewDockerApp()
bridgeApp = bcmd.NewBridgeApp(base, "../../bridge/conf/config.json")
coordinatorApp = capp.NewCoordinatorApp(base, "../../coordinator/conf/config.json")
chunkProverApp = rapp.NewProverApp(base, "../../prover/config.json", coordinatorApp.HTTPEndpoint(), message.ProofTypeChunk)
batchProverApp = rapp.NewProverApp(base, "../../prover/config.json", coordinatorApp.HTTPEndpoint(), message.ProofTypeBatch)
chunkProverApp = rapp.NewProverApp(base, utils.ChunkProverApp, "../../prover/config.json", coordinatorApp.HTTPEndpoint())
batchProverApp = rapp.NewProverApp(base, utils.BatchProverApp, "../../prover/config.json", coordinatorApp.HTTPEndpoint())
m.Run()
bridgeApp.Free()
coordinatorApp.Free()
@@ -115,17 +112,21 @@ func TestCoordinatorProverInteraction(t *testing.T) {
assert.NoError(t, err)
err = chunkOrm.UpdateBatchHashInRange(context.Background(), 0, 0, batch.Hash)
assert.NoError(t, err)
t.Log(version.Version)
// Run coordinator app.
coordinatorApp.RunApp(t)
// Run prover app.
chunkProverApp.RunAppWithExpectedResult(t, "proof submitted successfully") // chunk prover login -> get task -> submit proof.
batchProverApp.RunAppWithExpectedResult(t, "proof submitted successfully") // batch prover login -> get task -> submit proof.
chunkProverApp.ExpectWithTimeout(t, true, time.Second*40, "proof submitted successfully") // chunk prover login -> get task -> submit proof.
batchProverApp.ExpectWithTimeout(t, true, time.Second*40, "proof submitted successfully") // batch prover login -> get task -> submit proof.
// All task has been proven, coordinator would not return any task.
chunkProverApp.ExpectWithTimeout(t, false, 60*time.Second, "get empty prover task")
batchProverApp.ExpectWithTimeout(t, false, 60*time.Second, "get empty prover task")
chunkProverApp.ExpectWithTimeout(t, true, 60*time.Second, "get empty prover task")
batchProverApp.ExpectWithTimeout(t, true, 60*time.Second, "get empty prover task")
chunkProverApp.RunApp(t)
batchProverApp.RunApp(t)
// Free apps.
chunkProverApp.WaitExit()
@@ -142,10 +143,12 @@ func TestProverReLogin(t *testing.T) {
// Run coordinator app.
coordinatorApp.RunApp(t) // login timeout: 1 sec
chunkProverApp.RunApp(t)
batchProverApp.RunApp(t)
// Run prover app.
chunkProverApp.RunAppWithExpectedResult(t, "re-login success") // chunk prover login.
batchProverApp.RunAppWithExpectedResult(t, "re-login success") // batch prover login.
chunkProverApp.WaitResult(t, time.Second*40, "re-login success") // chunk prover login.
batchProverApp.WaitResult(t, time.Second*40, "re-login success") // batch prover login.
// Free apps.
chunkProverApp.WaitExit()