mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
4 Commits
v4.1.33
...
chunk-info
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
628df733ee | ||
|
|
938f05064c | ||
|
|
28fa44d3b8 | ||
|
|
6b7ca50599 |
4
.github/workflows/bridge.yml
vendored
4
.github/workflows/bridge.yml
vendored
@@ -43,8 +43,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Lint
|
||||
working-directory: 'bridge'
|
||||
run: |
|
||||
@@ -94,8 +92,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Build prerequisites
|
||||
run: |
|
||||
make dev_docker
|
||||
|
||||
2
.github/workflows/common.yml
vendored
2
.github/workflows/common.yml
vendored
@@ -88,8 +88,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Build prerequisites
|
||||
run: |
|
||||
make dev_docker
|
||||
|
||||
2
.github/workflows/coordinator.yml
vendored
2
.github/workflows/coordinator.yml
vendored
@@ -104,8 +104,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Build prerequisites
|
||||
run: |
|
||||
make dev_docker
|
||||
|
||||
2
.github/workflows/database.yml
vendored
2
.github/workflows/database.yml
vendored
@@ -81,8 +81,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Build prerequisites
|
||||
run: |
|
||||
make dev_docker
|
||||
|
||||
95
.github/workflows/docker.yaml
vendored
95
.github/workflows/docker.yaml
vendored
@@ -6,7 +6,7 @@ on:
|
||||
- v**
|
||||
|
||||
jobs:
|
||||
event_watcher:
|
||||
build-and-push:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
@@ -27,18 +27,6 @@ jobs:
|
||||
tags: scrolltech/event-watcher:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
gas_oracle:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push gas_oracle docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -48,18 +36,6 @@ jobs:
|
||||
tags: scrolltech/gas-oracle:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
msg_relayer:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push msg_relayer docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -69,18 +45,6 @@ jobs:
|
||||
tags: scrolltech/msg-relayer:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
rollup_relayer:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push rollup_relayer docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -90,18 +54,6 @@ jobs:
|
||||
tags: scrolltech/rollup-relayer:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
bridgehistoryapi-cross-msg-fetcher:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push bridgehistoryapi-cross-msg-fetcher docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -111,18 +63,6 @@ jobs:
|
||||
tags: scrolltech/bridgehistoryapi-cross-msg-fetcher:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
bridgehistoryapi-server:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push bridgehistoryapi-server docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -132,18 +72,6 @@ jobs:
|
||||
tags: scrolltech/bridgehistoryapi-server:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
coordinator:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push coordinator docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
@@ -153,24 +81,3 @@ jobs:
|
||||
tags: scrolltech/coordinator:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
prover-stats-api:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v2
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
- name: Login to Docker Hub
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
username: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
password: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
- name: Build and push prover-stats-api docker
|
||||
uses: docker/build-push-action@v2
|
||||
with:
|
||||
context: .
|
||||
file: ./build/dockerfiles/prover-stats-api.Dockerfile
|
||||
push: true
|
||||
tags: scrolltech/prover-stats-api:${{github.ref_name}}
|
||||
# cache-from: type=gha,scope=${{ github.workflow }}
|
||||
# cache-to: type=gha,scope=${{ github.workflow }}
|
||||
|
||||
2
.github/workflows/integration.yaml
vendored
2
.github/workflows/integration.yaml
vendored
@@ -33,8 +33,6 @@ jobs:
|
||||
version: '0.8.16'
|
||||
- name: Install Geth Tools
|
||||
uses: gacts/install-geth-tools@v1
|
||||
with:
|
||||
version: 1.10.19
|
||||
- name: Build prerequisites
|
||||
run: |
|
||||
make dev_docker
|
||||
|
||||
@@ -110,7 +110,7 @@ func action(ctx *cli.Context) error {
|
||||
|
||||
go utils.Loop(subCtx, 2*time.Second, l2relayer.ProcessPendingBatches)
|
||||
|
||||
go utils.Loop(subCtx, 60*time.Second, l2relayer.ProcessCommittedBatches)
|
||||
go utils.Loop(subCtx, 2*time.Second, l2relayer.ProcessCommittedBatches)
|
||||
|
||||
// Finish start all rollup relayer functions.
|
||||
log.Info("Start rollup-relayer successfully")
|
||||
|
||||
@@ -376,14 +376,9 @@ func (r *Layer2Relayer) ProcessPendingBatches() {
|
||||
txID := batch.Hash + "-commit"
|
||||
txHash, err := r.rollupSender.SendTransaction(txID, &r.cfg.RollupContractAddress, big.NewInt(0), calldata, 0)
|
||||
if err != nil {
|
||||
log.Error(
|
||||
"Failed to send commitBatch tx to layer1",
|
||||
"index", batch.Index,
|
||||
"hash", batch.Hash,
|
||||
"RollupContractAddress", r.cfg.RollupContractAddress,
|
||||
"calldata", common.Bytes2Hex(calldata),
|
||||
"err", err,
|
||||
)
|
||||
if !errors.Is(err, sender.ErrNoAvailableAccount) && !errors.Is(err, sender.ErrFullPending) {
|
||||
log.Error("Failed to send commitBatch tx to layer1 ", "err", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
|
||||
@@ -37,7 +37,7 @@ COPY . .
|
||||
RUN cp -r ./common/libzkp/interface ./coordinator/internal/logic/verifier/lib
|
||||
COPY --from=zkp-builder /app/target/release/libzkp.so ./coordinator/internal/logic/verifier/lib/
|
||||
COPY --from=zkp-builder /app/target/release/libzktrie.so ./coordinator/internal/logic/verifier/lib/
|
||||
RUN cd ./coordinator && make coordinator_skip_libzkp && mv ./build/bin/coordinator /bin/coordinator && mv internal/logic/verifier/lib /bin/
|
||||
RUN cd ./coordinator && go build -v -p 4 -o /bin/coordinator ./cmd && mv internal/logic/verifier/lib /bin/
|
||||
|
||||
# Pull coordinator into a second stage deploy alpine container
|
||||
FROM ubuntu:20.04
|
||||
@@ -46,6 +46,6 @@ ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/li
|
||||
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
|
||||
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib
|
||||
COPY --from=builder /bin/coordinator /bin/
|
||||
RUN /bin/coordinator --version
|
||||
|
||||
|
||||
ENTRYPOINT ["/bin/coordinator"]
|
||||
|
||||
@@ -1,31 +0,0 @@
|
||||
# Download Go dependencies
|
||||
FROM scrolltech/go-alpine-builder:1.19 as base
|
||||
|
||||
WORKDIR /src
|
||||
COPY go.work* ./
|
||||
COPY ./bridge/go.* ./bridge/
|
||||
COPY ./common/go.* ./common/
|
||||
COPY ./coordinator/go.* ./coordinator/
|
||||
COPY ./database/go.* ./database/
|
||||
COPY ./prover-stats-api/go.* ./prover-stats-api/
|
||||
COPY ./prover/go.* ./prover/
|
||||
COPY ./tests/integration-test/go.* ./tests/integration-test/
|
||||
COPY ./bridge-history-api/go.* ./bridge-history-api/
|
||||
# Support mainland environment.
|
||||
#ENV GOPROXY="https://goproxy.cn,direct"
|
||||
RUN go mod download -x
|
||||
|
||||
|
||||
# Build prover-stats-api
|
||||
FROM base as builder
|
||||
|
||||
RUN --mount=target=. \
|
||||
--mount=type=cache,target=/root/.cache/go-build \
|
||||
cd /src/prover-stats-api/cmd/ && go build -v -p 4 -o /bin/prover-stats-api
|
||||
|
||||
# Pull prover-stats-api into a second stage deploy alpine container \
|
||||
FROM alpine:latest
|
||||
|
||||
COPY --from=builder /bin/prover-stats-api /bin/
|
||||
|
||||
ENTRYPOINT ["prover-stats-api"]
|
||||
24
common/libzkp/impl/Cargo.lock
generated
24
common/libzkp/impl/Cargo.lock
generated
@@ -32,7 +32,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "aggregator"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"ark-std",
|
||||
"env_logger 0.10.0",
|
||||
@@ -432,7 +432,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||
[[package]]
|
||||
name = "bus-mapping"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -1048,7 +1048,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "eth-types"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"ethers-core",
|
||||
"ethers-signers",
|
||||
@@ -1225,7 +1225,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "external-tracer"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"geth-utils",
|
||||
@@ -1438,7 +1438,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gadgets"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"digest 0.7.6",
|
||||
"eth-types",
|
||||
@@ -1478,7 +1478,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "geth-utils"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"gobuild 0.1.0-alpha.2 (git+https://github.com/scroll-tech/gobuild.git)",
|
||||
@@ -2076,7 +2076,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "keccak256"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"env_logger 0.9.3",
|
||||
"eth-types",
|
||||
@@ -2263,7 +2263,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -2278,7 +2278,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpt-zktrie"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"bus-mapping",
|
||||
"eth-types",
|
||||
@@ -2754,7 +2754,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prover"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.12#ef945901bee26c4b79be8af60e259443392368fa"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.8#785bef96944a27bc2d0ddb41623fcb77de527824"
|
||||
dependencies = [
|
||||
"aggregator",
|
||||
"anyhow",
|
||||
@@ -4039,7 +4039,7 @@ checksum = "497961ef93d974e23eb6f433eb5fe1b7930b659f06d12dec6fc44a8f554c0bba"
|
||||
[[package]]
|
||||
name = "types"
|
||||
version = "0.4.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.12#ef945901bee26c4b79be8af60e259443392368fa"
|
||||
source = "git+https://github.com/scroll-tech/scroll-prover?tag=v0.5.8#785bef96944a27bc2d0ddb41623fcb77de527824"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"blake2",
|
||||
@@ -4490,7 +4490,7 @@ checksum = "2a0956f1ba7c7909bfb66c2e9e4124ab6f6482560f6628b5aaeba39207c9aad9"
|
||||
[[package]]
|
||||
name = "zkevm-circuits"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.12#554bdcf00334bab45670b8daa72d687a7ff2b919"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.5.8#ce1f59466c2fbe9ba6f320498b52b79a8e1a3258"
|
||||
dependencies = [
|
||||
"array-init",
|
||||
"bus-mapping",
|
||||
|
||||
@@ -20,8 +20,8 @@ maingate = { git = "https://github.com/scroll-tech/halo2wrong", branch = "halo2-
|
||||
halo2curves = { git = "https://github.com/scroll-tech/halo2curves.git", branch = "0.3.1-derive-serde" }
|
||||
|
||||
[dependencies]
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.12" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.12" }
|
||||
prover = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.8" }
|
||||
types = { git = "https://github.com/scroll-tech/scroll-prover", tag = "v0.5.8" }
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "develop" }
|
||||
|
||||
log = "0.4"
|
||||
|
||||
@@ -48,6 +48,9 @@ pub unsafe extern "C" fn gen_batch_proof(
|
||||
let chunk_proofs = serde_json::from_slice::<Vec<ChunkProof>>(&chunk_proofs).unwrap();
|
||||
assert_eq!(chunk_hashes.len(), chunk_proofs.len());
|
||||
|
||||
log::error!("gupeng - rust - 1 - chunk_hashes = {chunk_hashes:#?}");
|
||||
log::error!("gupeng - rust - 1 - chunk_proofs = {chunk_proofs:?}");
|
||||
|
||||
let chunk_hashes_proofs = chunk_hashes
|
||||
.into_iter()
|
||||
.zip(chunk_proofs.into_iter())
|
||||
|
||||
@@ -126,7 +126,7 @@ const (
|
||||
ProvingTaskUnassigned
|
||||
// ProvingTaskAssigned : proving_task is assigned to be proved
|
||||
ProvingTaskAssigned
|
||||
// ProvingTaskProved DEPRECATED: proof has been returned by prover
|
||||
// ProvingTaskProved : proof has been returned by prover
|
||||
ProvingTaskProved
|
||||
// ProvingTaskVerified : proof is valid
|
||||
ProvingTaskVerified
|
||||
|
||||
@@ -20,17 +20,12 @@ var (
|
||||
MessageRelayerApp MockAppName = "message-relayer-test"
|
||||
// RollupRelayerApp the name of mock rollup-relayer app.
|
||||
RollupRelayerApp MockAppName = "rollup-relayer-test"
|
||||
|
||||
// DBCliApp the name of mock database app.
|
||||
DBCliApp MockAppName = "db_cli-test"
|
||||
|
||||
// CoordinatorApp the name of mock coordinator app.
|
||||
CoordinatorApp MockAppName = "coordinator-test"
|
||||
|
||||
// ChunkProverApp the name of mock chunk prover app.
|
||||
ChunkProverApp MockAppName = "chunkProver-test"
|
||||
// BatchProverApp the name of mock batch prover app.
|
||||
BatchProverApp MockAppName = "batchProver-test"
|
||||
// DBCliApp the name of mock database app.
|
||||
DBCliApp MockAppName = "db_cli-test"
|
||||
// ProverApp the name of mock prover app.
|
||||
ProverApp MockAppName = "prover-test"
|
||||
)
|
||||
|
||||
// RegisterSimulation register initializer function for integration-test.
|
||||
|
||||
@@ -3,10 +3,9 @@ package version
|
||||
import (
|
||||
"fmt"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
)
|
||||
|
||||
var tag = "v4.1.33"
|
||||
var tag = "v4.1.22"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
@@ -20,29 +19,11 @@ var commit = func() string {
|
||||
}
|
||||
}
|
||||
}
|
||||
// Set default value for integration test.
|
||||
return "000000"
|
||||
return ""
|
||||
}()
|
||||
|
||||
// ZkVersion is commit-id of common/libzkp/impl/cargo.lock/scroll-prover and halo2, contacted by a "-"
|
||||
// The default `000000-000000` is set for integration test, and will be overwritten by coordinator's & prover's actual compilations (see their Makefiles).
|
||||
var ZkVersion = "000000-000000"
|
||||
// ZkVersion is commit-id of common/libzkp/impl/cargo.lock/scroll-prover
|
||||
var ZkVersion string
|
||||
|
||||
// Version denote the version of scroll protocol, including the l2geth, relayer, coordinator, prover, contracts and etc.
|
||||
var Version = fmt.Sprintf("%s-%s-%s", tag, commit, ZkVersion)
|
||||
|
||||
// CheckScrollProverVersion check the "scroll-prover" version, if it's different from the local one, return false
|
||||
func CheckScrollProverVersion(proverVersion string) bool {
|
||||
// note the the version is in fact in the format of "tag-commit-scroll_prover-halo2",
|
||||
// so split-by-'-' length should be 4
|
||||
remote := strings.Split(proverVersion, "-")
|
||||
if len(remote) != 4 {
|
||||
return false
|
||||
}
|
||||
local := strings.Split(Version, "-")
|
||||
if len(local) != 4 {
|
||||
return false
|
||||
}
|
||||
// compare the `scroll_prover` version
|
||||
return remote[2] == local[2]
|
||||
}
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.PHONY: lint docker clean coordinator coordinator_skip_libzkp mock_coordinator
|
||||
.PHONY: lint docker clean coordinator mock_coordinator
|
||||
|
||||
IMAGE_NAME=coordinator
|
||||
IMAGE_VERSION=latest
|
||||
@@ -25,9 +25,6 @@ libzkp:
|
||||
coordinator: libzkp ## Builds the Coordinator instance.
|
||||
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator ./cmd
|
||||
|
||||
coordinator_skip_libzkp:
|
||||
go build -ldflags "-X scroll-tech/common/version.ZkVersion=${ZK_VERSION}" -o $(PWD)/build/bin/coordinator ./cmd
|
||||
|
||||
mock_coordinator: ## Builds the mocked Coordinator instance.
|
||||
go build -tags="mock_prover mock_verifier" -o $(PWD)/build/bin/coordinator ./cmd
|
||||
|
||||
|
||||
@@ -66,48 +66,50 @@ func (c *Collector) timeoutProofTask() {
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
timeout := time.Duration(c.cfg.ProverManager.CollectionTimeSec) * time.Second
|
||||
assignedProverTasks, err := c.proverTaskOrm.GetTimeoutAssignedProverTasks(c.ctx, 10, timeout)
|
||||
assignedProverTasks, err := c.proverTaskOrm.GetAssignedProverTasks(c.ctx, 10)
|
||||
if err != nil {
|
||||
log.Error("get unassigned session info failure", "error", err)
|
||||
break
|
||||
}
|
||||
|
||||
// here not update the block batch proving status failed, because the collector loop will check
|
||||
// the attempt times. if reach the times, the collector will set the block batch proving status.
|
||||
for _, assignedProverTask := range assignedProverTasks {
|
||||
log.Warn("proof task have reach the timeout", "task id", assignedProverTask.TaskID,
|
||||
"prover public key", assignedProverTask.ProverPublicKey, "prover name", assignedProverTask.ProverName, "task type", assignedProverTask.TaskType)
|
||||
err = c.db.Transaction(func(tx *gorm.DB) error {
|
||||
// update prover task proving status as ProverProofInvalid
|
||||
if err = c.proverTaskOrm.UpdateProverTaskProvingStatus(c.ctx, message.ProofType(assignedProverTask.TaskType),
|
||||
assignedProverTask.TaskID, assignedProverTask.ProverPublicKey, types.ProverProofInvalid, tx); err != nil {
|
||||
log.Error("update prover task proving status failure", "hash", assignedProverTask.TaskID, "pubKey", assignedProverTask.ProverPublicKey, "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// update prover task failure type
|
||||
if err = c.proverTaskOrm.UpdateProverTaskFailureType(c.ctx, message.ProofType(assignedProverTask.TaskType),
|
||||
assignedProverTask.TaskID, assignedProverTask.ProverPublicKey, types.ProverTaskFailureTypeTimeout, tx); err != nil {
|
||||
log.Error("update prover task failure type failure", "hash", assignedProverTask.TaskID, "pubKey", assignedProverTask.ProverPublicKey, "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// update the task to unassigned, let collector restart it
|
||||
if message.ProofType(assignedProverTask.TaskType) == message.ProofTypeChunk {
|
||||
if err = c.chunkOrm.UpdateProvingStatus(c.ctx, assignedProverTask.TaskID, types.ProvingTaskUnassigned, tx); err != nil {
|
||||
log.Error("update chunk proving status to unassigned to restart it failure", "hash", assignedProverTask.TaskID, "err", err)
|
||||
timeoutDuration := time.Duration(c.cfg.ProverManager.CollectionTimeSec) * time.Second
|
||||
// here not update the block batch proving status failed, because the collector loop will check
|
||||
// the attempt times. if reach the times, the collector will set the block batch proving status.
|
||||
if time.Since(assignedProverTask.AssignedAt) >= timeoutDuration {
|
||||
log.Warn("proof task have reach the timeout", "task id", assignedProverTask.TaskID,
|
||||
"prover public key", assignedProverTask.ProverPublicKey, "prover name", assignedProverTask.ProverName, "task type", assignedProverTask.TaskType)
|
||||
err = c.db.Transaction(func(tx *gorm.DB) error {
|
||||
// update prover task proving status as ProverProofInvalid
|
||||
if err = c.proverTaskOrm.UpdateProverTaskProvingStatus(c.ctx, message.ProofType(assignedProverTask.TaskType),
|
||||
assignedProverTask.TaskID, assignedProverTask.ProverPublicKey, types.ProverProofInvalid, tx); err != nil {
|
||||
log.Error("update prover task proving status failure", "hash", assignedProverTask.TaskID, "pubKey", assignedProverTask.ProverPublicKey, "err", err)
|
||||
return err
|
||||
}
|
||||
}
|
||||
if message.ProofType(assignedProverTask.TaskType) == message.ProofTypeBatch {
|
||||
if err = c.batchOrm.UpdateProvingStatus(c.ctx, assignedProverTask.TaskID, types.ProvingTaskUnassigned, tx); err != nil {
|
||||
log.Error("update batch proving status to unassigned to restart it failure", "hash", assignedProverTask.TaskID, "err", err)
|
||||
|
||||
// update prover task failure type
|
||||
if err = c.proverTaskOrm.UpdateProverTaskFailureType(c.ctx, message.ProofType(assignedProverTask.TaskType),
|
||||
assignedProverTask.TaskID, assignedProverTask.ProverPublicKey, types.ProverTaskFailureTypeTimeout, tx); err != nil {
|
||||
log.Error("update prover task failure type failure", "hash", assignedProverTask.TaskID, "pubKey", assignedProverTask.ProverPublicKey, "err", err)
|
||||
return err
|
||||
}
|
||||
|
||||
// update the task to unassigned, let collector restart it
|
||||
if message.ProofType(assignedProverTask.TaskType) == message.ProofTypeChunk {
|
||||
if err = c.chunkOrm.UpdateProvingStatus(c.ctx, assignedProverTask.TaskID, types.ProvingTaskUnassigned, tx); err != nil {
|
||||
log.Error("update chunk proving status to unassigned to restart it failure", "hash", assignedProverTask.TaskID, "err", err)
|
||||
}
|
||||
}
|
||||
if message.ProofType(assignedProverTask.TaskType) == message.ProofTypeBatch {
|
||||
if err = c.batchOrm.UpdateProvingStatus(c.ctx, assignedProverTask.TaskID, types.ProvingTaskUnassigned, tx); err != nil {
|
||||
log.Error("update batch proving status to unassigned to restart it failure", "hash", assignedProverTask.TaskID, "err", err)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("check task proof is timeout failure", "error", err)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
if err != nil {
|
||||
log.Error("check task proof is timeout failure", "error", err)
|
||||
}
|
||||
}
|
||||
case <-c.ctx.Done():
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
@@ -43,29 +42,12 @@ func NewBatchProverTask(cfg *config.Config, db *gorm.DB) *BatchProverTask {
|
||||
func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinatorType.GetTaskParameter) (*coordinatorType.GetTaskSchema, error) {
|
||||
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
|
||||
if !publicKeyExist {
|
||||
return nil, fmt.Errorf("get public key from context failed")
|
||||
return nil, fmt.Errorf("get public key from contex failed")
|
||||
}
|
||||
|
||||
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
|
||||
if !proverNameExist {
|
||||
return nil, fmt.Errorf("get prover name from context failed")
|
||||
}
|
||||
|
||||
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
|
||||
if !proverVersionExist {
|
||||
return nil, fmt.Errorf("get prover version from context failed")
|
||||
}
|
||||
if !version.CheckScrollProverVersion(proverVersion.(string)) {
|
||||
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s", proverVersion.(string), version.Version)
|
||||
}
|
||||
|
||||
isAssigned, err := bp.proverTaskOrm.IsProverAssigned(ctx, publicKey.(string))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to check if prover is assigned a task: %w", err)
|
||||
}
|
||||
|
||||
if isAssigned {
|
||||
return nil, fmt.Errorf("prover with publicKey %s is already assigned a task", publicKey)
|
||||
return nil, fmt.Errorf("get prover name from contex failed")
|
||||
}
|
||||
|
||||
batchTasks, err := bp.batchOrm.UpdateUnassignedBatchReturning(ctx, 1)
|
||||
@@ -93,7 +75,6 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
ProverPublicKey: publicKey.(string),
|
||||
TaskType: int16(message.ProofTypeBatch),
|
||||
ProverName: proverName.(string),
|
||||
ProverVersion: proverVersion.(string),
|
||||
ProvingStatus: int16(types.ProverAssigned),
|
||||
FailureType: int16(types.ProverTaskFailureTypeUndefined),
|
||||
// here why need use UTC time. see scroll/common/databased/db.go
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
@@ -43,29 +42,12 @@ func NewChunkProverTask(cfg *config.Config, db *gorm.DB) *ChunkProverTask {
|
||||
func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinatorType.GetTaskParameter) (*coordinatorType.GetTaskSchema, error) {
|
||||
publicKey, publicKeyExist := ctx.Get(coordinatorType.PublicKey)
|
||||
if !publicKeyExist {
|
||||
return nil, fmt.Errorf("get public key from context failed")
|
||||
return nil, fmt.Errorf("get public key from contex failed")
|
||||
}
|
||||
|
||||
proverName, proverNameExist := ctx.Get(coordinatorType.ProverName)
|
||||
if !proverNameExist {
|
||||
return nil, fmt.Errorf("get prover name from context failed")
|
||||
}
|
||||
|
||||
proverVersion, proverVersionExist := ctx.Get(coordinatorType.ProverVersion)
|
||||
if !proverVersionExist {
|
||||
return nil, fmt.Errorf("get prover version from context failed")
|
||||
}
|
||||
if !version.CheckScrollProverVersion(proverVersion.(string)) {
|
||||
return nil, fmt.Errorf("incompatible prover version. please upgrade your prover, expect version: %s, actual version: %s", version.Version, proverVersion.(string))
|
||||
}
|
||||
|
||||
isAssigned, err := cp.proverTaskOrm.IsProverAssigned(ctx, publicKey.(string))
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("failed to check if prover is assigned a task: %w", err)
|
||||
}
|
||||
|
||||
if isAssigned {
|
||||
return nil, fmt.Errorf("prover with publicKey %s is already assigned a task", publicKey)
|
||||
return nil, fmt.Errorf("get prover name from contex failed")
|
||||
}
|
||||
|
||||
// load and send chunk tasks
|
||||
@@ -95,7 +77,6 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
ProverPublicKey: publicKey.(string),
|
||||
TaskType: int16(message.ProofTypeChunk),
|
||||
ProverName: proverName.(string),
|
||||
ProverVersion: proverVersion.(string),
|
||||
ProvingStatus: int16(types.ProverAssigned),
|
||||
FailureType: int16(types.ProverTaskFailureTypeUndefined),
|
||||
// here why need use UTC time. see scroll/common/databased/db.go
|
||||
|
||||
@@ -2,7 +2,6 @@ package submitproof
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"time"
|
||||
@@ -35,12 +34,8 @@ var (
|
||||
ErrValidatorFailureProofMsgStatusNotOk = errors.New("validator failure proof msg status not ok")
|
||||
// ErrValidatorFailureProverTaskEmpty get none prover task
|
||||
ErrValidatorFailureProverTaskEmpty = errors.New("validator failure get none prover task for the proof")
|
||||
// ErrValidatorFailureProverTaskCannotSubmitTwice prove task can not submit proof twice
|
||||
ErrValidatorFailureProverTaskCannotSubmitTwice = errors.New("validator failure prove task cannot submit proof twice")
|
||||
// ErrValidatorFailureProofTimeout the submit proof is timeout
|
||||
ErrValidatorFailureProofTimeout = errors.New("validator failure submit proof timeout")
|
||||
// ErrValidatorFailureTaskHaveVerifiedSuccess have proved success and verified success
|
||||
ErrValidatorFailureTaskHaveVerifiedSuccess = errors.New("validator failure chunk/batch have proved and verified success")
|
||||
// ErrValidatorFailureProverInfoHasProofValid proof is vaild
|
||||
ErrValidatorFailureProverInfoHasProofValid = errors.New("validator failure prover task info has proof valid")
|
||||
)
|
||||
|
||||
// ProofReceiverLogic the proof receiver logic
|
||||
@@ -79,7 +74,7 @@ func NewSubmitProofReceiverLogic(cfg *config.ProverManager, db *gorm.DB) *ProofR
|
||||
func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.ProofMsg) error {
|
||||
pk := ctx.GetString(coordinatorType.PublicKey)
|
||||
if len(pk) == 0 {
|
||||
return fmt.Errorf("get public key from context failed")
|
||||
return fmt.Errorf("get public key from contex failed")
|
||||
}
|
||||
|
||||
proverTask, err := m.proverTaskOrm.GetProverTaskByTaskIDAndPubKey(ctx, proofMsg.ID, pk)
|
||||
@@ -88,15 +83,47 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
|
||||
return ErrValidatorFailureProverTaskEmpty
|
||||
}
|
||||
|
||||
if err = m.validator(proverTask, pk, proofMsg); err != nil {
|
||||
if errors.Is(err, ErrValidatorFailureProofMsgStatusNotOk) {
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
proofTime := time.Since(proverTask.CreatedAt)
|
||||
proofTimeSec := uint64(proofTime.Seconds())
|
||||
|
||||
log.Info("handling zk proof", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
"prover pk", pk, "prove type", proverTask.TaskType, "proof time", proofTimeSec)
|
||||
|
||||
if err = m.validator(ctx, proverTask, pk, proofMsg); err != nil {
|
||||
return err
|
||||
// store proof content
|
||||
var storeProofErr error
|
||||
switch proofMsg.Type {
|
||||
case message.ProofTypeChunk:
|
||||
storeProofErr = m.db.Transaction(func(tx *gorm.DB) error {
|
||||
if dbErr := m.chunkOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.ChunkProof, proofTimeSec, tx); dbErr != nil {
|
||||
return fmt.Errorf("failed to store chunk proof into db, err:%w", dbErr)
|
||||
}
|
||||
if dbErr := m.chunkOrm.UpdateProvingStatus(ctx, proofMsg.ID, types.ProvingTaskProved, tx); dbErr != nil {
|
||||
return fmt.Errorf("failed to update chunk task status as proved, error:%w", dbErr)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
case message.ProofTypeBatch:
|
||||
storeProofErr = m.db.Transaction(func(tx *gorm.DB) error {
|
||||
if dbErr := m.batchOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.BatchProof, proofTimeSec, tx); dbErr != nil {
|
||||
return fmt.Errorf("failed to store batch proof into db, error:%w", dbErr)
|
||||
}
|
||||
if dbErr := m.batchOrm.UpdateProvingStatus(ctx, proofMsg.ID, types.ProvingTaskProved, tx); dbErr != nil {
|
||||
return fmt.Errorf("failed to update batch task status as proved, error:%w", dbErr)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
}
|
||||
if storeProofErr != nil {
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
log.Error("failed to store basic proof into db", "error", storeProofErr)
|
||||
return storeProofErr
|
||||
}
|
||||
|
||||
coordinatorProofsReceivedTotalCounter.Inc(1)
|
||||
|
||||
var success bool
|
||||
var verifyErr error
|
||||
@@ -107,7 +134,7 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
|
||||
}
|
||||
|
||||
if verifyErr != nil || !success {
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg)
|
||||
m.proofFailure(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
coordinatorProofsVerifiedFailedTimeTimer.Update(proofTime)
|
||||
|
||||
log.Info("proof verified by coordinator failed", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
@@ -122,10 +149,8 @@ func (m *ProofReceiverLogic) HandleZkProof(ctx *gin.Context, proofMsg *message.P
|
||||
log.Info("proof verified and valid", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
"prover pk", pk, "prove type", proofMsg.Type, "proof time", proofTimeSec)
|
||||
|
||||
coordinatorProofsReceivedTotalCounter.Inc(1)
|
||||
|
||||
if err := m.closeProofTask(ctx, proofMsg.ID, pk, proofMsg, proofTimeSec); err != nil {
|
||||
m.proofRecover(ctx, proofMsg.ID, pk, proofMsg)
|
||||
if err := m.closeProofTask(ctx, proofMsg.ID, pk, proofMsg); err != nil {
|
||||
m.proofRecover(ctx, proofMsg.ID, pk, proofMsg.Type)
|
||||
return err
|
||||
}
|
||||
|
||||
@@ -153,78 +178,49 @@ func (m *ProofReceiverLogic) checkAreAllChunkProofsReady(ctx context.Context, ch
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ProofReceiverLogic) validator(ctx context.Context, proverTask *orm.ProverTask, pk string, proofMsg *message.ProofMsg) error {
|
||||
func (m *ProofReceiverLogic) validator(proverTask *orm.ProverTask, pk string, proofMsg *message.ProofMsg) error {
|
||||
// Ensure this prover is eligible to participate in the prover task.
|
||||
if types.ProverProveStatus(proverTask.ProvingStatus) == types.ProverProofValid {
|
||||
// In order to prevent DoS attacks, it is forbidden to repeatedly submit valid proofs.
|
||||
// TODO: Defend invalid proof resubmissions by one of the following two methods:
|
||||
// (i) slash the prover for each submission of invalid proof
|
||||
// (ii) set the maximum failure retry times
|
||||
log.Warn("the prover task cannot submit twice", "hash", proofMsg.ID, "prover pk", proverTask.ProverPublicKey,
|
||||
"prover name", proverTask.ProverName, "proof type", proverTask.TaskType)
|
||||
return ErrValidatorFailureProverTaskCannotSubmitTwice
|
||||
log.Warn("prover has already submitted valid proof in proof session", "prover name", proverTask.ProverName,
|
||||
"prover pk", proverTask.ProverPublicKey, "proof type", proverTask.TaskType, "proof id", proofMsg.ProofDetail.ID)
|
||||
return ErrValidatorFailureProverInfoHasProofValid
|
||||
}
|
||||
|
||||
proofTime := time.Since(proverTask.CreatedAt)
|
||||
proofTimeSec := uint64(proofTime.Seconds())
|
||||
|
||||
log.Info("handling zk proof", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
"prover pk", pk, "prove type", proverTask.TaskType, "proof time", proofTimeSec)
|
||||
|
||||
if proofMsg.Status != message.StatusOk {
|
||||
coordinatorProofsGeneratedFailedTimeTimer.Update(proofTime)
|
||||
|
||||
log.Info("proof generated by prover failed", "proof id", proofMsg.ID, "prover name", proverTask.ProverName,
|
||||
"prover pk", pk, "prove type", proofMsg.Type, "error", proofMsg.Error)
|
||||
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsg.Type, proofMsg.ID, pk, types.ProverProofInvalid); updateErr != nil {
|
||||
log.Error("proof generated by prover failed update prover task proving status failure", "proof id", proofMsg.ID,
|
||||
"prover name", proverTask.ProverName, "prover pk", pk, "prove type", proofMsg.Type, "error", proofMsg.Error)
|
||||
}
|
||||
"prover pk", pk, "prove type", proofMsg.Type, "proof time", proofTimeSec, "error", proofMsg.Error)
|
||||
return ErrValidatorFailureProofMsgStatusNotOk
|
||||
}
|
||||
|
||||
// if prover task FailureType is SessionInfoFailureTimeout, the submit proof is timeout, need skip it
|
||||
if types.ProverTaskFailureType(proverTask.FailureType) == types.ProverTaskFailureTypeTimeout {
|
||||
log.Info("proof submit proof have timeout, skip this submit proof", "hash", proofMsg.ID, "proof type", proverTask.TaskType,
|
||||
"prover name", proverTask.ProverName, "prover public key", pk, "proof time", proofTimeSec)
|
||||
return ErrValidatorFailureProofTimeout
|
||||
}
|
||||
|
||||
// store the proof to prover task
|
||||
if updateTaskProofErr := m.updateProverTaskProof(ctx, pk, proofMsg); updateTaskProofErr != nil {
|
||||
log.Warn("update prover task proof failure", "hash", proofMsg.ID, "public key", pk,
|
||||
"prover name", proverTask.ProverName, "error", updateTaskProofErr)
|
||||
}
|
||||
|
||||
// if the batch/chunk have proved and verifier success, need skip this submit proof
|
||||
if m.checkIsTaskSuccess(ctx, proofMsg.ID, proofMsg.Type) {
|
||||
log.Info("the prove task have proved and verifier success, skip this submit proof", "hash", proofMsg.ID,
|
||||
"proof type", proverTask.TaskType, "prover name", proverTask.ProverName, "prover public key", pk)
|
||||
return ErrValidatorFailureTaskHaveVerifiedSuccess
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (m *ProofReceiverLogic) proofFailure(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) {
|
||||
log.Info("proof failure update proof status", "hash", hash, "public key", pubKey,
|
||||
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskFailed.String())
|
||||
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskFailed, 0); err != nil {
|
||||
func (m *ProofReceiverLogic) proofFailure(ctx context.Context, hash string, pubKey string, proofMsgType message.ProofType) {
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsgType, types.ProvingTaskFailed); err != nil {
|
||||
log.Error("failed to updated proof status ProvingTaskFailed", "hash", hash, "pubKey", pubKey, "error", err)
|
||||
}
|
||||
coordinatorSessionsFailedTotalCounter.Inc(1)
|
||||
}
|
||||
|
||||
func (m *ProofReceiverLogic) proofRecover(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) {
|
||||
log.Info("proof recover update proof status", "hash", hash, "public key", pubKey,
|
||||
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskUnassigned.String())
|
||||
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskUnassigned, 0); err != nil {
|
||||
func (m *ProofReceiverLogic) proofRecover(ctx context.Context, hash string, pubKey string, proofMsgType message.ProofType) {
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsgType, types.ProvingTaskUnassigned); err != nil {
|
||||
log.Error("failed to updated proof status ProvingTaskUnassigned", "hash", hash, "pubKey", pubKey, "error", err)
|
||||
}
|
||||
}
|
||||
|
||||
func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg, proofTimeSec uint64) error {
|
||||
log.Info("proof close task update proof status", "hash", hash, "public key", pubKey,
|
||||
"proof type", proofMsg.Type.String(), "status", types.ProvingTaskVerified.String())
|
||||
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg, types.ProvingTaskVerified, proofTimeSec); err != nil {
|
||||
func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pubKey string, proofMsg *message.ProofMsg) error {
|
||||
if err := m.updateProofStatus(ctx, hash, pubKey, proofMsg.Type, types.ProvingTaskVerified); err != nil {
|
||||
log.Error("failed to updated proof status ProvingTaskVerified", "hash", hash, "pubKey", pubKey, "error", err)
|
||||
return err
|
||||
}
|
||||
@@ -232,7 +228,13 @@ func (m *ProofReceiverLogic) closeProofTask(ctx context.Context, hash string, pu
|
||||
}
|
||||
|
||||
// UpdateProofStatus update the chunk/batch task and session info status
|
||||
func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string, proverPublicKey string, proofMsg *message.ProofMsg, status types.ProvingStatus, proofTimeSec uint64) error {
|
||||
func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string, proverPublicKey string, proofMsgType message.ProofType, status types.ProvingStatus) error {
|
||||
// if the prover task failure type is SessionInfoFailureTimeout,
|
||||
// just skip update the status because the proof result come too late.
|
||||
if m.checkIsTimeoutFailure(ctx, hash, proverPublicKey) {
|
||||
return nil
|
||||
}
|
||||
|
||||
var proverTaskStatus types.ProverProveStatus
|
||||
switch status {
|
||||
case types.ProvingTaskFailed, types.ProvingTaskUnassigned:
|
||||
@@ -242,31 +244,16 @@ func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string,
|
||||
}
|
||||
|
||||
err := m.db.Transaction(func(tx *gorm.DB) error {
|
||||
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsg.Type, hash, proverPublicKey, proverTaskStatus, tx); updateErr != nil {
|
||||
if updateErr := m.proverTaskOrm.UpdateProverTaskProvingStatus(ctx, proofMsgType, hash, proverPublicKey, proverTaskStatus, tx); updateErr != nil {
|
||||
return updateErr
|
||||
}
|
||||
|
||||
// if the block batch has proof verified, so the failed status not update block batch proving status
|
||||
if status == types.ProvingTaskFailed && m.checkIsTaskSuccess(ctx, hash, proofMsg.Type) {
|
||||
log.Info("update proof status ProvingTaskFailed skip because other prover have prove success", "hash", hash, "public key", proverPublicKey)
|
||||
if status == types.ProvingTaskFailed && m.checkIsTaskSuccess(ctx, hash, proofMsgType) {
|
||||
return nil
|
||||
}
|
||||
|
||||
if status == types.ProvingTaskVerified {
|
||||
var storeProofErr error
|
||||
switch proofMsg.Type {
|
||||
case message.ProofTypeChunk:
|
||||
storeProofErr = m.chunkOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.ChunkProof, proofTimeSec, tx)
|
||||
case message.ProofTypeBatch:
|
||||
storeProofErr = m.batchOrm.UpdateProofByHash(ctx, proofMsg.ID, proofMsg.BatchProof, proofTimeSec, tx)
|
||||
}
|
||||
if storeProofErr != nil {
|
||||
log.Error("failed to store chunk/batch proof into db", "hash", hash, "public key", proverPublicKey, "error", storeProofErr)
|
||||
return storeProofErr
|
||||
}
|
||||
}
|
||||
|
||||
switch proofMsg.Type {
|
||||
switch proofMsgType {
|
||||
case message.ProofTypeChunk:
|
||||
if err := m.chunkOrm.UpdateProvingStatus(ctx, hash, status, tx); err != nil {
|
||||
log.Error("failed to update chunk proving_status as failed", "msg.ID", hash, "error", err)
|
||||
@@ -285,7 +272,7 @@ func (m *ProofReceiverLogic) updateProofStatus(ctx context.Context, hash string,
|
||||
return err
|
||||
}
|
||||
|
||||
if status == types.ProvingTaskVerified && proofMsg.Type == message.ProofTypeChunk {
|
||||
if status == types.ProvingTaskVerified && proofMsgType == message.ProofTypeChunk {
|
||||
if checkReadyErr := m.checkAreAllChunkProofsReady(ctx, hash); checkReadyErr != nil {
|
||||
log.Error("failed to check are all chunk proofs ready", "error", checkReadyErr)
|
||||
return checkReadyErr
|
||||
@@ -315,19 +302,14 @@ func (m *ProofReceiverLogic) checkIsTaskSuccess(ctx context.Context, hash string
|
||||
return provingStatus == types.ProvingTaskVerified
|
||||
}
|
||||
|
||||
func (m *ProofReceiverLogic) updateProverTaskProof(ctx context.Context, pk string, proofMsg *message.ProofMsg) error {
|
||||
// store the proof to prover task
|
||||
var proofBytes []byte
|
||||
var marshalErr error
|
||||
switch proofMsg.Type {
|
||||
case message.ProofTypeChunk:
|
||||
proofBytes, marshalErr = json.Marshal(proofMsg.ChunkProof)
|
||||
case message.ProofTypeBatch:
|
||||
proofBytes, marshalErr = json.Marshal(proofMsg.BatchProof)
|
||||
func (m *ProofReceiverLogic) checkIsTimeoutFailure(ctx context.Context, hash, proverPublicKey string) bool {
|
||||
proverTask, err := m.proverTaskOrm.GetProverTaskByTaskIDAndPubKey(ctx, hash, proverPublicKey)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
|
||||
if len(proofBytes) == 0 || marshalErr != nil {
|
||||
return fmt.Errorf("updateProverTaskProof marshal proof error:%w", marshalErr)
|
||||
if types.ProverTaskFailureType(proverTask.FailureType) == types.ProverTaskFailureTypeTimeout {
|
||||
return true
|
||||
}
|
||||
return m.proverTaskOrm.UpdateProverTaskProof(ctx, proofMsg.Type, proofMsg.ID, pk, proofBytes)
|
||||
return false
|
||||
}
|
||||
|
||||
@@ -11,7 +11,6 @@ import (
|
||||
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
)
|
||||
|
||||
// ProverTask is assigned provers info of chunk/batch proof prover task
|
||||
@@ -23,7 +22,6 @@ type ProverTask struct {
|
||||
// prover
|
||||
ProverPublicKey string `json:"prover_public_key" gorm:"column:prover_public_key"`
|
||||
ProverName string `json:"prover_name" gorm:"column:prover_name"`
|
||||
ProverVersion string `json:"prover_version" gorm:"column:prover_version"`
|
||||
|
||||
// task
|
||||
TaskID string `json:"task_id" gorm:"column:task_id"`
|
||||
@@ -52,20 +50,6 @@ func (*ProverTask) TableName() string {
|
||||
return "prover_task"
|
||||
}
|
||||
|
||||
// IsProverAssigned checks if a prover with the given public key has been assigned a task.
|
||||
func (o *ProverTask) IsProverAssigned(ctx context.Context, publicKey string) (bool, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
var task ProverTask
|
||||
err := db.Where("prover_public_key = ? AND proving_status = ?", publicKey, types.ProverAssigned).First(&task).Error
|
||||
if err != nil {
|
||||
if err == gorm.ErrRecordNotFound {
|
||||
return false, nil
|
||||
}
|
||||
return false, err
|
||||
}
|
||||
return true, nil
|
||||
}
|
||||
|
||||
// GetProverTasks get prover tasks
|
||||
func (o *ProverTask) GetProverTasks(ctx context.Context, fields map[string]interface{}, orderByList []string, offset, limit int) ([]ProverTask, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
@@ -127,26 +111,11 @@ func (o *ProverTask) GetProverTaskByTaskIDAndPubKey(ctx context.Context, taskID,
|
||||
return &proverTask, nil
|
||||
}
|
||||
|
||||
// GetProvingStatusByTaskID retrieves the proving status of a prover task
|
||||
func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskID string) (types.ProverProveStatus, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Select("proving_status")
|
||||
db = db.Where("task_id = ?", taskID)
|
||||
|
||||
var proverTask ProverTask
|
||||
if err := db.Find(&proverTask).Error; err != nil {
|
||||
return types.ProverProofInvalid, fmt.Errorf("ProverTask.GetProvingStatusByTaskID error: %w, taskID: %v", err, taskID)
|
||||
}
|
||||
return types.ProverProveStatus(proverTask.ProvingStatus), nil
|
||||
}
|
||||
|
||||
// GetTimeoutAssignedProverTasks get the timeout and assigned proving_status prover task
|
||||
func (o *ProverTask) GetTimeoutAssignedProverTasks(ctx context.Context, limit int, timeout time.Duration) ([]ProverTask, error) {
|
||||
// GetAssignedProverTasks get the assigned prover task
|
||||
func (o *ProverTask) GetAssignedProverTasks(ctx context.Context, limit int) ([]ProverTask, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Where("proving_status", int(types.ProverAssigned))
|
||||
db = db.Where("assigned_at < ?", utils.NowUTC().Add(-timeout))
|
||||
db = db.Limit(limit)
|
||||
|
||||
var proverTasks []ProverTask
|
||||
@@ -167,7 +136,7 @@ func (o *ProverTask) SetProverTask(ctx context.Context, proverTask *ProverTask,
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Clauses(clause.OnConflict{
|
||||
Columns: []clause.Column{{Name: "task_type"}, {Name: "task_id"}, {Name: "prover_public_key"}},
|
||||
DoUpdates: clause.AssignmentColumns([]string{"prover_version", "proving_status", "failure_type", "assigned_at"}),
|
||||
DoUpdates: clause.AssignmentColumns([]string{"proving_status", "failure_type", "assigned_at"}),
|
||||
})
|
||||
|
||||
if err := db.Create(&proverTask).Error; err != nil {
|
||||
@@ -176,19 +145,6 @@ func (o *ProverTask) SetProverTask(ctx context.Context, proverTask *ProverTask,
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateProverTaskProof update the prover task's proof
|
||||
func (o *ProverTask) UpdateProverTaskProof(ctx context.Context, proofType message.ProofType, taskID string, pk string, proof []byte) error {
|
||||
db := o.db
|
||||
db = db.WithContext(ctx)
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Where("task_type = ? AND task_id = ? AND prover_public_key = ?", int(proofType), taskID, pk)
|
||||
|
||||
if err := db.Update("proof", proof).Error; err != nil {
|
||||
return fmt.Errorf("ProverTask.UpdateProverTaskProof error: %w, proof type: %v, taskID: %v, prover public key: %v", err, proofType.String(), taskID, pk)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// UpdateProverTaskProvingStatus updates the proving_status of a specific ProverTask record.
|
||||
func (o *ProverTask) UpdateProverTaskProvingStatus(ctx context.Context, proofType message.ProofType, taskID string, pk string, status types.ProverProveStatus, dbTX ...*gorm.DB) error {
|
||||
db := o.db
|
||||
|
||||
@@ -23,7 +23,6 @@ import (
|
||||
"scroll-tech/common/docker"
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/version"
|
||||
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/controller/api"
|
||||
@@ -38,11 +37,10 @@ var (
|
||||
|
||||
base *docker.App
|
||||
|
||||
db *gorm.DB
|
||||
l2BlockOrm *orm.L2Block
|
||||
chunkOrm *orm.Chunk
|
||||
batchOrm *orm.Batch
|
||||
proverTaskOrm *orm.ProverTask
|
||||
db *gorm.DB
|
||||
l2BlockOrm *orm.L2Block
|
||||
chunkOrm *orm.Chunk
|
||||
batchOrm *orm.Batch
|
||||
|
||||
wrappedBlock1 *types.WrappedBlock
|
||||
wrappedBlock2 *types.WrappedBlock
|
||||
@@ -109,8 +107,6 @@ func setupCoordinator(t *testing.T, proversPerSession uint8, coordinatorURL stri
|
||||
}
|
||||
|
||||
func setEnv(t *testing.T) {
|
||||
version.Version = "v1.2.3-aaa-bbb-ccc"
|
||||
|
||||
base = docker.NewDockerApp()
|
||||
base.RunDBImage(t)
|
||||
|
||||
@@ -131,7 +127,6 @@ func setEnv(t *testing.T) {
|
||||
batchOrm = orm.NewBatch(db)
|
||||
chunkOrm = orm.NewChunk(db)
|
||||
l2BlockOrm = orm.NewL2Block(db)
|
||||
proverTaskOrm = orm.NewProverTask(db)
|
||||
|
||||
templateBlockTrace, err := os.ReadFile("../../common/testdata/blockTrace_02.json")
|
||||
assert.NoError(t, err)
|
||||
@@ -367,12 +362,8 @@ func testProofGeneratedFailed(t *testing.T) {
|
||||
tickStop = time.Tick(time.Minute)
|
||||
)
|
||||
|
||||
var (
|
||||
chunkProofStatus types.ProvingStatus
|
||||
batchProofStatus types.ProvingStatus
|
||||
chunkProverTaskProvingStatus types.ProverProveStatus
|
||||
batchProverTaskProvingStatus types.ProverProveStatus
|
||||
)
|
||||
var chunkProofStatus types.ProvingStatus
|
||||
var batchProofStatus types.ProvingStatus
|
||||
|
||||
for {
|
||||
select {
|
||||
@@ -381,15 +372,7 @@ func testProofGeneratedFailed(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
batchProofStatus, err = batchOrm.GetProvingStatusByHash(context.Background(), batch.Hash)
|
||||
assert.NoError(t, err)
|
||||
if chunkProofStatus == types.ProvingTaskAssigned && batchProofStatus == types.ProvingTaskAssigned {
|
||||
return
|
||||
}
|
||||
|
||||
chunkProverTaskProvingStatus, err = proverTaskOrm.GetProvingStatusByTaskID(context.Background(), dbChunk.Hash)
|
||||
assert.NoError(t, err)
|
||||
batchProverTaskProvingStatus, err = proverTaskOrm.GetProvingStatusByTaskID(context.Background(), batch.Hash)
|
||||
assert.NoError(t, err)
|
||||
if chunkProverTaskProvingStatus == types.ProverProofInvalid && batchProverTaskProvingStatus == types.ProverProofInvalid {
|
||||
if chunkProofStatus == types.ProvingTaskFailed && batchProofStatus == types.ProvingTaskFailed {
|
||||
return
|
||||
}
|
||||
case <-tickStop:
|
||||
|
||||
@@ -14,7 +14,6 @@ import (
|
||||
|
||||
ctypes "scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/version"
|
||||
|
||||
"scroll-tech/coordinator/internal/logic/verifier"
|
||||
"scroll-tech/coordinator/internal/types"
|
||||
@@ -79,7 +78,7 @@ func (r *mockProver) login(t *testing.T, challengeString string) string {
|
||||
Identity: &message.Identity{
|
||||
Challenge: challengeString,
|
||||
ProverName: "test",
|
||||
ProverVersion: version.Version,
|
||||
ProverVersion: "v1.0.0",
|
||||
},
|
||||
}
|
||||
assert.NoError(t, authMsg.SignWithKey(r.privKey))
|
||||
@@ -163,16 +162,11 @@ func (r *mockProver) getProverTask(t *testing.T, proofType message.ProofType) *t
|
||||
}
|
||||
|
||||
func (r *mockProver) submitProof(t *testing.T, proverTaskSchema *types.GetTaskSchema, proofStatus proofStatus, errCode int) {
|
||||
proofMsgStatus := message.StatusOk
|
||||
if proofStatus == generatedFailed {
|
||||
proofMsgStatus = message.StatusProofError
|
||||
}
|
||||
|
||||
proof := &message.ProofMsg{
|
||||
ProofDetail: &message.ProofDetail{
|
||||
ID: proverTaskSchema.TaskID,
|
||||
Type: message.ProofType(proverTaskSchema.TaskType),
|
||||
Status: proofMsgStatus,
|
||||
Status: message.RespStatus(proofStatus),
|
||||
ChunkProof: &message.ChunkProof{},
|
||||
BatchProof: &message.BatchProof{},
|
||||
},
|
||||
|
||||
@@ -8,7 +8,6 @@ create table prover_task
|
||||
-- prover
|
||||
prover_public_key VARCHAR NOT NULL,
|
||||
prover_name VARCHAR NOT NULL,
|
||||
prover_version VARCHAR NOT NULL,
|
||||
|
||||
-- task
|
||||
task_id VARCHAR NOT NULL,
|
||||
|
||||
@@ -1,8 +1,4 @@
|
||||
.PHONY: clean build test docker
|
||||
|
||||
IMAGE_NAME=prover-stats-api
|
||||
IMAGE_VERSION=latest
|
||||
REPO_ROOT_DIR=./..
|
||||
.PHONY: clean build test
|
||||
|
||||
build:
|
||||
GOBIN=$(PWD)/build/bin go build -o $(PWD)/build/bin/prover-stats-api ./cmd
|
||||
@@ -18,6 +14,3 @@ test:
|
||||
|
||||
lint: ## Lint the files - used for CI
|
||||
GOBIN=$(PWD)/build/bin go run ../build/lint.go
|
||||
|
||||
docker:
|
||||
DOCKER_BUILDKIT=1 docker build -t scrolltech/${IMAGE_NAME}:${IMAGE_VERSION} ${REPO_ROOT_DIR}/ -f ${REPO_ROOT_DIR}/build/dockerfiles/prover-stats-api.Dockerfile
|
||||
|
||||
@@ -20,7 +20,6 @@ type ProverTask struct {
|
||||
TaskID string `json:"task_id" gorm:"column:task_id"`
|
||||
ProverPublicKey string `json:"prover_public_key" gorm:"column:prover_public_key"`
|
||||
ProverName string `json:"prover_name" gorm:"column:prover_name"`
|
||||
ProverVersion string `json:"prover_version" gorm:"column:prover_version"`
|
||||
TaskType int16 `json:"task_type" gorm:"column:task_type;default:0"`
|
||||
ProvingStatus int16 `json:"proving_status" gorm:"column:proving_status;default:0"`
|
||||
FailureType int16 `json:"failure_type" gorm:"column:failure_type;default:0"`
|
||||
@@ -95,7 +94,7 @@ func (o *ProverTask) SetProverTask(ctx context.Context, proverTask *ProverTask,
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Clauses(clause.OnConflict{
|
||||
Columns: []clause.Column{{Name: "task_type"}, {Name: "task_id"}, {Name: "prover_public_key"}},
|
||||
DoUpdates: clause.AssignmentColumns([]string{"prover_version", "proving_status", "failure_type", "assigned_at"}),
|
||||
DoUpdates: clause.AssignmentColumns([]string{"proving_status", "failure_type", "assigned_at"}),
|
||||
})
|
||||
|
||||
if err := db.Create(&proverTask).Error; err != nil {
|
||||
|
||||
@@ -31,8 +31,7 @@ func init() {
|
||||
}
|
||||
|
||||
// Register `prover-test` app for integration-test.
|
||||
utils.RegisterSimulation(app, utils.ChunkProverApp)
|
||||
utils.RegisterSimulation(app, utils.BatchProverApp)
|
||||
utils.RegisterSimulation(app, utils.ProverApp)
|
||||
}
|
||||
|
||||
func action(ctx *cli.Context) error {
|
||||
|
||||
@@ -6,21 +6,11 @@ import (
|
||||
"time"
|
||||
|
||||
"scroll-tech/common/cmd"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
)
|
||||
|
||||
func TestRunChunkProver(t *testing.T) {
|
||||
prover := cmd.NewCmd(string(utils.ChunkProverApp), "--version")
|
||||
defer prover.WaitExit()
|
||||
|
||||
// wait result
|
||||
prover.ExpectWithTimeout(t, true, time.Second*3, fmt.Sprintf("prover version %s", version.Version))
|
||||
prover.RunApp(nil)
|
||||
}
|
||||
|
||||
func TestRunBatchProver(t *testing.T) {
|
||||
prover := cmd.NewCmd(string(utils.BatchProverApp), "--version")
|
||||
func TestRunProver(t *testing.T) {
|
||||
prover := cmd.NewCmd("prover-test", "--version")
|
||||
defer prover.WaitExit()
|
||||
|
||||
// wait result
|
||||
|
||||
@@ -4,12 +4,15 @@ import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/google/uuid"
|
||||
"github.com/scroll-tech/go-ethereum/rpc"
|
||||
"golang.org/x/sync/errgroup"
|
||||
|
||||
"scroll-tech/prover/config"
|
||||
proverConfig "scroll-tech/prover/config"
|
||||
|
||||
"scroll-tech/common/cmd"
|
||||
"scroll-tech/common/docker"
|
||||
@@ -28,7 +31,7 @@ func getIndex() int {
|
||||
|
||||
// ProverApp prover-test client manager.
|
||||
type ProverApp struct {
|
||||
Config *config.Config
|
||||
Config *proverConfig.Config
|
||||
|
||||
base *docker.App
|
||||
|
||||
@@ -38,30 +41,23 @@ type ProverApp struct {
|
||||
|
||||
index int
|
||||
name string
|
||||
args []string
|
||||
docker.AppAPI
|
||||
}
|
||||
|
||||
// NewProverApp return a new proverApp manager.
|
||||
func NewProverApp(base *docker.App, mockName utils.MockAppName, file string, httpURL string) *ProverApp {
|
||||
var proofType message.ProofType
|
||||
switch mockName {
|
||||
case utils.ChunkProverApp:
|
||||
proofType = message.ProofTypeChunk
|
||||
case utils.BatchProverApp:
|
||||
proofType = message.ProofTypeBatch
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
name := string(mockName)
|
||||
proverFile := fmt.Sprintf("/tmp/%d_%s-config.json", base.Timestamp, name)
|
||||
func NewProverApp(base *docker.App, file string, httpURL string, proofType message.ProofType) *ProverApp {
|
||||
uuid := uuid.New().String()
|
||||
|
||||
proverFile := fmt.Sprintf("/tmp/%s_%d_prover-config.json", uuid, base.Timestamp)
|
||||
proverApp := &ProverApp{
|
||||
base: base,
|
||||
originFile: file,
|
||||
proverFile: proverFile,
|
||||
bboltDB: fmt.Sprintf("/tmp/%d_%s_bbolt_db", base.Timestamp, name),
|
||||
bboltDB: fmt.Sprintf("/tmp/%s_%d_bbolt_db", uuid, base.Timestamp),
|
||||
index: getIndex(),
|
||||
name: name,
|
||||
AppAPI: cmd.NewCmd(name, []string{"--log.debug", "--config", proverFile}...),
|
||||
name: string(utils.ProverApp),
|
||||
args: []string{"--log.debug", "--config", proverFile},
|
||||
}
|
||||
if err := proverApp.MockConfig(true, httpURL, proofType); err != nil {
|
||||
panic(err)
|
||||
@@ -70,10 +66,18 @@ func NewProverApp(base *docker.App, mockName utils.MockAppName, file string, htt
|
||||
}
|
||||
|
||||
// RunApp run prover-test child process by multi parameters.
|
||||
func (r *ProverApp) RunApp(t *testing.T) {
|
||||
func (r *ProverApp) RunApp(t *testing.T, args ...string) {
|
||||
r.AppAPI = cmd.NewCmd(r.name, append(r.args, args...)...)
|
||||
r.AppAPI.RunApp(func() bool { return r.AppAPI.WaitResult(t, time.Second*40, "prover start successfully") })
|
||||
}
|
||||
|
||||
// RunAppWithExpectedResult runs the prover-test child process with multiple parameters,
|
||||
// and checks for a specific expected result in the output.
|
||||
func (r *ProverApp) RunAppWithExpectedResult(t *testing.T, expectedResult string, args ...string) {
|
||||
r.AppAPI = cmd.NewCmd(r.name, append(r.args, args...)...)
|
||||
r.AppAPI.RunApp(func() bool { return r.AppAPI.WaitResult(t, time.Second*40, expectedResult) })
|
||||
}
|
||||
|
||||
// Free stop and release prover-test.
|
||||
func (r *ProverApp) Free() {
|
||||
if !utils.IsNil(r.AppAPI) {
|
||||
@@ -86,7 +90,7 @@ func (r *ProverApp) Free() {
|
||||
|
||||
// MockConfig creates a new prover config.
|
||||
func (r *ProverApp) MockConfig(store bool, httpURL string, proofType message.ProofType) error {
|
||||
cfg, err := config.NewConfig(r.originFile)
|
||||
cfg, err := proverConfig.NewConfig(r.originFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -119,3 +123,47 @@ func (r *ProverApp) MockConfig(store bool, httpURL string, proofType message.Pro
|
||||
}
|
||||
return os.WriteFile(r.proverFile, data, 0600)
|
||||
}
|
||||
|
||||
// ProverApps proverApp list.
|
||||
type ProverApps []*ProverApp
|
||||
|
||||
// RunApps starts all the proverApps.
|
||||
func (r ProverApps) RunApps(t *testing.T, args ...string) {
|
||||
var eg errgroup.Group
|
||||
for i := range r {
|
||||
i := i
|
||||
eg.Go(func() error {
|
||||
r[i].RunApp(t, args...)
|
||||
return nil
|
||||
})
|
||||
}
|
||||
_ = eg.Wait()
|
||||
}
|
||||
|
||||
// Free releases proverApps.
|
||||
func (r ProverApps) Free() {
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(r))
|
||||
for i := range r {
|
||||
i := i
|
||||
go func() {
|
||||
r[i].Free()
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
// WaitExit wait proverApps stopped.
|
||||
func (r ProverApps) WaitExit() {
|
||||
var wg sync.WaitGroup
|
||||
wg.Add(len(r))
|
||||
for i := range r {
|
||||
i := i
|
||||
go func() {
|
||||
r[i].WaitExit()
|
||||
wg.Done()
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
@@ -13,7 +13,6 @@ import (
|
||||
"github.com/scroll-tech/go-ethereum/core/types"
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
scrollTypes "scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
|
||||
"scroll-tech/prover/config"
|
||||
@@ -21,68 +20,16 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
paramsPath = flag.String("params", "/assets/test_params", "params dir")
|
||||
proofDumpPath = flag.String("dump", "/assets/proof_data", "the path proofs dump to")
|
||||
tracePath1 = flag.String("trace1", "/assets/traces/1_transfer.json", "chunk trace 1")
|
||||
tracePath2 = flag.String("trace2", "/assets/traces/10_transfer.json", "chunk trace 2")
|
||||
batchDetailPath = flag.String("batch_detail", "/assets/traces/full_proof_3.json", "batch detail")
|
||||
)
|
||||
|
||||
func TestFFI(t *testing.T) {
|
||||
as := assert.New(t)
|
||||
|
||||
chunkProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
ProofType: message.ProofTypeChunk,
|
||||
}
|
||||
chunkProverCore, err := core.NewProverCore(chunkProverConfig)
|
||||
as.NoError(err)
|
||||
t.Log("Constructed chunk prover")
|
||||
|
||||
chunkTrace1 := readChunkTrace(*tracePath1, as)
|
||||
chunkTrace2 := readChunkTrace(*tracePath2, as)
|
||||
t.Log("Loaded chunk traces")
|
||||
|
||||
chunkInfo1, err := chunkProverCore.TracesToChunkInfo(chunkTrace1)
|
||||
as.NoError(err)
|
||||
chunkInfo2, err := chunkProverCore.TracesToChunkInfo(chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Converted to chunk infos")
|
||||
|
||||
wrappedBlock1 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace1[0].Header,
|
||||
Transactions: chunkTrace1[0].Transactions,
|
||||
WithdrawRoot: chunkTrace1[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk1 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock1}}
|
||||
chunkHash1, err := chunk1.Hash(0)
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo1.PostStateRoot, wrappedBlock1.Header.Root)
|
||||
as.Equal(chunkInfo1.WithdrawRoot, wrappedBlock1.WithdrawRoot)
|
||||
as.Equal(chunkInfo1.DataHash, chunkHash1)
|
||||
t.Log("Successful to check chunk info 1")
|
||||
|
||||
wrappedBlock2 := &scrollTypes.WrappedBlock{
|
||||
Header: chunkTrace2[0].Header,
|
||||
Transactions: chunkTrace2[0].Transactions,
|
||||
WithdrawRoot: chunkTrace2[0].WithdrawTrieRoot,
|
||||
}
|
||||
chunk2 := &scrollTypes.Chunk{Blocks: []*scrollTypes.WrappedBlock{wrappedBlock2}}
|
||||
chunkHash2, err := chunk2.Hash(chunk1.NumL1Messages(0))
|
||||
as.NoError(err)
|
||||
as.Equal(chunkInfo2.PostStateRoot, wrappedBlock2.Header.Root)
|
||||
as.Equal(chunkInfo2.WithdrawRoot, wrappedBlock2.WithdrawRoot)
|
||||
as.Equal(chunkInfo2.DataHash, chunkHash2)
|
||||
t.Log("Successful to check chunk info 2")
|
||||
|
||||
chunkProof1, err := chunkProverCore.ProveChunk("chunk_proof1", chunkTrace1)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 1")
|
||||
|
||||
chunkProof2, err := chunkProverCore.ProveChunk("chunk_proof2", chunkTrace2)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped chunk proof 2")
|
||||
|
||||
batchProverConfig := &config.ProverCoreConfig{
|
||||
DumpDir: *proofDumpPath,
|
||||
ParamsPath: *paramsPath,
|
||||
@@ -91,13 +38,27 @@ func TestFFI(t *testing.T) {
|
||||
batchProverCore, err := core.NewProverCore(batchProverConfig)
|
||||
as.NoError(err)
|
||||
|
||||
chunkInfos := []*message.ChunkInfo{chunkInfo1, chunkInfo2}
|
||||
chunkProofs := []*message.ChunkProof{chunkProof1, chunkProof2}
|
||||
// gupeng
|
||||
batchDetail := readBatchDetail(*batchDetailPath, as)
|
||||
chunkInfos := batchDetail.ChunkInfos
|
||||
chunkProofs := batchDetail.ChunkProofs
|
||||
|
||||
_, err = batchProverCore.ProveBatch("batch_proof", chunkInfos, chunkProofs)
|
||||
as.NoError(err)
|
||||
t.Log("Generated and dumped batch proof")
|
||||
}
|
||||
|
||||
func readBatchDetail(filePat string, as *assert.Assertions) *message.BatchTaskDetail {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
byt, err := io.ReadAll(f)
|
||||
as.NoError(err)
|
||||
|
||||
batchDetail := &message.BatchTaskDetail{}
|
||||
as.NoError(json.Unmarshal(byt, batchDetail))
|
||||
|
||||
return batchDetail
|
||||
}
|
||||
func readChunkTrace(filePat string, as *assert.Assertions) []*types.BlockTrace {
|
||||
f, err := os.Open(filePat)
|
||||
as.NoError(err)
|
||||
|
||||
@@ -4,10 +4,12 @@ go 1.19
|
||||
|
||||
require (
|
||||
github.com/go-resty/resty/v2 v2.7.0
|
||||
github.com/google/uuid v1.3.0
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20230804022247-26eeb40ea3ca
|
||||
github.com/stretchr/testify v1.8.3
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
go.etcd.io/bbolt v1.3.7
|
||||
golang.org/x/sync v0.3.0
|
||||
)
|
||||
|
||||
require (
|
||||
@@ -19,7 +21,6 @@ require (
|
||||
github.com/go-ole/go-ole v1.2.6 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/holiman/uint256 v1.2.3 // indirect
|
||||
github.com/huin/goupnp v1.0.3 // indirect
|
||||
@@ -44,7 +45,6 @@ require (
|
||||
github.com/yusufpapurcu/wmi v1.2.2 // indirect
|
||||
golang.org/x/crypto v0.11.0 // indirect
|
||||
golang.org/x/net v0.12.0 // indirect
|
||||
golang.org/x/sync v0.3.0 // indirect
|
||||
golang.org/x/sys v0.10.0 // indirect
|
||||
golang.org/x/time v0.3.0 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
|
||||
|
||||
@@ -22,8 +22,8 @@ import (
|
||||
"scroll-tech/common/database"
|
||||
"scroll-tech/common/docker"
|
||||
"scroll-tech/common/types"
|
||||
"scroll-tech/common/types/message"
|
||||
"scroll-tech/common/utils"
|
||||
"scroll-tech/common/version"
|
||||
|
||||
bcmd "scroll-tech/bridge/cmd"
|
||||
)
|
||||
@@ -40,8 +40,8 @@ func TestMain(m *testing.M) {
|
||||
base = docker.NewDockerApp()
|
||||
bridgeApp = bcmd.NewBridgeApp(base, "../../bridge/conf/config.json")
|
||||
coordinatorApp = capp.NewCoordinatorApp(base, "../../coordinator/conf/config.json")
|
||||
chunkProverApp = rapp.NewProverApp(base, utils.ChunkProverApp, "../../prover/config.json", coordinatorApp.HTTPEndpoint())
|
||||
batchProverApp = rapp.NewProverApp(base, utils.BatchProverApp, "../../prover/config.json", coordinatorApp.HTTPEndpoint())
|
||||
chunkProverApp = rapp.NewProverApp(base, "../../prover/config.json", coordinatorApp.HTTPEndpoint(), message.ProofTypeChunk)
|
||||
batchProverApp = rapp.NewProverApp(base, "../../prover/config.json", coordinatorApp.HTTPEndpoint(), message.ProofTypeBatch)
|
||||
m.Run()
|
||||
bridgeApp.Free()
|
||||
coordinatorApp.Free()
|
||||
@@ -112,21 +112,17 @@ func TestCoordinatorProverInteraction(t *testing.T) {
|
||||
assert.NoError(t, err)
|
||||
err = chunkOrm.UpdateBatchHashInRange(context.Background(), 0, 0, batch.Hash)
|
||||
assert.NoError(t, err)
|
||||
t.Log(version.Version)
|
||||
|
||||
// Run coordinator app.
|
||||
coordinatorApp.RunApp(t)
|
||||
|
||||
// Run prover app.
|
||||
chunkProverApp.ExpectWithTimeout(t, true, time.Second*40, "proof submitted successfully") // chunk prover login -> get task -> submit proof.
|
||||
batchProverApp.ExpectWithTimeout(t, true, time.Second*40, "proof submitted successfully") // batch prover login -> get task -> submit proof.
|
||||
chunkProverApp.RunAppWithExpectedResult(t, "proof submitted successfully") // chunk prover login -> get task -> submit proof.
|
||||
batchProverApp.RunAppWithExpectedResult(t, "proof submitted successfully") // batch prover login -> get task -> submit proof.
|
||||
|
||||
// All task has been proven, coordinator would not return any task.
|
||||
chunkProverApp.ExpectWithTimeout(t, true, 60*time.Second, "get empty prover task")
|
||||
batchProverApp.ExpectWithTimeout(t, true, 60*time.Second, "get empty prover task")
|
||||
|
||||
chunkProverApp.RunApp(t)
|
||||
batchProverApp.RunApp(t)
|
||||
chunkProverApp.ExpectWithTimeout(t, false, 60*time.Second, "get empty prover task")
|
||||
batchProverApp.ExpectWithTimeout(t, false, 60*time.Second, "get empty prover task")
|
||||
|
||||
// Free apps.
|
||||
chunkProverApp.WaitExit()
|
||||
@@ -143,12 +139,10 @@ func TestProverReLogin(t *testing.T) {
|
||||
|
||||
// Run coordinator app.
|
||||
coordinatorApp.RunApp(t) // login timeout: 1 sec
|
||||
chunkProverApp.RunApp(t)
|
||||
batchProverApp.RunApp(t)
|
||||
|
||||
// Run prover app.
|
||||
chunkProverApp.WaitResult(t, time.Second*40, "re-login success") // chunk prover login.
|
||||
batchProverApp.WaitResult(t, time.Second*40, "re-login success") // batch prover login.
|
||||
chunkProverApp.RunAppWithExpectedResult(t, "re-login success") // chunk prover login.
|
||||
batchProverApp.RunAppWithExpectedResult(t, "re-login success") // batch prover login.
|
||||
|
||||
// Free apps.
|
||||
chunkProverApp.WaitExit()
|
||||
|
||||
Reference in New Issue
Block a user