Compare commits

..

2 Commits

Author SHA1 Message Date
colin
8471838cd4 fix(rollup-relayer): catch errors (#1427) 2024-07-10 22:56:16 +08:00
colin
c812288250 build: update dockerfiles (#1426)
Co-authored-by: colinlyguo <colinlyguo@users.noreply.github.com>
2024-07-10 19:42:27 +08:00
12 changed files with 90 additions and 30 deletions

View File

@@ -1,5 +1,5 @@
# Download Go dependencies
FROM golang:1.21-alpine3.19 as base
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
WORKDIR /src
COPY go.mod* ./
@@ -11,11 +11,13 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge-history-api/cmd/api && go build -v -p 4 -o /bin/bridgehistoryapi-api
cd /src/bridge-history-api/cmd/api && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/bridgehistoryapi-api
# Pull bridgehistoryapi-api into a second stage deploy alpine container
FROM alpine:latest
# Pull bridgehistoryapi-api into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
COPY --from=builder /bin/bridgehistoryapi-api /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-api"]
ENTRYPOINT ["bridgehistoryapi-api"]

View File

@@ -1,5 +1,5 @@
# Download Go dependencies
FROM golang:1.21-alpine3.19 as base
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
WORKDIR /src
COPY go.mod* ./
@@ -11,11 +11,13 @@ FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/bridge-history-api/cmd/fetcher && go build -v -p 4 -o /bin/bridgehistoryapi-fetcher
cd /src/bridge-history-api/cmd/fetcher && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/bridgehistoryapi-fetcher
# Pull bridgehistoryapi-fetcher into a second stage deploy alpine container
FROM alpine:latest
# Pull bridgehistoryapi-fetcher into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
COPY --from=builder /bin/bridgehistoryapi-fetcher /bin/
WORKDIR /app
ENTRYPOINT ["bridgehistoryapi-fetcher"]
ENTRYPOINT ["bridgehistoryapi-fetcher"]

View File

@@ -23,7 +23,6 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -34,11 +33,12 @@ FROM base as builder
COPY . .
RUN cp -r ./common/libzkp/interface ./coordinator/internal/logic/verifier/lib
COPY --from=zkp-builder /app/target/release/libzkp.so ./coordinator/internal/logic/verifier/lib/
RUN cd ./coordinator && make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/
RUN cd ./coordinator && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" make coordinator_api_skip_libzkp && mv ./build/bin/coordinator_api /bin/coordinator_api && mv internal/logic/verifier/lib /bin/
# Pull coordinator into a second stage deploy alpine container
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/src/coordinator/internal/logic/verifier/lib
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# ENV CHAIN_ID=534353
RUN mkdir -p /src/coordinator/internal/logic/verifier/lib
COPY --from=builder /bin/lib /src/coordinator/internal/logic/verifier/lib

View File

@@ -1,5 +1,5 @@
# Download Go dependencies
FROM scrolltech/go-alpine-builder:1.21 as base
FROM scrolltech/go-rust-builder:go-1.21-rust-nightly-2023-12-03 as base
WORKDIR /src
COPY go.work* ./
@@ -7,7 +7,6 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -16,10 +15,13 @@ RUN go mod download -x
FROM base as builder
RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/coordinator/cmd/cron/ && go build -v -p 4 -o /bin/coordinator_cron
cd /src/coordinator/cmd/cron/ && CGO_LDFLAGS="-Wl,--no-as-needed -ldl" go build -v -p 4 -o /bin/coordinator_cron
# Pull coordinator into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-Wl,--no-as-needed -ldl"
# Pull coordinator into a second stage deploy alpine container
FROM alpine:latest
COPY --from=builder /bin/coordinator_cron /bin/
WORKDIR /app
ENTRYPOINT ["coordinator_cron"]
ENTRYPOINT ["coordinator_cron"]

View File

@@ -7,7 +7,6 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x

View File

@@ -7,7 +7,6 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -19,11 +18,11 @@ RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/rollup/cmd/gas_oracle/ && CGO_LDFLAGS="-ldl" go build -v -p 4 -o /bin/gas_oracle
# Pull gas_oracle into a second stage deploy alpine container
# Pull gas_oracle into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-ldl"
COPY --from=builder /bin/gas_oracle /bin/
WORKDIR /app
ENTRYPOINT ["gas_oracle"]
ENTRYPOINT ["gas_oracle"]

View File

@@ -7,7 +7,6 @@ COPY ./rollup/go.* ./rollup/
COPY ./common/go.* ./common/
COPY ./coordinator/go.* ./coordinator/
COPY ./database/go.* ./database/
COPY ./prover/go.* ./prover/
COPY ./tests/integration-test/go.* ./tests/integration-test/
COPY ./bridge-history-api/go.* ./bridge-history-api/
RUN go mod download -x
@@ -19,7 +18,7 @@ RUN --mount=target=. \
--mount=type=cache,target=/root/.cache/go-build \
cd /src/rollup/cmd/rollup_relayer/ && CGO_LDFLAGS="-ldl" go build -v -p 4 -o /bin/rollup_relayer
# Pull rollup_relayer into a second stage deploy alpine container
# Pull rollup_relayer into a second stage deploy ubuntu container
FROM ubuntu:20.04
ENV CGO_LDFLAGS="-ldl"

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.4.25"
var tag = "v4.4.27"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -240,7 +240,8 @@ func (p *BatchProposer) proposeBatch() error {
batch.Chunks = append(batch.Chunks, chunk)
metrics, calcErr := utils.CalculateBatchMetrics(&batch, codecVersion)
if errors.Is(calcErr, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(calcErr, &compressErr) {
if i == 0 {
// The first chunk fails compressed data compatibility check, manual fix is needed.
return fmt.Errorf("the first chunk fails compressed data compatibility check; start block number: %v, end block number: %v", dbChunks[0].StartBlockNumber, dbChunks[0].EndBlockNumber)

View File

@@ -242,7 +242,8 @@ func (p *ChunkProposer) proposeChunk() error {
chunk.Blocks = append(chunk.Blocks, block)
metrics, calcErr := utils.CalculateChunkMetrics(&chunk, codecVersion)
if errors.Is(calcErr, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(calcErr, &compressErr) {
if i == 0 {
// The first block fails compressed data compatibility check, manual fix is needed.
return fmt.Errorf("the first block fails compressed data compatibility check; block number: %v", block.Header.Number)

View File

@@ -92,7 +92,8 @@ func CalculateChunkMetrics(chunk *encoding.Chunk, codecVersion encoding.CodecVer
metrics.L1CommitUncompressedBatchBytesSize, metrics.L1CommitBlobSize, err = codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize(chunk)
metrics.EstimateBlobSizeTime = time.Since(start)
if err != nil {
if errors.Is(err, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(err, &compressErr) {
return nil, err
} else {
return nil, fmt.Errorf("failed to estimate codecv2 chunk L1 commit batch size and blob size: %w", err)
@@ -176,7 +177,8 @@ func CalculateBatchMetrics(batch *encoding.Batch, codecVersion encoding.CodecVer
metrics.L1CommitUncompressedBatchBytesSize, metrics.L1CommitBlobSize, err = codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize(batch)
metrics.EstimateBlobSizeTime = time.Since(start)
if err != nil {
if errors.Is(err, &encoding.CompressedDataCompatibilityError{}) {
var compressErr *encoding.CompressedDataCompatibilityError
if errors.As(err, &compressErr) {
return nil, err
} else {
return nil, fmt.Errorf("failed to estimate codecv2 batch L1 commit batch size and blob size: %w", err)

View File

@@ -0,0 +1,53 @@
package utils
import (
"errors"
"math/big"
"testing"
"github.com/agiledragon/gomonkey/v2"
"github.com/scroll-tech/da-codec/encoding"
"github.com/scroll-tech/da-codec/encoding/codecv2"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/stretchr/testify/assert"
)
// regression test
func TestCompressedDataCompatibilityErrorCatching(t *testing.T) {
block := &encoding.Block{
Header: &types.Header{
Number: big.NewInt(0),
},
RowConsumption: &types.RowConsumption{},
}
chunk := &encoding.Chunk{
Blocks: []*encoding.Block{block},
}
batch := &encoding.Batch{
Index: 0,
TotalL1MessagePoppedBefore: 0,
ParentBatchHash: common.Hash{},
Chunks: []*encoding.Chunk{chunk},
}
patchGuard1 := gomonkey.ApplyFunc(codecv2.EstimateChunkL1CommitBatchSizeAndBlobSize, func(b *encoding.Chunk) (uint64, uint64, error) {
return 0, 0, &encoding.CompressedDataCompatibilityError{Err: errors.New("test-error-1")}
})
defer patchGuard1.Reset()
var compressErr *encoding.CompressedDataCompatibilityError
_, err := CalculateChunkMetrics(chunk, encoding.CodecV2)
assert.Error(t, err)
assert.ErrorAs(t, err, &compressErr)
patchGuard2 := gomonkey.ApplyFunc(codecv2.EstimateBatchL1CommitBatchSizeAndBlobSize, func(b *encoding.Batch) (uint64, uint64, error) {
return 0, 0, &encoding.CompressedDataCompatibilityError{Err: errors.New("test-error-2")}
})
defer patchGuard2.Reset()
_, err = CalculateBatchMetrics(batch, encoding.CodecV2)
assert.Error(t, err)
assert.ErrorAs(t, err, &compressErr)
}