feat: Decompression Prover: improved error reporting (#786)

* feat: decompression circuit improved error reporting

* increase max blob size to 774.54KB

* fix: use blob decompression response

---------

Signed-off-by: Arya Tabaie <arya.pourtabatabaie@gmail.com>
This commit is contained in:
Arya Tabaie
2025-03-17 16:31:36 -05:00
committed by GitHub
parent 3b8c5c94cc
commit aa405ebbda
3 changed files with 20 additions and 1 deletions

View File

@@ -4,6 +4,7 @@ import (
"bytes"
"encoding/base64"
"fmt"
blob_v0 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v0"
blob_v1 "github.com/consensys/linea-monorepo/prover/lib/compressor/blob/v1"
@@ -11,6 +12,7 @@ import (
fr381 "github.com/consensys/gnark-crypto/ecc/bls12-381/fr"
"github.com/consensys/linea-monorepo/prover/circuits"
"github.com/consensys/linea-monorepo/prover/circuits/blobdecompression"
v1 "github.com/consensys/linea-monorepo/prover/circuits/blobdecompression/v1"
"github.com/consensys/linea-monorepo/prover/circuits/dummy"
"github.com/consensys/linea-monorepo/prover/config"
"github.com/consensys/linea-monorepo/prover/lib/compressor/blob"
@@ -144,6 +146,17 @@ func Prove(cfg *config.Config, req *Request) (*Response, error) {
emPlonk.GetNativeVerifierOptions(ecc.BW6_761.ScalarField(), ecc.BLS12_377.ScalarField()),
}
// Add the MaxUncompressedBytes field
// This is actually not required for an assignment
// But in case the proof fails, ProveCheck will use
// the assignment as a circuit for the test engine
switch c := assignment.(type) {
case *v1.Circuit:
c.MaxBlobPayloadNbBytes = maxUncompressedBytes
default:
logrus.Warnf("decompression circuit of type %T. test engine might give an incorrect result.", c)
}
// This actually runs the compression prover
logrus.Infof("running the decompression prover")

View File

@@ -258,10 +258,16 @@ func AssignFPI(blobBytes []byte, dictStore dictionary.Store, eip4844Enabled bool
return
}
if len(r.RawPayload) > blob.MaxUncompressedBytes {
err = fmt.Errorf("decompression circuit assignment: blob payload too large : %d. max %d", len(r.RawPayload), blob.MaxUncompressedBytes)
return
}
if r.Header.NbBatches() > MaxNbBatches {
err = fmt.Errorf("decompression circuit assignment : too many batches in the header : %d. max %d", r.Header.NbBatches(), MaxNbBatches)
return
}
batchEnds := make([]int, r.Header.NbBatches())
if r.Header.NbBatches() > 0 {
batchEnds[0] = r.Header.BatchSizes[0]

View File

@@ -24,7 +24,7 @@ const (
NbElemsEncodingBytes = 2
// These also impact the circuit constraints (compile / setup time)
MaxUncompressedBytes = 756240 // ~738.5KB defines the max size we can handle for a blob (uncompressed) input
MaxUncompressedBytes = 793125 // ~774.54KB defines the max size we can handle for a blob (uncompressed) input
MaxUsableBytes = 32 * 4096 // defines the number of bytes available in a blob
)