Prover: all the fixes for beta-v1.a (#90)

* feat(limits): unhardcode the keccak limits
* limits(shomei): double the state limits
* fix(rlpaddr): uses the right stamp column for rlpaddr
* fix(ecpair): fix 1 invalid constraints and a bug in the assignment
* log(plonk): logs which circuit is failing in plonk-in-wizard
* log(projection): print a more informative message when a projection query fails
* feat(csv): adds more options to FmtCSV
* fix(mimc): removes a non-necessary constraint for mimc padding
* fix(mimc): address the edge-case where the mimc data size is a multiple of 31
* fix(sha2): uses the correct selectors to extract the sha2 hashes
* chores(makefile): dont recreate the zkevm.bin when linting or running the checker
* chore(make): adds the bin/checker in the .PHONY
* chores(cs): bump the constraints to rc7
* fix(arithmetization): fix invalid casting into a variable
This commit is contained in:
AlexandreBelling
2024-09-25 12:15:08 +08:00
committed by GitHub
parent 5256e04ced
commit bba9677418
17 changed files with 128 additions and 43 deletions

View File

@@ -29,6 +29,7 @@ CORSET := PATH="${CORSET_ROOT}/target/release":$$PATH
lib/compressor-and-shnarf-calculator-local \
docker \
bin/prover \
bin/checker \
corset \
testdata \
@@ -102,7 +103,7 @@ bin/compression-aggregation-sample:
##
## corset-checker
##
bin/checker: zkevm/arithmetization/zkevm.bin
bin/checker:
mkdir -p bin
rm -f $@
go build -o $@ ./cmd/dev-tools/corset-checker
@@ -140,7 +141,7 @@ test:
##
## Run the CI linting
##
ci-lint: zkevm/arithmetization/zkevm.bin
ci-lint:
golangci-lint run --timeout 5m
##

View File

@@ -95,7 +95,11 @@ func setDefaultTracesLimit() {
viper.SetDefault("traces_limits.SHF_REFERENCE_TABLE", 4096)
viper.SetDefault("traces_limits.INSTRUCTION_DECODER", 512)
// Shomei limits
viper.SetDefault("traces_limits.SHOMEI_MERKLE_PROOFS", 16384)
// Large Limits
// Arithmetization modules
viper.SetDefault("traces_limits_large.ADD", 1048576)
viper.SetDefault("traces_limits_large.BIN", 524288)
@@ -152,4 +156,7 @@ func setDefaultTracesLimit() {
viper.SetDefault("traces_limits_large.SHF_REFERENCE_TABLE", 4096)
viper.SetDefault("traces_limits_large.INSTRUCTION_DECODER", 512)
// Shomei limits
viper.SetDefault("traces_limits_large.SHOMEI_MERKLE_PROOFS", 32768)
}

View File

@@ -64,6 +64,8 @@ type TracesLimits struct {
BlockL1Size int `mapstructure:"BLOCK_L1_SIZE"`
BlockL2L1Logs int `mapstructure:"BLOCK_L2_L1_LOGS"`
BlockTransactions int `mapstructure:"BLOCK_TRANSACTIONS"`
ShomeiMerkleProofs int `mapstructure:"SHOMEI_MERKLE_PROOFS"`
}
func (tl *TracesLimits) Checksum() string {

View File

@@ -179,7 +179,7 @@ func (ctx compilationCtx) runGnarkPlonkProver(
)
if err != nil {
utils.Panic("Error in the solver: %v", err)
utils.Panic("Error in the solver: circ=%v err=%v", ctx.name, err)
}
// Once the solver has finished, return the solution

View File

@@ -289,8 +289,24 @@ func (pa projectionProverAction) Run(run *wizard.ProverRuntime) {
cntB++
}
for i := range rowsA {
fmt.Printf("row=%v %v %v\n", i, strings.Join(rowsA[i], " "), strings.Join(rowsB[i], " "))
larger := max(len(rowsA), len(rowsB))
for i := 0; i < larger; i++ {
var (
fa = "* * * * * *"
fb = "* * * * * *"
)
if i < len(rowsA) {
fa = strings.Join(rowsA[i], " ")
}
if i < len(rowsB) {
fb = strings.Join(rowsB[i], " ")
}
fmt.Printf("row=%v %v %v\n", i, fa, fb)
}
logrus.Errorf("projection query %v failed", pa.Name)

View File

@@ -17,7 +17,9 @@ import (
type cfg struct {
// The number of rows in the trace
nbRows int
nbRows int
skipPrePaddingZero bool
filterOn ifaces.Column
}
type Option func(*cfg) error
@@ -30,6 +32,21 @@ func WithNbRows(nbRows int) Option {
}
}
// SkipPrepaddingZero skips the zeroes at the beginning of the file
func SkipPrepaddingZero(c *cfg) error {
c.skipPrePaddingZero = true
return nil
}
// FilterOn sets the CSV printer to ignore rows where the provided filter
// column is zero.
func FilterOn(col ifaces.Column) Option {
return func(c *cfg) error {
c.filterOn = col
return nil
}
}
type CsvTrace struct {
mapped map[string][]field.Element
@@ -54,13 +71,20 @@ func MustOpenCsvFile(fName string) *CsvTrace {
// FmtCsv is a utility function that can be used in order to print a set of column
// in a csv format so that debugging and testcase generation are simpler.
func FmtCsv(w io.Writer, run *wizard.ProverRuntime, cols []ifaces.Column) error {
func FmtCsv(w io.Writer, run *wizard.ProverRuntime, cols []ifaces.Column, options []Option) error {
var (
header = []string{}
assignment = [][]field.Element{}
header = []string{}
assignment = [][]field.Element{}
cfg = cfg{}
foundNonZero = false
filterCol []field.Element
)
for _, op := range options {
op(&cfg)
}
for i := range cols {
header = append(header, string(cols[i].GetColID()))
assignment = append(assignment, cols[i].GetColAssignment(run).IntoRegVecSaveAlloc())
@@ -68,11 +92,23 @@ func FmtCsv(w io.Writer, run *wizard.ProverRuntime, cols []ifaces.Column) error
fmt.Fprintf(w, "%v\n", strings.Join(header, ","))
if cfg.filterOn != nil {
filterCol = cfg.filterOn.GetColAssignment(run).IntoRegVecSaveAlloc()
}
for r := range assignment[0] {
fmtVals := []string{}
var (
fmtVals = []string{}
allZeroes = true
)
for c := range assignment {
if !assignment[c][r].IsZero() {
allZeroes = false
}
if assignment[c][r].IsUint64() {
fmtVals = append(fmtVals, assignment[c][r].String())
continue
@@ -81,7 +117,17 @@ func FmtCsv(w io.Writer, run *wizard.ProverRuntime, cols []ifaces.Column) error
fmtVals = append(fmtVals, "0x"+assignment[c][r].Text(16))
}
fmt.Fprintf(w, "%v\n", strings.Join(fmtVals, ","))
if !allZeroes {
foundNonZero = true
}
if filterCol != nil && filterCol[r].IsZero() {
continue
}
if !cfg.skipPrePaddingZero || !allZeroes || foundNonZero {
fmt.Fprintf(w, "%v\n", strings.Join(fmtVals, ","))
}
}
return nil

View File

@@ -160,7 +160,7 @@ func (s *schemaScanner) addConstraintInComp(name string, corsetCS schema.Constra
wExpr = wExpr.ReconstructBottomUp(
func(e *symbolic.Expression, children []*symbolic.Expression) (new *symbolic.Expression) {
v, isV := e.Operator.(*symbolic.Variable)
v, isV := e.Operator.(symbolic.Variable)
if !isV {
return e.SameWithNewChildren(children)
}

File diff suppressed because one or more lines are too long

View File

@@ -24,16 +24,6 @@ import (
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/accumulator"
)
const (
// @TODO: the keccak limits are hardcoded currently, in the future we should
// instead take the limits from the trace limit file. Note, neither of these
// limits are actually enforced by the prover as the keccak module is not
// connected to the rest of the arithmetization. Thus, it is easy to just
// ignore the overflowing keccaks and the state merkle-proofs.
keccakLimit = 1 << 13
merkleProofLimit = 1 << 13
)
var (
fullZkEvm *ZkEvm
fullZkEvmCheckOnly *ZkEvm
@@ -141,7 +131,7 @@ func fullZKEVMWithSuite(tl *config.TracesLimits, suite compilationSuite) *ZkEvm
},
Statemanager: statemanager.Settings{
AccSettings: accumulator.Settings{
MaxNumProofs: merkleProofLimit,
MaxNumProofs: tl.ShomeiMerkleProofs,
Name: "SM_ACCUMULATOR",
MerkleTreeDepth: 40,
},
@@ -152,7 +142,7 @@ func fullZKEVMWithSuite(tl *config.TracesLimits, suite compilationSuite) *ZkEvm
Version: "beta-v1",
},
Keccak: keccak.Settings{
MaxNumKeccakf: keccakLimit,
MaxNumKeccakf: tl.BlockKeccak,
},
Ecdsa: ecdsa.Settings{
MaxNbEcRecover: tl.PrecompileEcrecoverEffectiveCalls,

View File

@@ -50,6 +50,8 @@ type ECPair struct {
*UnalignedPairingData
*UnalignedG2MembershipData
CptPrevEqualCurrID wizard.ProverAction
AlignedG2MembershipData *plonk.Alignment
AlignedMillerLoopCircuit *plonk.Alignment
AlignedFinalExpCircuit *plonk.Alignment

View File

@@ -10,10 +10,14 @@ import (
// Assign assigns the data to the circuit
func (ec *ECPair) Assign(run *wizard.ProverRuntime) {
// assign data to the pairing check part
ec.assignPairingData(run)
// assign data to the membership check part
ec.assignMembershipData(run)
// assign the column telling wether the previous and the current row have
// the same id.
ec.CptPrevEqualCurrID.Run(run)
// general assignments
var (
@@ -22,6 +26,7 @@ func (ec *ECPair) Assign(run *wizard.ProverRuntime) {
srcIsPairingPulling = ec.UnalignedPairingData.IsPulling.GetColAssignment(run).IntoRegVecSaveAlloc()
srcIsPairingComputed = ec.UnalignedPairingData.IsComputed.GetColAssignment(run).IntoRegVecSaveAlloc()
)
if len(srcIsG2Pulling) != len(srcIsG2Computed) || len(srcIsG2Pulling) != len(srcIsPairingPulling) || len(srcIsG2Pulling) != len(srcIsPairingComputed) {
utils.Panic("ECPair: input length mismatch")
}
@@ -95,10 +100,6 @@ func (ec *ECPair) assignPairingData(run *wizard.ProverRuntime) {
// inputs.
nbInputs := 1 // we start with 1 because we always have at least one pairing input
for {
// first we do a sanity check - we should expect to see IsResult but if the input is shorter then panic
if currPos+nbInputs*(nbG1Limbs+nbG2Limbs+2) >= len(srcLimbs) {
utils.Panic("ECPair: not enough data for pairing at pos %d", currPos)
}
if srcIsRes[currPos+nbInputs*(nbG1Limbs+nbG2Limbs)].IsOne() {
break
}

View File

@@ -1,7 +1,10 @@
package ecpair
import (
"github.com/consensys/linea-monorepo/prover/maths/field"
"github.com/consensys/linea-monorepo/prover/protocol/column"
"github.com/consensys/linea-monorepo/prover/protocol/column/verifiercol"
"github.com/consensys/linea-monorepo/prover/protocol/dedicated"
"github.com/consensys/linea-monorepo/prover/protocol/dedicated/projection"
"github.com/consensys/linea-monorepo/prover/protocol/ifaces"
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
@@ -94,16 +97,27 @@ func (ec *ECPair) csConstantWhenIsComputing(comp *wizard.CompiledIOP) {
func (ec *ECPair) csInstanceIDChangeWhenNewInstance(comp *wizard.CompiledIOP) {
// when we are at the first line of the new instance then the instance ID
// should change
prevEqualCurrID, cptPrevEqualCurrID := dedicated.IsZero(
comp,
sym.Sub(
ec.UnalignedPairingData.InstanceID,
column.Shift(ec.UnalignedPairingData.InstanceID, -1),
),
)
ec.CptPrevEqualCurrID = cptPrevEqualCurrID
// IF IS_ACTIVE AND FIRST_LINE AND INSTANCE_ID != 0 => INSTANCE_ID_{i} = INSTANCE_ID_{i-1} + 1
// And the constraint does not apply on the first row.
comp.InsertGlobal(
roundNr,
ifaces.QueryIDf("%v_INSTANCE_ID_CHANGE", nameECPair),
sym.Mul(
column.Shift(verifiercol.NewConstantCol(field.One(), ec.IsActive.Size()), -1), // this "useless" line helps cancelling the constraint on the first row
ec.IsActive,
ec.UnalignedPairingData.IsFirstLineOfInstance,
ec.UnalignedPairingData.InstanceID,
sym.Sub(ec.UnalignedPairingData.InstanceID, column.Shift(ec.UnalignedPairingData.InstanceID, -1), 1),
prevEqualCurrID,
),
)
}

View File

@@ -149,14 +149,16 @@ func ImportAndPad(comp *wizard.CompiledIOP, inp ImportAndPadInputs, numRows int)
sym.Sub(res.IsNewHash, sym.Mul(res.IsInserted, column.Shift(res.IsPadded, -1))),
)
// before IsActive transits to 0, there should be a padding zone.
// IsActive[i] * (1-IsActive[i+1]) * (1-IsPadded[i]) =0
comp.InsertGlobal(0, ifaces.QueryIDf("%v_LAST_HASH_HAS_PADDING", inp.Name),
sym.Mul(res.IsActive,
sym.Sub(1, column.Shift(res.IsActive, 1)),
sym.Sub(1, res.IsPadded),
),
)
if inp.PaddingStrategy != generic.MiMCUsecase {
// before IsActive transits to 0, there should be a padding zone.
// IsActive[i] * (1-IsActive[i+1]) * (1-IsPadded[i]) =0
comp.InsertGlobal(0, ifaces.QueryIDf("%v_LAST_HASH_HAS_PADDING", inp.Name),
sym.Mul(res.IsActive,
sym.Sub(1, column.Shift(res.IsActive, 1)),
sym.Sub(1, res.IsPadded),
),
)
}
// to handle the above constraint for the case where isActive[i] = 1 for all i .
// IsPadded[last-row]= isActive[last-row]

View File

@@ -49,6 +49,10 @@ func (sp mimcPadder) pushPaddingRows(byteStringSize int, ipad *importationAssign
accPaddedBytes = 0
)
if remainToPad == blocksize {
remainToPad = 0
}
for remainToPad > 0 {
currNbBytes := utils.Min(remainToPad, 16)
accPaddedBytes += currNbBytes

View File

@@ -105,14 +105,14 @@ func getShakiraArithmetization(comp *wizard.CompiledIOP) generic.GenericByteModu
func getRlpAddArithmetization(comp *wizard.CompiledIOP) generic.GenericByteModule {
return generic.GenericByteModule{
Data: generic.GenDataModule{
HashNum: comp.Columns.GetHandle("rlpaddr.COUNTER"),
HashNum: comp.Columns.GetHandle("rlpaddr.STAMP"),
Index: comp.Columns.GetHandle("rlpaddr.INDEX"),
Limb: comp.Columns.GetHandle("rlpaddr.LIMB"),
NBytes: comp.Columns.GetHandle("rlpaddr.nBYTES"),
ToHash: comp.Columns.GetHandle("rlpaddr.LC"),
},
Info: generic.GenInfoModule{
HashNum: comp.Columns.GetHandle("rlpaddr.COUNTER"),
HashNum: comp.Columns.GetHandle("rlpaddr.STAMP"),
HashLo: comp.Columns.GetHandle("rlpaddr.DEP_ADDR_LO"),
HashHi: comp.Columns.GetHandle("rlpaddr.RAW_ADDR_HI"),
IsHashLo: comp.Columns.GetHandle("rlpaddr.SELECTOR_KECCAK_RES"),

View File

@@ -115,13 +115,13 @@ func newSha2SingleProvider(comp *wizard.CompiledIOP, inp Sha2SingleProviderInput
projection.InsertProjection(comp, "SHA2_RES_HI",
[]ifaces.Column{cSha2.HashHi},
[]ifaces.Column{inp.Provider.Info.HashHi},
cSha2.IsActive,
cSha2.IsEffFirstLaneOfNewHash,
inp.Provider.Info.IsHashHi,
)
projection.InsertProjection(comp, "SHA2_RES_LO",
[]ifaces.Column{cSha2.HashLo},
[]ifaces.Column{inp.Provider.Info.HashLo},
cSha2.IsActive,
cSha2.IsEffFirstLaneOfNewHash,
inp.Provider.Info.IsHashLo,
)