Prover: beta v2 (#796)

* feat(state): connect the Hub and the StateSummary

* Merged main into prover/beta-v2

* constraints: bump to beta-v2.0-rc1

* use beta-v2.0-rc1 zkevm.bin generated by go-corset

* bump go-corset to v0.9.1

* fix go-corset compile errors

* bump constraints to beta v2.0-rc1.1

* go-corset v0.9.1 with metadata

* fix ReadZkevmBin error to align with go-corset v0.9.1

* add optimization level to CheckerZkEvm

* ReadZkevmBinadd detailed logs to

* feat(state-manager):adding arithmetization column names for ACP and SCP

* bump go-corset to v0.9.2

* fix go-corset v0.9.2 compatibility

* constraints: bump to beta-v2.1-rc1

* use zkevm.bin without 2B block gas limit for testing

* fix(state manager):batch numbers on prover side + updated 20-byte address calculation

* feat(go-corset): go-corset as submodule (#743)

* remove corset submodule

* add go-corset submodule

* use go-corst in Makefile

* use go-corset v0.9.2

* update go-corset submodule url

* fix(go-corset): typo in submodule url

* fix(state-manager): fixed Shomei issue through skippable traces + missing HUB processed address constraints + cosmetic refactoring

* fix(state-manager):fixed wrong begin/end markers for account segments due to skipped Shomei traces

* fix(state-manager): added an intermediary keccakcodehash column in state summary for the HUB connection + its constraints

* (bump prover/beta-v2 branch): constraints beta-v2.1-rc4 and go-corset v1.0.1 (#766)

- bump constraints from beta-v2.1-rc1 to rc4
- bump go-corset from v0.9.2 to v1.0.1
- this commit is to bump branch prover/beta-v2 branch to the latest verion of constraints and go-corsete without rebasing onto main

* fix(state-manager): account insertion's storage segments are excluded in the lookup from HUB to state summary.

* fix(state-manager): added missing constraints for the insertion edge case.

* (bump beta-v2 branch): constraints beta-v2.1-rc6 and go-corset v1.0.2

* fix(state-manager): address edge case for SSLOAD out of gas exception behavior that results in no traces on Shomei side, but rows on the HUB side.

* remove polluting changes

* fix(state-manager): address edge case for SSLOAD out of gas exception behavior that results in no traces on Shomei side, but rows on the HUB side, proper commit.

* fix(state-manager): address bug in the constancy constraint for the INSERT filter .

* minor(alignment): better error message when overflowing a precompile limit

* remove old v3 testdata to before merging origin/main

* bump: constraints beta-v2.1-rc10 and go-corset v1.0.3

* chore: remove unnecessary code

* revert pnpm-lock.yaml to main branch version

* update go.sum

* revert state-recovery/appcore/logic/build.gradle to main version

Remove the files to revert back to the main version. Its directory structure has changed.

Signed-off-by: Leo Jeong <dreamerty@postech.ac.kr>

* Revert "revert state-recovery/appcore/logic/build.gradle to main version"

This reverts commit 97f6c92230e511a66a29fd597bf059c9c003adfe.

* revert back to the mainnet build.gradle

* fixup(test): fix a test in the state-summary

* minor(csv): comment out the csv tracing

---------

Signed-off-by: Leo Jeong <dreamerty@postech.ac.kr>
Co-authored-by: AlexandreBelling <alexandrebelling8@gmail.com>
Co-authored-by: Bogdan Ursu <bogdanursuoffice@gmail.com>
This commit is contained in:
Leo Jeong
2025-03-24 07:13:49 +09:00
committed by GitHub
parent 77239f43a1
commit a46b0ef5a3
18 changed files with 1319 additions and 202 deletions

View File

@@ -42,13 +42,6 @@ go-corset:
zkevm/arithmetization/zkevm.bin: go-corset
cd ../constraints && $(GO_CORSET) make zkevm.bin && mv zkevm.bin ../prover/zkevm/arithmetization
##
## zkevm.bin without 2B block gas limit constraint to test old traces
##
zkevm/arithmetization/zkevm_for_old_trace.bin: go-corset
cd ../constraints && $(GO_CORSET) make zkevm_for_old_replay_tests.bin &&\
mv zkevm_for_old_replay_tests.bin ../prover/zkevm/arithmetization/zkevm_for_old_trace.bin
##
## Generate the setup for the execution prover (to be run with S3 access)
##

View File

@@ -71,6 +71,8 @@ type DecodedTrace struct {
// Can be any type of trace in the 5 possible types, for the either the
// world-state or an storage-trie.
Underlying accumulator.Trace
// decides whether the trace should be skipped
IsSkipped bool
}
func (dec *DecodedTrace) UnmarshalJSON(data []byte) error {

View File

@@ -10,7 +10,7 @@ require (
github.com/consensys/compress v0.2.5
github.com/consensys/gnark v0.11.1-0.20250107100237-2cb190338a01
github.com/consensys/gnark-crypto v0.14.1-0.20241217134352-810063550bd4
github.com/consensys/go-corset v1.0.1
github.com/consensys/go-corset v1.0.3
github.com/crate-crypto/go-kzg-4844 v1.1.0
github.com/dlclark/regexp2 v1.11.2
github.com/fxamacker/cbor/v2 v2.7.0

View File

@@ -100,8 +100,8 @@ github.com/consensys/gnark v0.11.1-0.20250107100237-2cb190338a01 h1:YCHI04nMKFC6
github.com/consensys/gnark v0.11.1-0.20250107100237-2cb190338a01/go.mod h1:8YNyW/+XsYiLRzROLaj/PSktYO4VAdv6YW1b1P3UsZk=
github.com/consensys/gnark-crypto v0.14.1-0.20241217134352-810063550bd4 h1:Kp6egjRqKZf4469dfAWqFe6gi3MRs4VvNHmTfEjUlS8=
github.com/consensys/gnark-crypto v0.14.1-0.20241217134352-810063550bd4/go.mod h1:GMPeN3dUSslNBYJsK3WTjIGd3l0ccfMbcEh/d5knFrc=
github.com/consensys/go-corset v1.0.1 h1:rO3ffDUz9DhWVIEQ5LKJhrFoU228QeAG+vAU0plR1r0=
github.com/consensys/go-corset v1.0.1/go.mod h1:JKJTywyjBE0Goco4DokW4BAkF6R+jtoo3XgkICwxrcw=
github.com/consensys/go-corset v1.0.3 h1:CZ04qi9NaMkMJnfv19UUHGhEXJZwpDNrAo5fwDbT1OM=
github.com/consensys/go-corset v1.0.3/go.mod h1:JKJTywyjBE0Goco4DokW4BAkF6R+jtoo3XgkICwxrcw=
github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=
github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc=

View File

@@ -2,6 +2,7 @@ package plonk
import (
"fmt"
"os"
"sync"
"github.com/consensys/gnark-crypto/ecc"
@@ -16,6 +17,7 @@ import (
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
"github.com/consensys/linea-monorepo/prover/symbolic"
"github.com/consensys/linea-monorepo/prover/utils"
"github.com/sirupsen/logrus"
"golang.org/x/net/context"
"golang.org/x/sync/errgroup"
)
@@ -84,8 +86,19 @@ func (ci *CircuitAlignmentInput) NbInstances() int {
// prepareWitnesses prepares the witnesses for every circuit instance. It is
// called inside the Once so that we do not prepare the witnesses multiple
// times. Safe to call multiple times, it is idepotent after first call.
// The function checks how many instances of the circuit are called and panics
// if this uncovers an overflow.
func (ci *CircuitAlignmentInput) prepareWitnesses(run *wizard.ProverRuntime) {
ci.witnessesOnce.Do(func() {
if err := ci.checkNbCircuitInvocation(run); err != nil {
// Don't use the fatal level here because we want to control the exit code
// to be 77.
logrus.Errorf("fatal=%v", err)
os.Exit(77)
}
if ci.InputFiller == nil {
ci.InputFiller = func(_, _ int) field.Element { return field.Zero() }
}
@@ -100,6 +113,7 @@ func (ci *CircuitAlignmentInput) prepareWitnesses(run *wizard.ProverRuntime) {
totalInputs++
}
}
// prepare witness for every circuit instance NB! keep in mind that we only
// have public inputs. So the public and private inputs match. Due to
// interface definition we have to return both but in practice have only a
@@ -178,6 +192,31 @@ func (ci *CircuitAlignmentInput) NumEffWitnesses(run *wizard.ProverRuntime) int
return ci.numEffWitnesses
}
// checkNbCircuitInvocation checks that the number of time the circuit is called
// does not goes above the [maxNbInstance] limit and returns an error if it does.
func (ci *CircuitAlignmentInput) checkNbCircuitInvocation(run *wizard.ProverRuntime) error {
var (
mask = ci.DataToCircuitMask.GetColAssignment(run).IntoRegVecSaveAlloc()
count = 0
)
for i := range mask {
if mask[i].IsOne() {
count++
}
}
if count > ci.nbPublicInputs*ci.NbCircuitInstances {
return fmt.Errorf(
"[circuit-alignement] too many inputs circuit=%v nb-public-input-required=%v nb-public-input-per-circuit=%v nb-circuits-available=%v nb-circuit-required=%v",
ci.Name, count, ci.nbPublicInputs, ci.NbCircuitInstances, utils.DivCeil(count, ci.nbPublicInputs),
)
}
return nil
}
// Alignment is the prepared structure where the Data field is aligned to gnark
// circuit PI column. It considers the cases where we call multiple instances of
// the circuit so that the inputs for every circuit is padded to power of two

View File

@@ -6,7 +6,6 @@ import (
"github.com/consensys/linea-monorepo/prover/protocol/compiler/dummy"
"github.com/consensys/linea-monorepo/prover/protocol/dedicated/plonk"
"github.com/consensys/linea-monorepo/prover/protocol/ifaces"
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
"github.com/consensys/linea-monorepo/prover/utils/csvtraces"
)
@@ -226,48 +225,48 @@ func writeModule(t *testing.T, run *wizard.ProverRuntime, outFile string, mod *E
t.Fatal(err)
}
defer w.Close()
csvtraces.FmtCsv(w, run, []ifaces.Column{
// // module activation
// mod.IsActive,
// csvtraces.FmtCsv(w, run, []ifaces.Column{
// // module activation
// mod.IsActive,
// // source
// mod.ECPairSource.ID,
// mod.ECPairSource.Index,
// mod.ECPairSource.Limb,
// mod.ECPairSource.SuccessBit,
// mod.ECPairSource.AccPairings,
// mod.ECPairSource.TotalPairings,
// mod.ECPairSource.IsEcPairingData,
// mod.ECPairSource.IsEcPairingResult,
// mod.ECPairSource.CsEcpairing,
// mod.ECPairSource.CsG2Membership,
// // source
// mod.ECPairSource.ID,
// mod.ECPairSource.Index,
// mod.ECPairSource.Limb,
// mod.ECPairSource.SuccessBit,
// mod.ECPairSource.AccPairings,
// mod.ECPairSource.TotalPairings,
// mod.ECPairSource.IsEcPairingData,
// mod.ECPairSource.IsEcPairingResult,
// mod.ECPairSource.CsEcpairing,
// mod.ECPairSource.CsG2Membership,
// // for pairing module test
// mod.UnalignedPairingData.IsActive,
// mod.UnalignedPairingData.Index,
// mod.UnalignedPairingData.InstanceID,
// mod.UnalignedPairingData.IsFirstLineOfInstance,
// mod.UnalignedPairingData.IsAccumulatorInit,
// mod.UnalignedPairingData.IsFirstLineOfPrevAccumulator,
// mod.UnalignedPairingData.IsAccumulatorPrev,
// mod.UnalignedPairingData.IsFirstLineOfCurrAccumulator,
// mod.UnalignedPairingData.IsAccumulatorCurr,
// mod.UnalignedPairingData.IsResultOfInstance,
// mod.UnalignedPairingData.IsComputed,
// mod.UnalignedPairingData.IsPulling,
// mod.UnalignedPairingData.PairID,
// mod.UnalignedPairingData.TotalPairs,
// mod.UnalignedPairingData.Limb,
// mod.UnalignedPairingData.ToMillerLoopCircuitMask,
// mod.UnalignedPairingData.ToFinalExpCircuitMask,
// // for pairing module test
// mod.UnalignedPairingData.IsActive,
// mod.UnalignedPairingData.Index,
// mod.UnalignedPairingData.InstanceID,
// mod.UnalignedPairingData.IsFirstLineOfInstance,
// mod.UnalignedPairingData.IsAccumulatorInit,
// mod.UnalignedPairingData.IsFirstLineOfPrevAccumulator,
// mod.UnalignedPairingData.IsAccumulatorPrev,
// mod.UnalignedPairingData.IsFirstLineOfCurrAccumulator,
// mod.UnalignedPairingData.IsAccumulatorCurr,
// mod.UnalignedPairingData.IsResultOfInstance,
// mod.UnalignedPairingData.IsComputed,
// mod.UnalignedPairingData.IsPulling,
// mod.UnalignedPairingData.PairID,
// mod.UnalignedPairingData.TotalPairs,
// mod.UnalignedPairingData.Limb,
// mod.UnalignedPairingData.ToMillerLoopCircuitMask,
// mod.UnalignedPairingData.ToFinalExpCircuitMask,
// // for subgroup module module test
// mod.UnalignedG2MembershipData.IsComputed,
// mod.UnalignedG2MembershipData.IsPulling,
// mod.UnalignedG2MembershipData.Limb,
// mod.UnalignedG2MembershipData.SuccessBit,
// mod.UnalignedG2MembershipData.ToG2MembershipCircuitMask,
},
[]csvtraces.Option{csvtraces.InHex},
)
// // for subgroup module module test
// mod.UnalignedG2MembershipData.IsComputed,
// mod.UnalignedG2MembershipData.IsPulling,
// mod.UnalignedG2MembershipData.Limb,
// mod.UnalignedG2MembershipData.SuccessBit,
// mod.UnalignedG2MembershipData.ToG2MembershipCircuitMask,
// },
// []csvtraces.Option{csvtraces.InHex},
// )
}

View File

@@ -176,29 +176,32 @@ func (am *Module) Assign(
)
for _, trace := range traces {
switch t := trace.Underlying.(type) {
case statemanager.UpdateTraceST:
pushUpdateRows(builder, t)
case statemanager.UpdateTraceWS:
pushUpdateRows(builder, t)
case statemanager.InsertionTraceST:
pushInsertionRows(builder, t)
case statemanager.InsertionTraceWS:
pushInsertionRows(builder, t)
case statemanager.DeletionTraceST:
pushDeletionRows(builder, t)
case statemanager.DeletionTraceWS:
pushDeletionRows(builder, t)
case statemanager.ReadZeroTraceST:
pushReadZeroRows(builder, t)
case statemanager.ReadZeroTraceWS:
pushReadZeroRows(builder, t)
case statemanager.ReadNonZeroTraceST:
pushReadNonZeroRows(builder, t)
case statemanager.ReadNonZeroTraceWS:
pushReadNonZeroRows(builder, t)
default:
utils.Panic("Unexpected type : %T", t)
// only assign the traces that are flagged as not to be skipped
if !trace.IsSkipped {
switch t := trace.Underlying.(type) {
case statemanager.UpdateTraceST:
pushUpdateRows(builder, t)
case statemanager.UpdateTraceWS:
pushUpdateRows(builder, t)
case statemanager.InsertionTraceST:
pushInsertionRows(builder, t)
case statemanager.InsertionTraceWS:
pushInsertionRows(builder, t)
case statemanager.DeletionTraceST:
pushDeletionRows(builder, t)
case statemanager.DeletionTraceWS:
pushDeletionRows(builder, t)
case statemanager.ReadZeroTraceST:
pushReadZeroRows(builder, t)
case statemanager.ReadZeroTraceWS:
pushReadZeroRows(builder, t)
case statemanager.ReadNonZeroTraceST:
pushReadNonZeroRows(builder, t)
case statemanager.ReadNonZeroTraceWS:
pushReadNonZeroRows(builder, t)
default:
utils.Panic("Unexpected type : %T", t)
}
}
}

View File

@@ -1,11 +1,23 @@
package statemanager
import (
"fmt"
"github.com/consensys/linea-monorepo/prover/maths/common/smartvectors"
"github.com/consensys/linea-monorepo/prover/maths/field"
"github.com/consensys/linea-monorepo/prover/protocol/column/verifiercol"
"github.com/consensys/linea-monorepo/prover/protocol/ifaces"
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
sym "github.com/consensys/linea-monorepo/prover/symbolic"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/mimccodehash"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/statesummary"
)
const (
ACP = "acp"
SCP = "scp"
ADDR_MULTIPLIER = "340282366920938463463374607431768211456" // 2^{16*8}
)
// romLex returns the columns of the arithmetization.RomLex module of interest
// to justify the consistency between them and the MiMCCodeHash module
func romLex(comp *wizard.CompiledIOP) *mimccodehash.RomLexInput {
@@ -33,12 +45,184 @@ func rom(comp *wizard.CompiledIOP) *mimccodehash.RomInput {
// perspective of the Hub that are of interest for checking consistency with
// the stateSummary
func acp(comp *wizard.CompiledIOP) statesummary.HubColumnSet {
panic("not available yet")
size := comp.Columns.GetHandle("hub.acp_ADDRESS_HI").Size()
// the prover-side state manager uses a single field element for 20-bytes addresses
// and we need to create this column ourselves
if !comp.Columns.Exists("HUB_acp_PROVER_SIDE_ADDRESS_IDENTIFIER") {
combinedAddr := comp.InsertCommit(0,
"HUB_acp_PROVER_SIDE_ADDRESS_IDENTIFIER",
size,
)
// constrain the processed HUB addresses
addrHI := comp.Columns.GetHandle("hub.acp_ADDRESS_HI")
addrLO := comp.Columns.GetHandle("hub.acp_ADDRESS_LO")
comp.InsertGlobal(
0,
ifaces.QueryIDf("STATE_MANAGER_ACP_HUB_PROCESSED_ADDRESSES_GLOBAL_CONSTRAINT"),
sym.Sub(
combinedAddr,
sym.Mul(
addrHI,
field.NewFromString(ADDR_MULTIPLIER),
),
addrLO,
),
)
}
constantZero := verifiercol.NewConstantCol(field.Zero(), size)
res := statesummary.HubColumnSet{
Address: comp.Columns.GetHandle("HUB_acp_PROVER_SIDE_ADDRESS_IDENTIFIER"),
AddressHI: comp.Columns.GetHandle("hub.acp_ADDRESS_HI"),
AddressLO: comp.Columns.GetHandle("hub.acp_ADDRESS_LO"),
Nonce: comp.Columns.GetHandle("hub.acp_NONCE"),
NonceNew: comp.Columns.GetHandle("hub.acp_NONCE_NEW"),
CodeHashHI: comp.Columns.GetHandle("hub.acp_CODE_HASH_HI"),
CodeHashLO: comp.Columns.GetHandle("hub.acp_CODE_HASH_LO"),
CodeHashHINew: comp.Columns.GetHandle("hub.acp_CODE_HASH_HI_NEW"),
CodeHashLONew: comp.Columns.GetHandle("hub.acp_CODE_HASH_LO_NEW"),
CodeSizeOld: comp.Columns.GetHandle("hub.acp_CODE_SIZE"),
CodeSizeNew: comp.Columns.GetHandle("hub.acp_CODE_SIZE_NEW"),
BalanceOld: comp.Columns.GetHandle("hub.acp_BALANCE"),
BalanceNew: comp.Columns.GetHandle("hub.acp_BALANCE_NEW"),
KeyHI: constantZero,
KeyLO: constantZero,
ValueHICurr: constantZero,
ValueLOCurr: constantZero,
ValueHINext: constantZero,
ValueLONext: constantZero,
DeploymentNumber: comp.Columns.GetHandle("hub.acp_DEPLOYMENT_NUMBER"),
DeploymentNumberInf: comp.Columns.GetHandle("hub.acp_DEPLOYMENT_NUMBER"),
BlockNumber: comp.Columns.GetHandle("hub.acp_REL_BLK_NUM"),
Exists: comp.Columns.GetHandle("hub.acp_EXISTS"),
ExistsNew: comp.Columns.GetHandle("hub.acp_EXISTS_NEW"),
PeekAtAccount: comp.Columns.GetHandle("hub.acp_PEEK_AT_ACCOUNT"),
PeekAtStorage: constantZero,
FirstAOC: comp.Columns.GetHandle("hub.acp_FIRST_IN_CNF"),
LastAOC: comp.Columns.GetHandle("hub.acp_FINAL_IN_CNF"),
FirstKOC: constantZero,
LastKOC: constantZero,
FirstAOCBlock: comp.Columns.GetHandle("hub.acp_FIRST_IN_BLK"),
LastAOCBlock: comp.Columns.GetHandle("hub.acp_FINAL_IN_BLK"),
FirstKOCBlock: constantZero,
LastKOCBlock: constantZero,
MinDeplBlock: comp.Columns.GetHandle("hub.acp_DEPLOYMENT_NUMBER_FIRST_IN_BLOCK"),
MaxDeplBlock: comp.Columns.GetHandle("hub.acp_DEPLOYMENT_NUMBER_FINAL_IN_BLOCK"),
}
return res
}
// scp returns the columns of the arithmetization corresponding to the SCP
// scp returns the columns of the arithmetization correspoanding to the SCP
// perspective of the Hub that are of interest for checking consistency with
// the stateSummary
func scp(comp *wizard.CompiledIOP) statesummary.HubColumnSet {
panic("not available yet")
size := comp.Columns.GetHandle("hub.scp_ADDRESS_HI").Size()
// the prover-side state manager uses a single field element for 20-bytes addresses
// and we need to create this column ourselves
if !comp.Columns.Exists("HUB_scp_PROVER_SIDE_ADDRESS_IDENTIFIER") {
combinedAddr := comp.InsertCommit(0,
"HUB_scp_PROVER_SIDE_ADDRESS_IDENTIFIER",
size,
)
// constrain the processed HUB addresses
addrHI := comp.Columns.GetHandle("hub.scp_ADDRESS_HI")
addrLO := comp.Columns.GetHandle("hub.scp_ADDRESS_LO")
comp.InsertGlobal(
0,
ifaces.QueryIDf("STATE_MANAGER_SCP_HUB_PROCESSED_ADDRESSES_GLOBAL_CONSTRAINT"),
sym.Sub(
combinedAddr,
sym.Mul(
addrHI,
field.NewFromString(ADDR_MULTIPLIER),
),
addrLO,
),
)
}
constantZero := verifiercol.NewConstantCol(field.Zero(), size)
res := statesummary.HubColumnSet{
Address: comp.Columns.GetHandle("HUB_scp_PROVER_SIDE_ADDRESS_IDENTIFIER"),
AddressHI: comp.Columns.GetHandle("hub.scp_ADDRESS_HI"),
AddressLO: comp.Columns.GetHandle("hub.scp_ADDRESS_LO"),
Nonce: constantZero,
NonceNew: constantZero,
CodeHashHI: constantZero,
CodeHashLO: constantZero,
CodeHashHINew: constantZero,
CodeHashLONew: constantZero,
CodeSizeOld: constantZero,
CodeSizeNew: constantZero,
BalanceOld: constantZero,
BalanceNew: constantZero,
KeyHI: comp.Columns.GetHandle("hub.scp_STORAGE_KEY_HI"),
KeyLO: comp.Columns.GetHandle("hub.scp_STORAGE_KEY_LO"),
ValueHICurr: comp.Columns.GetHandle("hub.scp_VALUE_CURR_HI"),
ValueLOCurr: comp.Columns.GetHandle("hub.scp_VALUE_CURR_LO"),
ValueHINext: comp.Columns.GetHandle("hub.scp_VALUE_NEXT_HI"),
ValueLONext: comp.Columns.GetHandle("hub.scp_VALUE_NEXT_LO"),
DeploymentNumber: comp.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER"),
DeploymentNumberInf: comp.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER"),
BlockNumber: comp.Columns.GetHandle("hub.scp_REL_BLK_NUM"),
Exists: constantZero,
ExistsNew: constantZero,
PeekAtAccount: constantZero,
PeekAtStorage: comp.Columns.GetHandle("hub.scp_PEEK_AT_STORAGE"),
FirstAOC: constantZero,
LastAOC: constantZero,
FirstKOC: comp.Columns.GetHandle("hub.scp_FIRST_IN_CNF"),
LastKOC: comp.Columns.GetHandle("hub.scp_FINAL_IN_CNF"),
FirstAOCBlock: constantZero,
LastAOCBlock: constantZero,
FirstKOCBlock: comp.Columns.GetHandle("hub.scp_FIRST_IN_BLK"),
LastKOCBlock: comp.Columns.GetHandle("hub.scp_FINAL_IN_BLK"),
MinDeplBlock: comp.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER_FIRST_IN_BLOCK"),
MaxDeplBlock: comp.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER_FINAL_IN_BLOCK"),
}
return res
}
/*
assignHubAddresses is a function that combines addressHI and addressLO from
the arithmetization columns into a single column.
*/
func assignHubAddresses(run *wizard.ProverRuntime) {
assignHubAddressesSubdomain := func(domainName string) {
addressHI := run.GetColumn(ifaces.ColID(fmt.Sprintf("hub.%s_ADDRESS_HI", domainName)))
addressLO := run.GetColumn(ifaces.ColID(fmt.Sprintf("hub.%s_ADDRESS_LO", domainName)))
size := addressHI.Len()
newVect := make([]field.Element, size)
for i := range newVect {
elemHi := addressHI.Get(i)
bytesHi := elemHi.Bytes()
elemLo := addressLO.Get(i)
bytesLo := elemLo.Bytes()
newBytes := make([]byte, field.Bytes)
// set the high part
for j := 0; j < 4; j++ {
newBytes[12+j] = bytesHi[32-(4-j)]
}
// set the low part
for j := 4; j < 20; j++ {
newBytes[12+j] = bytesLo[16+(j-4)]
}
newVect[i].SetBytes(newBytes)
}
run.AssignColumn(
ifaces.ColID(fmt.Sprintf("HUB_%s_PROVER_SIDE_ADDRESS_IDENTIFIER", domainName)),
smartvectors.NewRegular(newVect),
)
}
// assign the addresses column in each of the submodules
assignHubAddressesSubdomain(ACP)
assignHubAddressesSubdomain(SCP)
}

View File

@@ -1,9 +1,14 @@
package statemanager
import (
"fmt"
"os"
"github.com/consensys/linea-monorepo/prover/backend/execution/statemanager"
"github.com/consensys/linea-monorepo/prover/maths/field"
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
"github.com/consensys/linea-monorepo/prover/utils"
"github.com/consensys/linea-monorepo/prover/utils/types"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/accumulator"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/accumulatorsummary"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/statemanager/codehashconsistency"
@@ -58,44 +63,114 @@ func NewStateManager(comp *wizard.CompiledIOP, settings Settings) *StateManager
return sm
}
// NewStateManager instantiate the [StateManager] module but ignores the
// connection with the Hub columns.
func NewStateManagerNoHub(comp *wizard.CompiledIOP, settings Settings) *StateManager {
sm := &StateManager{
StateSummary: statesummary.NewModule(comp, settings.stateSummarySize()),
accumulator: accumulator.NewModule(comp, settings.AccSettings),
mimcCodeHash: mimccodehash.NewModule(comp, mimccodehash.Inputs{
Name: "MiMCCodeHash",
Size: settings.MiMCCodeHashSize,
}),
}
sm.accumulatorSummaryConnector = *accumulatorsummary.NewModule(
comp,
accumulatorsummary.Inputs{
Name: "ACCUMULATOR_SUMMARY",
Accumulator: sm.accumulator,
},
)
sm.accumulatorSummaryConnector.ConnectToStateSummary(comp, &sm.StateSummary)
sm.mimcCodeHash.ConnectToRom(comp, rom(comp), romLex(comp))
sm.codeHashConsistency = codehashconsistency.NewModule(comp, "CODEHASHCONSISTENCY", &sm.StateSummary, &sm.mimcCodeHash)
return sm
}
// Assign assignes the submodules of the state-manager. It requires the
// arithmetization columns to be assigned first.
func (sm *StateManager) Assign(run *wizard.ProverRuntime, shomeiTraces [][]statemanager.DecodedTrace) {
assignHubAddresses(run)
printAllShomeiTraces(&shomeiTraces)
addSkipFlags(&shomeiTraces)
sm.StateSummary.Assign(run, shomeiTraces)
sm.accumulator.Assign(run, utils.Join(shomeiTraces...))
sm.accumulatorSummaryConnector.Assign(run)
sm.mimcCodeHash.Assign(run)
sm.codeHashConsistency.Assign(run)
// csvtraces.FmtCsv(
// files.MustOverwrite("./alex-csv/arith.csv"),
// run,
// []ifaces.Column{
// run.Spec.Columns.GetHandle("HUB_acp_PROVER_SIDE_ADDRESS_IDENTIFIER"),
// run.Spec.Columns.GetHandle("hub.acp_ADDRESS_HI"),
// run.Spec.Columns.GetHandle("hub.acp_ADDRESS_LO"),
// run.Spec.Columns.GetHandle("hub.acp_BALANCE"),
// run.Spec.Columns.GetHandle("hub.acp_NONCE"),
// run.Spec.Columns.GetHandle("hub.acp_CODE_SIZE"),
// run.Spec.Columns.GetHandle("hub.acp_CODE_HASH_HI"),
// run.Spec.Columns.GetHandle("hub.acp_CODE_HASH_LO"),
// run.Spec.Columns.GetHandle("hub.acp_REL_BLK_NUM"),
// run.Spec.Columns.GetHandle("hub.acp_EXISTS"),
// run.Spec.Columns.GetHandle("hub.acp_EXISTS_NEW"),
// run.Spec.Columns.GetHandle("hub.acp_PEEK_AT_ACCOUNT"),
// run.Spec.Columns.GetHandle("hub.acp_FIRST_IN_BLK"),
// run.Spec.Columns.GetHandle("hub.acp_IS_PRECOMPILE"),
// },
// []csvtraces.Option{},
// )
// csvtraces.FmtCsv(
// files.MustOverwrite("./alex-csv/ss.csv"),
// run,
// []ifaces.Column{
// sm.StateSummary.Account.Address,
// sm.StateSummary.Account.Initial.Balance,
// sm.StateSummary.Account.Initial.Nonce,
// sm.StateSummary.Account.Initial.CodeSize,
// sm.StateSummary.Account.Initial.KeccakCodeHash.Hi,
// sm.StateSummary.Account.Initial.KeccakCodeHash.Lo,
// sm.StateSummary.BatchNumber,
// sm.StateSummary.Account.Initial.Exists,
// sm.StateSummary.Account.Final.Exists,
// sm.StateSummary.IsInitialDeployment,
// sm.StateSummary.IsStorage,
// },
// []csvtraces.Option{},
// )
// csvtraces.FmtCsv(
// files.MustOverwrite("./alex-csv/hub.csv"),
// run,
// []ifaces.Column{
// run.Spec.Columns.GetHandle("hub.RELATIVE_BLOCK_NUMBER"),
// },
// []csvtraces.Option{},
// )
// csvtraces.FmtCsv(
// files.MustOverwrite("./alex-csv/scparith.csv"),
// run,
// []ifaces.Column{
// run.Spec.Columns.GetHandle("HUB_scp_PROVER_SIDE_ADDRESS_IDENTIFIER"),
// run.Spec.Columns.GetHandle("hub.scp_ADDRESS_HI"),
// run.Spec.Columns.GetHandle("hub.scp_ADDRESS_LO"),
// run.Spec.Columns.GetHandle("hub.scp_STORAGE_KEY_HI"),
// run.Spec.Columns.GetHandle("hub.scp_STORAGE_KEY_LO"),
// run.Spec.Columns.GetHandle("hub.scp_VALUE_CURR_HI"),
// run.Spec.Columns.GetHandle("hub.scp_VALUE_CURR_LO"),
// run.Spec.Columns.GetHandle("hub.scp_VALUE_NEXT_HI"),
// run.Spec.Columns.GetHandle("hub.scp_VALUE_NEXT_LO"),
// run.Spec.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER"),
// run.Spec.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER"),
// run.Spec.Columns.GetHandle("hub.scp_REL_BLK_NUM"),
// run.Spec.Columns.GetHandle("hub.scp_PEEK_AT_STORAGE"),
// run.Spec.Columns.GetHandle("hub.scp_FIRST_IN_CNF"),
// run.Spec.Columns.GetHandle("hub.scp_FINAL_IN_CNF"),
// run.Spec.Columns.GetHandle("hub.scp_FIRST_IN_BLK"),
// run.Spec.Columns.GetHandle("hub.scp_FINAL_IN_BLK"),
// run.Spec.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER_FIRST_IN_BLOCK"),
// run.Spec.Columns.GetHandle("hub.scp_DEPLOYMENT_NUMBER_FINAL_IN_BLOCK"),
// run.Spec.Columns.GetHandle("hub.scp_EXISTS_FIRST_IN_BLOCK"),
// run.Spec.Columns.GetHandle("hub.scp_EXISTS_FINAL_IN_BLOCK"),
// //run.Spec.Columns.GetHandle("hub.scp_TX_EXEC"),
// },
// []csvtraces.Option{},
// )
// csvtraces.FmtCsv(
// files.MustOverwrite("./alex-csv/scpss.csv"),
// run,
// []ifaces.Column{
// sm.StateSummary.Account.Address,
// sm.StateSummary.Storage.Key.Hi,
// sm.StateSummary.Storage.Key.Lo,
// sm.StateSummary.Storage.OldValue.Hi,
// sm.StateSummary.Storage.OldValue.Lo,
// sm.StateSummary.Storage.NewValue.Hi,
// sm.StateSummary.Storage.NewValue.Lo,
// sm.StateSummary.BatchNumber,
// sm.StateSummary.IsFinalDeployment,
// sm.StateSummary.Account.Final.Exists,
// sm.StateSummary.IsStorage,
// },
// []csvtraces.Option{},
// )
}
// stateSummarySize returns the number of rows to give to the state-summary
@@ -103,3 +178,221 @@ func (sm *StateManager) Assign(run *wizard.ProverRuntime, shomeiTraces [][]state
func (s *Settings) stateSummarySize() int {
return utils.NextPowerOfTwo(s.AccSettings.MaxNumProofs)
}
// addSkipFlags adds skip flags to redundant shomei traces
func addSkipFlags(shomeiTraces *[][]statemanager.DecodedTrace) {
// AddressAndKey is a struct used as a key in order to identify skippable traces
// in our maps
type AddressAndKey struct {
address types.Bytes32
storageKey types.Bytes32
}
// iterate over all the Shomei blocks
for blockNo, vec := range *shomeiTraces {
var (
curAddress = types.EthAddress{}
err error
)
// instantiate the map for the current block
traceMap := make(map[AddressAndKey]int)
// now we process the traces themselves
for i, trace := range vec {
// compute the current address account
curAddress, err = trace.GetRelatedAccount()
if err != nil {
panic(err)
}
x := *(&field.Element{}).SetBytes(curAddress[:])
if trace.Location != statemanager.WS_LOCATION {
// we have a STORAGE trace
// prepare the search key
searchKey := AddressAndKey{
address: x.Bytes(),
storageKey: trace.Underlying.HKey(statemanager.MIMC_CONFIG),
}
previousIndex, isFound := traceMap[searchKey]
if isFound {
// set the previous trace as a skippable trace
(*shomeiTraces)[blockNo][previousIndex].IsSkipped = true
} else {
// when not found, add its index to the map (if a duplicate is found later)
// this stored index will be then used to make the current trace skippable
traceMap[searchKey] = i
}
}
}
}
}
func printAllShomeiTraces(shomeiTraces *[][]statemanager.DecodedTrace) {
// AddressAndKey is a struct used as a key in order to identify skippable traces
// in our maps
type AddressAndKey struct {
address types.Bytes32
storageKey types.Bytes32
}
file, err := os.OpenFile("shomeifull.txt", os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
fmt.Println("Error opening file:", err)
}
// iterate over all the Shomei blocks
for blockNo, vec := range *shomeiTraces {
batchNumber := blockNo + 1
for _, trace := range vec {
curAddress, err := trace.GetRelatedAccount()
if err != nil {
panic(err)
}
accountAddress := curAddress
switch t := trace.Underlying.(type) {
case statemanager.ReadZeroTraceST:
// BEGIN LOGGING
// Open the file in append mode, create it if it doesn't exist
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("READZEROST") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " STORAGE KEY "+t.Key.Hex()+" %d", batchNumber) +
fmt.Sprintln("IS SKIPPED ", trace.IsSkipped),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.ReadNonZeroTraceST:
// BEGIN LOGGING
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("READNONZEROST") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " STORAGE KEY "+t.Key.Hex()+" %d"+" STORAGE VALUE "+t.Value.Hex(), batchNumber) +
fmt.Sprintln("IS SKIPPED ", trace.IsSkipped),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.InsertionTraceST:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("INSERTST") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " STORAGE KEY "+t.Key.Hex()+" %d"+" STORAGE VALUE "+t.Val.Hex(), batchNumber) +
fmt.Sprintln("IS SKIPPED ", trace.IsSkipped),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.UpdateTraceST:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("UPDATEST") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " STORAGE KEY "+t.Key.Hex()+" %d"+" STORAGE VALUE + "+t.OldValue.Hex()+" "+t.NewValue.Hex(), batchNumber) +
fmt.Sprintln("IS SKIPPED ", trace.IsSkipped),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.DeletionTraceST:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("DELETEST") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " STORAGE KEY "+t.Key.Hex()+" %d"+" STORAGE VALUE + "+t.DeletedValue.Hex(), batchNumber) +
fmt.Sprintln("IS SKIPPED ", trace.IsSkipped),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.ReadZeroTraceWS:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("READZEROWS") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " %d", batchNumber),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.ReadNonZeroTraceWS:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
// Write the text to the file
if _, err := file.WriteString(
fmt.Sprintln("READNONZEROWS") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " %d", batchNumber),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.InsertionTraceWS:
// BEGIN LOGGING
// Write the text to the file
if err != nil {
fmt.Println("Error opening file:", err)
}
//x := *(&field.Element{}).SetBytes(accountAddress[:])
if _, err := file.WriteString(
fmt.Sprintln("INSERTWS") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " %d", batchNumber),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.UpdateTraceWS:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
//x := *(&field.Element{}).SetBytes(accountAddress[:])
if _, err := file.WriteString(
fmt.Sprintln("UPDATEWS") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " %d", batchNumber),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
case statemanager.DeletionTraceWS:
// BEGIN LOGGING
if err != nil {
fmt.Println("Error opening file:", err)
}
//x := *(&field.Element{}).SetBytes(accountAddress[:])
if _, err := file.WriteString(
fmt.Sprintln("DELETEWS") +
fmt.Sprintln("ADDRESS ", accountAddress.Hex(), " %d", batchNumber),
); err != nil {
fmt.Println("Error writing to file:", err)
}
// END LOGGING
default:
panic("unknown trace type")
}
}
}
file.Close()
}

View File

@@ -10,6 +10,7 @@ import (
sym "github.com/consensys/linea-monorepo/prover/symbolic"
"github.com/consensys/linea-monorepo/prover/utils/types"
"github.com/consensys/linea-monorepo/prover/zkevm/prover/common"
types2 "github.com/ethereum/go-ethereum/core/types"
)
var (
@@ -152,6 +153,9 @@ type Account struct {
Exists, Nonce, Balance, MiMCCodeHash, CodeSize, StorageRoot ifaces.Column
// KeccakCodeHash stores the keccak code hash of the account.
KeccakCodeHash common.HiLoColumns
// ExpectedHubCodeHash is almost the same as the KeccakCodeHash, with the difference
// than when the account does not exist, it contains the keccak hash of the empty string
ExpectedHubCodeHash common.HiLoColumns
// HasEmptyCodeHash is an indicator column indicating whether the current
// account has an empty codehash
HasEmptyCodeHash ifaces.Column
@@ -179,6 +183,7 @@ func newAccount(comp *wizard.CompiledIOP, size int, name string) Account {
CodeSize: createCol("CODESIZE"),
StorageRoot: createCol("STORAGE_ROOT"),
KeccakCodeHash: common.NewHiLoColumns(comp, size, name+"_KECCAK_CODE_HASH"),
ExpectedHubCodeHash: common.NewHiLoColumns(comp, size, name+"_EXPECTED_HUB_CODE_HASH"),
ExistsAndHasNonEmptyCodeHash: createCol("EXISTS_AND_NON_EMPTY_CODEHASH"),
}
@@ -232,6 +237,7 @@ func newAccountPeekAssignmentBuilder(ap *AccountPeek) accountPeekAssignmentBuild
type accountAssignmentBuilder struct {
exists, nonce, balance, miMCCodeHash, codeSize, storageRoot *common.VectorBuilder
keccakCodeHash common.HiLoAssignmentBuilder
expectedHubCodeHash common.HiLoAssignmentBuilder
existsAndHasNonEmptyCodeHash *common.VectorBuilder
}
@@ -247,6 +253,7 @@ func newAccountAssignmentBuilder(ap *Account) accountAssignmentBuilder {
storageRoot: common.NewVectorBuilder(ap.StorageRoot),
existsAndHasNonEmptyCodeHash: common.NewVectorBuilder(ap.ExistsAndHasNonEmptyCodeHash),
keccakCodeHash: common.NewHiLoAssignmentBuilder(ap.KeccakCodeHash),
expectedHubCodeHash: common.NewHiLoAssignmentBuilder(ap.ExpectedHubCodeHash),
}
}
@@ -262,10 +269,14 @@ func (ss *accountAssignmentBuilder) pushAll(acc types.Account) {
ss.balance.PushBytes32(types.LeftPadToBytes32(acc.Balance.Bytes()))
ss.exists.PushOne()
ss.keccakCodeHash.Push(acc.KeccakCodeHash)
// if account exists push the same Keccak code hash
ss.expectedHubCodeHash.Push(acc.KeccakCodeHash)
} else {
ss.balance.PushZero()
ss.exists.PushZero()
ss.keccakCodeHash.PushZeroes()
// if account does not exist push empty codehash
ss.expectedHubCodeHash.Push(types.FullBytes32(types2.EmptyCodeHash))
}
ss.codeSize.PushInt(int(acc.CodeSize))
@@ -290,10 +301,14 @@ func (ss *accountAssignmentBuilder) pushOverrideStorageRoot(
ss.balance.PushBytes32(types.LeftPadToBytes32(acc.Balance.Bytes()))
ss.exists.PushOne()
ss.keccakCodeHash.Push(acc.KeccakCodeHash)
// if account exists push the same codehash
ss.expectedHubCodeHash.Push(acc.KeccakCodeHash)
} else {
ss.balance.PushZero()
ss.exists.PushZero()
ss.keccakCodeHash.PushZeroes()
// if account does not exist push empty codehash
ss.expectedHubCodeHash.Push(types.FullBytes32(types2.EmptyCodeHash))
}
ss.codeSize.PushInt(int(acc.CodeSize))
@@ -310,6 +325,7 @@ func (ss *accountAssignmentBuilder) PadAndAssign(run *wizard.ProverRuntime) {
ss.nonce.PadAndAssign(run)
ss.balance.PadAndAssign(run)
ss.keccakCodeHash.PadAssign(run, types.FullBytes32{})
ss.expectedHubCodeHash.PadAssign(run, types.FullBytes32{})
ss.miMCCodeHash.PadAndAssign(run)
ss.storageRoot.PadAndAssign(run)
ss.codeSize.PadAndAssign(run)

View File

@@ -1,6 +1,7 @@
package statesummary
import (
"github.com/consensys/linea-monorepo/prover/protocol/column"
"sync"
"github.com/consensys/linea-monorepo/prover/maths/common/smartvectors"
@@ -62,6 +63,13 @@ func (ss *Module) assignArithmetizationLink(run *wizard.ProverRuntime) {
runConcurrent([]wizard.ProverAction{
ss.arithmetizationLink.scpSelector.ComputeSelectorMinDeplBlock,
ss.arithmetizationLink.scpSelector.ComputeSelectorMaxDeplBlock,
ss.arithmetizationLink.scpSelector.ComputeSelectorEmptySTValueHi,
ss.arithmetizationLink.scpSelector.ComputeSelectorEmptySTValueLo,
ss.arithmetizationLink.scpSelector.ComputeSelectorEmptySTValueNextHi,
ss.arithmetizationLink.scpSelector.ComputeSelectorEmptySTValueNextLo,
ss.arithmetizationLink.scpSelector.ComputeSelectorSTKeyDiffHi,
ss.arithmetizationLink.scpSelector.ComputeSelectorSTKeyDiffLo,
ss.arithmetizationLink.scpSelector.ComputeSelectorBlockNoDiff,
})
}
@@ -76,7 +84,6 @@ type HubColumnSet struct {
// account data
AddressHI, AddressLO ifaces.Column
Nonce, NonceNew ifaces.Column
MimcCodeHash, MimcCodeHashNew ifaces.Column
CodeHashHI, CodeHashLO, CodeHashHINew, CodeHashLONew ifaces.Column
CodeSizeOld, CodeSizeNew ifaces.Column
BalanceOld, BalanceNew ifaces.Column
@@ -107,6 +114,18 @@ These columns are 1 at indices where the deployment number is equal to MinDeplBl
type scpSelector struct {
SelectorMinDeplBlock, SelectorMaxDeplBlock ifaces.Column
ComputeSelectorMinDeplBlock, ComputeSelectorMaxDeplBlock wizard.ProverAction
// selectors for empty keys, current values
SelectorEmptySTValueHi, SelectorEmptySTValueLo ifaces.Column
ComputeSelectorEmptySTValueHi, ComputeSelectorEmptySTValueLo wizard.ProverAction
// selectors for empty keys, next values
SelectorEmptySTValueNextHi, SelectorEmptySTValueNextLo ifaces.Column
ComputeSelectorEmptySTValueNextHi, ComputeSelectorEmptySTValueNextLo wizard.ProverAction
// storage key difference selectors
SelectorSTKeyDiffHi, SelectorSTKeyDiffLo ifaces.Column
ComputeSelectorSTKeyDiffHi, ComputeSelectorSTKeyDiffLo wizard.ProverAction
// block number key difference selectors
SelectorBlockNoDiff ifaces.Column
ComputeSelectorBlockNoDiff wizard.ProverAction
}
/*
@@ -125,11 +144,72 @@ func newScpSelector(comp *wizard.CompiledIOP, smc HubColumnSet) scpSelector {
sym.Sub(smc.DeploymentNumber, smc.MaxDeplBlock),
)
// ST value selectors
SelectorEmptySTValueHi, ComputeSelectorEmptySTValueHi := dedicated.IsZero(
comp,
ifaces.ColumnAsVariable(smc.ValueHICurr),
)
SelectorEmptySTValueLo, ComputeSelectorEmptySTValueLo := dedicated.IsZero(
comp,
ifaces.ColumnAsVariable(smc.ValueLOCurr),
)
SelectorEmptySTValueNextHi, ComputeSelectorEmptySTValueNextHi := dedicated.IsZero(
comp,
ifaces.ColumnAsVariable(smc.ValueHINext),
)
SelectorEmptySTValueNextLo, ComputeSelectorEmptySTValueNextLo := dedicated.IsZero(
comp,
ifaces.ColumnAsVariable(smc.ValueLONext),
)
// storage key diff selectors
SelectorSTKeyDiffHi, ComputeSelectorSTKeyDiffHi := dedicated.IsZero(
comp,
sym.Sub(
smc.KeyHI,
column.Shift(smc.KeyHI, -1),
),
)
SelectorSTKeyDiffLo, ComputeSelectorSTKeyDiffLo := dedicated.IsZero(
comp,
sym.Sub(
smc.KeyLO,
column.Shift(smc.KeyLO, -1),
),
)
// compute selectors for the block number difference
SelectorBlockNoDiff, ComputeSelectorBlockNoDiff := dedicated.IsZero(
comp,
sym.Sub(
smc.BlockNumber,
column.Shift(smc.BlockNumber, -1),
),
)
res := scpSelector{
SelectorMinDeplBlock: SelectorMinDeplNoBlock,
SelectorMaxDeplBlock: SelectorMaxDeplNoBlock,
ComputeSelectorMinDeplBlock: ComputeSelectorMinDeplNoBlock,
ComputeSelectorMaxDeplBlock: ComputeSelectorMaxDeplNoBlock,
// ST selectors, current
SelectorEmptySTValueHi: SelectorEmptySTValueHi,
SelectorEmptySTValueLo: SelectorEmptySTValueLo,
ComputeSelectorEmptySTValueHi: ComputeSelectorEmptySTValueHi,
ComputeSelectorEmptySTValueLo: ComputeSelectorEmptySTValueLo,
// ST selectors, next
SelectorEmptySTValueNextHi: SelectorEmptySTValueNextHi,
SelectorEmptySTValueNextLo: SelectorEmptySTValueNextLo,
ComputeSelectorEmptySTValueNextHi: ComputeSelectorEmptySTValueNextHi,
ComputeSelectorEmptySTValueNextLo: ComputeSelectorEmptySTValueNextLo,
// ST Key diff
SelectorSTKeyDiffHi: SelectorSTKeyDiffHi,
SelectorSTKeyDiffLo: SelectorSTKeyDiffLo,
ComputeSelectorSTKeyDiffHi: ComputeSelectorSTKeyDiffHi,
ComputeSelectorSTKeyDiffLo: ComputeSelectorSTKeyDiffLo,
// Block Number Diff
SelectorBlockNoDiff: SelectorBlockNoDiff,
ComputeSelectorBlockNoDiff: ComputeSelectorBlockNoDiff,
}
return res
@@ -159,8 +239,8 @@ func accountIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
ss.Account.Initial.Balance,
ss.Account.Initial.Nonce,
ss.Account.Initial.CodeSize,
ss.Account.Initial.KeccakCodeHash.Hi,
ss.Account.Initial.KeccakCodeHash.Lo,
ss.Account.Initial.ExpectedHubCodeHash.Hi,
ss.Account.Initial.ExpectedHubCodeHash.Lo,
ss.BatchNumber,
ss.Account.Initial.Exists,
}
@@ -192,6 +272,7 @@ func accountIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
filterSummary,
)
//isWarm := comp.Columns.GetHandle("hub.acp_WARMTH")
// Now we define the constraints for our filters
comp.InsertGlobal(
0,
@@ -201,6 +282,11 @@ func accountIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
sym.Mul(
smc.PeekAtAccount,
smc.FirstAOCBlock,
/*
sym.Sub(
1,
isWarm,
),*/
),
),
)
@@ -226,8 +312,20 @@ accountIntegrationAssignInitial assigns the columns used to check initial accoun
data consistency using the lookups from AccountIntegrationDefineInitial
*/
func accountIntegrationAssignInitial(run *wizard.ProverRuntime, ss Module, smc HubColumnSet) {
/*
isWarm := run.Spec.Columns.GetHandle("hub.acp_WARMTH")
isNotPrewarmingPhase := make([]field.Element, smc.AddressHI.Size())
for i := range isNotPrewarmingPhase {
fieldOne := field.One()
isWarmElem := isWarm.GetColAssignmentAt(run, i)
isNotPrewarmingPhase[i].Sub(&fieldOne, &isWarmElem)
}*/
svfilterArith := smartvectors.Mul(smc.PeekAtAccount.GetColAssignment(run), smc.FirstAOCBlock.GetColAssignment(run))
svfilterArith := smartvectors.Mul(
smc.PeekAtAccount.GetColAssignment(run),
smc.FirstAOCBlock.GetColAssignment(run),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_ACCOUNT_INITIAL_ARITHMETIZATION", svfilterArith)
@@ -256,12 +354,31 @@ the corresponding columns in the arithmetization.
func accountIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubColumnSet) {
filterArith := comp.InsertCommit(0, "FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_ACCOUNT_FINAL_ARITHMETIZATION", smc.AddressHI.Size())
filterSummary := comp.InsertCommit(0, "FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_ACCOUNT_FINAL_SUMMARY", ss.IsStorage.Size())
stateSummaryTable := []ifaces.Column{ss.Account.Address, ss.Account.Final.Balance, ss.Account.Final.Nonce, ss.Account.Final.CodeSize, ss.Account.Final.KeccakCodeHash.Hi, ss.Account.Final.KeccakCodeHash.Lo, ss.BatchNumber, ss.Account.Final.Exists}
arithTable := []ifaces.Column{smc.Address, smc.BalanceNew, smc.NonceNew, smc.CodeSizeNew, smc.CodeHashHINew, smc.CodeHashLONew, smc.BlockNumber, smc.ExistsNew}
stateSummaryTable := []ifaces.Column{
ss.Account.Address,
ss.Account.Final.Balance,
ss.Account.Final.Nonce,
ss.Account.Final.CodeSize,
ss.Account.Final.ExpectedHubCodeHash.Hi,
ss.Account.Final.ExpectedHubCodeHash.Lo,
ss.BatchNumber,
ss.Account.Final.Exists,
}
arithTable := []ifaces.Column{
smc.Address,
smc.BalanceNew,
smc.NonceNew,
smc.CodeSizeNew,
smc.CodeHashHINew,
smc.CodeHashLONew,
smc.BlockNumber,
smc.ExistsNew,
}
comp.InsertInclusionDoubleConditional(0, "LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_FINAL_ACCOUNT", stateSummaryTable, arithTable, filterSummary, filterArith)
comp.InsertInclusionDoubleConditional(0, "LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_FINAL_ACCOUNT_REVERSED", arithTable, stateSummaryTable, filterArith, filterSummary)
//isWarmNew := comp.Columns.GetHandle("hub.acp_WARMTH_NEW")
// Now we define the constraints for our filters
comp.InsertGlobal(
0,
@@ -271,6 +388,12 @@ func accountIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
sym.Mul(
smc.PeekAtAccount,
smc.LastAOCBlock,
/*
sym.Sub(
// remove prewarming slots
1,
isWarmNew,
),*/
),
),
)
@@ -295,7 +418,19 @@ func accountIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
accountIntegrationAssignFinal assigns the columns used to check initial account data consistency using the lookups from accountIntegrationAssignFinal
*/
func accountIntegrationAssignFinal(run *wizard.ProverRuntime, ss Module, smc HubColumnSet) {
filterArith := smartvectors.Mul(smc.PeekAtAccount.GetColAssignment(run), smc.LastAOCBlock.GetColAssignment(run))
/*
isWarmNew := run.Spec.Columns.GetHandle("hub.acp_WARMTH_NEW")
isNotPrewarmingPhase := make([]field.Element, smc.AddressHI.Size())
for i := range isNotPrewarmingPhase {
fieldOne := field.One()
isWarmNewElem := isWarmNew.GetColAssignmentAt(run, i)
isNotPrewarmingPhase[i].Sub(&fieldOne, &isWarmNewElem)
}*/
filterArith := smartvectors.Mul(
smc.PeekAtAccount.GetColAssignment(run),
smc.LastAOCBlock.GetColAssignment(run),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_ACCOUNT_FINAL_ARITHMETIZATION", filterArith)
@@ -327,11 +462,42 @@ func storageIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
filterArithReversed := comp.InsertCommit(0, "FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_INITIAL_ARITHMETIZATION_REVERSED", smc.AddressHI.Size())
summaryTable := []ifaces.Column{ss.Account.Address, ss.Storage.Key.Hi, ss.Storage.Key.Lo, ss.Storage.OldValue.Hi, ss.Storage.OldValue.Lo, ss.BatchNumber}
arithTable := []ifaces.Column{smc.Address, smc.KeyHI, smc.KeyLO, smc.ValueHICurr, smc.ValueLOCurr, smc.BlockNumber}
comp.InsertInclusionDoubleConditional(0, "LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_INIT_STORAGE", summaryTable, arithTable, filterSummary, filterArith)
comp.InsertInclusionDoubleConditional(0, "LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_INIT_STORAGE_REVERSE", arithTable, summaryTable, filterArithReversed, filterSummary)
isExceptionalOperation := comp.Columns.GetHandle("hub.scp_EXCEPTIONAL_OPERATION")
summaryTable := []ifaces.Column{
ss.Account.Address,
ss.Storage.Key.Hi,
ss.Storage.Key.Lo,
ss.Storage.OldValue.Hi,
ss.Storage.OldValue.Lo,
ss.BatchNumber,
}
arithTable := []ifaces.Column{
smc.Address,
smc.KeyHI,
smc.KeyLO,
smc.ValueHICurr,
smc.ValueLOCurr,
smc.BlockNumber,
}
comp.InsertInclusionDoubleConditional(
0,
"LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_INIT_STORAGE",
summaryTable,
arithTable,
filterSummary,
filterArith,
)
comp.InsertInclusionDoubleConditional(
0,
"LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_INIT_STORAGE_REVERSE",
arithTable,
summaryTable,
filterArithReversed,
filterSummary,
)
//isWarm := comp.Columns.GetHandle("hub.scp_WARMTH")
// Now we define the constraints for our filters
comp.InsertGlobal(
0,
@@ -342,6 +508,16 @@ func storageIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
sc.SelectorMinDeplBlock,
smc.PeekAtStorage,
smc.FirstKOCBlock,
sym.Sub(
1,
isExceptionalOperation,
),
/*
sym.Sub(
// remove rows that contain prewarming slots
1,
isWarm,
),*/
),
),
)
@@ -354,6 +530,13 @@ func storageIntegrationDefineInitial(comp *wizard.CompiledIOP, ss Module, smc Hu
sym.Mul(
smc.PeekAtStorage,
smc.FirstKOCBlock,
/*
sym.Sub(
// remove rows that contain prewarming slots
1,
isWarm,
)
*/
),
),
)
@@ -389,14 +572,45 @@ func storageIntegrationAssignInitial(run *wizard.ProverRuntime, ss Module, smc H
}
svSelectorMinDeplBlock := smartvectors.NewRegular(selectorMinDeplBlock)
filterArith := smartvectors.Mul(svSelectorMinDeplBlock, smc.PeekAtStorage.GetColAssignment(run), smc.FirstKOCBlock.GetColAssignment(run))
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_INITIAL_ARITHMETIZATION", filterArith)
isExceptionalOperation := run.Spec.Columns.GetHandle("hub.scp_EXCEPTIONAL_OPERATION")
isNotExceptionalOperation := make([]field.Element, smc.AddressHI.Size())
for i := range isNotExceptionalOperation {
fieldOne := field.One()
isExceptionElem := isExceptionalOperation.GetColAssignmentAt(run, i)
isNotExceptionalOperation[i].Sub(&fieldOne, &isExceptionElem)
}
/*
isWarm := run.Spec.Columns.GetHandle("hub.scp_WARMTH")
isNotPrewarmingPhase := make([]field.Element, smc.AddressHI.Size())
for i := range isNotPrewarmingPhase {
fieldOne := field.One()
isWarmElem := isWarm.GetColAssignmentAt(run, i)
isNotPrewarmingPhase[i].Sub(&fieldOne, &isWarmElem)
}*/
filterArith := smartvectors.Mul(
svSelectorMinDeplBlock,
smc.PeekAtStorage.GetColAssignment(run),
smc.FirstKOCBlock.GetColAssignment(run),
smartvectors.NewRegular(isNotExceptionalOperation),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn(
"FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_INITIAL_ARITHMETIZATION",
filterArith,
)
/*
When looking up with including = {arithmetization} and included = {State summary}, we remove the MinDeplBlock filter selector
(arithmetization keys might be read after the first deployment in the block)
*/
filterArithReversed := smartvectors.Mul(smc.PeekAtStorage.GetColAssignment(run), smc.FirstKOCBlock.GetColAssignment(run))
filterArithReversed := smartvectors.Mul(
smc.PeekAtStorage.GetColAssignment(run),
smc.FirstKOCBlock.GetColAssignment(run),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_INITIAL_ARITHMETIZATION_REVERSED", filterArithReversed)
}
@@ -432,10 +646,20 @@ func storageIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
smc.AddressHI.Size(),
)
filterArithReversed = comp.InsertCommit(0,
"FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_ARITHMETIZATION_REVERSED",
smc.AddressHI.Size(),
)
filterSummary = comp.InsertCommit(0,
"FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_SUMMARY",
ss.Account.Address.Size(),
)
filterAccountInsert = comp.InsertCommit(0,
"FILTER_CONNECTOR_HUB_STATE_SUMMARY_ACCOUNT_INSERT_FILTER",
smc.AddressHI.Size(),
)
)
comp.InsertInclusionDoubleConditional(0,
@@ -450,10 +674,14 @@ func storageIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
"LOOKUP_STATE_MGR_ARITH_TO_STATE_SUMMARY_FINAL_STORAGE_REVERSED",
arithTable,
summaryTable,
filterArith,
filterArithReversed,
filterSummary,
)
isSLoad := comp.Columns.GetHandle("hub.scp_SLOAD_OPERATION")
isExceptionalOperation := comp.Columns.GetHandle("hub.scp_EXCEPTIONAL_OPERATION")
//isWarmNew := comp.Columns.GetHandle("hub.scp_WARMTH_NEW")
comp.InsertGlobal(
0,
ifaces.QueryIDf("CONSTRAINT_FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_ARITHMETIZATION"),
@@ -463,6 +691,42 @@ func storageIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
sc.SelectorMaxDeplBlock,
smc.PeekAtStorage,
smc.LastKOCBlock,
filterAccountInsert,
sym.Sub(
1,
sym.Mul(
// in this paranthesis, we have a filter for SLOADS that generate exceptions,
// and will not appear on Shomei's side. Currently, the SSTORE behavior seems to match
// betwen HUB and Shomei, so we do not filter or create separate lookups for SSTOREs
isSLoad,
isExceptionalOperation,
),
),
/*
sym.Sub(
// require that we are not in a prewarming phase
1,
isWarmNew,
),*/
),
),
)
comp.InsertGlobal(
0,
ifaces.QueryIDf("CONSTRAINT_FILTER_REVERSED_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_ARITHMETIZATION"),
sym.Sub(
filterArithReversed,
sym.Mul(
sc.SelectorMaxDeplBlock,
smc.PeekAtStorage,
smc.LastKOCBlock,
/*
sym.Sub(
// require that we are not in a prewarming phase
1,
isWarmNew,
),*/
),
),
)
@@ -478,6 +742,83 @@ func storageIntegrationDefineFinal(comp *wizard.CompiledIOP, ss Module, smc HubC
),
),
)
// constraint the insertion selector filter
existsFirstInBlock := comp.Columns.GetHandle("hub.scp_EXISTS_FIRST_IN_BLOCK")
existsFinalInBlock := comp.Columns.GetHandle("hub.scp_EXISTS_FINAL_IN_BLOCK")
// on storage rows, we enforce that filterAccountInsert is 0 then (existsFirstInBlock = 0 and existsFinalInBlock = 1)
// security of the following constraint relies on the fact that the underlying marker columns are binary
comp.InsertGlobal(
0,
ifaces.QueryIDf("GLOBAL_CONSTRAINT_FILTER_CONNECTOR_HUB_STATE_SUMMARY_ACCOUNT_INSERT_FILTER"),
sym.Mul(
smc.PeekAtStorage, // when we are dealing with storage segments
sym.Mul(
sym.Sub(
1,
filterAccountInsert,
), // if filterAccountInsert = 0 it must be that the conditions of the filter are both satisfied
sym.Add(
existsFirstInBlock,
sym.Sub(
1,
existsFinalInBlock,
),
),
),
),
)
// if the filter is set to 0, then all the emoty value selectors must be 1.
comp.InsertGlobal(
0,
ifaces.QueryIDf("GLOBAL_CONSTRAINT_FILTER_CONNECTOR_HUB_STATE_SUMMARY_ACCOUNT_INSERT_FILTER_VALUE_ZEROIZATION"),
sym.Mul(
smc.PeekAtStorage,
sym.Sub(
1,
filterAccountInsert,
),
sym.Sub(
1,
sym.Mul(
sc.SelectorEmptySTValueHi,
sc.SelectorEmptySTValueLo,
sc.SelectorEmptySTValueNextHi,
sc.SelectorEmptySTValueNextLo,
),
),
),
)
// filter must be constant as long as the storage key does not change
comp.InsertGlobal(
0,
ifaces.QueryIDf("GLOBAL_CONSTRAINT_HUB_STATE_SUMMARY__ACCOUNT_INSERT_FILTER_CONSTANCY"),
sym.Mul(
sc.SelectorSTKeyDiffHi, // 1 if ST key HI is the same as in the previous index
sc.SelectorSTKeyDiffLo, // 1 if ST key LO is the same as in the previous index
sc.SelectorBlockNoDiff, // 1 if the block number is the same, meaning that we are in the same storage key segment
sym.Sub(
filterAccountInsert,
column.Shift(filterAccountInsert, -1), // the filter remains constant if the ST key is the same, and block is the same
),
),
)
comp.InsertGlobal(
0,
ifaces.QueryIDf("GLOBAL_CONSTRAINT_FILTER_CONNECTOR_HUB_STATE_SUMMARY_ACCOUNT_INSERT_FILTER_NON_ZEROIZATION"),
sym.Mul(
sym.Sub(
1,
smc.PeekAtStorage,
), // when we are not dealing with storage segments
sym.Sub(
1,
filterAccountInsert,
), // filterAccountInsert must be 1
),
)
// constrain the filter to be binary
mustBeBinary(comp, filterAccountInsert)
}
/*
@@ -494,9 +835,99 @@ func storageIntegrationAssignFinal(run *wizard.ProverRuntime, ss Module, smc Hub
}
svSelectorMaxDeplBlock := smartvectors.NewRegular(selectorMaxDeplBlock)
filterSummary := smartvectors.Mul(ss.IsStorage.GetColAssignment(run), ss.IsFinalDeployment.GetColAssignment(run))
filterSummary := smartvectors.Mul(
ss.IsStorage.GetColAssignment(run),
ss.IsFinalDeployment.GetColAssignment(run),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_SUMMARY", filterSummary)
filterArith := smartvectors.Mul(svSelectorMaxDeplBlock, smc.PeekAtStorage.GetColAssignment(run), smc.LastKOCBlock.GetColAssignment(run))
// compute the filter that detects account inserts in order to exclude those key reads from the
// arithmetization to state summary lookups.
existsFirstInBlock := run.Spec.Columns.GetHandle("hub.scp_EXISTS_FIRST_IN_BLOCK")
existsFinalInBlock := run.Spec.Columns.GetHandle("hub.scp_EXISTS_FINAL_IN_BLOCK")
filterAccountInsert := make([]field.Element, smc.AddressHI.Size())
lastSegmentStart := 0
for index := range filterAccountInsert {
filterAccountInsert[index].SetOne() // always set the filter as one, unless we detect an insertion segment
isStorage := smc.PeekAtStorage.GetColAssignmentAt(run, index)
if isStorage.IsOne() {
firstKOCBlock := smc.FirstKOCBlock.GetColAssignmentAt(run, index)
lastKOCBlock := smc.LastKOCBlock.GetColAssignmentAt(run, index)
existsAtBlockEnd := existsFinalInBlock.GetColAssignmentAt(run, index)
if firstKOCBlock.IsOne() {
// remember when the segment starts
lastSegmentStart = index
}
if lastKOCBlock.IsOne() && existsAtBlockEnd.IsOne() {
existsAtBlockStart := existsFirstInBlock.GetColAssignmentAt(run, lastSegmentStart)
if existsAtBlockStart.IsZero() {
// we are indeed dealing with an insertion segment, check if indeed all the storage values are 0
allStorageIsZero := true
for j := lastSegmentStart; j <= index; j++ {
valueCurrentHi := smc.ValueHICurr.GetColAssignmentAt(run, j)
valueCurrentLo := smc.ValueLOCurr.GetColAssignmentAt(run, j)
valueNextHi := smc.ValueHINext.GetColAssignmentAt(run, j)
valueNextLo := smc.ValueLONext.GetColAssignmentAt(run, j)
if !valueCurrentHi.IsZero() || !valueCurrentLo.IsZero() || !valueNextHi.IsZero() || !valueNextLo.IsZero() {
allStorageIsZero = false
}
}
if allStorageIsZero {
// indeed we are dealing with a zeroed insertion segment
for j := lastSegmentStart; j <= index; j++ {
// set the filter to zeros on the insertion segment
filterAccountInsert[j].SetZero()
}
}
}
}
}
}
svfilterAccountInsert := smartvectors.NewRegular(filterAccountInsert)
run.AssignColumn("FILTER_CONNECTOR_HUB_STATE_SUMMARY_ACCOUNT_INSERT_FILTER", svfilterAccountInsert)
//filterTxExec := run.Spec.Columns.GetHandle("hub.scp_TX_EXEC")
isSLoad := run.Spec.Columns.GetHandle("hub.scp_SLOAD_OPERATION")
isExceptionalOperation := run.Spec.Columns.GetHandle("hub.scp_EXCEPTIONAL_OPERATION")
isNotExceptionalSLoad := make([]field.Element, smc.AddressHI.Size())
for i := range isNotExceptionalSLoad {
fieldOne := field.One()
isExceptionElem := isExceptionalOperation.GetColAssignmentAt(run, i)
isSLoadElem := isSLoad.GetColAssignmentAt(run, i)
multiplied := new(field.Element).Mul(&isSLoadElem, &isExceptionElem)
isNotExceptionalSLoad[i].Sub(&fieldOne, multiplied)
}
// hub.scp_TX_WARM is = 1 iff the storage row is associated to pre-warming)
/*
isWarmNew := run.Spec.Columns.GetHandle("hub.scp_WARMTH_NEW")
isNotPrewarmingPhase := make([]field.Element, smc.AddressHI.Size())
for i := range isNotPrewarmingPhase {
fieldOne := field.One()
isWarmNewElem := isWarmNew.GetColAssignmentAt(run, i)
isNotPrewarmingPhase[i].Sub(&fieldOne, &isWarmNewElem)
}*/
filterArith := smartvectors.Mul(
svSelectorMaxDeplBlock,
smc.PeekAtStorage.GetColAssignment(run),
smc.LastKOCBlock.GetColAssignment(run),
svfilterAccountInsert,
smartvectors.NewRegular(isNotExceptionalSLoad),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_ARITHMETIZATION", filterArith)
filterArithReversed := smartvectors.Mul(
svSelectorMaxDeplBlock,
smc.PeekAtStorage.GetColAssignment(run),
smc.LastKOCBlock.GetColAssignment(run),
//smartvectors.NewRegular(isNotPrewarmingPhase),
)
run.AssignColumn("FILTER_CONNECTOR_SUMMARY_ARITHMETIZATION_STORAGE_FINAL_ARITHMETIZATION_REVERSED", filterArithReversed)
}

View File

@@ -95,8 +95,6 @@ func defineStateManagerColumns(comp *wizard.CompiledIOP, sampleType int, size in
AddressLO: createCol("ADDRESS_LO"),
Nonce: createCol("NONCE"),
NonceNew: createCol("NONCE_NEW"),
MimcCodeHash: createCol("MimcCodeHash"),
MimcCodeHashNew: createCol("MimcCodeHashNew"),
CodeHashHI: createCol("CodeHashHI"),
CodeHashLO: createCol("CodeHashLO"),
CodeHashHINew: createCol("CodeHashHINew"),
@@ -151,8 +149,6 @@ func (smc *HubColumnSet) assignForTest(run *wizard.ProverRuntime, smVectors *moc
assign(smc.AddressLO, smVectors.AddressLO)
assign(smc.Nonce, smVectors.Nonce)
assign(smc.NonceNew, smVectors.NonceNew)
assign(smc.MimcCodeHash, smVectors.MimcCodeHash)
assign(smc.MimcCodeHashNew, smVectors.MimcCodeHashNew)
assign(smc.CodeHashHI, smVectors.CodeHashHI)
assign(smc.CodeHashLO, smVectors.CodeHashLO)
assign(smc.CodeHashHINew, smVectors.CodeHashHINew)

View File

@@ -42,7 +42,8 @@ func (ss *Module) Assign(run *wizard.ProverRuntime, traces [][]statemanager.Deco
}
for batchNumber, ts := range traces {
assignmentBuilder.pushBlockTraces(batchNumber, ts)
// +1 is to harmonize with the HUB block numbering, which starts from 1
assignmentBuilder.pushBlockTraces(batchNumber+1, ts)
}
assignmentBuilder.finalize(run)
@@ -123,7 +124,7 @@ func (ss *stateSummaryAssignmentBuilder) pushBlockTraces(batchNumber int, traces
// that situation, the account trace is at the beginning of the
// segment. When that happens, we want to be sure that the
// storage rows and the account segment arise in the same position.
if len(subSegment.storageTraces) > 0 {
if actualUnskippedLength(subSegment.storageTraces) > 0 {
curSegment[len(curSegment)-1].storageTraces = subSegment.storageTraces
subSegment = accountSubSegmentWitness{}
}
@@ -144,7 +145,7 @@ func (ss *stateSummaryAssignmentBuilder) pushBlockTraces(batchNumber int, traces
subSegment.storageTraces = append(subSegment.storageTraces, trace)
}
if len(subSegment.storageTraces) > 0 {
if actualUnskippedLength(subSegment.storageTraces) > 0 {
curSegment[len(curSegment)-1].storageTraces = subSegment.storageTraces
}
@@ -168,6 +169,7 @@ func (ss *stateSummaryAssignmentBuilder) pushAccountSegment(batchNumber int, seg
panic("could not get the account address")
}
noOfSkippedStorageTraces := 0
for i := range seg.storageTraces {
var (
@@ -177,77 +179,85 @@ func (ss *stateSummaryAssignmentBuilder) pushAccountSegment(batchNumber int, seg
_ = oldRoot // TODO golangci-lint thinks oldRoot is otherwise unused, even though it's clearly used in the switch case
ss.batchNumber.PushInt(batchNumber)
ss.account.address.PushAddr(accountAddress)
ss.isInitialDeployment.PushBoolean(segID == 0)
ss.isFinalDeployment.PushBoolean(segID == len(segment)-1)
ss.IsDeleteSegment.PushBoolean(isDeleteSegment)
ss.isActive.PushOne()
ss.isStorage.PushOne()
ss.isEndOfAccountSegment.PushZero()
ss.isBeginningOfAccountSegment.PushBoolean(segID == 0 && i == 0)
ss.account.initial.pushAll(initialAccount)
ss.account.final.pushOverrideStorageRoot(finalAccount, newRoot)
ss.worldStateRoot.PushBytes32(initWsRoot)
if !seg.storageTraces[i].IsSkipped {
// the storage trace is to be kept, and not skipped
ss.batchNumber.PushInt(batchNumber)
ss.account.address.PushAddr(accountAddress)
ss.isInitialDeployment.PushBoolean(segID == 0)
ss.isFinalDeployment.PushBoolean(segID == len(segment)-1)
ss.IsDeleteSegment.PushBoolean(isDeleteSegment)
ss.isActive.PushOne()
ss.isStorage.PushOne()
ss.isEndOfAccountSegment.PushZero()
ss.isBeginningOfAccountSegment.PushBoolean(
segID == 0 && i == firstUnskippedIndex(seg.storageTraces),
)
ss.account.initial.pushAll(initialAccount)
ss.account.final.pushOverrideStorageRoot(finalAccount, newRoot)
ss.worldStateRoot.PushBytes32(initWsRoot)
switch t := stoTrace.(type) {
case statemanager.ReadZeroTraceST:
if isDeleteSegment {
/*
Special case: the Shomei compactification process automatically sets storage values to zero if the account later gets deleted
which might not be the case in the arithmetization
in this particular case, for the consistency lookups to work,
we fetch and use the last corresponding storage value/block from the arithmetization columns using
an ArithmetizationStorageParser
*/
x := *(&field.Element{}).SetBytes(accountAddress[:])
keysAndBlock := KeysAndBlock{
address: x.Bytes(),
storageKey: t.Key,
block: batchNumber,
switch t := stoTrace.(type) {
case statemanager.ReadZeroTraceST:
if isDeleteSegment {
/*
Special case: the Shomei compactification process automatically sets storage values to zero if the account later gets deleted
which might not be the case in the arithmetization
in this particular case, for the consistency lookups to work,
we fetch and use the last corresponding storage value/block from the arithmetization columns using
an ArithmetizationStorageParser
*/
x := *(&field.Element{}).SetBytes(accountAddress[:])
keysAndBlock := KeysAndBlock{
address: x.Bytes(),
storageKey: t.Key,
block: batchNumber,
}
arithStorage := ss.arithmetizationStorage.Values[keysAndBlock]
ss.storage.push(t.Key, types.FullBytes32{}, arithStorage)
ss.accumulatorStatement.PushReadZero(oldRoot, hash(t.Key))
} else {
ss.storage.pushOnlyKey(t.Key)
ss.accumulatorStatement.PushReadZero(oldRoot, hash(t.Key))
}
arithStorage := ss.arithmetizationStorage.Values[keysAndBlock]
case statemanager.ReadNonZeroTraceST:
if isDeleteSegment {
/*
Special case, same motivation and fix as in the case of ReadZeroTraceST
*/
x := *(&field.Element{}).SetBytes(accountAddress[:])
keysAndBlock := KeysAndBlock{
address: x.Bytes(),
storageKey: t.Key,
block: batchNumber,
}
arithStorage := ss.arithmetizationStorage.Values[keysAndBlock]
ss.storage.push(t.Key, types.FullBytes32{}, arithStorage)
ss.accumulatorStatement.PushReadZero(oldRoot, hash(t.Key))
} else {
ss.storage.pushOnlyKey(t.Key)
ss.accumulatorStatement.PushReadZero(oldRoot, hash(t.Key))
}
case statemanager.ReadNonZeroTraceST:
if isDeleteSegment {
/*
Special case, same motivation and fix as in the case of ReadZeroTraceST
*/
x := *(&field.Element{}).SetBytes(accountAddress[:])
keysAndBlock := KeysAndBlock{
address: x.Bytes(),
storageKey: t.Key,
block: batchNumber,
ss.storage.push(t.Key, t.Value, arithStorage)
ss.accumulatorStatement.PushReadNonZero(oldRoot, hash(t.Key), hash(t.Value))
} else {
ss.storage.push(t.Key, t.Value, t.Value)
ss.accumulatorStatement.PushReadNonZero(oldRoot, hash(t.Key), hash(t.Value))
}
arithStorage := ss.arithmetizationStorage.Values[keysAndBlock]
ss.storage.push(t.Key, t.Value, arithStorage)
ss.accumulatorStatement.PushReadNonZero(oldRoot, hash(t.Key), hash(t.Value))
case statemanager.InsertionTraceST:
ss.storage.pushOnlyNew(t.Key, t.Val)
ss.accumulatorStatement.PushInsert(oldRoot, newRoot, hash(t.Key), hash(t.Val))
} else {
ss.storage.push(t.Key, t.Value, t.Value)
ss.accumulatorStatement.PushReadNonZero(oldRoot, hash(t.Key), hash(t.Value))
case statemanager.UpdateTraceST:
ss.storage.push(t.Key, t.OldValue, t.NewValue)
ss.accumulatorStatement.PushUpdate(oldRoot, newRoot, hash(t.Key), hash(t.OldValue), hash(t.NewValue))
case statemanager.DeletionTraceST:
ss.storage.pushOnlyOld(t.Key, t.DeletedValue)
ss.accumulatorStatement.PushDelete(oldRoot, newRoot, hash(t.Key), hash(t.DeletedValue))
default:
panic("unknown trace type")
}
case statemanager.InsertionTraceST:
ss.storage.pushOnlyNew(t.Key, t.Val)
ss.accumulatorStatement.PushInsert(oldRoot, newRoot, hash(t.Key), hash(t.Val))
case statemanager.UpdateTraceST:
ss.storage.push(t.Key, t.OldValue, t.NewValue)
ss.accumulatorStatement.PushUpdate(oldRoot, newRoot, hash(t.Key), hash(t.OldValue), hash(t.NewValue))
case statemanager.DeletionTraceST:
ss.storage.pushOnlyOld(t.Key, t.DeletedValue)
ss.accumulatorStatement.PushDelete(oldRoot, newRoot, hash(t.Key), hash(t.DeletedValue))
default:
panic("unknown trace type")
} else {
// the storage trace is skipped
noOfSkippedStorageTraces++
}
}
@@ -259,7 +269,7 @@ func (ss *stateSummaryAssignmentBuilder) pushAccountSegment(batchNumber int, seg
ss.isActive.PushOne()
ss.isStorage.PushZero()
ss.isEndOfAccountSegment.PushBoolean(segID == len(segment)-1)
ss.isBeginningOfAccountSegment.PushBoolean(segID == 0 && len(seg.storageTraces) == 0)
ss.isBeginningOfAccountSegment.PushBoolean(segID == 0 && actualUnskippedLength(seg.storageTraces) == 0)
ss.account.initial.pushAll(initialAccount)
ss.account.final.pushAll(finalAccount)
ss.worldStateRoot.PushBytes32(finalWsRoot)
@@ -435,3 +445,25 @@ func hash(x io.WriterTo) types.Bytes32 {
x.WriteTo(hasher)
return types.AsBytes32(hasher.Sum(nil))
}
// actualUnskippedLength computes the actual number of traces that form the segments
// meaning it adds up only the unskipped traces
func actualUnskippedLength(traces []statemanager.DecodedTrace) int {
res := 0
for _, trace := range traces {
if !trace.IsSkipped {
res++
}
}
return res
}
// firstUnskippedIndex returns the index of the first unskipped storage trace.
func firstUnskippedIndex(traces []statemanager.DecodedTrace) int {
for i, trace := range traces {
if !trace.IsSkipped {
return i
}
}
panic("There are no unskipped storage traces, but that is out of Shomei's expected specifications")
}

View File

@@ -1,6 +1,7 @@
package statesummary
import (
"github.com/consensys/linea-monorepo/prover/maths/field"
"github.com/consensys/linea-monorepo/prover/protocol/column"
"github.com/consensys/linea-monorepo/prover/protocol/ifaces"
"github.com/consensys/linea-monorepo/prover/protocol/wizard"
@@ -8,6 +9,11 @@ import (
"github.com/consensys/linea-monorepo/prover/utils"
)
const (
EMPTYKECCAKCODEHASH_HI = "0xc5d2460186f7233c927e7db2dcc703c0"
EMPTYKECCAKCODEHASH_LO = "0xe500b653ca82273b7bfad8045d85a470"
)
// Module represents the state-summary module. It defines all the columns
// constraints and assigment methods for this module. The state-summary module
// is tasked with:
@@ -125,7 +131,7 @@ func NewModule(comp *wizard.CompiledIOP, size int) Module {
res.csStoragePeek(comp)
res.csWorldStateRoot(comp)
res.csIsDeletionSegment(comp)
res.constrainExpectedHubCodeHash(comp)
return res
}
@@ -396,8 +402,11 @@ func (ss *Module) csBatchNumber(comp *wizard.CompiledIOP) {
comp.InsertLocal(
0,
"STATE_SUMMARY_BATCH_NUMER_START_FROM_ZERO",
sym.NewVariable(ss.BatchNumber),
"STATE_SUMMARY_BATCH_NUMBER_START_FROM_ONE",
sym.Sub(
ss.BatchNumber,
1,
),
)
isZeroWhenInactive(comp, ss.BatchNumber, ss.IsActive)
@@ -930,6 +939,126 @@ func (ss *Module) csAccumulatorRoots(comp *wizard.CompiledIOP) {
)
}
// constrainExpectedHubCodeHash constrains the ExpectedHubCodeHash columns
// using the KeccakCodeHash information from the state summary
func (ss *Module) constrainExpectedHubCodeHash(comp *wizard.CompiledIOP) {
// if account exists we have the same Keccak code hash
// if account does not exist we have the empty code hash in what is expected
// from the HUB
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_INITIAL_CASE_EXISTENT_HI",
sym.Mul(
ss.Account.Initial.Exists,
sym.Sub(
ss.Account.Initial.KeccakCodeHash.Hi,
ss.Account.Initial.ExpectedHubCodeHash.Hi,
),
),
)
// initial case Lo, existent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_INITIAL_CASE_EXISTENT_LO",
sym.Mul(
ss.Account.Initial.Exists,
sym.Sub(
ss.Account.Initial.KeccakCodeHash.Lo,
ss.Account.Initial.ExpectedHubCodeHash.Lo,
),
),
)
// initial case Hi, nonexistent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_INITIAL_CASE_NON_EXISTENT_HI",
sym.Mul(
ss.IsActive, // only on the active part of the module
sym.Sub(
1,
ss.Account.Initial.Exists,
),
sym.Sub(
ss.Account.Initial.ExpectedHubCodeHash.Hi,
field.NewFromString(EMPTYKECCAKCODEHASH_HI),
),
),
)
// initial case Lo, nonexistent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_INITIAL_CASE_NON_EXISTENT_LO",
sym.Mul(
ss.IsActive, // only on the active part of the module
sym.Sub(
1,
ss.Account.Initial.Exists,
),
sym.Sub(
ss.Account.Initial.ExpectedHubCodeHash.Lo,
field.NewFromString(EMPTYKECCAKCODEHASH_LO),
),
),
)
// final checks
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_FINALL_CASE_EXISTENT_HI",
sym.Mul(
ss.Account.Final.Exists,
sym.Sub(
ss.Account.Final.KeccakCodeHash.Hi,
ss.Account.Final.ExpectedHubCodeHash.Hi,
),
),
)
// final case Lo, existent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_FINAL_CASE_EXISTENT_LO",
sym.Mul(
ss.Account.Final.Exists,
sym.Sub(
ss.Account.Final.KeccakCodeHash.Lo,
ss.Account.Final.ExpectedHubCodeHash.Lo,
),
),
)
// final case Hi, nonexistent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_FINAL_CASE_NON_EXISTENT_HI",
sym.Mul(
ss.IsActive, // only on the active part of the module
sym.Sub(
1,
ss.Account.Final.Exists,
),
sym.Sub(
ss.Account.Final.ExpectedHubCodeHash.Hi,
field.NewFromString(EMPTYKECCAKCODEHASH_HI),
),
),
)
// final case Lo, nonexistent accounts
comp.InsertGlobal(
0,
"GLOBAL_CONSTRAINT_EXPECTED_HUB_CODEHASH_FINAL_CASE_NON_EXISTENT_LO",
sym.Mul(
ss.IsActive, // only on the active part of the module
sym.Sub(
1,
ss.Account.Final.Exists,
),
sym.Sub(
ss.Account.Final.ExpectedHubCodeHash.Lo,
field.NewFromString(EMPTYKECCAKCODEHASH_LO),
),
),
)
}
// ternary is a small utility to construct ternaries is constraints
func ternary(cond, if1, if0 any) *sym.Expression {
return sym.Add(

View File

@@ -95,7 +95,7 @@ func newZkEVM(b *wizard.Builder, s *Settings) *ZkEvm {
comp = b.CompiledIOP
arith = arithmetization.NewArithmetization(b, s.Arithmetization)
ecdsa = ecdsa.NewEcdsaZkEvm(comp, &s.Ecdsa)
stateManager = statemanager.NewStateManagerNoHub(comp, s.Statemanager)
stateManager = statemanager.NewStateManager(comp, s.Statemanager)
keccak = keccak.NewKeccakZkEVM(comp, s.Keccak, ecdsa.GetProviders())
modexp = modexp.NewModuleZkEvm(comp, s.Modexp)
ecadd = ecarith.NewEcAddZkEvm(comp, &s.Ecadd)