Compare commits

...

31 Commits

Author SHA1 Message Date
colinlyguo
e929d53b0b add a new script 2024-04-15 01:16:23 +08:00
colin
e90aa04e8c Merge branch 'update-zkverifier-deployment-script' into codecv1-script 2024-04-13 01:38:34 +08:00
colin
32f0011d74 Merge branch 'develop' into codecv1-script 2024-04-13 01:37:59 +08:00
colinlyguo
f0fc344303 update-rollup-verifier-deployment-script 2024-04-12 23:30:42 +08:00
HAOYUatHZ
5b827c3c18 feat(db): add batch_data_hash & blob metadata (#1221)
Co-authored-by: HAOYUatHZ <HAOYUatHZ@users.noreply.github.com>
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-04-12 15:45:06 +08:00
georgehao
6b2eb80aa5 feat: print version info on service startup (#1268)
Co-authored-by: Péter Garamvölgyi <peter@scroll.io>
2024-04-12 15:09:26 +08:00
Péter Garamvölgyi
71f88b04f5 fix: add blobHash to challenge and piHash (#1264)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-04-12 11:00:37 +08:00
Zhang Zhuo
bcd9764bcd chore(libzkp): upgrade to v0.10.3 (#1267)
Co-authored-by: Thegaram <Thegaram@users.noreply.github.com>
2024-04-12 10:51:39 +08:00
georgehao
b4f8377a08 fix(coordinator): fix coordinator recover public key inconsistent (#1265)
Co-authored-by: georgehao <georgehao@users.noreply.github.com>
2024-04-12 07:19:31 +08:00
colinlyguo
be5c58a427 update scripts 2024-04-06 22:04:58 +08:00
colinlyguo
97c85209c5 update tx payload length 2024-04-06 22:00:22 +08:00
colinlyguo
48534f8698 fix 2024-04-06 17:46:55 +08:00
colinlyguo
378ec79d14 fix 2024-04-06 03:26:03 +08:00
colinlyguo
1054023dd5 fix script 2024-04-06 03:21:27 +08:00
colinlyguo
65481212d5 update script 2024-04-06 03:13:22 +08:00
colinlyguo
ba289fc651 update contract 2024-04-05 18:36:36 +08:00
colinlyguo
64ed273a1d fix 2024-04-05 03:03:24 +08:00
colinlyguo
c1059a3f51 fix 2024-04-05 00:20:01 +08:00
colin
70aa557968 Merge branch 'develop' into codecv1-script 2024-04-04 19:03:44 +08:00
colinlyguo
94a3c0a571 update sending script 2024-04-04 18:52:19 +08:00
HAOYUatHZ
4c6016f852 add relay-skipped-tx.ts 2024-04-04 17:29:15 +08:00
HAOYUatHZ
e53e150341 add ecc.sol foundry deployment scripts 2024-04-04 16:30:36 +08:00
HAOYUatHZ
5b6b170db1 add hash.sol foundry deployment scripts 2024-04-04 16:24:40 +08:00
HAOYUatHZ
44c77bcc87 add sha256 & ecc contracts 2024-04-04 16:19:39 +08:00
colinlyguo
364173a8d6 add finalizeBatch calldata script 2024-04-03 16:30:29 +08:00
colinlyguo
2afba3e394 fix scripts 2024-04-03 10:24:02 +08:00
colinlyguo
53b24f75f8 fix a typo 2024-04-03 02:51:01 +08:00
colinlyguo
de60ea8f55 add dump calldata scripts 2024-04-03 02:35:41 +08:00
colinlyguo
9ade976ce5 add large transaction payload script 2024-04-02 19:50:59 +08:00
colinlyguo
4bb9cf89aa dump full blob 2024-04-01 17:11:39 +08:00
colinlyguo
efe0d7d2fe feat: codecv1 script 2024-04-01 15:41:56 +08:00
36 changed files with 1385 additions and 177 deletions

View File

@@ -31,7 +31,7 @@ dependencies = [
[[package]]
name = "aggregator"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"ark-std 0.3.0",
"c-kzg",
@@ -521,7 +521,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
[[package]]
name = "bus-mapping"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"eth-types",
"ethers-core",
@@ -1139,7 +1139,7 @@ dependencies = [
[[package]]
name = "eth-types"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"base64 0.13.1",
"ethers-core",
@@ -1293,7 +1293,7 @@ dependencies = [
[[package]]
name = "external-tracer"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"eth-types",
"geth-utils",
@@ -1485,7 +1485,7 @@ dependencies = [
[[package]]
name = "gadgets"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"eth-types",
"halo2_proofs",
@@ -1507,7 +1507,7 @@ dependencies = [
[[package]]
name = "geth-utils"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"env_logger 0.10.0",
"gobuild",
@@ -2142,7 +2142,7 @@ dependencies = [
[[package]]
name = "keccak256"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"env_logger 0.10.0",
"eth-types",
@@ -2292,7 +2292,7 @@ dependencies = [
[[package]]
name = "mock"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"eth-types",
"ethers-core",
@@ -2307,7 +2307,7 @@ dependencies = [
[[package]]
name = "mpt-zktrie"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"eth-types",
"halo2-mpt-circuits",
@@ -2769,7 +2769,7 @@ dependencies = [
[[package]]
name = "prover"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"aggregator",
"anyhow",
@@ -4456,7 +4456,7 @@ dependencies = [
[[package]]
name = "zkevm-circuits"
version = "0.1.0"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.2#31ca6a0068d2c21f6d179780823e47b54403dba8"
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.10.3#5776400eca902bf9a69306a07ea62ca6300dff76"
dependencies = [
"array-init",
"bus-mapping",

View File

@@ -25,7 +25,7 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
[dependencies]
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.2", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.10.3", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
base64 = "0.13.0"
env_logger = "0.9.0"

View File

@@ -0,0 +1,133 @@
package main
import (
"context"
"fmt"
"log"
"math/big"
"time"
"github.com/scroll-tech/go-ethereum/accounts/abi/bind"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/rlp"
)
const targetTxSize = 126914
func main() {
privateKeyHex := "0000000000000000000000000000000000000000000000000000000000000042"
privateKey, err := crypto.HexToECDSA(privateKeyHex)
if err != nil {
log.Fatalf("Invalid private key: %v", err)
}
client, err := ethclient.Dial("http://localhost:9999")
if err != nil {
log.Fatalf("Failed to connect to the Ethereum client: %v", err)
}
auth, err := bind.NewKeyedTransactorWithChainID(privateKey, big.NewInt(222222))
if err != nil {
log.Fatalf("Failed to create transactor with chain ID 222222: %v", err)
}
nonce, err := client.PendingNonceAt(context.Background(), auth.From)
if err != nil {
log.Fatalf("Failed to retrieve account nonce: %v", err)
}
totalTxNum := []uint64{2, 3, 4, 5, 6}
for _, num := range totalTxNum {
prepareAndSendTransactions(client, auth, nonce, num)
nonce += num
}
}
func prepareAndSendTransactions(client *ethclient.Client, auth *bind.TransactOpts, initialNonce uint64, totalTxNum uint64) error {
gasLimit := uint64(5000000)
gasPrice := big.NewInt(1000000000)
var signedTxs []*types.Transaction
payloadSum := 0
dataPayload := make([]byte, targetTxSize/totalTxNum)
for i := range dataPayload {
dataPayload[i] = 0xff
}
for i := uint64(0); i < totalTxNum-1; i++ {
txData := &types.LegacyTx{
Nonce: initialNonce + i,
GasPrice: gasPrice,
Gas: gasLimit,
To: &auth.From,
Data: dataPayload,
}
signedTx, err := auth.Signer(auth.From, types.NewTx(txData))
if err != nil {
log.Fatalf("Failed to sign tx: %v", err)
}
rlpTxData, err := rlp.EncodeToBytes(signedTx)
if err != nil {
log.Fatalf("Failed to RLP encode the tx: %v", err)
}
payloadSum += len(rlpTxData)
signedTxs = append(signedTxs, signedTx)
}
fmt.Println("payload sum", payloadSum)
lowerBound := 0
upperBound := targetTxSize
for lowerBound <= upperBound {
mid := (lowerBound + upperBound) / 2
data := make([]byte, mid)
for i := range data {
data[i] = 0xff
}
txData := &types.LegacyTx{
Nonce: initialNonce + totalTxNum - 1,
GasPrice: gasPrice,
Gas: gasLimit,
To: &auth.From,
Data: data,
}
signedTx, err := auth.Signer(auth.From, types.NewTx(txData))
if err != nil {
log.Fatalf("Failed to sign tx: %v", err)
}
rlpTxData, err := rlp.EncodeToBytes(signedTx)
if err != nil {
log.Fatalf("Failed to RLP encode the tx: %v", err)
}
txSize := len(rlpTxData)
if payloadSum+txSize < targetTxSize {
lowerBound = mid + 1
} else if payloadSum+txSize > targetTxSize {
upperBound = mid - 1
} else {
fmt.Println("payloadSum+txSize", payloadSum+txSize)
signedTxs = append(signedTxs, signedTx)
break
}
}
for _, signedTx := range signedTxs {
if err := client.SendTransaction(context.Background(), signedTx); err != nil {
return fmt.Errorf("failed to send transaction: %v", err)
}
fmt.Printf("Transaction with nonce %d sent\n", signedTx.Nonce())
time.Sleep(10 * time.Second)
}
return nil
}

View File

@@ -0,0 +1,131 @@
package main
import (
"context"
"fmt"
"log"
"math/big"
"github.com/scroll-tech/go-ethereum/accounts/abi/bind"
"github.com/scroll-tech/go-ethereum/core/types"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/ethclient"
"github.com/scroll-tech/go-ethereum/rlp"
)
const targetTxSize = 120568
func main() {
privateKeyHex := "0000000000000000000000000000000000000000000000000000000000000042"
privateKey, err := crypto.HexToECDSA(privateKeyHex)
if err != nil {
log.Fatalf("Invalid private key: %v", err)
}
client, err := ethclient.Dial("http://localhost:9999")
if err != nil {
log.Fatalf("Failed to connect to the Ethereum client: %v", err)
}
auth, err := bind.NewKeyedTransactorWithChainID(privateKey, big.NewInt(222222))
if err != nil {
log.Fatalf("Failed to create transactor with chain ID 222222: %v", err)
}
nonce, err := client.PendingNonceAt(context.Background(), auth.From)
if err != nil {
log.Fatalf("Failed to retrieve account nonce: %v", err)
}
prepareAndSendTransactions(client, auth, nonce, 1)
prepareAndSendTransactions(client, auth, nonce+1, 2)
prepareAndSendTransactions(client, auth, nonce+1+2, 3)
prepareAndSendTransactions(client, auth, nonce+1+2+3, 4)
prepareAndSendTransactions(client, auth, nonce+1+2+3+4, 5)
prepareAndSendTransactions(client, auth, nonce+1+2+3+4+5, 6)
}
func prepareAndSendTransactions(client *ethclient.Client, auth *bind.TransactOpts, initialNonce uint64, totalTxNum uint64) error {
gasLimit := uint64(5000000)
gasPrice := big.NewInt(1000000000)
var signedTxs []*types.Transaction
payloadSum := 0
dataPayload := make([]byte, targetTxSize/totalTxNum)
for i := range dataPayload {
dataPayload[i] = 0xff
}
for i := uint64(0); i < totalTxNum-1; i++ {
txData := &types.LegacyTx{
Nonce: initialNonce + i,
GasPrice: gasPrice,
Gas: gasLimit,
To: &auth.From,
Data: dataPayload,
}
signedTx, err := auth.Signer(auth.From, types.NewTx(txData))
if err != nil {
log.Fatalf("Failed to sign tx: %v", err)
}
rlpTxData, err := rlp.EncodeToBytes(signedTx)
if err != nil {
log.Fatalf("Failed to RLP encode the tx: %v", err)
}
payloadSum += len(rlpTxData)
signedTxs = append(signedTxs, signedTx)
}
fmt.Println("payload sum", payloadSum)
lowerBound := 0
upperBound := targetTxSize
for lowerBound <= upperBound {
mid := (lowerBound + upperBound) / 2
data := make([]byte, mid)
for i := range data {
data[i] = 0xff
}
txData := &types.LegacyTx{
Nonce: initialNonce + totalTxNum - 1,
GasPrice: gasPrice,
Gas: gasLimit,
To: &auth.From,
Data: data,
}
signedTx, err := auth.Signer(auth.From, types.NewTx(txData))
if err != nil {
log.Fatalf("Failed to sign tx: %v", err)
}
rlpTxData, err := rlp.EncodeToBytes(signedTx)
if err != nil {
log.Fatalf("Failed to RLP encode the tx: %v", err)
}
txSize := len(rlpTxData)
if payloadSum+txSize < targetTxSize {
lowerBound = mid + 1
} else if payloadSum+txSize > targetTxSize {
upperBound = mid - 1
} else {
fmt.Println("payloadSum+txSize", payloadSum+txSize)
signedTxs = append(signedTxs, signedTx)
break
}
}
for i := len(signedTxs) - 1; i >= 0; i-- {
if err := client.SendTransaction(context.Background(), signedTxs[i]); err != nil {
return fmt.Errorf("failed to send transaction: %v", err)
}
fmt.Printf("Transaction with nonce %d sent\n", signedTxs[i].Nonce())
}
return nil
}

View File

@@ -227,18 +227,11 @@ func NewDABatch(batch *encoding.Batch) (*DABatch, error) {
}
// blob payload
blob, z, err := constructBlobPayload(batch.Chunks)
blob, blobVersionedHash, z, err := constructBlobPayload(batch.Chunks)
if err != nil {
return nil, err
}
// blob versioned hash
c, err := kzg4844.BlobToCommitment(*blob)
if err != nil {
return nil, fmt.Errorf("failed to create blob commitment")
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
daBatch := DABatch{
Version: CodecV1Version,
BatchIndex: batch.Index,
@@ -281,7 +274,7 @@ func computeBatchDataHash(chunks []*encoding.Chunk, totalL1MessagePoppedBefore u
}
// constructBlobPayload constructs the 4844 blob payload.
func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Point, error) {
func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, common.Hash, *kzg4844.Point, error) {
// metadata consists of num_chunks (2 bytes) and chunki_size (4 bytes per chunk)
metadataLength := 2 + MaxNumChunks*4
@@ -289,8 +282,8 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
blobBytes := make([]byte, metadataLength)
// challenge digest preimage
// 1 hash for metadata and 1 for each chunk
challengePreimage := make([]byte, (1+MaxNumChunks)*32)
// 1 hash for metadata, 1 hash for each chunk, 1 hash for blob versioned hash
challengePreimage := make([]byte, (1+MaxNumChunks+1)*32)
// the chunk data hash used for calculating the challenge preimage
var chunkDataHash common.Hash
@@ -309,7 +302,7 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
// encode L2 txs into blob payload
rlpTxData, err := encoding.ConvertTxDataToRLPEncoding(tx)
if err != nil {
return nil, nil, err
return nil, common.Hash{}, nil, err
}
blobBytes = append(blobBytes, rlpTxData...)
}
@@ -341,9 +334,19 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
// convert raw data to BLSFieldElements
blob, err := makeBlobCanonical(blobBytes)
if err != nil {
return nil, nil, err
return nil, common.Hash{}, nil, err
}
// compute blob versioned hash
c, err := kzg4844.BlobToCommitment(*blob)
if err != nil {
return nil, common.Hash{}, nil, fmt.Errorf("failed to create blob commitment")
}
blobVersionedHash := kzg4844.CalcBlobHashV1(sha256.New(), &c)
// challenge: append blob versioned hash
copy(challengePreimage[(1+MaxNumChunks)*32:], blobVersionedHash[:])
// compute z = challenge_digest % BLS_MODULUS
challengeDigest := crypto.Keccak256Hash(challengePreimage)
pointBigInt := new(big.Int).Mod(new(big.Int).SetBytes(challengeDigest[:]), BLSModulus)
@@ -354,7 +357,7 @@ func constructBlobPayload(chunks []*encoding.Chunk) (*kzg4844.Blob, *kzg4844.Poi
start := 32 - len(pointBytes)
copy(z[start:], pointBytes)
return blob, &z, nil
return blob, blobVersionedHash, &z, nil
}
// makeBlobCanonical converts the raw blob data into the canonical blob representation of 4096 BLSFieldElements.

View File

@@ -479,55 +479,55 @@ func TestCodecV1BatchChallenge(t *testing.T) {
originalBatch := &encoding.Batch{Chunks: []*encoding.Chunk{chunk2}}
batch, err := NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "06138a688f328d13cb9caf0e2046d65bbcf766eab00196fb05e43806c7b26b36", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef", hex.EncodeToString(batch.z[:]))
trace3 := readBlockFromJSON(t, "../../../testdata/blockTrace_03.json")
chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk3}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "1e3f41f46941b3d30bbc482942026b09224636ed63a160738d7ae57a00c99294", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e58", hex.EncodeToString(batch.z[:]))
trace4 := readBlockFromJSON(t, "../../../testdata/blockTrace_04.json")
chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk4}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "37c3ab6ad48e99fc0ce8e9de5f9b2c9be832699b293e4243b85d4e42bad0db7a", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8", hex.EncodeToString(batch.z[:]))
trace5 := readBlockFromJSON(t, "../../../testdata/blockTrace_05.json")
chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk5}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:]))
trace6 := readBlockFromJSON(t, "../../../testdata/blockTrace_06.json")
chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk6}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:]))
trace7 := readBlockFromJSON(t, "../../../testdata/blockTrace_07.json")
chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk7}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", hex.EncodeToString(batch.z[:]))
// 15 chunks
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "0244c987922db21694e8eb0184c4a5e6f3785fb688224822f1f826874ed5aae2", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b", hex.EncodeToString(batch.z[:]))
chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}}
chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}}
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk8, chunk9}}
batch, err = NewDABatch(originalBatch)
assert.NoError(t, err)
assert.Equal(t, "03523cd88a7227826e093305cbe4ce237e8df38e2157566fb3742cc39dbc9c43", hex.EncodeToString(batch.z[:]))
assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab", hex.EncodeToString(batch.z[:]))
}
func repeat(element byte, count int) string {
@@ -547,31 +547,31 @@ func TestCodecV1BatchChallengeWithStandardTestCases(t *testing.T) {
expectedy string
}{
// single empty chunk
{chunks: [][]string{{}}, expectedz: "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec", expectedy: "28bda8f1836f60a3879f4253c4f51b3e41a905449b60a83a594f9f2487e8df51"},
{chunks: [][]string{{}}, expectedz: "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925", expectedy: "304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd08"},
// single non-empty chunk
{chunks: [][]string{{"0x010203"}}, expectedz: "30a9d6cfc2b87fb00d80e7fea28ebb9eff0bd526dbf1da32acfe8c5fd49632ff", expectedy: "723515444cb320fe437b9cea3b51293f5fbcb5913739ad35eab28b1863f7c312"},
{chunks: [][]string{{"0x010203"}}, expectedz: "1c1d4bd5153f877d799853080aba243f2c186dd6d6064eaefacfe715c92b6354", expectedy: "24e80ed99526b0d15ba46f7ec682f517576ddae68d5131e5d351f8bae06ea7d3"},
// multiple empty chunks
{chunks: [][]string{{}, {}}, expectedz: "17772348f946a4e4adfcaf5c1690d078933b6b090ca9a52fab6c7e545b1007ae", expectedy: "05ba9abbc81a1c97f4cdaa683a7e0c731d9dfd88feef8f7b2fcfd79e593662b5"},
{chunks: [][]string{{}, {}}, expectedz: "152c9ccfcc2884f9891f7adce2de110cf9f85bfd0e21f0933ae0636390a84d41", expectedy: "5f6f532676e25b49e2eae77513fbeca173a300b434c0a5e24fa554b68e27d582"},
// multiple non-empty chunks
{chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "60376321eea0886c29bd97d95851c7b5fbdb064c8adfdadd7678617b32b3ebf2", expectedy: "50cfbcece01cadb4eade40649e17b140b31f96088097e38f020e31dfe6551604"},
{chunks: [][]string{{"0x010203"}, {"0x070809"}}, expectedz: "62100f5381179ea7db7aa8fdedb0f7fc7b82730b75432d50ab41f80aeebe45a3", expectedy: "5b1f6e7a54907ddc06871853cf1f5d53bf2de0df7b61d0df84bc2c3fb80320cd"},
// empty chunk followed by non-empty chunk
{chunks: [][]string{{}, {"0x010203"}}, expectedz: "054539f03564eda9462d582703cde0788e4e27c311582ddfb19835358273a7ca", expectedy: "1fba03580b5908c4c66b48e79c10e7a34e4b27ed37a1a049b3e17e017cad5245"},
{chunks: [][]string{{}, {"0x010203"}}, expectedz: "2d94d241c4a2a8d8f02845ca40cfba344f3b42384af2045a75c82e725a184232", expectedy: "302416c177e9e7fe40c3bc4315066c117e27d246b0a33ef68cdda6dd333c485c"},
// non-empty chunk followed by empty chunk
{chunks: [][]string{{"0x070809"}, {}}, expectedz: "0b82dceaa6ca4b5d704590c921accfd991b56b5ad0212e6a4e63e54915a2053b", expectedy: "2362f3a0c87f0ea11eb898ed608c7f09a42926a058d4c5d111a0f54cad10ebbd"},
{chunks: [][]string{{"0x070809"}, {}}, expectedz: "7227567e3b1dbacb48a32bb85e4e99f73e4bd5620ea8cd4f5ac00a364c86af9c", expectedy: "2eb3dfd28362f35f562f779e749a555d2f1f87ddc716e95f04133d25189a391c"},
// max number of chunks all empty
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "174cd3ba9b2ae8ab789ec0b5b8e0b27ee122256ec1756c383dbf2b5b96903f1b", expectedy: "225cab9658904181671eb7abc342ffc36a6836048b64a67f0fb758439da2567b"},
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}}, expectedz: "1128ac3e22ced6af85be4335e0d03a266946a7cade8047e7fc59d6c8be642321", expectedy: "2d9b16422ce17f328fd00c99349768f0cb0c8648115eb3bd9b7864617ba88059"},
// max number of chunks all non-empty
{chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1e93e961cdfb4bd26a5be48f23af4f1aa8c6bebe57a089d3250f8afb1e988bf8", expectedy: "24ed4791a70b28a6bad21c22d58f82a5ea5f9f9d2bcfc07428b494e9ae93de6e"},
{chunks: [][]string{{"0x0a"}, {"0x0a0b"}, {"0x0a0b0c"}, {"0x0a0b0c0d"}, {"0x0a0b0c0d0e"}, {"0x0a0b0c0d0e0f"}, {"0x0a0b0c0d0e0f10"}, {"0x0a0b0c0d0e0f1011"}, {"0x0a0b0c0d0e0f101112"}, {"0x0a0b0c0d0e0f10111213"}, {"0x0a0b0c0d0e0f1011121314"}, {"0x0a0b0c0d0e0f101112131415"}, {"0x0a0b0c0d0e0f10111213141516"}, {"0x0a0b0c0d0e0f1011121314151617"}, {"0x0a0b0c0d0e0f101112131415161718"}}, expectedz: "1a4025a3d74e70b511007dd55a2e252478c48054c6383285e8a176f33d99853b", expectedy: "12071ac2571c11220432a27b8be549392892e9baf4c654748ca206def3843940"},
// single chunk blob full
{chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "61405cb0b114dfb4d611be84bedba0fcd2e55615e193e424f1cc7b1af0df3d31", expectedy: "58609bbca10e50489b630ecb5b9347378579ed784d6a10749fd505055d35c3c0"},
{chunks: [][]string{{repeat(123, nRowsData)}}, expectedz: "72714cc4a0ca75cee2d543b1f958e3d3dd59ac7df0d9d5617d8117b65295a5f2", expectedy: "4ebb690362bcbc42321309c210c99f2ebdb53b3fcf7cf3b17b78f6cfd1203ed3"},
// multiple chunks blob full
{chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "22533c3ea99536b4b83a89835aa91e6f0d2fc3866c201e18d7ca4b3af92fad61", expectedy: "40d4b71492e1a06ee3c273ef9003c7cb05aed021208871e13fa33302fa0f4dcc"},
{chunks: [][]string{{repeat(123, 1111)}, {repeat(231, nRowsData-1111)}}, expectedz: "70eb5b4db503e59413238eef451871c5d12f2bb96c8b96ceca012f4ca0114727", expectedy: "568d0aaf280ec83f9c81ed2d80ecbdf199bd72dafb8a350007d37ea82997e455"},
// max number of chunks only last one non-empty not full blob
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "0e6525c0dd261e8f62342b1139062bb23bc2b8b460163364598fb29e82a4eed5", expectedy: "1db984d6deb5e84bc67d0755aa2da8fe687233147603b4ecba94d0c8463c3836"},
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData-1111)}}, expectedz: "03db68ae16ee88489d52db19e6111b25630c5f23ad7cd14530aacf0cd231d476", expectedy: "24527d0b0e93b3dec0060c7b128975a8088b3104d3a297dc807ab43862a77a1a"},
// max number of chunks only last one non-empty full blob
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "3a638eac98f22f817b84e3d81ccaa3de080f83dc80a5823a3f19320ef3cb6fc8", expectedy: "73ab100278822144e2ed8c9d986e92f7a2662fd18a51bdf96ec55848578b227a"},
{chunks: [][]string{{}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {}, {repeat(132, nRowsData)}}, expectedz: "677670193f73db499cede572bcb55677f0d2f13d690f9a820bd00bf584c3c241", expectedy: "1d85677f172dbdf4ad3094a17deeb1df4d7d2b7f35ecea44aebffa757811a268"},
// max number of chunks but last is empty
{chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "02ef442d99f450559647a7823f1be0e148c75481cc5c703c02a116e8ac531fa8", expectedy: "31743538cfc3ac43d1378a5c497ebc9462c20b4cb4470e0e7a9f7342ea948333"},
{chunks: [][]string{{repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {repeat(111, 100)}, {}}, expectedz: "22935042dfe7df771b02c1f5cababfe508869e8f6339dabe25a8a32e37728bb0", expectedy: "48ca66fb5a094401728c3a6a517ffbd72c4d4d9a8c907e2d2f1320812f4d856f"},
} {
chunks := []*encoding.Chunk{}
@@ -587,7 +587,7 @@ func TestCodecV1BatchChallengeWithStandardTestCases(t *testing.T) {
chunks = append(chunks, chunk)
}
b, z, err := constructBlobPayload(chunks)
b, _, z, err := constructBlobPayload(chunks)
assert.NoError(t, err)
actualZ := hex.EncodeToString(z[:])
assert.Equal(t, tc.expectedz, actualZ)
@@ -608,7 +608,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err := batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "06138a688f328d13cb9caf0e2046d65bbcf766eab00196fb05e43806c7b26b363d27683f7aab53cf071e2c8c8f3abfe750d206c048489450d120679cdc823f7db44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607a30dad96431f70551dd950c1426131d73ccea6d050d38dea123aad90aa8c0b734c98e8e04bd8ea8f19b415f2d85156d8", hex.EncodeToString(verifyData))
assert.Equal(t, "0d8e67f882c61159aa99b04ec4f6f3d90cb95cbfba6efd56cefc55ca15b290ef423dc493f1dd7c9fbecdffa021ca4649b13e8d72231487034ec6b27e155ecfd7b44a38af1f9a6c70cd3ccfbf71968f447aa566bbafb0bbc566fc9eeb42973484802635a1bbd8305d34a46693331bf607b38542ec811c92d86ff6f3319de06ee60c42655278ccf874f3615f450de730895276828b73db03c553b0bc7e5474a5e0", hex.EncodeToString(verifyData))
trace3 := readBlockFromJSON(t, "../../../testdata/blockTrace_03.json")
chunk3 := &encoding.Chunk{Blocks: []*encoding.Block{trace3}}
@@ -617,7 +617,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "1e3f41f46941b3d30bbc482942026b09224636ed63a160738d7ae57a00c992946dc7e51a42a31f429bc1f321dcf020b9a661225259522dba186fcfe5dc012191b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e86a0b3c76e33edb24eb07faeaa5d3f2b15a55df6ab99abf828b5803f5681dc634602eb7469ee0556563b2eccebf16ec822", hex.EncodeToString(verifyData))
assert.Equal(t, "32da228f4945de828954675f9396debb169bbf336ba93f849a8fc7fee1bc9e5821975f318babe50be728f9b52754d5ce2caa2ba82ba35b5888af1c5f28d23206b8aab265dc352e352807a298f7bb99d432c7cd543e63158cbdb8fbf99f3182a71af35ccbed2693c5e0bc5be38d565e868e0c6fe7bd39baa5ee6339cd334a18af7c680d24e825262499e83b31633b13a9ee89813fae8441630c82bc9dce3f1e07", hex.EncodeToString(verifyData))
trace4 := readBlockFromJSON(t, "../../../testdata/blockTrace_04.json")
chunk4 := &encoding.Chunk{Blocks: []*encoding.Block{trace4}}
@@ -626,7 +626,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "37c3ab6ad48e99fc0ce8e9de5f9b2c9be832699b293e4243b85d4e42bad0db7a24164e6ea8b7946ce5e40d2baa4f6aa0d030076f6074295288133c00e75dafa2afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d676248f5ca4a9f0d9b7fa48f5f649dc84e928161fd99ad1bd9a9879b05d29c5f718bfb3b0a696a5f3ed50b5b8c6a9d530b3ee", hex.EncodeToString(verifyData))
assert.Equal(t, "09a37ab43d41bcae3000c090a341e4661a8dc705b3c93d01b9eda3a0b3f8d4a8088a01e54e3565d2e91ce6afbadf479330847d9106737875303ce17f17c48722afd4e1c55a17dbdf8390b5736158afe238d82f8b696669ba47015fcdfd4d1becd0ff7a47f8f379a4ac8d1741e2d67624aee03a0f7cdb7807bc7e0b9fb20bc299af2a35e38cda816708b40f2f18db491e14a0f5d9cfe2f4c12e4ca1a219484f17", hex.EncodeToString(verifyData))
trace5 := readBlockFromJSON(t, "../../../testdata/blockTrace_05.json")
chunk5 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}}
@@ -635,7 +635,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec28bda8f1836f60a3879f4253c4f51b3e41a905449b60a83a594f9f2487e8df518f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea389598d958507378f8212199c51c059f8c419fd809dcc7de5750f76220c9c54cd57ad18cb3c38c127559a133df250f66b7", hex.EncodeToString(verifyData))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData))
trace6 := readBlockFromJSON(t, "../../../testdata/blockTrace_06.json")
chunk6 := &encoding.Chunk{Blocks: []*encoding.Block{trace6}}
@@ -644,7 +644,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec28bda8f1836f60a3879f4253c4f51b3e41a905449b60a83a594f9f2487e8df518f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea389598d958507378f8212199c51c059f8c419fd809dcc7de5750f76220c9c54cd57ad18cb3c38c127559a133df250f66b7", hex.EncodeToString(verifyData))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData))
trace7 := readBlockFromJSON(t, "../../../testdata/blockTrace_07.json")
chunk7 := &encoding.Chunk{Blocks: []*encoding.Block{trace7}}
@@ -653,7 +653,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "1fa77f72d924ed6efdc399cf7a3de45fd3b50538d368d80d94840d30fdb606ec28bda8f1836f60a3879f4253c4f51b3e41a905449b60a83a594f9f2487e8df518f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea389598d958507378f8212199c51c059f8c419fd809dcc7de5750f76220c9c54cd57ad18cb3c38c127559a133df250f66b7", hex.EncodeToString(verifyData))
assert.Equal(t, "17c71700d949f82963d3bd6af3994ecc383a3d58007f2f27702758fefa34a925304817c2a9ec97b4cfdfc7a646f4bd5ac309e967465bb49059d397094e57cd088f26f349339c68b33ce856aa2c05b8f89e7c23db0c00817550679998efcbd8f2464f9e1ea6c3172b0b750603d1e4ea38979341a25ec6b613f9f32b23fc0e1a11342bc84d4af0705c666e7813de790d0e63b0a9bc56dc484590728aaaafa6b7a4", hex.EncodeToString(verifyData))
// 15 chunks
originalBatch = &encoding.Batch{Chunks: []*encoding.Chunk{chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2, chunk2}}
@@ -661,7 +661,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "0244c987922db21694e8eb0184c4a5e6f3785fb688224822f1f826874ed5aae2613ca15d051a539e3b239027f9bdbd03bd3c66c98afafb674e2a7441912cbe099743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc93291fbc65cfa558e4df12bcde442483d31072000c56f94fe012285bc5832eaee5fe1d47f1e8655539c4500f66207d8edc6", hex.EncodeToString(verifyData))
assert.Equal(t, "55dac3baa818133cfdce0f97ddbb950e341399756d7b49bc34107dd65ecd3a4b54d28f1479467d8b97fb99f5257d3e5d63a81cb2d60e3564fe6ec6066a311c119743324c70e20042de6480f115b215fbba3472a8b994303a99576c1244aa4aec22fdfe6c74ec728aa28a9eb3812bc932a0b603cc94be2007d4b3b17af06b4fb30caf0e574d5abcfc5654079e65154679afad75844396082a7200a4e82462aeed", hex.EncodeToString(verifyData))
chunk8 := &encoding.Chunk{Blocks: []*encoding.Block{trace2, trace3, trace4}}
chunk9 := &encoding.Chunk{Blocks: []*encoding.Block{trace5}}
@@ -670,7 +670,7 @@ func TestCodecV1BatchBlobDataProof(t *testing.T) {
assert.NoError(t, err)
verifyData, err = batch.BlobDataProof()
assert.NoError(t, err)
assert.Equal(t, "03523cd88a7227826e093305cbe4ce237e8df38e2157566fb3742cc39dbc9c4330b3863672052b3d6c6552d121b0b13f97659f49bbfb6d7fed6e4b7076e4a43383bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fdae9cb71d402cfe8bc4d659f228c41f0b9d195c5074278a2346204cfaa336f5de2244a3d53e0effa2f49c81924720e84e", hex.EncodeToString(verifyData))
assert.Equal(t, "0b14dce4abfdeb3a69a341f7db6b1e16162c20826e6d964a829e20f671030cab35b73ddb4a78fc4a8540f1d8259512c46e606a701e7ef7742e38cc4562ef53b983bee97f95fbf2d789a8e0fb365c26e141d6a31e43403b4a469d1723128f6d5de5c54e913e143feede32d0af9b6fd6fda28e5610ca6b185d6ac30b53bd83d6366fccb1956daafa90ff6b504a966b119ebb45cb3f7085b7c1d622ee1ad27fcff9", hex.EncodeToString(verifyData))
}
func TestCodecV1BatchSkipBitmap(t *testing.T) {

View File

@@ -0,0 +1,91 @@
package message
import (
"crypto/ecdsa"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/rlp"
)
// AuthMsg is the first message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type AuthMsg struct {
// Message fields
Identity *Identity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// Identity contains all the fields to be signed by the prover.
type Identity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
// HardForkName the hard fork name
HardForkName string `json:"hard_fork_name"`
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *AuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *AuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *AuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *Identity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}

View File

@@ -0,0 +1,89 @@
package message
import (
"crypto/ecdsa"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/common/hexutil"
"github.com/scroll-tech/go-ethereum/crypto"
"github.com/scroll-tech/go-ethereum/rlp"
)
// LegacyAuthMsg is the old auth message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type LegacyAuthMsg struct {
// Message fields
Identity *LegacyIdentity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// LegacyIdentity contains all the fields to be signed by the prover.
type LegacyIdentity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *LegacyAuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *LegacyAuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *LegacyAuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *LegacyIdentity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}

View File

@@ -58,28 +58,6 @@ const (
ProofTypeBatch
)
// AuthMsg is the first message exchanged from the Prover to the Sequencer.
// It effectively acts as a registration, and makes the Prover identification
// known to the Sequencer.
type AuthMsg struct {
// Message fields
Identity *Identity `json:"message"`
// Prover signature
Signature string `json:"signature"`
}
// Identity contains all the fields to be signed by the prover.
type Identity struct {
// ProverName the prover name
ProverName string `json:"prover_name"`
// ProverVersion the prover version
ProverVersion string `json:"prover_version"`
// Challenge unique challenge generated by manager
Challenge string `json:"challenge"`
// HardForkName the hard fork name
HardForkName string `json:"hard_fork_name"`
}
// GenerateToken generates token
func GenerateToken() (string, error) {
b := make([]byte, 16)
@@ -89,65 +67,6 @@ func GenerateToken() (string, error) {
return hex.EncodeToString(b), nil
}
// SignWithKey auth message with private key and set public key in auth message's Identity
func (a *AuthMsg) SignWithKey(priv *ecdsa.PrivateKey) error {
// Hash identity content
hash, err := a.Identity.Hash()
if err != nil {
return err
}
// Sign register message
sig, err := crypto.Sign(hash, priv)
if err != nil {
return err
}
a.Signature = hexutil.Encode(sig)
return nil
}
// Verify verifies the message of auth.
func (a *AuthMsg) Verify() (bool, error) {
hash, err := a.Identity.Hash()
if err != nil {
return false, err
}
sig := common.FromHex(a.Signature)
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return false, err
}
return crypto.VerifySignature(crypto.CompressPubkey(pk), hash, sig[:len(sig)-1]), nil
}
// PublicKey return public key from signature
func (a *AuthMsg) PublicKey() (string, error) {
hash, err := a.Identity.Hash()
if err != nil {
return "", err
}
sig := common.FromHex(a.Signature)
// recover public key
pk, err := crypto.SigToPub(hash, sig)
if err != nil {
return "", err
}
return common.Bytes2Hex(crypto.CompressPubkey(pk)), nil
}
// Hash returns the hash of the auth message, which should be the message used
// to construct the Signature.
func (i *Identity) Hash() ([]byte, error) {
byt, err := rlp.EncodeToBytes(i)
if err != nil {
return nil, err
}
hash := crypto.Keccak256Hash(byt)
return hash[:], nil
}
// ProofMsg is the data structure sent to the coordinator.
type ProofMsg struct {
*ProofDetail `json:"zkProof"`

View File

@@ -5,7 +5,7 @@ import (
"runtime/debug"
)
var tag = "v4.3.88"
var tag = "v4.3.92"
var commit = func() string {
if info, ok := debug.ReadBuildInfo(); ok {

View File

@@ -5,7 +5,7 @@
"license": "MIT",
"scripts": {
"test:hardhat": "npx hardhat test",
"test:forge": "forge test -vvv",
"test:forge": "forge test -vvv --evm-version cancun",
"test": "yarn test:hardhat && yarn test:forge",
"solhint": "./node_modules/.bin/solhint -f table 'src/**/*.sol'",
"lint:sol": "./node_modules/.bin/prettier --write 'src/**/*.sol'",

View File

@@ -0,0 +1,23 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Ecc} from "../../src/misc/ecc.sol";
contract DeployEcc is Script {
function run() external {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
Ecc ecc = new Ecc();
address L2_ECC_ADDR = address(ecc);
vm.stopBroadcast();
logAddress("L2_ECC_ADDR", L2_ECC_ADDR);
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -0,0 +1,23 @@
// SPDX-License-Identifier: UNLICENSED
pragma solidity =0.8.24;
import {Script} from "forge-std/Script.sol";
import {console} from "forge-std/console.sol";
import {Hash} from "../../src/misc/hash.sol";
contract DeployHash is Script {
function run() external {
uint256 L2_DEPLOYER_PRIVATE_KEY = vm.envUint("L2_DEPLOYER_PRIVATE_KEY");
vm.startBroadcast(L2_DEPLOYER_PRIVATE_KEY);
Hash hash = new Hash();
address L2_HASH_ADDR = address(hash);
vm.stopBroadcast();
logAddress("L2_HASH_ADDR", L2_HASH_ADDR);
}
function logAddress(string memory name, address addr) internal view {
console.log(string(abi.encodePacked(name, "=", vm.toString(address(addr)))));
}
}

View File

@@ -92,10 +92,12 @@ contract DeployL1BridgeContracts is Script {
}
function deployMultipleVersionRollupVerifier() internal {
uint256[] memory _versions = new uint256[](1);
address[] memory _verifiers = new address[](1);
uint256[] memory _versions = new uint256[](2);
address[] memory _verifiers = new address[](2);
_versions[0] = 0;
_verifiers[0] = address(zkEvmVerifierV1);
_versions[1] = 1;
_verifiers[1] = address(zkEvmVerifierV1);
rollupVerifier = new MultipleVersionRollupVerifier(L1_SCROLL_CHAIN_PROXY_ADDR, _versions, _verifiers);
logAddress("L1_MULTIPLE_VERSION_ROLLUP_VERIFIER_ADDR", address(rollupVerifier));

View File

@@ -0,0 +1,34 @@
/* eslint-disable node/no-missing-import */
import * as dotenv from "dotenv";
import { ethers } from "hardhat";
dotenv.config();
async function main() {
const [deployer] = await ethers.getSigners();
const l1ScrollMessengerAddress = process.env.L1_SCROLL_MESSENGER_PROXY_ADDR!;
const l2EccContractAddress = process.env.L2_ECC_ADDR!;
const payload = process.env.SKIPPED_TX_PAYLOAD!; // TODO: calc the payload, parse as bytes
const L1ScrollMessenger = await ethers.getContractAt("L1ScrollMessenger", l1ScrollMessengerAddress, deployer);
const tx = await L1ScrollMessenger.sendMessage(
l2EccContractAddress, // address _to
0, // uint256 _value
payload, // bytes memory _message
100000000 // uint256 _gasLimit
);
console.log(`calling ${l2EccContractAddress} with payload from l1, hash:`, tx.hash);
const receipt = await tx.wait();
console.log(`✅ Done, gas used: ${receipt.gasUsed}`);
}
// We recommend this pattern to be able to use async/await everywhere
// and properly handle errors.
main().catch((error) => {
console.error(error);
process.exitCode = 1;
});

View File

@@ -477,7 +477,8 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
_postStateRoot,
_withdrawRoot,
_dataHash,
_blobDataProof[0:64]
_blobDataProof[0:64],
_blobVersionedHash
)
);

127
contracts/src/misc/ecc.sol Normal file
View File

@@ -0,0 +1,127 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity =0.8.24;
contract Ecc {
/* ECC Functions */
// https://etherscan.io/address/0x41bf00f080ed41fa86201eac56b8afb170d9e36d#code
function ecAdd(uint256[2] memory p0, uint256[2] memory p1) public view returns (uint256[2] memory retP) {
uint256[4] memory i = [p0[0], p0[1], p1[0], p1[1]];
assembly {
// call ecadd precompile
// inputs are: x1, y1, x2, y2
if iszero(staticcall(not(0), 0x06, i, 0x80, retP, 0x40)) {
revert(0, 0)
}
}
}
// https://etherscan.io/address/0x41bf00f080ed41fa86201eac56b8afb170d9e36d#code
function ecMul(uint256[2] memory p, uint256 s) public view returns (uint256[2] memory retP) {
// With a public key (x, y), this computes p = scalar * (x, y).
uint256[3] memory i = [p[0], p[1], s];
assembly {
// call ecmul precompile
// inputs are: x, y, scalar
if iszero(staticcall(not(0), 0x07, i, 0x60, retP, 0x40)) {
revert(0, 0)
}
}
}
// scroll-tech/scroll/contracts/src/libraries/verifier/RollupVerifier.sol
struct G1Point {
uint256 x;
uint256 y;
}
struct G2Point {
uint256[2] x;
uint256[2] y;
}
function ecPairing(G1Point[] memory p1, G2Point[] memory p2) internal view returns (bool) {
uint256 length = p1.length * 6;
uint256[] memory input = new uint256[](length);
uint256[1] memory result;
bool ret;
require(p1.length == p2.length);
for (uint256 i = 0; i < p1.length; i++) {
input[0 + i * 6] = p1[i].x;
input[1 + i * 6] = p1[i].y;
input[2 + i * 6] = p2[i].x[0];
input[3 + i * 6] = p2[i].x[1];
input[4 + i * 6] = p2[i].y[0];
input[5 + i * 6] = p2[i].y[1];
}
assembly {
ret := staticcall(gas(), 8, add(input, 0x20), mul(length, 0x20), result, 0x20)
}
require(ret);
return result[0] != 0;
}
/* Bench */
function ecAdds(uint256 n) public {
uint256[2] memory p0;
p0[0] = 1;
p0[1] = 2;
uint256[2] memory p1;
p1[0] = 1;
p1[1] = 2;
for (uint256 i = 0; i < n; i++) {
ecAdd(p0, p1);
}
}
function ecMuls(uint256 n) public {
uint256[2] memory p0;
p0[0] = 1;
p0[1] = 2;
for (uint256 i = 0; i < n; i++) {
ecMul(p0, 3);
}
}
function ecPairings(uint256 n) public {
G1Point[] memory g1_points = new G1Point[](2);
G2Point[] memory g2_points = new G2Point[](2);
g1_points[0].x = 0x0000000000000000000000000000000000000000000000000000000000000001;
g1_points[0].y = 0x0000000000000000000000000000000000000000000000000000000000000002;
g2_points[0].x[1] = 0x1800deef121f1e76426a00665e5c4479674322d4f75edadd46debd5cd992f6ed;
g2_points[0].x[0] = 0x198e9393920d483a7260bfb731fb5d25f1aa493335a9e71297e485b7aef312c2;
g2_points[0].y[1] = 0x12c85ea5db8c6deb4aab71808dcb408fe3d1e7690c43d37b4ce6cc0166fa7daa;
g2_points[0].y[0] = 0x090689d0585ff075ec9e99ad690c3395bc4b313370b38ef355acdadcd122975b;
g1_points[1].x = 0x1aa125a22bd902874034e67868aed40267e5575d5919677987e3bc6dd42a32fe;
g1_points[1].y = 0x1bacc186725464068956d9a191455c2d6f6db282d83645c610510d8d4efbaee0;
g2_points[1].x[1] = 0x1b7734c80605f71f1e2de61e998ce5854ff2abebb76537c3d67e50d71422a852;
g2_points[1].x[0] = 0x10d5a1e34b2388a5ebe266033a5e0e63c89084203784da0c6bd9b052a78a2cac;
g2_points[1].y[1] = 0x275739c5c2cdbc72e37c689e2ab441ea76c1d284b9c46ae8f5c42ead937819e1;
g2_points[1].y[0] = 0x018de34c5b7c3d3d75428bbe050f1449ea3d9961d563291f307a1874f7332e65;
for (uint256 i = 0; i < n; i++) {
ecPairing(g1_points, g2_points);
// bool checked = false;
// checked = ecPairing(g1_points, g2_points);
// require(checked);
}
}
// https://github.com/OpenZeppelin/openzeppelin-contracts/blob/8a0b7bed82d6b8053872c3fd40703efd58f5699d/test/utils/cryptography/ECDSA.test.js#L230
function ecRecovers(uint256 n) public {
bytes32 hash = 0xb94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9;
bytes32 r = 0xe742ff452d41413616a5bf43fe15dd88294e983d3d36206c2712f39083d638bd;
uint8 v = 0x1b;
bytes32 s = 0xe0a0fc89be718fbc1033e1d30d78be1c68081562ed2e97af876f286f3453231d;
for (uint256 i = 0; i < n; i++) {
ecrecover(hash, v, r, s);
}
}
}

View File

@@ -0,0 +1,34 @@
// SPDX-License-Identifier: GPL-3.0
pragma solidity =0.8.24;
contract Hash {
function sha256(bytes memory input) public view returns (bytes memory out) {
(bool ok, bytes memory out) = address(2).staticcall(input);
require(ok);
}
function sha256Yul(bytes memory input) public view returns (bytes memory out) {
assembly {
// mstore(0, input)
if iszero(staticcall(gas(), 2, 0, 32, 0, 32)) {
revert(0, 0)
}
// return(0, 32)
}
}
function sha256s(bytes memory input, uint256 n) public {
for (uint256 i = 0; i < n; i++) {
sha256(input);
}
}
function keccak256s(uint256 n) public {
bytes32[] memory output = new bytes32[](n);
for (uint256 i = 0; i < n; i++) {
bytes memory input = abi.encode(i);
output[i] = keccak256(input);
}
}
}

View File

@@ -83,6 +83,8 @@ contract L2USDCGatewayTest is L2GatewayTestBase {
}
function testTransferUSDCRoles(address owner) external {
hevm.assume(owner != address(0));
// non-whitelisted caller call, should revert
hevm.expectRevert("only circle caller");
gateway.transferUSDCRoles(owner);

View File

@@ -53,18 +53,31 @@ func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
return jwt.MapClaims{}
}
// recover the public key
authMsg := message.AuthMsg{
Identity: &message.Identity{
Challenge: v.Message.Challenge,
ProverName: v.Message.ProverName,
ProverVersion: v.Message.ProverVersion,
HardForkName: v.Message.HardForkName,
},
Signature: v.Signature,
var publicKey string
var err error
if v.Message.HardForkName != "" {
authMsg := message.AuthMsg{
Identity: &message.Identity{
Challenge: v.Message.Challenge,
ProverName: v.Message.ProverName,
ProverVersion: v.Message.ProverVersion,
HardForkName: v.Message.HardForkName,
},
Signature: v.Signature,
}
publicKey, err = authMsg.PublicKey()
} else {
authMsg := message.LegacyAuthMsg{
Identity: &message.LegacyIdentity{
Challenge: v.Message.Challenge,
ProverName: v.Message.ProverName,
ProverVersion: v.Message.ProverVersion,
},
Signature: v.Signature,
}
publicKey, err = authMsg.PublicKey()
}
publicKey, err := authMsg.PublicKey()
if err != nil {
return jwt.MapClaims{}
}

View File

@@ -24,6 +24,7 @@ type Batch struct {
// batch
Index uint64 `json:"index" gorm:"column:index"`
Hash string `json:"hash" gorm:"column:hash"`
DataHash string `json:"data_hash" gorm:"column:data_hash"`
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
@@ -54,6 +55,10 @@ type Batch struct {
OracleStatus int16 `json:"oracle_status" gorm:"column:oracle_status;default:1"`
OracleTxHash string `json:"oracle_tx_hash" gorm:"column:oracle_tx_hash;default:NULL"`
// blob
BlobDataProof []byte `json:"blob_data_proof" gorm:"column:blob_data_proof"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
CreatedAt time.Time `json:"created_at" gorm:"column:created_at"`
UpdatedAt time.Time `json:"updated_at" gorm:"column:updated_at"`
@@ -248,6 +253,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...
newBatch := Batch{
Index: batch.Index,
Hash: daBatch.Hash().Hex(),
DataHash: daBatch.DataHash.Hex(),
StartChunkHash: startDAChunkHash.Hex(),
StartChunkIndex: startChunkIndex,
EndChunkHash: endDAChunkHash.Hex(),
@@ -262,6 +268,8 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...
ActiveAttempts: 0,
RollupStatus: int16(types.RollupPending),
OracleStatus: int16(types.GasOraclePending),
BlobDataProof: nil, // using mock value because this piece of codes is only used in unit tests
BlobSize: 0, // using mock value because this piece of codes is only used in unit tests
}
db := o.db

View File

@@ -48,6 +48,10 @@ type Chunk struct {
// batch
BatchHash string `json:"batch_hash" gorm:"column:batch_hash;default:NULL"`
// blob
CrcMax uint64 `json:"crc_max" gorm:"column:crc_max"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
TotalL2TxGas uint64 `json:"total_l2_tx_gas" gorm:"column:total_l2_tx_gas"`
TotalL2TxNum uint64 `json:"total_l2_tx_num" gorm:"column:total_l2_tx_num"`
@@ -300,6 +304,8 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, dbTX ...
ProvingStatus: int16(types.ProvingTaskUnassigned),
TotalAttempts: 0,
ActiveAttempts: 0,
CrcMax: 0, // using mock value because this piece of codes is only used in unit tests
BlobSize: 0, // using mock value because this piece of codes is only used in unit tests
}
db := o.db

View File

@@ -77,18 +77,31 @@ func (r *mockProver) challenge(t *testing.T) string {
}
func (r *mockProver) login(t *testing.T, challengeString string, forkName string) string {
authMsg := message.AuthMsg{
Identity: &message.Identity{
Challenge: challengeString,
ProverName: r.proverName,
ProverVersion: r.proverVersion,
HardForkName: forkName,
},
var body string
if forkName != "" {
authMsg := message.AuthMsg{
Identity: &message.Identity{
Challenge: challengeString,
ProverName: r.proverName,
ProverVersion: r.proverVersion,
HardForkName: forkName,
},
}
assert.NoError(t, authMsg.SignWithKey(r.privKey))
body = fmt.Sprintf("{\"message\":{\"challenge\":\"%s\",\"prover_name\":\"%s\", \"prover_version\":\"%s\", \"hard_fork_name\":\"%s\"},\"signature\":\"%s\"}",
authMsg.Identity.Challenge, authMsg.Identity.ProverName, authMsg.Identity.ProverVersion, authMsg.Identity.HardForkName, authMsg.Signature)
} else {
authMsg := message.LegacyAuthMsg{
Identity: &message.LegacyIdentity{
Challenge: challengeString,
ProverName: r.proverName,
ProverVersion: r.proverVersion,
},
}
assert.NoError(t, authMsg.SignWithKey(r.privKey))
body = fmt.Sprintf("{\"message\":{\"challenge\":\"%s\",\"prover_name\":\"%s\", \"prover_version\":\"%s\"},\"signature\":\"%s\"}",
authMsg.Identity.Challenge, authMsg.Identity.ProverName, authMsg.Identity.ProverVersion, authMsg.Signature)
}
assert.NoError(t, authMsg.SignWithKey(r.privKey))
body := fmt.Sprintf("{\"message\":{\"challenge\":\"%s\",\"prover_name\":\"%s\", \"prover_version\":\"%s\", \"hard_fork_name\":\"%s\"},\"signature\":\"%s\"}",
authMsg.Identity.Challenge, authMsg.Identity.ProverName, authMsg.Identity.ProverVersion, authMsg.Identity.HardForkName, authMsg.Signature)
var result ctypes.Response
client := resty.New()

View File

@@ -59,20 +59,20 @@ func testResetDB(t *testing.T) {
cur, err := Current(pgDB)
assert.NoError(t, err)
// total number of tables.
assert.Equal(t, int64(16), cur)
assert.Equal(t, int64(17), cur)
}
func testMigrate(t *testing.T) {
assert.NoError(t, Migrate(pgDB))
cur, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(16), cur)
assert.Equal(t, int64(17), cur)
}
func testRollback(t *testing.T) {
version, err := Current(pgDB)
assert.NoError(t, err)
assert.Equal(t, int64(16), version)
assert.Equal(t, int64(17), version)
assert.NoError(t, Rollback(pgDB, nil))

View File

@@ -0,0 +1,27 @@
-- +goose Up
-- +goose StatementBegin
ALTER TABLE chunk
ADD COLUMN crc_max INTEGER DEFAULT 0,
ADD COLUMN blob_size INTEGER DEFAULT 0;
ALTER TABLE batch
ADD COLUMN data_hash VARCHAR DEFAULT '',
ADD COLUMN blob_data_proof BYTEA DEFAULT NULL,
ADD COLUMN blob_size INTEGER DEFAULT 0;
-- +goose StatementEnd
-- +goose Down
-- +goose StatementBegin
ALTER TABLE IF EXISTS batch
DROP COLUMN data_hash,
DROP COLUMN blob_data_proof,
DROP COLUMN blob_size;
ALTER TABLE IF EXISTS chunk
DROP COLUMN crc_max,
DROP COLUMN blob_size;
-- +goose StatementEnd

View File

@@ -76,7 +76,7 @@ func action(ctx *cli.Context) error {
}
})
log.Info("Start event-watcher successfully")
log.Info("Start event-watcher successfully", "version", version.Version)
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)

View File

@@ -109,7 +109,7 @@ func action(ctx *cli.Context) error {
go utils.Loop(subCtx, 2*time.Second, l2relayer.ProcessGasPriceOracle)
// Finish start all message relayer functions
log.Info("Start gas-oracle successfully")
log.Info("Start gas-oracle successfully", "version", version.Version)
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)

View File

@@ -115,7 +115,7 @@ func action(ctx *cli.Context) error {
go utils.Loop(subCtx, 15*time.Second, l2relayer.ProcessCommittedBatches)
// Finish start all rollup relayer functions.
log.Info("Start rollup-relayer successfully")
log.Info("Start rollup-relayer successfully", "version", version.Version)
// Catch CTRL-C to ensure a graceful shutdown.
interrupt := make(chan os.Signal, 1)

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@@ -0,0 +1,103 @@
package main
import (
"context"
"encoding/hex"
"os"
"strconv"
"github.com/scroll-tech/go-ethereum/common"
"github.com/scroll-tech/go-ethereum/log"
"scroll-tech/common/database"
"scroll-tech/common/types/encoding"
"scroll-tech/common/types/encoding/codecv1"
"scroll-tech/rollup/internal/orm"
)
func main() {
glogger := log.NewGlogHandler(log.StreamHandler(os.Stderr, log.LogfmtFormat()))
glogger.Verbosity(log.LvlInfo)
log.Root().SetHandler(glogger)
if len(os.Args) < 2 {
log.Crit("no batch index provided")
return
}
batchIndexStr := os.Args[1]
batchIndexInt, err := strconv.Atoi(batchIndexStr)
if err != nil || batchIndexInt <= 0 {
log.Crit("invalid batch index", "indexStr", batchIndexStr, "err", err)
return
}
batchIndex := uint64(batchIndexInt)
db, err := database.InitDB(&database.Config{
DriverName: "postgres",
DSN: os.Getenv("DB_DSN"),
MaxOpenNum: 200,
MaxIdleNum: 20,
})
if err != nil {
log.Crit("failed to init db", "err", err)
}
defer func() {
if deferErr := database.CloseDB(db); deferErr != nil {
log.Error("failed to close db", "err", err)
}
}()
l2BlockOrm := orm.NewL2Block(db)
chunkOrm := orm.NewChunk(db)
batchOrm := orm.NewBatch(db)
dbBatch, err := batchOrm.GetBatchByIndex(context.Background(), batchIndex)
if err != nil {
log.Crit("failed to get batch", "index", batchIndex, "err", err)
return
}
dbParentBatch, err := batchOrm.GetBatchByIndex(context.Background(), batchIndex-1)
if err != nil {
log.Crit("failed to get batch", "index", batchIndex-1, "err", err)
return
}
dbChunks, err := chunkOrm.GetChunksInRange(context.Background(), dbBatch.StartChunkIndex, dbBatch.EndChunkIndex)
if err != nil {
log.Crit("failed to fetch chunks", "err", err)
return
}
chunks := make([]*encoding.Chunk, len(dbChunks))
for i, c := range dbChunks {
blocks, err := l2BlockOrm.GetL2BlocksInRange(context.Background(), c.StartBlockNumber, c.EndBlockNumber)
if err != nil {
log.Crit("failed to fetch blocks", "err", err)
return
}
chunks[i] = &encoding.Chunk{Blocks: blocks}
}
batch := &encoding.Batch{
Index: dbBatch.Index,
TotalL1MessagePoppedBefore: dbChunks[0].TotalL1MessagesPoppedBefore,
ParentBatchHash: common.HexToHash(dbParentBatch.Hash),
Chunks: chunks,
}
daBatch, err := codecv1.NewDABatch(batch)
if err != nil {
log.Crit("failed to create DA batch", "err", err)
return
}
blobDataProof, err := daBatch.BlobDataProof()
if err != nil {
log.Crit("failed to get blob data proof", "err", err)
return
}
log.Info("batchMeta", "batchHash", daBatch.Hash().Hex(), "batchDataHash", daBatch.DataHash.Hex(), "blobDataProof", hex.EncodeToString(blobDataProof), "blobData", hex.EncodeToString(daBatch.Blob()[:]))
}

View File

@@ -25,6 +25,7 @@ type Batch struct {
// batch
Index uint64 `json:"index" gorm:"column:index"`
Hash string `json:"hash" gorm:"column:hash"`
DataHash string `json:"data_hash" gorm:"column:data_hash"`
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
@@ -53,6 +54,10 @@ type Batch struct {
OracleStatus int16 `json:"oracle_status" gorm:"column:oracle_status;default:1"`
OracleTxHash string `json:"oracle_tx_hash" gorm:"column:oracle_tx_hash;default:NULL"`
// blob
BlobDataProof []byte `json:"blob_data_proof" gorm:"column:blob_data_proof"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
TotalL1CommitGas uint64 `json:"total_l1_commit_gas" gorm:"column:total_l1_commit_gas;default:0"`
TotalL1CommitCalldataSize uint64 `json:"total_l1_commit_calldata_size" gorm:"column:total_l1_commit_calldata_size;default:0"`
@@ -257,6 +262,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecVer
newBatch := Batch{
Index: batch.Index,
Hash: batchMeta.BatchHash.Hex(),
DataHash: batchMeta.BatchDataHash.Hex(),
StartChunkHash: batchMeta.StartChunkHash.Hex(),
StartChunkIndex: startChunkIndex,
EndChunkHash: batchMeta.EndChunkHash.Hex(),
@@ -271,6 +277,8 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, codecVer
OracleStatus: int16(types.GasOraclePending),
TotalL1CommitGas: metrics.L1CommitGas,
TotalL1CommitCalldataSize: metrics.L1CommitCalldataSize,
BlobDataProof: batchMeta.BatchBlobDataProof,
BlobSize: metrics.L1CommitBlobSize,
}
db := o.db

View File

@@ -44,6 +44,10 @@ type Chunk struct {
// batch
BatchHash string `json:"batch_hash" gorm:"column:batch_hash;default:NULL"`
// blob
CrcMax uint64 `json:"crc_max" gorm:"column:crc_max"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
TotalL2TxGas uint64 `json:"total_l2_tx_gas" gorm:"column:total_l2_tx_gas"`
TotalL2TxNum uint64 `json:"total_l2_tx_num" gorm:"column:total_l2_tx_num"`
@@ -212,6 +216,8 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, codecVer
ParentChunkStateRoot: parentChunkStateRoot,
WithdrawRoot: chunk.Blocks[numBlocks-1].WithdrawRoot.Hex(),
ProvingStatus: int16(types.ProvingTaskUnassigned),
CrcMax: metrics.CrcMax,
BlobSize: metrics.L1CommitBlobSize,
}
db := o.db

View File

@@ -191,10 +191,12 @@ func GetChunkHash(chunk *encoding.Chunk, totalL1MessagePoppedBefore uint64, code
// BatchMetadata represents the metadata of a batch.
type BatchMetadata struct {
BatchHash common.Hash
BatchBytes []byte
StartChunkHash common.Hash
EndChunkHash common.Hash
BatchHash common.Hash
BatchDataHash common.Hash
BatchBlobDataProof []byte
BatchBytes []byte
StartChunkHash common.Hash
EndChunkHash common.Hash
}
// GetBatchMetadata retrieves the metadata of a batch.
@@ -212,9 +214,11 @@ func GetBatchMetadata(batch *encoding.Batch, codecVersion encoding.CodecVersion)
return nil, fmt.Errorf("failed to create codecv0 DA batch: %w", err)
}
// BatchBlobDataProof is left as empty for codecv0.
batchMeta := &BatchMetadata{
BatchHash: daBatch.Hash(),
BatchBytes: daBatch.Encode(),
BatchHash: daBatch.Hash(),
BatchDataHash: daBatch.DataHash,
BatchBytes: daBatch.Encode(),
}
startDAChunk, err := codecv0.NewDAChunk(batch.Chunks[0], batch.TotalL1MessagePoppedBefore)
@@ -243,9 +247,16 @@ func GetBatchMetadata(batch *encoding.Batch, codecVersion encoding.CodecVersion)
return nil, fmt.Errorf("failed to create codecv1 DA batch: %w", err)
}
blobDataProof, err := daBatch.BlobDataProof()
if err != nil {
return nil, fmt.Errorf("failed to get codecv1 blob data proof: %w", err)
}
batchMeta := &BatchMetadata{
BatchHash: daBatch.Hash(),
BatchBytes: daBatch.Encode(),
BatchHash: daBatch.Hash(),
BatchDataHash: daBatch.DataHash,
BatchBlobDataProof: blobDataProof,
BatchBytes: daBatch.Encode(),
}
startDAChunk, err := codecv1.NewDAChunk(batch.Chunks[0], batch.TotalL1MessagePoppedBefore)

View File

@@ -21,6 +21,7 @@ type Batch struct {
// batch
Index uint64 `json:"index" gorm:"column:index"`
Hash string `json:"hash" gorm:"column:hash"`
DataHash string `json:"data_hash" gorm:"column:data_hash"`
StartChunkIndex uint64 `json:"start_chunk_index" gorm:"column:start_chunk_index"`
StartChunkHash string `json:"start_chunk_hash" gorm:"column:start_chunk_hash"`
EndChunkIndex uint64 `json:"end_chunk_index" gorm:"column:end_chunk_index"`
@@ -49,6 +50,10 @@ type Batch struct {
OracleStatus int16 `json:"oracle_status" gorm:"column:oracle_status;default:1"`
OracleTxHash string `json:"oracle_tx_hash" gorm:"column:oracle_tx_hash;default:NULL"`
// blob
BlobDataProof []byte `json:"blob_data_proof" gorm:"column:blob_data_proof"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
CreatedAt time.Time `json:"created_at" gorm:"column:created_at"`
UpdatedAt time.Time `json:"updated_at" gorm:"column:updated_at"`
@@ -151,6 +156,7 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...
newBatch := Batch{
Index: batch.Index,
Hash: daBatch.Hash().Hex(),
DataHash: daBatch.DataHash.Hex(),
StartChunkHash: startDAChunkHash.Hex(),
StartChunkIndex: startChunkIndex,
EndChunkHash: endDAChunkHash.Hex(),
@@ -163,6 +169,8 @@ func (o *Batch) InsertBatch(ctx context.Context, batch *encoding.Batch, dbTX ...
ProvingStatus: int16(types.ProvingTaskUnassigned),
RollupStatus: int16(types.RollupPending),
OracleStatus: int16(types.GasOraclePending),
BlobDataProof: nil, // using mock value because this piece of codes is only used in unit tests
BlobSize: 0, // using mock value because this piece of codes is only used in unit tests
}
db := o.db

View File

@@ -43,6 +43,10 @@ type Chunk struct {
// batch
BatchHash string `json:"batch_hash" gorm:"column:batch_hash;default:NULL"`
// blob
CrcMax uint64 `json:"crc_max" gorm:"column:crc_max"`
BlobSize uint64 `json:"blob_size" gorm:"column:blob_size"`
// metadata
TotalL2TxGas uint64 `json:"total_l2_tx_gas" gorm:"column:total_l2_tx_gas"`
TotalL2TxNum uint64 `json:"total_l2_tx_num" gorm:"column:total_l2_tx_num"`
@@ -150,6 +154,8 @@ func (o *Chunk) InsertChunk(ctx context.Context, chunk *encoding.Chunk, dbTX ...
ParentChunkStateRoot: parentChunkStateRoot,
WithdrawRoot: chunk.Blocks[numBlocks-1].WithdrawRoot.Hex(),
ProvingStatus: int16(types.ProvingTaskUnassigned),
CrcMax: 0, // using mock value because this piece of codes is only used in unit tests
BlobSize: 0, // using mock value because this piece of codes is only used in unit tests
}
db := o.db