mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
5 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
af3913d607 | ||
|
|
c586d2f223 | ||
|
|
0039ef7d5a | ||
|
|
14730d46a3 | ||
|
|
1cfecad426 |
@@ -147,7 +147,7 @@ func (b *EventUpdateLogic) updateL2WithdrawMessageInfos(ctx context.Context, bat
|
||||
}
|
||||
|
||||
if withdrawTrie.NextMessageNonce != l2WithdrawMessages[0].MessageNonce {
|
||||
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actual next message nonce", l2WithdrawMessages[0].MessageNonce)
|
||||
log.Error("nonce mismatch", "expected next message nonce", withdrawTrie.NextMessageNonce, "actuall next message nonce", l2WithdrawMessages[0].MessageNonce)
|
||||
return fmt.Errorf("nonce mismatch")
|
||||
}
|
||||
|
||||
|
||||
20
common/libzkp/impl/Cargo.lock
generated
20
common/libzkp/impl/Cargo.lock
generated
@@ -31,7 +31,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "aggregator"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"ark-std 0.3.0",
|
||||
"bitstream-io",
|
||||
@@ -537,7 +537,7 @@ checksum = "a3e2c3daef883ecc1b5d58c15adae93470a91d425f3532ba1695849656af3fc1"
|
||||
[[package]]
|
||||
name = "bus-mapping"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -1126,7 +1126,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "eth-types"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"ethers-core",
|
||||
@@ -1283,7 +1283,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "external-tracer"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"geth-utils",
|
||||
@@ -1465,7 +1465,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gadgets"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"halo2_proofs",
|
||||
@@ -1488,7 +1488,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "geth-utils"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"env_logger 0.10.0",
|
||||
"gobuild",
|
||||
@@ -2237,7 +2237,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"ethers-core",
|
||||
@@ -2252,7 +2252,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpt-zktrie"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types",
|
||||
"halo2curves",
|
||||
@@ -2724,7 +2724,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prover"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"aggregator",
|
||||
"anyhow",
|
||||
@@ -4361,7 +4361,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "zkevm-circuits"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"array-init",
|
||||
"bus-mapping",
|
||||
|
||||
@@ -25,7 +25,7 @@ bls12_381 = { git = "https://github.com/scroll-tech/bls12_381", branch = "feat/i
|
||||
[dependencies]
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
|
||||
base64 = "0.13.0"
|
||||
env_logger = "0.9.0"
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.4.20"
|
||||
var tag = "v4.4.17"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -38,7 +38,7 @@ Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.
|
||||
function defaultERC20Gateway() external view returns (address)
|
||||
```
|
||||
|
||||
The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
|
||||
The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -83,6 +83,45 @@ function committedBatches(uint256) external view returns (bytes32)
|
||||
|---|---|---|
|
||||
| _0 | bytes32 | The batch hash of a committed batch. |
|
||||
|
||||
### finalizeBatch
|
||||
|
||||
```solidity
|
||||
function finalizeBatch(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot) external nonpayable
|
||||
```
|
||||
|
||||
Finalize a committed batch on layer 1 without providing proof.
|
||||
|
||||
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _batchHeader | bytes | undefined |
|
||||
| _prevStateRoot | bytes32 | undefined |
|
||||
| _postStateRoot | bytes32 | undefined |
|
||||
| _withdrawRoot | bytes32 | undefined |
|
||||
|
||||
### finalizeBatch4844
|
||||
|
||||
```solidity
|
||||
function finalizeBatch4844(bytes _batchHeader, bytes32 _prevStateRoot, bytes32 _postStateRoot, bytes32 _withdrawRoot, bytes _blobDataProof) external nonpayable
|
||||
```
|
||||
|
||||
Finalize a committed batch (with blob) on layer 1 without providing proof.
|
||||
|
||||
*Memory layout of `_blobDataProof`: ```text | z | y | kzg_commitment | kzg_proof | |---------|---------|----------------|-----------| | bytes32 | bytes32 | bytes48 | bytes48 | ```*
|
||||
|
||||
#### Parameters
|
||||
|
||||
| Name | Type | Description |
|
||||
|---|---|---|
|
||||
| _batchHeader | bytes | undefined |
|
||||
| _prevStateRoot | bytes32 | undefined |
|
||||
| _postStateRoot | bytes32 | undefined |
|
||||
| _withdrawRoot | bytes32 | undefined |
|
||||
| _blobDataProof | bytes | undefined |
|
||||
|
||||
### finalizeBatchWithProof
|
||||
|
||||
```solidity
|
||||
@@ -945,7 +984,7 @@ error ErrorRevertFinalizedBatch()
|
||||
|
||||
|
||||
|
||||
*Thrown when reverting a finalized batch.*
|
||||
*Thrown when reverting a finialized batch.*
|
||||
|
||||
|
||||
### ErrorRevertNotStartFromEnd
|
||||
@@ -956,7 +995,7 @@ error ErrorRevertNotStartFromEnd()
|
||||
|
||||
|
||||
|
||||
*Thrown when the reverted batches are not in the ending of committed batch chain.*
|
||||
*Thrown when the reverted batches are not in the ending of commited batch chain.*
|
||||
|
||||
|
||||
### ErrorRevertZeroBatches
|
||||
|
||||
@@ -44,7 +44,7 @@ contract L1ScrollMessenger is ScrollMessengerBase, IL1ScrollMessenger {
|
||||
struct ReplayState {
|
||||
// The number of replayed times.
|
||||
uint128 times;
|
||||
// The queue index of latest replayed one. If it is zero, it means the message has not been replayed.
|
||||
// The queue index of lastest replayed one. If it is zero, it means the message has not been replayed.
|
||||
uint128 lastIndex;
|
||||
}
|
||||
|
||||
|
||||
@@ -167,7 +167,7 @@ abstract contract L1ERC20Gateway is IL1ERC20Gateway, IMessageDropCallback, Scrol
|
||||
/// @dev Internal function to do all the deposit operations.
|
||||
///
|
||||
/// @param _token The token to deposit.
|
||||
/// @param _to The recipient address to receive the token in L2.
|
||||
/// @param _to The recipient address to recieve the token in L2.
|
||||
/// @param _amount The amount of token to deposit.
|
||||
/// @param _data Optional data to forward to recipient's account.
|
||||
/// @param _gasLimit Gas limit required to complete the deposit on L2.
|
||||
|
||||
@@ -25,7 +25,7 @@ contract L1GatewayRouter is OwnableUpgradeable, IL1GatewayRouter {
|
||||
/// @notice The address of L1ETHGateway.
|
||||
address public ethGateway;
|
||||
|
||||
/// @notice The address of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
|
||||
/// @notice The addess of default ERC20 gateway, normally the L1StandardERC20Gateway contract.
|
||||
address public defaultERC20Gateway;
|
||||
|
||||
/// @notice Mapping from ERC20 token address to corresponding L1ERC20Gateway.
|
||||
|
||||
@@ -97,7 +97,7 @@ contract L1StandardERC20Gateway is L1ERC20Gateway {
|
||||
|
||||
/// @inheritdoc IL1ERC20Gateway
|
||||
function getL2ERC20Address(address _l1Token) public view override returns (address) {
|
||||
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
|
||||
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
|
||||
// we can calculate the l2 address directly.
|
||||
bytes32 _salt = keccak256(abi.encodePacked(counterpart, keccak256(abi.encodePacked(_l1Token))));
|
||||
|
||||
|
||||
@@ -122,4 +122,36 @@ interface IScrollChain {
|
||||
bytes calldata blobDataProof,
|
||||
bytes calldata aggrProof
|
||||
) external;
|
||||
|
||||
/// @notice Finalize a committed batch on layer 1 without providing proof.
|
||||
/// @param batchHeader The header of current batch, see the encoding in comments of `commitBatch.
|
||||
/// @param prevStateRoot The state root of parent batch.
|
||||
/// @param postStateRoot The state root of current batch.
|
||||
/// @param withdrawRoot The withdraw trie root of current batch.
|
||||
function finalizeBatch(
|
||||
bytes calldata batchHeader,
|
||||
bytes32 prevStateRoot,
|
||||
bytes32 postStateRoot,
|
||||
bytes32 withdrawRoot
|
||||
) external;
|
||||
|
||||
/// @notice Finalize a committed batch (with blob) on layer 1 without providing proof.
|
||||
///
|
||||
/// @dev Memory layout of `blobDataProof`:
|
||||
/// | z | y | kzg_commitment | kzg_proof |
|
||||
/// |---------|---------|----------------|-----------|
|
||||
/// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||
///
|
||||
/// @param batchHeader The header of current batch, see the encoding in comments of `commitBatch.
|
||||
/// @param prevStateRoot The state root of parent batch.
|
||||
/// @param postStateRoot The state root of current batch.
|
||||
/// @param withdrawRoot The withdraw trie root of current batch.
|
||||
/// @param blobDataProof The proof for blob data.
|
||||
function finalizeBatch4844(
|
||||
bytes calldata batchHeader,
|
||||
bytes32 prevStateRoot,
|
||||
bytes32 postStateRoot,
|
||||
bytes32 withdrawRoot,
|
||||
bytes calldata blobDataProof
|
||||
) external;
|
||||
}
|
||||
|
||||
@@ -89,10 +89,10 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/// @dev Thrown when the number of batches to revert is zero.
|
||||
error ErrorRevertZeroBatches();
|
||||
|
||||
/// @dev Thrown when the reverted batches are not in the ending of committed batch chain.
|
||||
/// @dev Thrown when the reverted batches are not in the ending of commited batch chain.
|
||||
error ErrorRevertNotStartFromEnd();
|
||||
|
||||
/// @dev Thrown when reverting a finalized batch.
|
||||
/// @dev Thrown when reverting a finialized batch.
|
||||
error ErrorRevertFinalizedBatch();
|
||||
|
||||
/// @dev Thrown when the given state root is zero.
|
||||
@@ -509,6 +509,104 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
|
||||
}
|
||||
|
||||
/// @inheritdoc IScrollChain
|
||||
function finalizeBatch(
|
||||
bytes calldata _batchHeader,
|
||||
bytes32 _prevStateRoot,
|
||||
bytes32 _postStateRoot,
|
||||
bytes32 _withdrawRoot
|
||||
) external override OnlyProver whenNotPaused {
|
||||
require(_prevStateRoot != bytes32(0), "previous state root is zero");
|
||||
require(_postStateRoot != bytes32(0), "new state root is zero");
|
||||
|
||||
// compute batch hash and verify
|
||||
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
|
||||
|
||||
// verify previous state root.
|
||||
require(finalizedStateRoots[_batchIndex - 1] == _prevStateRoot, "incorrect previous state root");
|
||||
|
||||
// avoid duplicated verification
|
||||
require(finalizedStateRoots[_batchIndex] == bytes32(0), "batch already verified");
|
||||
|
||||
// check and update lastFinalizedBatchIndex
|
||||
unchecked {
|
||||
require(lastFinalizedBatchIndex + 1 == _batchIndex, "incorrect batch index");
|
||||
lastFinalizedBatchIndex = _batchIndex;
|
||||
}
|
||||
|
||||
// record state root and withdraw root
|
||||
finalizedStateRoots[_batchIndex] = _postStateRoot;
|
||||
withdrawRoots[_batchIndex] = _withdrawRoot;
|
||||
|
||||
// Pop finalized and non-skipped message from L1MessageQueue.
|
||||
_popL1Messages(
|
||||
BatchHeaderV0Codec.getSkippedBitmapPtr(memPtr),
|
||||
BatchHeaderV0Codec.getTotalL1MessagePopped(memPtr),
|
||||
BatchHeaderV0Codec.getL1MessagePopped(memPtr)
|
||||
);
|
||||
|
||||
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
|
||||
}
|
||||
|
||||
/// @inheritdoc IScrollChain
|
||||
/// @dev Memory layout of `_blobDataProof`:
|
||||
/// ```text
|
||||
/// | z | y | kzg_commitment | kzg_proof |
|
||||
/// |---------|---------|----------------|-----------|
|
||||
/// | bytes32 | bytes32 | bytes48 | bytes48 |
|
||||
/// ```
|
||||
function finalizeBatch4844(
|
||||
bytes calldata _batchHeader,
|
||||
bytes32 _prevStateRoot,
|
||||
bytes32 _postStateRoot,
|
||||
bytes32 _withdrawRoot,
|
||||
bytes calldata _blobDataProof
|
||||
) external override OnlyProver whenNotPaused {
|
||||
if (_prevStateRoot == bytes32(0)) revert ErrorPreviousStateRootIsZero();
|
||||
if (_postStateRoot == bytes32(0)) revert ErrorStateRootIsZero();
|
||||
|
||||
// compute batch hash and verify
|
||||
(uint256 memPtr, bytes32 _batchHash, uint256 _batchIndex, ) = _loadBatchHeader(_batchHeader);
|
||||
bytes32 _blobVersionedHash = BatchHeaderV1Codec.getBlobVersionedHash(memPtr);
|
||||
|
||||
// Calls the point evaluation precompile and verifies the output
|
||||
{
|
||||
(bool success, bytes memory data) = POINT_EVALUATION_PRECOMPILE_ADDR.staticcall(
|
||||
abi.encodePacked(_blobVersionedHash, _blobDataProof)
|
||||
);
|
||||
// We verify that the point evaluation precompile call was successful by testing the latter 32 bytes of the
|
||||
// response is equal to BLS_MODULUS as defined in https://eips.ethereum.org/EIPS/eip-4844#point-evaluation-precompile
|
||||
if (!success) revert ErrorCallPointEvaluationPrecompileFailed();
|
||||
(, uint256 result) = abi.decode(data, (uint256, uint256));
|
||||
if (result != BLS_MODULUS) revert ErrorUnexpectedPointEvaluationPrecompileOutput();
|
||||
}
|
||||
|
||||
// verify previous state root.
|
||||
if (finalizedStateRoots[_batchIndex - 1] != _prevStateRoot) revert ErrorIncorrectPreviousStateRoot();
|
||||
|
||||
// avoid duplicated verification
|
||||
if (finalizedStateRoots[_batchIndex] != bytes32(0)) revert ErrorBatchIsAlreadyVerified();
|
||||
|
||||
// check and update lastFinalizedBatchIndex
|
||||
unchecked {
|
||||
if (lastFinalizedBatchIndex + 1 != _batchIndex) revert ErrorIncorrectBatchIndex();
|
||||
lastFinalizedBatchIndex = _batchIndex;
|
||||
}
|
||||
|
||||
// record state root and withdraw root
|
||||
finalizedStateRoots[_batchIndex] = _postStateRoot;
|
||||
withdrawRoots[_batchIndex] = _withdrawRoot;
|
||||
|
||||
// Pop finalized and non-skipped message from L1MessageQueue.
|
||||
_popL1Messages(
|
||||
BatchHeaderV1Codec.getSkippedBitmapPtr(memPtr),
|
||||
BatchHeaderV1Codec.getTotalL1MessagePopped(memPtr),
|
||||
BatchHeaderV1Codec.getL1MessagePopped(memPtr)
|
||||
);
|
||||
|
||||
emit FinalizeBatch(_batchIndex, _batchHash, _postStateRoot, _withdrawRoot);
|
||||
}
|
||||
|
||||
/************************
|
||||
* Restricted Functions *
|
||||
************************/
|
||||
@@ -580,7 +678,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/// @param _chunks The list of chunks to commit.
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _batchDataHash The computed data hash for the list of chunks.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
|
||||
function _commitChunksV0(
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes[] memory _chunks,
|
||||
@@ -627,7 +725,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
/// @param _skippedL1MessageBitmap The bitmap indicates whether each L1 message is skipped or not.
|
||||
/// @return _blobVersionedHash The blob versioned hash for the blob carried in this transaction.
|
||||
/// @return _batchDataHash The computed data hash for the list of chunks.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages popped in this batch, including skipped one.
|
||||
/// @return _totalL1MessagesPoppedInBatch The total number of L1 messages poped in this batch, including skipped one.
|
||||
function _commitChunksV1(
|
||||
uint256 _totalL1MessagesPoppedOverall,
|
||||
bytes[] memory _chunks,
|
||||
@@ -950,7 +1048,7 @@ contract ScrollChain is OwnableUpgradeable, PausableUpgradeable, IScrollChain {
|
||||
|
||||
/// @dev Internal function to pop finalized l1 messages.
|
||||
/// @param bitmapPtr The memory offset of `skippedL1MessageBitmap`.
|
||||
/// @param totalL1MessagePopped The total number of L1 messages popped in all batches including current batch.
|
||||
/// @param totalL1MessagePopped The total number of L1 messages poped in all batches including current batch.
|
||||
/// @param l1MessagePopped The number of L1 messages popped in current batch.
|
||||
function _popL1Messages(
|
||||
uint256 bitmapPtr,
|
||||
|
||||
@@ -58,11 +58,7 @@ contract L2CustomERC20Gateway is L2ERC20Gateway {
|
||||
/// @param _counterpart The address of `L1CustomERC20Gateway` contract in L1.
|
||||
/// @param _router The address of `L2GatewayRouter` contract in L2.
|
||||
/// @param _messenger The address of `L2ScrollMessenger` contract in L2.
|
||||
function initialize(
|
||||
address _counterpart,
|
||||
address _router,
|
||||
address _messenger
|
||||
) external initializer {
|
||||
function initialize(address _counterpart, address _router, address _messenger) external initializer {
|
||||
ScrollGatewayBase._initialize(_counterpart, _router, _messenger);
|
||||
}
|
||||
|
||||
|
||||
@@ -21,7 +21,7 @@ contract L2GatewayRouter is OwnableUpgradeable, IL2GatewayRouter {
|
||||
/// @notice The address of L2ETHGateway.
|
||||
address public ethGateway;
|
||||
|
||||
/// @notice The address of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
|
||||
/// @notice The addess of default L2 ERC20 gateway, normally the L2StandardERC20Gateway contract.
|
||||
address public defaultERC20Gateway;
|
||||
|
||||
/// @notice Mapping from L2 ERC20 token address to corresponding L2ERC20Gateway.
|
||||
|
||||
@@ -242,7 +242,7 @@ contract L1BlockContainer is OwnableBase, IL1BlockContainer {
|
||||
let _computedBlockHash := keccak256(memPtr, headerPayloadLength)
|
||||
require(eq(_blockHash, _computedBlockHash), "Block hash mismatch")
|
||||
|
||||
// load 16 values
|
||||
// load 16 vaules
|
||||
for {
|
||||
let i := 0
|
||||
} lt(i, 16) {
|
||||
|
||||
@@ -169,12 +169,7 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
|
||||
/// @param _router The address of `L1GatewayRouter` contract in L1.
|
||||
/// @param _messenger The address of `L1ScrollMessenger` contract in L1.
|
||||
/// @param _queue The address of `L1MessageQueue` contract in L1.
|
||||
constructor(
|
||||
address _counterpart,
|
||||
address _router,
|
||||
address _messenger,
|
||||
address _queue
|
||||
) {
|
||||
constructor(address _counterpart, address _router, address _messenger, address _queue) {
|
||||
_disableInitializers();
|
||||
|
||||
counterpart = _counterpart;
|
||||
@@ -340,11 +335,7 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
|
||||
/// @param token The address of token to deposit.
|
||||
/// @param sender The address of token sender.
|
||||
/// @param amount The amount of token to deposit.
|
||||
function _deposit(
|
||||
address token,
|
||||
address sender,
|
||||
uint96 amount
|
||||
) internal {
|
||||
function _deposit(address token, address sender, uint96 amount) internal {
|
||||
BatchConfig memory cachedBatchConfig = configs[token];
|
||||
TokenState memory cachedTokenState = tokens[token];
|
||||
_tryFinalizeCurrentBatch(token, cachedBatchConfig, cachedTokenState);
|
||||
@@ -409,11 +400,7 @@ contract L1BatchBridgeGateway is AccessControlEnumerableUpgradeable, ReentrancyG
|
||||
/// @param token The address of token.
|
||||
/// @param receiver The address of token receiver.
|
||||
/// @param amount The amount of token to transfer.
|
||||
function _transferToken(
|
||||
address token,
|
||||
address receiver,
|
||||
uint256 amount
|
||||
) private {
|
||||
function _transferToken(address token, address receiver, uint256 amount) private {
|
||||
if (token == address(0)) {
|
||||
(bool success, ) = receiver.call{value: amount}("");
|
||||
if (!success) revert ErrorTransferETHFailed();
|
||||
|
||||
@@ -185,11 +185,7 @@ contract L2BatchBridgeGateway is AccessControlEnumerableUpgradeable {
|
||||
/// @param l2Token The address of L2 token.
|
||||
/// @param batchIndex The index of batch to distribute.
|
||||
/// @param nodes The list of encoded L1 deposits.
|
||||
function distribute(
|
||||
address l2Token,
|
||||
uint64 batchIndex,
|
||||
bytes32[] memory nodes
|
||||
) external onlyRole(KEEPER_ROLE) {
|
||||
function distribute(address l2Token, uint64 batchIndex, bytes32[] memory nodes) external onlyRole(KEEPER_ROLE) {
|
||||
address l1Token = tokenMapping[l2Token];
|
||||
bytes32 hash = BatchBridgeCodec.encodeInitialNode(l1Token, batchIndex);
|
||||
for (uint256 i = 0; i < nodes.length; i++) {
|
||||
@@ -225,11 +221,7 @@ contract L2BatchBridgeGateway is AccessControlEnumerableUpgradeable {
|
||||
/// @param receiver The address of token receiver.
|
||||
/// @param amount The amount of token to transfer.
|
||||
/// @return success Whether the transfer is successful.
|
||||
function _transferToken(
|
||||
address token,
|
||||
address receiver,
|
||||
uint256 amount
|
||||
) private returns (bool success) {
|
||||
function _transferToken(address token, address receiver, uint256 amount) private returns (bool success) {
|
||||
if (token == address(0)) {
|
||||
// We add gas limit here to avoid DDOS from malicious receiver.
|
||||
(success, ) = receiver.call{value: amount, gas: SAFE_ETH_TRANSFER_GAS_LIMIT}("");
|
||||
|
||||
@@ -23,7 +23,7 @@ contract ScrollStandardERC20Factory is Ownable, IScrollStandardERC20Factory {
|
||||
|
||||
/// @inheritdoc IScrollStandardERC20Factory
|
||||
function computeL2TokenAddress(address _gateway, address _l1Token) external view returns (address) {
|
||||
// In StandardERC20Gateway, all corresponding l2 tokens are deployed by Create2 with salt,
|
||||
// In StandardERC20Gateway, all corresponding l2 tokens are depoyed by Create2 with salt,
|
||||
// we can calculate the l2 address directly.
|
||||
bytes32 _salt = _getSalt(_gateway, _l1Token);
|
||||
|
||||
|
||||
@@ -369,7 +369,7 @@ library PatriciaMerkleTrieVerifier {
|
||||
|
||||
// first item is considered the root node.
|
||||
// Otherwise verifies that the hash of the current node
|
||||
// is the same as the previous chosen one.
|
||||
// is the same as the previous choosen one.
|
||||
switch i
|
||||
case 1 {
|
||||
rootHash := hash
|
||||
@@ -425,7 +425,7 @@ library PatriciaMerkleTrieVerifier {
|
||||
}
|
||||
}
|
||||
|
||||
// lastly, derive the path of the chosen one (TM)
|
||||
// lastly, derive the path of the choosen one (TM)
|
||||
path := derivePath(key, depth)
|
||||
}
|
||||
|
||||
|
||||
@@ -113,7 +113,7 @@ library ZkTrieVerifier {
|
||||
|
||||
// first item is considered the root node.
|
||||
// Otherwise verifies that the hash of the current node
|
||||
// is the same as the previous chosen one.
|
||||
// is the same as the previous choosen one.
|
||||
switch depth
|
||||
case 1 {
|
||||
rootHash := hash
|
||||
@@ -262,7 +262,7 @@ library ZkTrieVerifier {
|
||||
ptr, storageValue := verifyStorageProof(poseidon, storageKey, storageRootHash, ptr)
|
||||
|
||||
// the one and only boundary check
|
||||
// in case an attacker crafted a malicious payload
|
||||
// in case an attacker crafted a malicous payload
|
||||
// and succeeds in the prior verification steps
|
||||
// then this should catch any bogus accesses
|
||||
if iszero(eq(ptr, add(proof.offset, proof.length))) {
|
||||
|
||||
@@ -173,11 +173,7 @@ contract L1BatchBridgeGatewayTest is L1GatewayTestBase {
|
||||
assertEq(safeBridgeGasLimit, config.safeBridgeGasLimit);
|
||||
}
|
||||
|
||||
function checkBatchState(
|
||||
address token,
|
||||
uint256 phase,
|
||||
L1BatchBridgeGateway.BatchState memory expected
|
||||
) private {
|
||||
function checkBatchState(address token, uint256 phase, L1BatchBridgeGateway.BatchState memory expected) private {
|
||||
(uint128 amount, uint64 startTime, uint64 numDeposits, bytes32 hash) = batch.batches(token, phase);
|
||||
assertEq(amount, expected.amount);
|
||||
assertEq(startTime, expected.startTime);
|
||||
|
||||
@@ -10,11 +10,7 @@ contract RevertOnTransferToken is MockERC20 {
|
||||
bool private revertOnTransfer;
|
||||
bool private transferReturn;
|
||||
|
||||
constructor(
|
||||
string memory _name,
|
||||
string memory _symbol,
|
||||
uint8 _decimals
|
||||
) MockERC20(_name, _symbol, _decimals) {
|
||||
constructor(string memory _name, string memory _symbol, uint8 _decimals) MockERC20(_name, _symbol, _decimals) {
|
||||
transferReturn = true;
|
||||
}
|
||||
|
||||
|
||||
@@ -309,7 +309,7 @@ func (o *Batch) UpdateProvingStatusFailed(ctx context.Context, hash string, maxA
|
||||
db = db.Model(&Batch{})
|
||||
db = db.Where("hash", hash)
|
||||
db = db.Where("total_attempts >= ?", maxAttempts)
|
||||
db = db.Where("proving_status != ?", int(types.ProvingTaskVerified))
|
||||
db = db.Where("proving_status != ?", int(types.ProverProofValid))
|
||||
if err := db.Update("proving_status", int(types.ProvingTaskFailed)).Error; err != nil {
|
||||
return fmt.Errorf("Batch.UpdateProvingStatus error: %w, batch hash: %v, status: %v", err, hash, types.ProvingTaskFailed.String())
|
||||
}
|
||||
|
||||
@@ -332,7 +332,7 @@ func (o *Chunk) UpdateProvingStatusFailed(ctx context.Context, hash string, maxA
|
||||
db = db.Model(&Chunk{})
|
||||
db = db.Where("hash", hash)
|
||||
db = db.Where("total_attempts >= ?", maxAttempts)
|
||||
db = db.Where("proving_status != ?", int(types.ProvingTaskVerified))
|
||||
db = db.Where("proving_status != ?", int(types.ProverProofValid))
|
||||
if err := db.Update("proving_status", int(types.ProvingTaskFailed)).Error; err != nil {
|
||||
return fmt.Errorf("Batch.UpdateProvingStatus error: %w, batch hash: %v, status: %v", err, hash, types.ProvingTaskFailed.String())
|
||||
}
|
||||
|
||||
33
prover/Cargo.lock
generated
33
prover/Cargo.lock
generated
@@ -59,12 +59,12 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "aggregator"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"ark-std 0.3.0",
|
||||
"bitstream-io",
|
||||
"c-kzg",
|
||||
"ctor 0.1.26",
|
||||
"ctor",
|
||||
"encoder",
|
||||
"env_logger 0.10.2",
|
||||
"eth-types 0.11.0",
|
||||
@@ -661,7 +661,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "bus-mapping"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types 0.11.0",
|
||||
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
@@ -1036,16 +1036,6 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctor"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "edb49164822f3ee45b17acd4a208cfc1251410cf0cad9a833234c9890774dd9f"
|
||||
dependencies = [
|
||||
"quote",
|
||||
"syn 2.0.66",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ctr"
|
||||
version = "0.9.2"
|
||||
@@ -1391,7 +1381,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "eth-types"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"base64 0.13.1",
|
||||
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
@@ -1625,7 +1615,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "external-tracer"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types 0.11.0",
|
||||
"geth-utils 0.11.0",
|
||||
@@ -1863,7 +1853,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "gadgets"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types 0.11.0",
|
||||
"halo2_proofs",
|
||||
@@ -1896,7 +1886,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "geth-utils"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"env_logger 0.10.2",
|
||||
"gobuild",
|
||||
@@ -2795,7 +2785,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mock"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types 0.11.0",
|
||||
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
@@ -2825,7 +2815,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "mpt-zktrie"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"eth-types 0.11.0",
|
||||
"halo2curves",
|
||||
@@ -3388,7 +3378,6 @@ dependencies = [
|
||||
"anyhow",
|
||||
"base64 0.13.1",
|
||||
"clap",
|
||||
"ctor 0.2.8",
|
||||
"env_logger 0.11.3",
|
||||
"eth-keystore",
|
||||
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
@@ -3451,7 +3440,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "prover"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"aggregator 0.11.0",
|
||||
"anyhow",
|
||||
@@ -5595,7 +5584,7 @@ dependencies = [
|
||||
[[package]]
|
||||
name = "zkevm-circuits"
|
||||
version = "0.11.0"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.4#38a68e22d3d8449bd39a50c22da55b9e741de453"
|
||||
source = "git+https://github.com/scroll-tech/zkevm-circuits.git?tag=v0.11.1#512996f1bac1218c93d9d3de49d7b86f52726c27"
|
||||
dependencies = [
|
||||
"array-init",
|
||||
"bus-mapping 0.11.0",
|
||||
|
||||
@@ -30,7 +30,7 @@ ethers-providers = { git = "https://github.com/scroll-tech/ethers-rs.git", branc
|
||||
halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.1" }
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover = { git = "https://github.com/scroll-tech/zkevm-circuits.git", branch = "v0.10", default-features = false, features = ["parallel_syn", "scroll", "shanghai"] }
|
||||
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.4", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover_next = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.11.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
@@ -45,4 +45,3 @@ tokio = "1.37.0"
|
||||
sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
ctor = "0.2.8"
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
.PHONY: prover lint tests_binary
|
||||
.PHONY: prover
|
||||
|
||||
ifeq (4.3,$(firstword $(sort $(MAKE_VERSION) 4.3)))
|
||||
HALO2_VERSION=$(shell grep -m 1 "halo2.git" ./Cargo.lock | cut -d "#" -f2 | cut -c-7)
|
||||
@@ -38,15 +38,4 @@ endif
|
||||
prover:
|
||||
GO_TAG=${GO_TAG} GIT_REV=${GIT_REV} ZK_VERSION=${ZK_VERSION} cargo build --release
|
||||
rm -rf ./lib && mkdir ./lib
|
||||
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
|
||||
|
||||
tests_binary:
|
||||
cargo clean && cargo test --release --no-run
|
||||
ls target/release/deps/prover* | grep -v "\.d" | xargs -I{} ln -sf {} ./prover.test
|
||||
rm -rf ./lib && mkdir ./lib
|
||||
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
|
||||
|
||||
lint:
|
||||
cargo check --all-features
|
||||
cargo clippy --all-features --all-targets -- -D warnings
|
||||
cargo fmt --all
|
||||
find target/ -name "libzktrie.so" | xargs -I{} cp {} ./lib
|
||||
@@ -1,5 +1,3 @@
|
||||
#![feature(lazy_cell)]
|
||||
|
||||
mod config;
|
||||
mod coordinator_client;
|
||||
mod geth_client;
|
||||
|
||||
@@ -6,8 +6,8 @@ use serde::Deserialize;
|
||||
|
||||
use crate::types::{CommonHash, Task};
|
||||
use prover::{
|
||||
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BatchProof, BlockTrace,
|
||||
ChunkHash, ChunkProof,
|
||||
aggregator::Prover as BatchProver, zkevm::Prover as ChunkProver, BlockTrace, ChunkHash,
|
||||
ChunkProof,
|
||||
};
|
||||
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
|
||||
|
||||
@@ -60,7 +60,8 @@ impl BaseCircuitsHandler {
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk_proof = prover.borrow_mut().gen_chunk_proof(
|
||||
chunk_trace,
|
||||
@@ -69,50 +70,34 @@ impl BaseCircuitsHandler {
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
fn gen_batch_proof_raw(
|
||||
&self,
|
||||
chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)>,
|
||||
) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_chunk_proofs(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
bail!("non-match chunk protocol, task-id: {}", &task.id)
|
||||
}
|
||||
|
||||
let batch_proof = prover.borrow_mut().gen_agg_evm_proof(
|
||||
chunk_hashes_proofs,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
)?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
let chunk_hashes_proofs: Vec<(ChunkHash, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
@@ -210,178 +195,3 @@ impl CircuitsHandler for BaseCircuitsHandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================== tests module ========================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use prover::utils::chunk_trace_to_witness_block;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
crate::utils::log_init(None);
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
static DEFAULT_WORK_DIR: &str = "/assets";
|
||||
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
std::env::var("BERNOULLI_TEST_DIR")
|
||||
.unwrap_or(String::from(DEFAULT_WORK_DIR))
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
});
|
||||
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
|
||||
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
|
||||
static PROOF_DUMP_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
|
||||
static BATCH_DIR_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
|
||||
static BATCH_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
|
||||
static CHUNK_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = true;
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
let chunk_handler =
|
||||
BaseCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
|
||||
|
||||
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
|
||||
|
||||
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_infos = vec![];
|
||||
let mut chunk_proofs = vec![];
|
||||
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
|
||||
let chunk_id = format!("chunk_proof{}", id + 1);
|
||||
log::info!("start to process {chunk_id}");
|
||||
let chunk_trace = read_chunk_trace(chunk_path)?;
|
||||
|
||||
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler =
|
||||
BaseCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
|
||||
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
|
||||
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
|
||||
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: ProofType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
ProofType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
ProofType::Batch => BATCH_VK_PATH.clone(),
|
||||
ProofType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
Ok(encode_vk(data))
|
||||
}
|
||||
|
||||
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
|
||||
log::info!("read_chunk_trace, {:?}", path);
|
||||
let mut chunk_trace: Vec<BlockTrace> = vec![];
|
||||
|
||||
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
|
||||
let f = std::fs::File::open(file)?;
|
||||
Ok(serde_json::from_reader(&f)?)
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(&path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.into_iter()
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
return None;
|
||||
}
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
|
||||
log::info!("files in chunk {:?} is {:?}", path, files);
|
||||
for file in files {
|
||||
let block_trace = read_block_trace(&path.join(file))?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
} else {
|
||||
let block_trace = read_block_trace(&path)?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
Ok(chunk_trace)
|
||||
}
|
||||
|
||||
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
|
||||
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
|
||||
let entries = std::fs::read_dir(&batch_path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
log::info!("files in batch {:?} is {:?}", batch_path, files);
|
||||
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
|
||||
}
|
||||
|
||||
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkHash> {
|
||||
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
|
||||
Ok(ChunkHash::from_witness_block(&witness_block, false))
|
||||
}
|
||||
|
||||
fn dump_proof(id: String, proof_data: String) -> Result<()> {
|
||||
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
|
||||
Ok(std::fs::write(dump_path.join(id), proof_data)?)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -8,7 +8,7 @@ use std::{cell::RefCell, cmp::Ordering, rc::Rc};
|
||||
|
||||
use prover_next::{
|
||||
aggregator::Prover as BatchProver, check_chunk_hashes, zkevm::Prover as ChunkProver,
|
||||
BatchProof, BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
|
||||
BatchProvingTask, BlockTrace, ChunkInfo, ChunkProof, ChunkProvingTask,
|
||||
};
|
||||
|
||||
use super::bernoulli::OUTPUT_DIR;
|
||||
@@ -59,7 +59,8 @@ impl NextCircuitsHandler {
|
||||
}
|
||||
}
|
||||
|
||||
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
@@ -68,29 +69,22 @@ impl NextCircuitsHandler {
|
||||
.borrow_mut()
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
return serde_json::to_string(&chunk_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
fn gen_batch_proof_raw(
|
||||
&self,
|
||||
chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)>,
|
||||
) -> Result<BatchProof> {
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
bail!("non-match chunk protocol, task-id: {}", &task.id)
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch = BatchProvingTask { chunk_proofs };
|
||||
@@ -99,19 +93,11 @@ impl NextCircuitsHandler {
|
||||
.borrow_mut()
|
||||
.gen_agg_evm_proof(batch, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(batch_proof);
|
||||
return serde_json::to_string(&batch_proof).map_err(|e| anyhow::anyhow!(e));
|
||||
}
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> =
|
||||
self.gen_chunk_hashes_proofs(task)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(chunk_hashes_proofs)?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
@@ -209,178 +195,3 @@ impl CircuitsHandler for NextCircuitsHandler {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// =================================== tests module ========================================
|
||||
|
||||
#[cfg(test)]
|
||||
mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use prover_next::utils::chunk_trace_to_witness_block;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
crate::utils::log_init(None);
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
static DEFAULT_WORK_DIR: &str = "/assets";
|
||||
static WORK_DIR: LazyLock<String> = LazyLock::new(|| {
|
||||
std::env::var("CURIE_TEST_DIR")
|
||||
.unwrap_or(String::from(DEFAULT_WORK_DIR))
|
||||
.trim_end_matches('/')
|
||||
.to_string()
|
||||
});
|
||||
static PARAMS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_params", *WORK_DIR));
|
||||
static ASSETS_PATH: LazyLock<String> = LazyLock::new(|| format!("{}/test_assets", *WORK_DIR));
|
||||
static PROOF_DUMP_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/proof_data", *WORK_DIR));
|
||||
static BATCH_DIR_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/traces/batch_24", *WORK_DIR));
|
||||
static BATCH_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/agg_vk.vkey", *WORK_DIR));
|
||||
static CHUNK_VK_PATH: LazyLock<String> =
|
||||
LazyLock::new(|| format!("{}/test_assets/chunk_vk.vkey", *WORK_DIR));
|
||||
|
||||
#[test]
|
||||
fn it_works() {
|
||||
let result = true;
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
let chunk_handler =
|
||||
NextCircuitsHandler::new(ProofType::Chunk, &PARAMS_PATH, &ASSETS_PATH, None)?;
|
||||
|
||||
let chunk_vk = chunk_handler.get_vk(ProofType::Chunk).unwrap();
|
||||
|
||||
check_vk(ProofType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_infos = vec![];
|
||||
let mut chunk_proofs = vec![];
|
||||
for (id, chunk_path) in chunk_dir_paths.into_iter().enumerate() {
|
||||
let chunk_id = format!("chunk_proof{}", id + 1);
|
||||
log::info!("start to process {chunk_id}");
|
||||
let chunk_trace = read_chunk_trace(chunk_path)?;
|
||||
|
||||
let chunk_info = traces_to_chunk_info(chunk_trace.clone())?;
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler =
|
||||
NextCircuitsHandler::new(ProofType::Batch, &PARAMS_PATH, &ASSETS_PATH, None)?;
|
||||
let batch_vk = batch_handler.get_vk(ProofType::Batch).unwrap();
|
||||
check_vk(ProofType::Batch, batch_vk, "batch vk must be available");
|
||||
let chunk_hashes_proofs = chunk_infos.into_iter().zip(chunk_proofs).collect();
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(chunk_hashes_proofs)?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: ProofType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: ProofType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
ProofType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
ProofType::Batch => BATCH_VK_PATH.clone(),
|
||||
ProofType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
Ok(encode_vk(data))
|
||||
}
|
||||
|
||||
fn read_chunk_trace(path: PathBuf) -> Result<Vec<BlockTrace>> {
|
||||
log::info!("read_chunk_trace, {:?}", path);
|
||||
let mut chunk_trace: Vec<BlockTrace> = vec![];
|
||||
|
||||
fn read_block_trace(file: &PathBuf) -> Result<BlockTrace> {
|
||||
let f = std::fs::File::open(file)?;
|
||||
Ok(serde_json::from_reader(&f)?)
|
||||
}
|
||||
|
||||
if path.is_dir() {
|
||||
let entries = std::fs::read_dir(&path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.into_iter()
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
return None;
|
||||
}
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
|
||||
log::info!("files in chunk {:?} is {:?}", path, files);
|
||||
for file in files {
|
||||
let block_trace = read_block_trace(&path.join(file))?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
} else {
|
||||
let block_trace = read_block_trace(&path)?;
|
||||
chunk_trace.push(block_trace);
|
||||
}
|
||||
Ok(chunk_trace)
|
||||
}
|
||||
|
||||
fn get_chunk_dir_paths() -> Result<Vec<PathBuf>> {
|
||||
let batch_path = PathBuf::from(BATCH_DIR_PATH.clone());
|
||||
let entries = std::fs::read_dir(&batch_path)?;
|
||||
let mut files: Vec<String> = entries
|
||||
.filter_map(|e| {
|
||||
if e.is_err() {
|
||||
return None;
|
||||
}
|
||||
let entry = e.unwrap();
|
||||
if entry.path().is_dir() {
|
||||
if let Result::Ok(file_name) = entry.file_name().into_string() {
|
||||
Some(file_name)
|
||||
} else {
|
||||
None
|
||||
}
|
||||
} else {
|
||||
None
|
||||
}
|
||||
})
|
||||
.collect();
|
||||
files.sort();
|
||||
log::info!("files in batch {:?} is {:?}", batch_path, files);
|
||||
Ok(files.into_iter().map(|f| batch_path.join(f)).collect())
|
||||
}
|
||||
|
||||
fn traces_to_chunk_info(chunk_trace: Vec<BlockTrace>) -> Result<ChunkInfo> {
|
||||
let witness_block = chunk_trace_to_witness_block(chunk_trace)?;
|
||||
Ok(ChunkInfo::from_witness_block(&witness_block, false))
|
||||
}
|
||||
|
||||
fn dump_proof(id: String, proof_data: String) -> Result<()> {
|
||||
let dump_path = PathBuf::from(PROOF_DUMP_PATH.clone());
|
||||
Ok(std::fs::write(dump_path.join(id), proof_data)?)
|
||||
}
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user