fix: use explicity solidity types

This commit is contained in:
Wanseob Lim
2020-06-04 12:23:37 +09:00
parent 75926ff403
commit c3b9bb2d06
21 changed files with 770 additions and 347 deletions

View File

@@ -31,6 +31,7 @@
"bn.js": "^5.1.1",
"circomlib": "^0.1.1",
"snarkjs": "^0.1.25",
"soltypes": "^1.1.2",
"web3-utils": "^1.2.6"
},
"publishConfig": {

View File

@@ -1,5 +1,6 @@
import BN from 'bn.js'
import bigInt, { BigInteger } from 'big-integer'
import { Bytes32, Uint256 } from 'soltypes'
export type F = number | string | number[] | Uint8Array | Buffer | BN
@@ -79,6 +80,14 @@ export class Field extends BN {
return `0x${this.toString('hex')}`
}
toBytes32(): Bytes32 {
return new Bytes32(`0x${this.toString(16, 64)}`)
}
toUint256(): Uint256 {
return this.toBytes32().toUint()
}
toIden3BigInt(): BigInteger {
return bigInt(this.toString())
}

View File

@@ -44,7 +44,9 @@
"bn.js": "^5.1.1",
"ffjavascript": "^0.1.3",
"node-fetch": "^2.6.0",
"node-schedule": "^1.3.2",
"snarkjs": "^0.1.27",
"soltypes": "^1.1.2",
"web3": "^1.2.6",
"web3-core": "^1.2.6",
"web3-eth-contract": "^1.2.6",

View File

@@ -1,4 +1,5 @@
/* eslint-disable @typescript-eslint/camelcase */
// eslint-disable-next-line max-classes-per-file
import {
ZkTx,
ZkInflow,
@@ -6,31 +7,32 @@ import {
PublicData,
SNARK,
} from '@zkopru/transaction'
import { BlockSql, BlockStatus } from '@zkopru/database'
import { BlockSql, BlockStatus, HeaderSql } from '@zkopru/database'
import * as Utils from '@zkopru/utils'
import { Field } from '@zkopru/babyjubjub'
import { Transaction } from 'web3-core'
import { soliditySha3 } from 'web3-utils'
import { Bytes32, Uint256, Address } from 'soltypes'
// import { soliditySha3 } from 'web3-utils'
export interface MassDeposit {
merged: string
fee: string
merged: Bytes32
fee: Uint256
}
export interface ERC20Migration {
addr: string
amount: string
addr: Address
amount: Uint256
}
export interface ERC721Migration {
addr: string
nfts: string[]
addr: Address
nfts: Uint256[]
}
export interface MassMigration {
destination: string
totalETH: string
destination: Address
totalETH: Uint256
migratingLeaves: MassDeposit
erc20: ERC20Migration[]
erc721: ERC721Migration[]
@@ -42,18 +44,18 @@ export interface Proposal {
}
export interface Header {
proposer: string
parentBlock: string
metadata: string
fee: string
utxoRoot: string
utxoIndex: string
nullifierRoot: string
withdrawalRoot: string
withdrawalIndex: string
txRoot: string
depositRoot: string
migrationRoot: string
proposer: Address
parentBlock: Bytes32
metadata: Bytes32
fee: Uint256
utxoRoot: Uint256
utxoIndex: Uint256
nullifierRoot: Bytes32
withdrawalRoot: Bytes32
withdrawalIndex: Uint256
txRoot: Bytes32
depositRoot: Bytes32
migrationRoot: Bytes32
}
export interface Body {
@@ -63,7 +65,7 @@ export interface Body {
}
export interface Finalization {
submissionId: string
proposalChecksum: Bytes32
header: Header
massDeposits: MassDeposit[]
massMigration: MassMigration[]
@@ -75,44 +77,65 @@ export enum VerifyResult {
FULLY_VERIFIED,
}
export function headerToSql(header: Header): HeaderSql {
const sql: HeaderSql = {} as HeaderSql
Object.keys(header).forEach(key => {
sql[key] = header[key].toString()
})
return sql
}
export function sqlToHeader(sql: HeaderSql): Header {
return {
proposer: Address.from(sql.proposer),
parentBlock: Bytes32.from(sql.parentBlock),
metadata: Bytes32.from(sql.metadata),
fee: Uint256.from(sql.fee),
utxoRoot: Uint256.from(sql.utxoRoot),
utxoIndex: Uint256.from(sql.utxoIndex),
nullifierRoot: Bytes32.from(sql.nullifierRoot),
withdrawalRoot: Bytes32.from(sql.withdrawalRoot),
withdrawalIndex: Uint256.from(sql.withdrawalIndex),
txRoot: Bytes32.from(sql.txRoot),
depositRoot: Bytes32.from(sql.depositRoot),
migrationRoot: Bytes32.from(sql.migrationRoot),
}
}
export function serializeHeader(header: Header): Buffer {
// Header
const headerBytes = Buffer.concat([
Utils.hexToBuffer(header.proposer, 20),
Utils.hexToBuffer(header.parentBlock, 32),
Utils.hexToBuffer(header.metadata, 32),
Utils.hexToBuffer(header.fee, 32),
Utils.hexToBuffer(header.utxoRoot, 32),
Utils.hexToBuffer(header.utxoIndex, 32),
Utils.hexToBuffer(header.nullifierRoot, 32),
Utils.hexToBuffer(header.withdrawalRoot, 32),
Utils.hexToBuffer(header.withdrawalIndex, 32),
Utils.hexToBuffer(header.txRoot, 32),
Utils.hexToBuffer(header.depositRoot, 32),
Utils.hexToBuffer(header.migrationRoot, 32),
])
const headerBytes = Buffer.concat(
[
header.proposer,
header.parentBlock,
header.metadata,
header.fee,
header.utxoRoot,
header.utxoIndex,
header.nullifierRoot,
header.withdrawalRoot,
header.withdrawalIndex,
header.txRoot,
header.depositRoot,
header.migrationRoot,
].map(val => val.toBuffer()),
)
return headerBytes
}
export function serializeBody(body: Body): Buffer {
const arr: Buffer[] = []
// Txs
const txLenBytes = Utils.hexToBuffer(body.txs.length.toString(16), 2)
const txLenBytes = Utils.numToBuffer(body.txs.length, 2)
arr.push(txLenBytes)
for (let i = 0; i < body.txs.length; i += 1) {
const numOfInflowByte = Utils.hexToBuffer(
body.txs[i].inflow.length.toString(16),
1,
)
const numOfInflowByte = Utils.numToBuffer(body.txs[i].inflow.length, 1)
arr.push(numOfInflowByte)
for (let j = 0; j < body.txs[i].inflow.length; j += 1) {
arr.push(body.txs[i].inflow[j].root.toBuffer('be', 32))
arr.push(body.txs[i].inflow[j].nullifier.toBuffer('be', 32))
}
const numOfOutflowByte = Utils.hexToBuffer(
body.txs[i].outflow.length.toString(16),
1,
)
const numOfOutflowByte = Utils.numToBuffer(body.txs[i].outflow.length, 1)
arr.push(numOfOutflowByte)
for (let j = 0; j < body.txs[i].outflow.length; j += 1) {
arr.push(body.txs[i].outflow[j].note.toBuffer('be', 32))
@@ -156,41 +179,33 @@ export function serializeBody(body: Body): Buffer {
}
}
// Mass deposits
const massDepositLenBytes = Utils.hexToBuffer(
body.massDeposits.length.toString(16),
1,
)
const massDepositLenBytes = Utils.numToBuffer(body.massDeposits.length, 1)
arr.push(massDepositLenBytes)
for (let i = 0; i < body.massDeposits.length; i += 1) {
arr.push(Utils.hexToBuffer(body.massDeposits[i].merged, 32))
arr.push(Utils.hexToBuffer(body.massDeposits[i].fee, 32))
arr.push(body.massDeposits[i].merged.toBuffer())
arr.push(body.massDeposits[i].fee.toBuffer())
}
// Mass migrations
const massMigrationLenBytes = Utils.hexToBuffer(
body.massMigrations.length.toString(16),
1,
)
const massMigrationLenBytes = Utils.numToBuffer(body.massMigrations.length, 1)
arr.push(massMigrationLenBytes)
for (let i = 0; i < body.massMigrations.length; i += 1) {
arr.push(Utils.hexToBuffer(body.massMigrations[i].destination, 20))
arr.push(Utils.hexToBuffer(body.massMigrations[i].totalETH, 32))
arr.push(
Utils.hexToBuffer(body.massMigrations[i].migratingLeaves.merged, 32),
)
arr.push(Utils.hexToBuffer(body.massMigrations[i].migratingLeaves.fee, 32))
arr.push(body.massMigrations[i].destination.toBuffer())
arr.push(body.massMigrations[i].totalETH.toBuffer())
arr.push(body.massMigrations[i].migratingLeaves.merged.toBuffer())
arr.push(body.massMigrations[i].migratingLeaves.fee.toBuffer())
const { erc20, erc721 } = body.massMigrations[i]
arr.push(Utils.hexToBuffer(erc20.length.toString(16), 1))
arr.push(Utils.numToBuffer(erc20.length, 1))
for (let j = 0; j < erc20.length; j += 1) {
arr.push(Utils.hexToBuffer(erc20[j].addr, 20))
arr.push(Utils.hexToBuffer(erc20[j].amount, 32))
arr.push(erc20[j].addr.toBuffer())
arr.push(erc20[j].amount.toBuffer())
}
arr.push(Utils.hexToBuffer(erc721.length.toString(16), 1))
arr.push(Utils.numToBuffer(erc721.length, 1))
for (let j = 0; j < erc721.length; j += 1) {
arr.push(Utils.hexToBuffer(erc721[j].addr, 20))
arr.push(erc721[j].addr.toBuffer())
const { nfts } = erc721[j]
arr.push(Utils.hexToBuffer(nfts.length.toString(16), 1))
arr.push(Utils.numToBuffer(nfts.length, 1))
for (let k = 0; k < nfts.length; k += 1) {
arr.push(Utils.hexToBuffer(nfts[k], 32))
arr.push(nfts[k].toBuffer())
}
}
}
@@ -202,18 +217,18 @@ function deserializeHeaderFrom(
): { header: Header; rest: string } {
const queue = new Utils.StringifiedHexQueue(rawData)
const header: Header = {
proposer: queue.dequeue(20),
parentBlock: queue.dequeue(32),
metadata: queue.dequeue(32),
fee: queue.dequeue(32),
utxoRoot: queue.dequeue(32),
utxoIndex: queue.dequeue(32),
nullifierRoot: queue.dequeue(32),
withdrawalRoot: queue.dequeue(32),
withdrawalIndex: queue.dequeue(32),
txRoot: queue.dequeue(32),
depositRoot: queue.dequeue(32),
migrationRoot: queue.dequeue(32),
proposer: queue.dequeueToAddress(),
parentBlock: queue.dequeueToBytes32(),
metadata: queue.dequeueToBytes32(),
fee: queue.dequeueToUint256(),
utxoRoot: queue.dequeueToUint256(),
utxoIndex: queue.dequeueToUint256(),
nullifierRoot: queue.dequeueToBytes32(),
withdrawalRoot: queue.dequeueToBytes32(),
withdrawalIndex: queue.dequeueToUint256(),
txRoot: queue.dequeueToBytes32(),
depositRoot: queue.dequeueToBytes32(),
migrationRoot: queue.dequeueToBytes32(),
}
return { header, rest: queue.dequeueAll() }
}
@@ -285,9 +300,10 @@ function deserializeMassDeposits(
const mdLength: number = queue.dequeueToNumber(1)
const massDeposits: MassDeposit[] = []
while (massDeposits.length < mdLength) {
console.log('deserializa mass deposit rest', queue.str.slice(0, 128))
massDeposits.push({
merged: queue.dequeue(32),
fee: queue.dequeue(32),
merged: queue.dequeueToBytes32(),
fee: queue.dequeueToUint256(),
})
}
return { massDeposits, rest: queue.dequeueAll() }
@@ -303,15 +319,15 @@ function deserializeMassMigrations(
const destination = queue.dequeue(20)
const totalETH = queue.dequeue(32)
const migratingLeaves: MassDeposit = {
merged: queue.dequeue(32),
fee: queue.dequeue(32),
merged: queue.dequeueToBytes32(),
fee: queue.dequeueToUint256(),
}
const erc20MigrationLength = queue.dequeueToNumber(1)
const erc20Migrations: ERC20Migration[] = []
while (erc20Migrations.length < erc20MigrationLength) {
erc20Migrations.push({
addr: queue.dequeue(20),
amount: queue.dequeue(32),
addr: queue.dequeueToAddress(),
amount: queue.dequeueToUint256(),
})
}
const erc721MigrationLength = queue.dequeueToNumber(1)
@@ -324,13 +340,13 @@ function deserializeMassMigrations(
nfts.push(queue.dequeue(32))
}
erc721Migrations.push({
addr,
nfts,
addr: Address.from(addr),
nfts: nfts.map(Uint256.from),
})
}
massMigrations.push({
destination,
totalETH,
destination: Address.from(destination),
totalETH: Uint256.from(totalETH),
migratingLeaves,
erc20: erc20Migrations,
erc721: erc721Migrations,
@@ -339,55 +355,58 @@ function deserializeMassMigrations(
return { massMigrations, rest: queue.dequeueAll() }
}
export function headerHash(header: Header): string {
const concatenated = Buffer.concat([
Utils.hexToBuffer(header.proposer, 20),
Utils.hexToBuffer(header.parentBlock, 32),
Utils.hexToBuffer(header.metadata, 32),
Utils.hexToBuffer(header.fee, 32),
Utils.hexToBuffer(header.utxoRoot, 32),
Utils.hexToBuffer(header.utxoIndex, 32),
Utils.hexToBuffer(header.nullifierRoot, 32),
Utils.hexToBuffer(header.withdrawalRoot, 32),
Utils.hexToBuffer(header.withdrawalIndex, 32),
Utils.hexToBuffer(header.txRoot, 32),
Utils.hexToBuffer(header.depositRoot, 32),
Utils.hexToBuffer(header.migrationRoot, 32),
])
export function headerHash(header: Header): Bytes32 {
const concatenated = Buffer.concat(
[
header.proposer,
header.parentBlock,
header.metadata,
header.fee,
header.utxoRoot,
header.utxoIndex,
header.nullifierRoot,
header.withdrawalRoot,
header.withdrawalIndex,
header.txRoot,
header.depositRoot,
header.migrationRoot,
].map(val => val.toBuffer()),
)
const result = soliditySha3(`0x${concatenated.toString('hex')}`)
if (!result) throw Error('Failed to get header hash')
return result
return Bytes32.from(result)
}
export function massDepositHash(massDeposit: MassDeposit): string {
const concatenated = Buffer.concat([
Utils.hexToBuffer(massDeposit.merged, 32),
Utils.hexToBuffer(massDeposit.fee, 32),
])
const concatenated = Buffer.concat(
[massDeposit.merged, massDeposit.fee].map(val => val.toBuffer()),
)
const result = soliditySha3(`0x${concatenated.toString('hex')}`)
if (!result) throw Error('Failed to get header hash')
return result
}
export function massMigrationHash(massMigration: MassMigration): string {
let concatenated = Buffer.concat([
Utils.hexToBuffer(massMigration.destination, 32),
Utils.hexToBuffer(massMigration.migratingLeaves.merged, 32),
Utils.hexToBuffer(massMigration.migratingLeaves.fee, 32),
])
let concatenated = Buffer.concat(
[
massMigration.destination,
massMigration.migratingLeaves.merged,
massMigration.migratingLeaves.fee,
].map(val => val.toBuffer()),
)
for (let i = 0; i < massMigration.erc20.length; i += 1) {
concatenated = Buffer.concat([
concatenated,
Utils.hexToBuffer(massMigration.erc20[i].addr, 20),
Utils.hexToBuffer(massMigration.erc20[i].amount, 20),
massMigration.erc20[i].addr.toBuffer(),
massMigration.erc20[i].amount.toBuffer(),
])
}
for (let i = 0; i < massMigration.erc721.length; i += 1) {
concatenated = Buffer.concat([
concatenated,
Utils.hexToBuffer(massMigration.erc721[i].addr, 20),
massMigration.erc721[i].addr.toBuffer(),
massMigration.erc721[i].nfts.reduce((buff, nft) => {
return Buffer.concat([buff, Utils.hexToBuffer(nft, 32)])
return Buffer.concat([buff, nft.toBuffer()])
}, Buffer.from([])),
])
}
@@ -397,15 +416,17 @@ export function massMigrationHash(massMigration: MassMigration): string {
}
export class Block {
hash: string
hash: Bytes32
status: BlockStatus
proposedAt?: number
proposalNum?: number
parent: string
proposedAt: number
proposalHash: string
parent: Bytes32
proposalTx: Bytes32
header: Header
@@ -414,43 +435,46 @@ export class Block {
proposalData?: Transaction
bootstrap?: {
utxoTreeIndex: number
utxoBootstrap: string[]
withdrawalTreeIndex: number
withdrawalBootstrap: string[]
utxoTreeIndex: Uint256
utxoBootstrap: Uint256[]
withdrawalTreeIndex: Uint256
withdrawalBootstrap: Bytes32[]
}
constructor({
hash,
status,
proposalNum,
proposedAt,
parent,
proposalHash,
proposalTx,
header,
body,
proposalData,
bootstrap,
}: {
hash: string
hash: Bytes32
status: BlockStatus
proposedAt: number
parent: string
proposalHash: string
parent: Bytes32
proposalTx: Bytes32
header: Header
body: Body
proposalNum?: number
proposalData?: Transaction
bootstrap?: {
utxoTreeIndex: number
utxoBootstrap: string[]
withdrawalTreeIndex: number
withdrawalBootstrap: string[]
utxoTreeIndex: Uint256
utxoBootstrap: Uint256[]
withdrawalTreeIndex: Uint256
withdrawalBootstrap: Bytes32[]
}
}) {
this.hash = hash
this.status = status
this.proposalNum = proposalNum
this.proposedAt = proposedAt
this.parent = parent
this.proposalHash = proposalHash
this.proposalTx = proposalTx
this.header = header
this.body = body
this.proposalData = proposalData
@@ -458,14 +482,36 @@ export class Block {
}
toSqlObj(): BlockSql {
const header = {} as HeaderSql
Object.keys(this.header).forEach(key => {
header[key] = this.header[key].toString()
})
return {
hash: this.hash,
hash: this.hash.toString(),
status: this.status,
proposalNum: this.proposalNum,
proposedAt: this.proposedAt || 0,
proposalHash: this.proposalHash,
header: this.header,
proposalTx: this.proposalTx.toString(),
header,
proposalData: this.proposalData ? this.proposalData : undefined,
bootstrap: this.bootstrap ? this.bootstrap : undefined,
bootstrap: this.bootstrap
? {
utxoTreeIndex: parseInt(
this.bootstrap.utxoTreeIndex.toString(),
10,
),
utxoBootstrap: this.bootstrap.utxoBootstrap.map(val =>
val.toString(),
),
withdrawalTreeIndex: parseInt(
this.bootstrap.withdrawalTreeIndex.toString(),
10,
),
withdrawalBootstrap: this.bootstrap.withdrawalBootstrap.map(val =>
val.toString(),
),
}
: undefined,
}
}
@@ -480,7 +526,17 @@ export class Block {
}
static fromTx(tx: Transaction): Block {
const deserializedHeader = deserializeHeaderFrom(tx.input)
const queue = new Utils.StringifiedHexQueue(tx.input)
// remove function selector
queue.dequeue(4)
// remove param position
queue.dequeue(32)
// remove bytes length
const length = queue.dequeue(32)
console.log('length is ', length)
const rawData = queue.dequeueAll()
console.log('raw data is...', rawData)
const deserializedHeader = deserializeHeaderFrom(rawData)
const deserializedTxs = deserializeTxsFrom(deserializedHeader.rest)
const deserializedMassDeposits = deserializeMassDeposits(
deserializedTxs.rest,
@@ -491,6 +547,7 @@ export class Block {
const { header } = deserializedHeader
const { txs } = deserializedTxs
const { massDeposits } = deserializedMassDeposits
console.log('mass deposits,...', massDeposits)
const { massMigrations } = deserializedMassMigrations
const body: Body = {
txs,
@@ -502,7 +559,7 @@ export class Block {
status: BlockStatus.FETCHED,
proposedAt: tx.blockNumber || 0,
parent: header.parentBlock,
proposalHash: tx.hash,
proposalTx: Bytes32.from(tx.hash),
proposalData: tx,
header,
body,

View File

@@ -4,7 +4,7 @@ import { Field } from '@zkopru/babyjubjub'
import BN from 'bn.js'
export interface BootstrapData {
proposalHash: string
proposalTx: string
blockHash: string
utxoTreeIndex: number
utxoStartingLeafProof: MerkleProof<Field>
@@ -30,7 +30,7 @@ export class HttpBootstrapHelper implements BootstrapHelper {
console.log('json', response.json())
console.log('body', response.body)
return {
proposalHash: body.proposalHash,
proposalTx: body.proposalTx,
blockHash: body.blockHash,
utxoTreeIndex: body.utxoTreeIndex,
utxoStartingLeafProof: {

View File

@@ -1,36 +1,38 @@
import { Hasher, genesisRoot } from '@zkopru/tree'
import { Field } from '@zkopru/babyjubjub'
import { hexify } from '@zkopru/utils'
import { L1Config } from '@zkopru/database'
import { genesisRoot, poseidonHasher, keccakHasher } from '@zkopru/tree'
import { bnToBytes32 } from '@zkopru/utils'
import { Address, Bytes32 } from 'soltypes'
import BN from 'bn.js'
import { Header } from './block'
export const genesis = ({
address,
hashers,
parent,
config,
}: {
address: string
hashers: {
utxo: Hasher<Field>
withdrawal: Hasher<BN>
nullifier: Hasher<BN>
}
address: Address
parent: Bytes32
config: L1Config
}): Header => {
const utxoRoot = genesisRoot(hashers.utxo).toHex()
const withdrawalRoot = hexify(genesisRoot(hashers.withdrawal))
const nullifierRoot = hexify(genesisRoot(hashers.nullifier))
const utxoHasher = poseidonHasher(config.utxoTreeDepth)
const withdrawalHasher = keccakHasher(config.withdrawalTreeDepth)
const nullifierHasher = keccakHasher(config.nullifierTreeDepth)
const utxoRoot = genesisRoot(utxoHasher).toUint256()
const withdrawalRoot = bnToBytes32(genesisRoot(withdrawalHasher))
const nullifierRoot = bnToBytes32(genesisRoot(nullifierHasher))
const zeroBytes = bnToBytes32(new BN(0))
return {
proposer: address,
parentBlock: '0x0000000000000000000000000000000000000000',
metadata:
'0x0000000000000000000000000000000000000000000000000000000000000000',
fee: '0',
parentBlock: parent,
metadata: zeroBytes,
fee: zeroBytes.toUint(),
utxoRoot,
utxoIndex: '0',
utxoIndex: zeroBytes.toUint(),
nullifierRoot,
withdrawalRoot,
withdrawalIndex: '0',
txRoot: '0x00',
depositRoot: '0x00',
migrationRoot: '0x00',
withdrawalIndex: zeroBytes.toUint(),
txRoot: zeroBytes,
depositRoot: zeroBytes,
migrationRoot: zeroBytes,
}
}

View File

@@ -1,19 +1,25 @@
/* eslint-disable @typescript-eslint/camelcase */
import ZkOPRUContract from '@zkopru/contracts'
import { L1Config } from '@zkopru/database'
import { verifyingKeyIdentifier } from '@zkopru/utils'
import { verifyingKeyIdentifier, logger } from '@zkopru/utils'
import Web3 from 'web3'
import { ContractOptions } from 'web3-eth-contract'
import bigInt from 'big-integer'
import * as ffjs from 'ffjavascript'
import { VerifyingKey } from './snark'
import { TransactionObject, Tx } from './types/contract'
export class L1Contract extends ZkOPRUContract {
web3: Web3
address: string
config?: L1Config
constructor(web3: Web3, address: string, option?: ContractOptions) {
super(web3, address, option)
this.web3 = web3
this.address = address
}
async getVKs(): Promise<{ [txSig: string]: VerifyingKey }> {
@@ -74,7 +80,7 @@ export class L1Contract extends ZkOPRUContract {
}
async getConfig(): Promise<L1Config> {
let genesisBlock!: string
if (this.config) return this.config
let utxoTreeDepth!: number
let withdrawalTreeDepth!: number
let nullifierTreeDepth!: number
@@ -87,11 +93,9 @@ export class L1Contract extends ZkOPRUContract {
let utxoSubTreeSize!: number
let withdrawalSubTreeDepth!: number
let withdrawalSubTreeSize!: number
// const utxoTreeDepth = await this.upstream.methods
/** test start */
/** test ends */
const tasks = [
async () => {
genesisBlock = await this.upstream.methods.latest().call()
},
async () => {
utxoTreeDepth = parseInt(
await this.upstream.methods.UTXO_TREE_DEPTH().call(),
@@ -159,8 +163,7 @@ export class L1Contract extends ZkOPRUContract {
},
]
await Promise.all(tasks.map(task => task()))
return {
genesisBlock,
this.config = {
utxoTreeDepth,
withdrawalTreeDepth,
nullifierTreeDepth,
@@ -174,5 +177,34 @@ export class L1Contract extends ZkOPRUContract {
maxUtxoPerTree,
maxWithdrawalPerTree,
}
return this.config
}
async sendTx(tx: TransactionObject<void>, option?: Tx) {
let gas!: number
let gasPrice!: string
await Promise.all(
[
async () => {
try {
gas = await tx.estimateGas({
...option,
})
} catch (err) {
logger.error(err)
throw Error('It may get reverted so did not send the transaction')
}
},
async () => {
gasPrice = await this.web3.eth.getGasPrice()
},
].map(fetchTask => fetchTask()),
)
const receipt = await tx.send({
gas,
gasPrice,
...option,
})
return receipt
}
}

View File

@@ -9,13 +9,15 @@ import {
L1Config,
} from '@zkopru/database'
import { Transaction } from 'web3-core'
import { Block, Header, VerifyResult, MassDeposit } from './block'
import { Bytes32 } from 'soltypes'
import { logger } from '@zkopru/utils'
import { Block, Header, VerifyResult, MassDeposit, sqlToHeader } from './block'
import { BootstrapData } from './bootstrap'
export interface Patch {
result: VerifyResult
block: string
massDeposits?: string[]
block: Bytes32
massDeposits?: Bytes32[]
treePatch?: GrovePatch
}
@@ -36,14 +38,9 @@ export class L2Chain implements ChainConfig {
db: InanoSQLInstance
latest: string
latest?: string
constructor(
db: InanoSQLInstance,
genesisBlock: string,
grove: Grove,
chainConfig: ChainConfig,
) {
constructor(db: InanoSQLInstance, grove: Grove, chainConfig: ChainConfig) {
this.db = db
this.grove = grove
this.id = chainConfig.id
@@ -51,39 +48,87 @@ export class L2Chain implements ChainConfig {
this.chainId = chainConfig.chainId
this.address = chainConfig.address
this.config = chainConfig.config
this.latest = genesisBlock
this.lock = new AsyncLock()
}
async getBlockSql(hash: string): Promise<BlockSql | null> {
async getBlockSql(hash: Bytes32): Promise<BlockSql | null> {
const queryResult = await this.db
.selectTable(schema.block.name)
.presetQuery('getBlockWithHash', { hash })
.presetQuery('getBlockWithHash', { hash: hash.toString() })
.exec()
if (queryResult.length === 0) return null
return queryResult[0] as BlockSql
}
async getBlock(hash: string): Promise<Block | null> {
async getLatestBlockHash(): Promise<Bytes32 | null> {
const lastVerified = await this.db
.selectTable(schema.block.name)
.presetQuery('getLastVerifiedBlock')
.exec()
const lastVerifiedBlock = lastVerified[0]
return lastVerifiedBlock.hash ? Bytes32.from(lastVerifiedBlock.hash) : null
}
async getBlock(hash: Bytes32): Promise<Block | null> {
const blockSql = await this.getBlockSql(hash)
if (!blockSql) return null
const txData = blockSql.proposalData as Transaction
if (!txData) return null
console.log('txData is', txData)
return Block.fromTx(txData)
}
async getDeposits(massDeposit: MassDeposit): Promise<DepositSql[]> {
const commitIndexArr = await this.db
.selectTable(schema.massDeposit.name)
.presetQuery('getCommitIndex', { ...massDeposit, zkopru: this.id })
.presetQuery('getCommitIndex', {
merged: massDeposit.merged.toString(),
fee: massDeposit.fee.toString(),
zkopru: this.id,
})
.exec()
const commitIndex = commitIndexArr[0]
const commitIndex = commitIndexArr[0].index
console.log(
'retrieved,',
await this.db
.selectTable(schema.massDeposit.name)
.query('select')
.exec(),
)
console.log(
'queried',
massDeposit.merged.toString(),
massDeposit.fee.toString(),
this.id,
)
console.log('commitIndex is', commitIndex)
if (!commitIndex) throw Error('Failed to find the mass deposit')
const deposits = await this.db
console.log(
'raw select deposit',
await this.db
.selectTable(schema.deposit.name)
.query('select')
// .where(['queuedAt', 'IN', ['0']])
.exec(),
)
const deposits = (await this.db
.selectTable(schema.deposit.name)
.presetQuery('getDeposits', { commitIndex, zkopru: this.id })
.exec()
return deposits as DepositSql[]
.presetQuery('getDeposits', {
commitIndexes: [commitIndex.toString()],
zkopru: this.id,
})
.exec()) as DepositSql[]
console.log('unsorted deposits', deposits)
deposits.sort((a, b) => {
if (a.blockNumber !== b.blockNumber) {
return a.blockNumber - b.blockNumber
}
if (a.transactionIndex !== b.transactionIndex) {
return a.transactionIndex - b.transactionIndex
}
return a.logIndex - b.logIndex
})
return deposits
}
async getOldestUnverifiedBlock(): Promise<{
@@ -94,26 +139,23 @@ export class L2Chain implements ChainConfig {
.selectTable(schema.block.name)
.presetQuery('getLastVerifiedBlock')
.exec()
if (lastVerified.length > 0) {
const lastVerifiedBlock = lastVerified[0]
const prevHeader = lastVerifiedBlock.header
const lastUnverified = await this.db
.selectTable(schema.block.name)
.query('select', ['header', 'proposalData', 'MIN(proposedAt)'])
.where(['header.parentBlock', '=', prevHeader.hash])
.exec()
const block = Block.fromTx(lastUnverified[0].proposalData)
if (lastUnverified.length > 0) {
return {
prevHeader,
block,
}
}
const lastVerifiedBlock = lastVerified[0] as BlockSql
const prevHeader = sqlToHeader(lastVerifiedBlock.header)
const lastUnverified = await this.db
.selectTable(schema.block.name)
.query('select', ['header', 'proposalData', 'MIN(proposalNum)'])
.where(['header.parentBlock', '=', lastVerifiedBlock.hash])
.exec()
if (!lastUnverified[0].proposalData) return {}
const block = Block.fromTx(lastUnverified[0].proposalData)
return {
prevHeader,
block,
}
return {}
}
async applyPatch(patch: Patch) {
logger.info('layer2.ts: applyPatch()')
const { result, block, treePatch, massDeposits } = patch
// Apply tree patch
if (treePatch) {
@@ -146,43 +188,43 @@ export class L2Chain implements ChainConfig {
.exec()
}
async finalize(hash: string) {
async finalize(hash: Bytes32) {
await this.markAsFinalized(hash)
}
private async markMassDepositsAsIncludedIn(ids: string[], block: string) {
private async markMassDepositsAsIncludedIn(ids: Bytes32[], block: Bytes32) {
this.db
.selectTable(schema.massDeposit.name)
.presetQuery('markAsIncludedIn', {
zkopru: this.id,
block,
ids,
block: block.toString(),
ids: ids.map(val => val.toString()),
})
.exec()
}
private async markAsPartiallyVerified(hash: string) {
private async markAsPartiallyVerified(hash: Bytes32) {
this.db
.selectTable(schema.block.name)
.presetQuery('markAsPartiallyVerified', { hash })
.presetQuery('markAsPartiallyVerified', { hash: hash.toString() })
.exec()
}
private async markAsFullyVerified(hash: string) {
private async markAsFullyVerified(hash: Bytes32) {
this.db
.selectTable(schema.block.name)
.presetQuery('markAsFullyVerified', { hash })
.presetQuery('markAsFullyVerified', { hash: hash.toString() })
}
private async markAsFinalized(hash: string) {
private async markAsFinalized(hash: Bytes32) {
this.db
.selectTable(schema.block.name)
.presetQuery('markAsFinalized', { hash })
.presetQuery('markAsFinalized', { hash: hash.toString() })
}
private async markAsInvalidated(hash: string) {
private async markAsInvalidated(hash: Bytes32) {
this.db
.selectTable(schema.block.name)
.presetQuery('markAsInvalidated', { hash })
.presetQuery('markAsInvalidated', { hash: hash.toString() })
}
}

View File

@@ -4,6 +4,7 @@ import { InanoSQLInstance } from '@nano-sql/core'
import Web3 from 'web3'
import { BlockStatus } from '@zkopru/database'
import { verifyProof } from '@zkopru/tree'
import { Bytes32 } from 'soltypes'
import { L1Contract } from './layer1'
import { Verifier, VerifyOption } from './verifier'
import { L2Chain } from './layer2'
@@ -57,7 +58,9 @@ export class LightNode extends ZkOPRUNode {
async bootstrap() {
if (!this.bootstrapHelper) return
const latest = await this.l1Contract.upstream.methods.latest().call()
const latestBlockFromDB = await this.l2Chain.getBlockSql(latest)
const latestBlockFromDB = await this.l2Chain.getBlockSql(
Bytes32.from(latest),
)
if (
latestBlockFromDB &&
latestBlockFromDB.status &&
@@ -67,10 +70,10 @@ export class LightNode extends ZkOPRUNode {
}
const bootstrapData = await this.bootstrapHelper.fetchBootstrapData(latest)
const proposalData = await this.l1Contract.web3.eth.getTransaction(
bootstrapData.proposalHash,
bootstrapData.proposalTx,
)
const block = Block.fromTx(proposalData)
const headerProof = headerHash(block.header) === latest
const headerProof = headerHash(block.header).eq(Bytes32.from(latest))
const utxoMerkleProof = verifyProof(
this.l2Chain.grove.config.utxoHasher,
bootstrapData.utxoStartingLeafProof,

View File

@@ -1,18 +1,27 @@
import { InanoSQLInstance } from '@nano-sql/core'
import { schema, DepositSql, MassDepositCommitSql } from '@zkopru/database'
import {
schema,
DepositSql,
MassDepositCommitSql,
BlockStatus,
HeaderSql,
} from '@zkopru/database'
import { logger } from '@zkopru/utils'
import { EventEmitter } from 'events'
import { InanoSQLObserverQuery } from '@nano-sql/core/lib/interfaces'
import { toBN } from 'web3-utils'
import { scheduleJob, Job } from 'node-schedule'
import { Bytes32, Address, Uint256 } from 'soltypes'
import { L1Contract } from './layer1'
import { Block } from './block'
import { Block, headerHash } from './block'
import { genesis } from './genesis'
export enum NetworkStatus {
STOPPED,
INITIALIZING,
ON_SYNCING,
LIVE,
FULLY_SYNCED,
ON_ERROR,
STOPPED = 'stopped',
ON_SYNCING = 'on syncing',
ON_PROCESSING = 'processing',
SYNCED = 'synced',
ON_ERROR = 'on error',
}
export class Synchronizer extends EventEmitter {
@@ -23,7 +32,7 @@ export class Synchronizer extends EventEmitter {
l1Contract!: L1Contract
fetching: {
[proposalHash: string]: boolean
[proposalTx: string]: boolean
}
depositSubscriber?: EventEmitter
@@ -36,13 +45,15 @@ export class Synchronizer extends EventEmitter {
private latestProposalObserver?: InanoSQLObserverQuery
private latestVerificationObserver?: InanoSQLObserverQuery
private latestProcessedObserver?: InanoSQLObserverQuery
private latestProposedHash?: string
private latestProposedAt?: number
private latestProposed?: number
private latestVerfied?: number
private latestProcessed?: number
private cronJob?: Job
status: NetworkStatus
@@ -59,6 +70,7 @@ export class Synchronizer extends EventEmitter {
if (this.status !== status) {
this.status = status
this.emit('status', status, this.latestProposedHash)
logger.info(`sync status: ${status}`)
}
}
@@ -68,12 +80,17 @@ export class Synchronizer extends EventEmitter {
) {
if (this.status === NetworkStatus.STOPPED) {
this.setStatus(NetworkStatus.ON_SYNCING)
this.listenGenesis()
this.listenBlockUpdate()
this.listenDeposits()
this.listenMassDepositCommit()
this.listenNewProposals(proposalCB)
this.listenFinalization(finalizationCB)
}
this.cronJob = scheduleJob('*/5 * * * * *', () => {
this.updateStatus()
this.checkUnfetched()
})
}
stop() {
@@ -92,68 +109,170 @@ export class Synchronizer extends EventEmitter {
if (this.latestProposalObserver) {
this.latestProposalObserver.unsubscribe()
}
if (this.cronJob) {
this.cronJob.cancel()
this.cronJob = undefined
}
this.setStatus(NetworkStatus.STOPPED)
}
listenBlockUpdate() {
this.latestProposalObserver = this.db
.selectTable(schema.block.name)
.query('select', ['hash', 'MAX(proposedAt)'])
.query('select', ['hash', 'MAX(proposalNum)'])
.listen({
debounce: 500,
unique: false,
compareFn: (rowsA, rowsB) => {
return rowsA[0]?.proposedAt !== rowsB[0]?.proposedAt
console.log('new block on comparing')
console.log('rows a', rowsA)
console.log('rows b', rowsB)
return rowsA[0]?.proposalNum !== rowsB[0]?.proposalNum
},
})
this.latestProposalObserver.exec(async (rows, err) => {
if (err) this.setStatus(NetworkStatus.ON_ERROR)
else {
this.latestProposedHash = rows[0]?.hash
this.setLatestProposed(rows[0]?.proposesAt)
this.setLatestProposed(rows[0]?.proposalNum)
}
})
this.latestVerificationObserver = this.db
this.latestProcessedObserver = this.db
.selectTable(schema.block.name)
.presetQuery('getLastVerifiedBlock')
.presetQuery('getLastProcessedBlock')
.listen({
debounce: 500,
unique: false,
compareFn: (rowsA, rowsB) => {
return rowsA[0]?.proposedAt !== rowsB[0]?.proposedAt
return rowsA[0]?.proposalNum !== rowsB[0]?.proposalNum
},
})
this.latestVerificationObserver.exec(async (rows, err) => {
this.latestProcessedObserver.exec(async (rows, err) => {
if (err) this.setStatus(NetworkStatus.ON_ERROR)
else {
this.setLatestVerified(rows[0]?.proposesAt)
this.setLatestProcessed(rows[0]?.proposalNum)
}
})
}
private setLatestProposed(blockNum: number) {
if (this.latestProposedAt !== blockNum) {
this.latestProposedAt = blockNum
this.updateStatus()
private setLatestProposed(proposalNum: number) {
if (proposalNum && this.latestProposed !== proposalNum) {
this.latestProposed = proposalNum
}
}
private setLatestVerified(blockNum: number) {
if (this.latestVerfied !== blockNum) {
this.latestVerfied = blockNum
this.updateStatus()
private setLatestProcessed(proposalNum: number) {
if (proposalNum && this.latestProcessed !== proposalNum) {
this.latestProcessed = proposalNum
}
}
async updateStatus() {
if (!this.latestProposedAt || !this.latestVerfied) {
this.setStatus(NetworkStatus.INITIALIZING)
} else if (this.latestProposedAt === this.latestVerfied) {
this.setStatus(NetworkStatus.FULLY_SYNCED)
} else if (this.latestProposedAt - this.latestVerfied < 5) {
this.setStatus(NetworkStatus.LIVE)
} else {
const queryResult = await this.db
.selectTable(schema.block.name)
.query('select', ['MAX(proposalNum) AS knownBlocks'])
.exec()
const lastProcessedBlock = (
await this.db
.selectTable(schema.block.name)
.presetQuery('getLastProcessedBlock')
.exec()
)[0]
const totalProposed = await this.l1Contract.upstream.methods
.proposedBlocks()
.call()
const knownBlocks = queryResult[0]?.knownBlocks + 1 || 0
const processedBlocks = lastProcessedBlock?.proposalNum + 1 || 0
logger.info(
`proposed: ${totalProposed} / known: ${knownBlocks} / processed: ${processedBlocks}`,
)
const haveFetchedAll = toBN(totalProposed).eqn(knownBlocks)
const haveProcessedAll = toBN(processedBlocks).eqn(knownBlocks)
if (!haveFetchedAll) {
this.setStatus(NetworkStatus.ON_SYNCING)
} else if (!haveProcessedAll) {
this.setStatus(NetworkStatus.ON_PROCESSING)
} else {
this.setStatus(NetworkStatus.SYNCED)
}
}
async checkUnfetched() {
const MAX_FETCH_JOB = 10
const availableFetchJob = Math.max(
MAX_FETCH_JOB - Object.keys(this.fetching).length,
0,
)
if (availableFetchJob === 0) return
const candidates = await this.db
.selectTable(schema.block.name)
.query('select', ['proposalTx'])
.where(['status', '=', BlockStatus.NOT_FETCHED])
.orderBy(['proposalNum ASC'])
.limit(availableFetchJob)
.exec()
console.log('fetch candidates: ', candidates)
console.log(
'hhhmm',
await this.db
.selectTable(schema.block.name)
.query('select')
.exec(),
)
candidates.forEach(candidate => this.fetch(candidate.proposalTx))
}
async listenGenesis() {
const query = await this.db
.selectTable(schema.block.name)
.query('select')
.where(['proposalNum', '=', 0])
.exec()
const genesisExist = query.length === 1
if (!genesisExist) {
logger.info('No genesis block. Trying to fetch')
const genesisListener = this.l1Contract.upstream.events
.GenesisBlock({ fromBlock: 0 })
.on('data', async event => {
const { returnValues, blockNumber, transactionHash } = event
// WRITE DATABASE
const { blockHash, proposer, parentBlock } = returnValues
console.log('genesis hash: ', blockHash)
console.log('genesis data: ', returnValues)
// GENESIS BLOCK
const config = await this.l1Contract.getConfig()
const genesisHeader = genesis({
address: Address.from(proposer),
parent: Bytes32.from(parentBlock),
config,
})
console.log(genesisHeader)
const header: HeaderSql = {} as HeaderSql
Object.keys(genesisHeader).forEach(key => {
header[key] = genesisHeader[key].toString()
})
if (!Bytes32.from(blockHash).eq(headerHash(genesisHeader))) {
throw Error('Failed to set up the genesis block')
}
await this.db
.selectTable(schema.block.name)
.presetQuery('addGenesisBlock', {
hash: Bytes32.from(blockHash).toString(),
header,
proposedAt: blockNumber,
proposalTx: transactionHash,
})
.exec()
genesisListener.removeAllListeners()
if (!this.latestProposed) {
this.setLatestProposed(0)
}
if (!this.latestProcessed) {
this.setLatestProcessed(0)
}
})
}
// TODO: layer1 REVERT handling & challenge handling
}
async listenDeposits(cb?: (deposit: DepositSql) => void) {
@@ -161,7 +280,8 @@ export class Synchronizer extends EventEmitter {
.selectTable(schema.deposit.name)
.presetQuery('getSyncStart', { zkopru: this.zkopruId })
.exec()
const fromBlock = query[0] ? query[0].proposedAt : 0
const fromBlock = query[0]?.proposedAt || 0
console.log('new deposit from block', fromBlock)
this.depositSubscriber = this.l1Contract.user.events
.Deposit({ fromBlock })
.on('connected', subId => {
@@ -170,17 +290,24 @@ export class Synchronizer extends EventEmitter {
)
})
.on('data', async event => {
const { returnValues, blockNumber } = event
const { returnValues, logIndex, transactionIndex, blockNumber } = event
const deposit: DepositSql = {
...returnValues,
note: Uint256.from(returnValues.note).toString(),
fee: Uint256.from(returnValues.fee).toString(),
queuedAt: Uint256.from(returnValues.queuedAt).toString(),
zkopru: this.zkopruId,
transactionIndex,
logIndex,
blockNumber,
}
logger.info(`synchronizer.js: NewDeposit(${deposit.note})`)
console.log('deposit detail', deposit)
await this.db
.selectTable(schema.deposit.name)
.presetQuery('writeNewDeposit', { deposit })
.exec()
if (cb) cb(deposit)
console.log('deposit succeeded')
})
.on('changed', event => {
// TODO
@@ -197,7 +324,8 @@ export class Synchronizer extends EventEmitter {
.selectTable(schema.massDeposit.name)
.presetQuery('getSyncStart', { zkopru: this.zkopruId })
.exec()
const fromBlock = query[0] ? query[0].proposedAt : 0
const fromBlock = query[0]?.proposedAt || 0
console.log('mass deposit from block', fromBlock)
this.massDepositCommitSubscriber = this.l1Contract.coordinator.events
.MassDepositCommit({ fromBlock })
.on('connected', subId => {
@@ -207,16 +335,25 @@ export class Synchronizer extends EventEmitter {
})
.on('data', async event => {
const { returnValues, blockNumber } = event
logger.info(
`MassDepositCommit ${(returnValues.index,
returnValues.merged,
returnValues.fee)}`,
)
const massDeposit: MassDepositCommitSql = {
...returnValues,
index: Uint256.from(returnValues.index).toString(),
merged: Bytes32.from(returnValues.merged).toString(),
fee: Uint256.from(returnValues.fee).toString(),
zkopru: this.zkopruId,
blockNumber,
}
console.log('massdeposit commit is', massDeposit)
await this.db
.selectTable(schema.deposit.name)
.selectTable(schema.massDeposit.name)
.presetQuery('writeMassDepositCommit', { massDeposit })
.exec()
if (cb) cb(massDeposit)
console.log('massdeposit commit succeeded')
})
.on('changed', event => {
// TODO
@@ -229,12 +366,12 @@ export class Synchronizer extends EventEmitter {
}
async listenNewProposals(cb?: (hash: string) => void) {
if (this.status !== NetworkStatus.STOPPED) return
const query = await this.db
.selectTable(schema.block.name)
.presetQuery('getProposalSyncStart')
.exec()
const fromBlock = query[0] ? query[0].proposedAt : 0
const fromBlock = query[0]?.proposedAt || 0
console.log('listenNewProposal fromBlock: ', fromBlock)
this.proposalSubscriber = this.l1Contract.coordinator.events
.NewProposal({ fromBlock })
.on('connected', subId => {
@@ -245,15 +382,22 @@ export class Synchronizer extends EventEmitter {
.on('data', async event => {
const { returnValues, blockNumber, transactionHash } = event
// WRITE DATABASE
const { proposalNum, blockHash } = returnValues
console.log('newProposal: ', returnValues)
console.log('blocknumber: ', blockNumber)
console.log('transactionHash: ', transactionHash)
const newProposal = {
blockHash: Bytes32.from(blockHash).toString(),
proposalNum: parseInt(proposalNum, 10),
proposedAt: blockNumber,
proposalTx: transactionHash,
}
console.log('newProposal', newProposal)
await this.db
.selectTable(schema.block.name)
.presetQuery('writeNewProposal', {
hash: returnValues,
proposedAt: blockNumber,
proposalHash: transactionHash,
})
.presetQuery('writeNewProposal', newProposal)
.exec()
if (cb) cb(returnValues)
if (cb) cb(blockHash)
// FETCH DETAILS
this.fetch(transactionHash)
})
@@ -273,7 +417,7 @@ export class Synchronizer extends EventEmitter {
.selectTable(schema.block.name)
.presetQuery('getFinalizationSyncStart')
.exec()
const startFrom = query[0] ? query[0].proposedAt : 0
const startFrom = query[0]?.proposedAt || 0
this.finalizationSubscriber = this.l1Contract.coordinator.events
.Finalized({ fromBlock: startFrom })
.on('connected', subId => {
@@ -296,22 +440,27 @@ export class Synchronizer extends EventEmitter {
})
}
async fetch(proposalHash: string) {
if (this.fetching[proposalHash]) return
async fetch(proposalTx: string) {
logger.info('fetched block proposal')
if (this.fetching[proposalTx]) return
const proposalData = await this.l1Contract.web3.eth.getTransaction(
proposalHash,
proposalTx,
)
const block = Block.fromTx(proposalData)
const header: HeaderSql = {} as HeaderSql
Object.keys(block.header).forEach(key => {
header[key] = block.header[key].toString()
})
const { hash } = block
await this.db
.selectTable(schema.block.name)
.presetQuery('saveFetchedBlock', {
hash,
header: block.header,
hash: hash.toString(),
header,
proposalData,
})
.exec()
delete this.fetching[proposalHash]
delete this.fetching[proposalTx]
this.emit('newBlock', block)
}
}

59
packages/core/src/types/contract.d.ts vendored Normal file
View File

@@ -0,0 +1,59 @@
/* Generated by ts-generator ver. 0.0.8 */
// /* tslint:disable */
import BN from 'bn.js'
import { EventLog } from 'web3-core/types'
import { EventEmitter } from 'events'
// @ts-ignore
import PromiEvent from 'web3/promiEvent'
interface EstimateGasOptions {
from?: string
gas?: number
value?: number | string | BN
}
interface EventOptions {
filter?: object
fromBlock?: BlockType
topics?: string[]
}
export type Callback<T> = (error: Error, result: T) => void
export interface ContractEventLog<T> extends EventLog {
returnValues: T
}
export interface ContractEventEmitter<T> extends EventEmitter {
on(event: 'connected', listener: (subscriptionId: string) => void): this
on(
event: 'data' | 'changed',
listener: (event: ContractEventLog<T>) => void,
): this
on(event: 'error', listener: (error: Error) => void): this
}
export type ContractEvent<T> = (
options?: EventOptions,
cb?: Callback<ContractEventLog<T>>,
) => ContractEventEmitter<T>
export interface Tx {
nonce?: string | number
chainId?: string | number
from?: string
to?: string
data?: string
value?: string | number
gas?: string | number
gasPrice?: string | number
}
export interface TransactionObject<T> {
arguments: any[]
call(tx?: Tx): Promise<T>
send(tx?: Tx): PromiEvent<T>
estimateGas(tx?: Tx): Promise<number>
encodeABI(): string
}
export type BlockType = 'latest' | 'pending' | 'genesis' | number

View File

@@ -1,8 +1,9 @@
import { verifyingKeyIdentifier } from '@zkopru/utils'
import { verifyingKeyIdentifier, logger } from '@zkopru/utils'
// import { Point } from '@zkopru/babyjubjub'
import { DepositSql } from '@zkopru/database'
import { DepositSql, schema } from '@zkopru/database'
import { Bytes32, Uint256 } from 'soltypes'
import { soliditySha3 } from 'web3-utils'
import bigInt from 'big-integer'
import BN from 'bn.js'
import { Block, Header, VerifyResult } from './block'
import { VerifyingKey } from './snark'
import { L1Contract } from './layer1'
@@ -44,6 +45,7 @@ export class Verifier {
prevHeader: Header
block: Block
}): Promise<Patch> {
logger.info(`Verifying ${block.hash}`)
if (this.option.header) {
await this.verifyHeader(block)
}
@@ -51,13 +53,28 @@ export class Verifier {
// deposit verification
for (const massDeposit of block.body.massDeposits) {
const deposits: DepositSql[] = await layer2.getDeposits(massDeposit)
console.log('massdeposit...', massDeposit)
console.log('deposits...', deposits)
console.log(
'select...',
await layer2.db
.selectTable(schema.deposit.name)
.query('select')
.exec(),
)
let merged
let fee = bigInt.zero
let fee = new BN(0)
for (const deposit of deposits) {
merged = soliditySha3(merged || 0, deposit.note) || ''
fee = bigInt(deposit.fee).add(fee)
fee = fee.add(Uint256.from(deposit.fee).toBN())
}
if (merged !== massDeposit.merged || fee.neq(massDeposit.fee)) {
console.log('computed merged', merged)
console.log('committed merged', massDeposit.merged)
console.log('deposits', deposits)
if (
!Bytes32.from(merged).eq(massDeposit.merged) ||
!massDeposit.fee.toBN().eq(fee)
) {
throw Error('Failed to match the deposit leaves with the proposal.')
}
}

View File

@@ -4,13 +4,13 @@ import { uuid } from '@nano-sql/core/lib/utilities'
import { ChainConfig, schema, BlockStatus } from '@zkopru/database'
import { Grove, poseidonHasher, keccakHasher, verifyProof } from '@zkopru/tree'
import { logger } from '@zkopru/utils'
import { Bytes32 } from 'soltypes'
import { L1Contract } from './layer1'
import { Verifier, VerifyOption } from './verifier'
import { L2Chain } from './layer2'
import { BootstrapHelper } from './bootstrap'
import { headerHash, Block } from './block'
import { Synchronizer } from './synchronizer'
import { genesis } from './genesis'
export class ZkOPRUNode {
db: InanoSQLInstance
@@ -29,6 +29,10 @@ export class ZkOPRUNode {
verifyOption: VerifyOption
newBlockListner?: () => Promise<void>
finalizationListener?: (val: string) => Promise<void>
constructor({
db,
l1Contract,
@@ -60,21 +64,30 @@ export class ZkOPRUNode {
startSync() {
logger.info('start sync')
this.synchronizer.on('newBlock', this.processUnverifiedBlocks)
this.synchronizer.on('finalization', this.finalizeBlock)
this.newBlockListner = () => this.processUnverifiedBlocks()
this.finalizationListener = hash => this.finalizeBlock(Bytes32.from(hash))
this.synchronizer.on('newBlock', this.newBlockListner)
this.synchronizer.on('finalization', this.finalizationListener)
this.synchronizer.sync()
}
stopSync() {
logger.info('stop sync')
if (this.newBlockListner) {
this.synchronizer.off('newBlock', this.newBlockListner)
}
if (this.finalizationListener) {
this.synchronizer.off('finalization', this.finalizationListener)
}
this.synchronizer.stop()
this.synchronizer.off('newBlock', this.processUnverifiedBlocks)
}
async bootstrap() {
if (!this.bootstrapHelper) return
const latest = await this.l1Contract.upstream.methods.latest().call()
const latestBlockFromDB = await this.l2Chain.getBlockSql(latest)
const latestBlockFromDB = await this.l2Chain.getBlockSql(
Bytes32.from(latest),
)
if (
latestBlockFromDB &&
latestBlockFromDB.status &&
@@ -84,10 +97,10 @@ export class ZkOPRUNode {
}
const bootstrapData = await this.bootstrapHelper.fetchBootstrapData(latest)
const proposalData = await this.l1Contract.web3.eth.getTransaction(
bootstrapData.proposalHash,
bootstrapData.proposalTx,
)
const block = Block.fromTx(proposalData)
const headerProof = headerHash(block.header) === latest
const headerProof = headerHash(block.header).eq(Bytes32.from(latest))
const utxoMerkleProof = verifyProof(
this.l2Chain.grove.config.utxoHasher,
bootstrapData.utxoStartingLeafProof,
@@ -102,6 +115,7 @@ export class ZkOPRUNode {
}
async processUnverifiedBlocks() {
logger.info('processUnverifiedBlocks()')
// prevHeader should be a verified one
const { prevHeader, block } = await this.l2Chain.getOldestUnverifiedBlock()
if (!block) return
@@ -117,7 +131,7 @@ export class ZkOPRUNode {
await this.l2Chain.applyPatch(patch)
}
async finalizeBlock(hash: string) {
async finalizeBlock(hash: Bytes32) {
this.l2Chain.finalize(hash)
}
@@ -130,6 +144,8 @@ export class ZkOPRUNode {
fullSync: boolean,
accounts?: ZkAccount[],
): Promise<L2Chain> {
logger.info('Get or init chain')
console.log('get or init chain called')
const pubKeysToObserve = accounts
? accounts.map(account => account.pubKey)
: []
@@ -152,9 +168,13 @@ export class ZkOPRUNode {
withdrawal: keccakHasher(l1Config.withdrawalTreeDepth),
nullifier: keccakHasher(l1Config.nullifierTreeDepth),
}
const tables = await db.query('show tables').exec()
if (!tables.find(obj => obj.table === schema.block.name)) {
await db.query('create table', schema.block).exec()
}
if (l2Config) {
const grove = new Grove(l2Config.id, db, {
...l2Config.config,
...l1Config,
utxoHasher: hashers.utxo,
withdrawalHasher: hashers.withdrawal,
nullifierHasher: hashers.nullifier,
@@ -164,22 +184,9 @@ export class ZkOPRUNode {
addressesToObserve,
})
await grove.init()
return new L2Chain(db, l1Config.genesisBlock, grove, l2Config)
return new L2Chain(db, grove, l2Config)
}
const id = uuid()
const genesisBlock = genesis({ address, hashers })
const blockTable = schema.block
const tables = await db.query('show tables').exec()
if (!tables.find(obj => obj.table === blockTable.name)) {
await db.query('create table', blockTable).exec()
}
await db
.selectTable(blockTable.name)
.presetQuery('addGenesisBlock', {
hash: headerHash(genesisBlock),
header: genesisBlock,
})
.exec()
const grove = new Grove(id, db, {
...l1Config,
utxoHasher: hashers.utxo,
@@ -191,7 +198,7 @@ export class ZkOPRUNode {
addressesToObserve,
})
await grove.init()
return new L2Chain(db, l1Config.genesisBlock, grove, {
return new L2Chain(db, grove, {
id,
networkId,
chainId,

View File

@@ -15,6 +15,7 @@ describe('block.ts', () => {
serializeHeader(header),
serializeBody(body),
])
const dummySelector = 'aaaaaaaa'
const dummyTx: Transaction = {
hash: 'dummyhash',
nonce: 1,
@@ -26,7 +27,7 @@ describe('block.ts', () => {
value: 'dummyvalue',
gasPrice: 'dummygas',
gas: 11,
input: `0x${serializedBlock.toString('hex')}`,
input: `0x${dummySelector}${serializedBlock.toString('hex')}`,
}
const deserializedBlock = Block.fromTx(dummyTx)
expect(deserializedBlock).toBeDefined()

View File

@@ -38,6 +38,7 @@
"big-integer": "^1.6.48",
"fs-extra": "^9.0.0",
"node-docker-api": "^1.1.22",
"soltypes": "^1.1.2",
"tar": "^6.0.2",
"web3": "^1.2.6",
"web3-core": "^1.2.7",

View File

@@ -1,5 +1,6 @@
import { Header, Body } from '@zkopru/core'
import { Field } from '@zkopru/babyjubjub'
import { Address } from 'soltypes'
import { loadZkTxs } from './testset-zktxs'
function strToField(val: string): Field {
@@ -7,18 +8,18 @@ function strToField(val: string): Field {
}
export const dummyHeader: Header = {
proposer: strToField('proposer').toHex(20),
parentBlock: strToField('parentBlock').toHex(32),
metadata: strToField('metadata').toHex(32),
fee: strToField('totalFee').toHex(32),
utxoRoot: strToField('utxoRoot').toHex(32),
utxoIndex: strToField('utxoIndex').toHex(32),
nullifierRoot: strToField('nullifierRoot').toHex(32),
withdrawalRoot: strToField('withdrawalRoot').toHex(32),
withdrawalIndex: strToField('withdrawalIndex').toHex(32),
txRoot: strToField('txRoot').toHex(32),
depositRoot: strToField('depositRoot').toHex(32),
migrationRoot: strToField('migrationRoot').toHex(32),
proposer: Address.from(strToField('proposer').toHex(20)),
parentBlock: strToField('parentBlock').toBytes32(),
metadata: strToField('metadata').toBytes32(),
fee: strToField('totalFee').toUint256(),
utxoRoot: strToField('utxoRoot').toUint256(),
utxoIndex: strToField('utxoIndex').toUint256(),
nullifierRoot: strToField('nullifierRoot').toBytes32(),
withdrawalRoot: strToField('withdrawalRoot').toBytes32(),
withdrawalIndex: strToField('withdrawalIndex').toUint256(),
txRoot: strToField('txRoot').toBytes32(),
depositRoot: strToField('depositRoot').toBytes32(),
migrationRoot: strToField('migrationRoot').toBytes32(),
}
export async function getDummyBody(): Promise<Body> {
@@ -26,35 +27,35 @@ export async function getDummyBody(): Promise<Body> {
txs: await loadZkTxs(),
massDeposits: [
{
merged: strToField('md1/merged').toHex(32),
fee: strToField('md1/fee').toHex(32),
merged: strToField('md1/merged').toBytes32(),
fee: strToField('md1/fee').toUint256(),
},
{
merged: strToField('md2/merged').toHex(32),
fee: strToField('md2/fee').toHex(32),
merged: strToField('md2/merged').toBytes32(),
fee: strToField('md2/fee').toUint256(),
},
],
massMigrations: [
{
destination: strToField('mm1/dest').toHex(20),
totalETH: strToField('mm1/totalETH').toHex(32),
destination: Address.from(strToField('mm1/dest').toHex(20)),
totalETH: strToField('mm1/totalETH').toUint256(),
migratingLeaves: {
merged: strToField('mm1/md').toHex(32),
fee: strToField('mm1/fee').toHex(32),
merged: strToField('mm1/md').toBytes32(),
fee: strToField('mm1/fee').toUint256(),
},
erc20: [
{
addr: strToField('mm1/erc20').toHex(20),
amount: strToField('mm1/amount').toHex(32),
addr: Address.from(strToField('mm1/erc20').toHex(20)),
amount: strToField('mm1/amount').toUint256(),
},
],
erc721: [
{
addr: strToField('mm1/erc721').toHex(20),
addr: Address.from(strToField('mm1/erc721').toHex(20)),
nfts: [
strToField('mm1/erc721/nft1').toHex(32),
strToField('mm1/erc721/nft2').toHex(32),
strToField('mm1/erc721/nft3').toHex(32),
strToField('mm1/erc721/nft1').toUint256(),
strToField('mm1/erc721/nft2').toUint256(),
strToField('mm1/erc721/nft3').toUint256(),
],
},
],

View File

@@ -314,6 +314,8 @@ export class Grove {
.exec()
const utxo: UtxoSql = queryResult.pop() as UtxoSql
if (!utxo) throw Error('Failed to find the utxo')
if (!utxo.tree) throw Error('It is not included in a block yet')
if (!utxo.index) throw Error('It is not included in a block yet')
const cachedSiblings = (await this.db
.selectTable(schema.utxoTreeNode(utxo.tree).name)
@@ -353,6 +355,8 @@ export class Grove {
.exec()
const withdrawal: UtxoSql = queryResult.pop() as UtxoSql
if (!withdrawal) throw Error('Failed to find the withdrawal')
if (!withdrawal.tree) throw Error('It is not included in a block yet')
if (!withdrawal.index) throw Error('It is not included in a block yet')
const cachedSiblings = (await this.db
.selectTable(schema.withdrawalTreeNode(withdrawal.tree).name)

View File

@@ -13,17 +13,18 @@ function getPreHash<T extends Field | BN>(
parentOf: (left: T, right: T) => T,
depth: number,
): T[] {
const preHash = Array<T>(depth)
preHash[0] = zero
for (let level = 1; level < depth; level += 1) {
preHash[level] = parentOf(preHash[level - 1], preHash[level - 1])
const preHash: T[] = []
preHash.push(zero)
for (let level = 0; level < depth; level += 1) {
const topValue = preHash[preHash.length - 1]
preHash.push(parentOf(topValue, topValue))
}
return preHash
}
export function genesisRoot<T extends Field | BN>(hasher: Hasher<T>): T {
const lastSib = hasher.preHash[hasher.preHash.length - 1]
return hasher.parentOf(lastSib, lastSib)
return lastSib
}
export function keccakHasher(depth: number): Hasher<BN> {

View File

@@ -146,7 +146,7 @@ export abstract class LightRollUpTree<T extends Field | BN> {
start = this.latestLeafIndex()
latestSiblings = this.siblings()
})
let root!: T
let root: T = this.root()
let index = start
for (let i = 0; i < items.length; i += 1) {

View File

@@ -35,6 +35,7 @@
"pino-pretty": "^4.0.0",
"prompts": "^2.3.2",
"snarkjs": "^0.1.25",
"soltypes": "^1.1.2",
"tar": "^6.0.2",
"web3-utils": "^1.2.6"
},

View File

@@ -4,6 +4,7 @@ import { soliditySha3, padLeft } from 'web3-utils'
import pino from 'pino'
import { Container } from 'node-docker-api/lib/container'
import { ReadStream } from 'fs-extra'
import { Bytes32, Uint256, Address } from 'soltypes'
import tar from 'tar'
import BN from 'bn.js'
@@ -46,9 +47,12 @@ export function verifyingKeyIdentifier(nI: number, nO: number): string {
return identifier
}
export function hexify(n: BN | Buffer | string, length?: number): string {
export function hexify(
n: BN | Buffer | string | number,
length?: number,
): string {
let hex: string
if (n instanceof BN) {
if (n instanceof BN || typeof n === 'number') {
hex = n.toString(16)
} else if (typeof n === 'string') {
if (n.startsWith('0x')) {
@@ -72,6 +76,24 @@ export function hexify(n: BN | Buffer | string, length?: number): string {
return `0x${hex}`
}
export function numToBuffer(
decimal: BN | string | number,
len?: number,
): Buffer {
if (typeof decimal === 'string' && decimal.startsWith('0x')) {
throw Error('It starts with 0x. This is not a number')
}
return hexToBuffer(hexify(decimal), len)
}
export function bnToBytes32(n: BN): Bytes32 {
return Bytes32.from(`0x${n.toString(16, 64)}`)
}
export function bnToUint256(n: BN): Uint256 {
return bnToBytes32(n).toUint()
}
export class Queue {
buffer: Buffer
@@ -105,6 +127,18 @@ export class StringifiedHexQueue {
return `0x${dequeued}`
}
dequeueToAddress(): Address {
return Address.from(this.dequeue(20))
}
dequeueToBytes32(): Bytes32 {
return Bytes32.from(this.dequeue(32))
}
dequeueToUint256(): Uint256 {
return this.dequeueToBytes32().toUint()
}
dequeueToNumber(n: number): number {
const dequeued = this.str.slice(this.cursor, this.cursor + n * 2)
this.cursor += n * 2