Merge pull request #179 from JChanceHud/indexed-db-connector

Browser support
This commit is contained in:
Wanseob Lim
2021-04-06 16:41:20 +09:00
committed by GitHub
85 changed files with 2125 additions and 363 deletions

View File

@@ -54,12 +54,6 @@ jobs:
- run:
name: Build TS
command: yarn build:ts:serial
- run:
name: Pull Mockup Trusted Setup Artifacts
command: |
cd packages/circuits
yarn setup:pull
yarn update-contracts
- persist_to_workspace:
root: ~/
paths: project
@@ -148,6 +142,12 @@ jobs:
- run:
name: ZK Wizard Tests
command: |
docker pull zkoprunet/circuits:dev
cd packages/circuits
yarn setup
yarn build-keys
yarn postbuild-keys
cd ../..
yarn test --scope=@zkopru/zk-wizard
test_client:
machine:
@@ -185,9 +185,13 @@ jobs:
- run:
name: Pull Preset Images
command: |
cd packages/circuits
yarn setup:pull
yarn update-contracts
cd ../..
yarn images pull circuits
yarn images pull contracts-for-integration-test
- run:
name: Integration Tests
no_output_timeout: 30m
no_output_timeout: 45m
command: yarn test --scope=@zkopru/integration-test

View File

@@ -1,3 +1,4 @@
node_modules
dist
packages/contracts/src/contracts
packages/client/browser/*

1
.gitignore vendored
View File

@@ -12,6 +12,7 @@ temp
reports
junit.xml
dist
browser
.build-cache
keys.tgz
keys/

View File

@@ -98,7 +98,7 @@
"@typescript-eslint/eslint-plugin": "^2.34.0",
"@typescript-eslint/parser": "^2.34.0",
"bignumber.js": "^9.0.0",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"circom": "0.5.42",
"circomlib": "0.5.1",
"commitizen": "^4.0.3",

View File

@@ -10,7 +10,8 @@
"_moduleAliases": {
"~account": "dist",
"~utils": "../utils/dist",
"~database": "../database/dist"
"~database": "../database/dist",
"~database-node": "../database/dist/node.js"
},
"scripts": {
"prebuild": "shx mkdir -p dist",

View File

@@ -1,13 +1,12 @@
/* eslint-disable jest/no-hooks */
import Web3 from 'web3'
import { HDWallet, ZkAccount } from '~account'
import { DB, SQLiteConnector, schema } from '~database'
import { DB, SQLiteConnector, schema } from '~database-node'
describe('unit test', () => {
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
})
afterAll(async () => {
await mockup.close()

View File

@@ -29,7 +29,7 @@
"@zkopru/utils": "file:../utils",
"big-integer": "^1.6.48",
"blake-hash": "^1.1.0",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"circomlib": "0.5.1",
"ffjavascript": "0.2.22",
"soltypes": "^1.3.5",

View File

@@ -67,6 +67,10 @@ export class Fp extends BN {
return n.lt(Fp.ORDER)
}
toBuffer(endian?: BN.Endianness, length?: number): Buffer {
return this.toArrayLike(Buffer, endian, length)
}
addPrefixBit(bitLength: number): BN {
const prefix = new BN(1).shln(bitLength)
if (this.gt(prefix)) throw Error('prefix bit is less than current value')

View File

@@ -67,6 +67,10 @@ export class Fr extends BN {
return n.lt(Fr.ORDER)
}
toBuffer(endian?: BN.Endianness, length?: number): Buffer {
return this.toArrayLike(Buffer, endian, length)
}
addPrefixBit(bitLength: number): BN {
const prefix = new BN(1).shln(bitLength)
if (this.gt(prefix)) throw Error('prefix bit is less than current value')

View File

@@ -48,7 +48,7 @@
"tar": "^6.0.2"
},
"devDependencies": {
"@zkopru/utils": "file:../utils",
"@zkopru/utils-docker": "file:../utils-docker",
"ffjavascript": "0.2.22",
"node-docker-api": "^1.1.22",
"shelljs": "^0.8.4",

View File

@@ -1,7 +1,7 @@
import path from 'path'
import fs from 'fs'
import tar from 'tar'
import * as utils from '~utils'
import * as utils from '@zkopru/utils-docker'
async function loadArtifacts(build?: boolean) {
// It may take about an hour. If you want to skip building image,

View File

@@ -1,6 +1,6 @@
import path from 'path'
import fs from 'fs'
import * as utils from '@zkopru/utils'
import * as utils from '@zkopru/utils-docker'
async function loadArtifacts() {
// It may take about an hour. If you want to skip building image,

View File

@@ -47,8 +47,7 @@ describe('inclusion_proof.test.circom', () => {
beforeAll(async () => {
checkPhase1Setup()
prepareArtifactsDirectory()
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
utxoTree = new UtxoTree({
db: mockup,
metadata: utxoTreeMetadata,

View File

@@ -52,8 +52,7 @@ describe('zk_transaction_1_2.test.circom', () => {
beforeAll(async () => {
checkPhase1Setup()
prepareArtifactsDirectory()
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
utxoTree = new UtxoTree({
db: mockup,
metadata: utxoTreeMetadata,

View File

@@ -7,6 +7,6 @@
},
"include": ["src"],
"references": [
{ "path": "../utils/tsconfig.build.json", "prepend": false },
{ "path": "../utils-docker/tsconfig.build.json", "prepend": false },
]
}

View File

@@ -46,7 +46,7 @@
"big-integer": "^1.6.48",
"bip39": "^3.0.2",
"blessed": "^0.1.81",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"chalk": "^4.0.0",
"cli-progress": "^3.8.2",
"figlet": "^1.4.0",

View File

@@ -1,5 +1,5 @@
export const DEFAULT = {
address: '0xcf59A7424E979969FF52982933BD0E33072552c4',
address: '0x94D37e8839A7c018328E865906702e9F7Edc3054',
bootstrap: true,
websocket: 'ws://goerli.zkopru.network:8546',
maxBytes: 131072,

View File

@@ -6,7 +6,7 @@ import {
PostgresConnector,
schema,
initDB,
} from '@zkopru/database'
} from '@zkopru/database/dist/node'
import { L1Contract } from '@zkopru/core'
import Configurator, { Context, Menu } from '../configurator'
@@ -20,10 +20,10 @@ export default class LoadDatabase extends Configurator {
}
let database: DB
if (this.base.postgres) {
database = await PostgresConnector.create(this.base.postgres)
database = await PostgresConnector.create(schema, this.base.postgres)
} else if (this.base.sqlite) {
const dbPath = this.base.sqlite
database = await SQLiteConnector.create(dbPath)
database = await SQLiteConnector.create(schema, dbPath)
} else {
// no configuration. try to create new one
const enum DBType {
@@ -84,6 +84,7 @@ export default class LoadDatabase extends Configurator {
initial: 'zkopru-coordinator',
})
database = await PostgresConnector.create(
schema,
`postgresql://${user}:${password}@${host}:${port}/${dbName}`,
)
} else {
@@ -109,10 +110,9 @@ export default class LoadDatabase extends Configurator {
fs.unlinkSync(dbName)
}
}
database = await SQLiteConnector.create(dbName)
database = await SQLiteConnector.create(schema, dbName)
}
}
await database.createTables(schema)
await initDB(
database,
context.web3,

View File

@@ -5,7 +5,7 @@ import { Menu, ExampleConfigContext } from '../menu'
const addressesByNetworkId = {
'1': undefined,
'5': '0xcf59A7424E979969FF52982933BD0E33072552c4',
'5': '0x94D37e8839A7c018328E865906702e9F7Edc3054',
}
export default class Wallet extends PromptApp<ExampleConfigContext, void> {

View File

@@ -1,10 +1,12 @@
export const DEFAULT = {
address: '0xcf59A7424E979969FF52982933BD0E33072552c4',
address: '0x94D37e8839A7c018328E865906702e9F7Edc3054',
networkId: 1,
chainId: 1,
websocket: 'ws://goerli.zkopru.network:8546',
coordinator: 'https://coordinator.zkopru.network',
snarkkeys: 'https://zkopru.azureedge.net/snarkkeys/burrito/040946c7/keys.tgz',
// snarkkeys: 'https://zkopru.azureedge.net/snarkkeys/burrito/040946c7/keys.tgz',
snarkkeys:
'https://ipfs.tubby.cloud/ipfs/QmPAKEsw24rKjBzojMgyxHEnd9FXrW3Kgive3MfiYT3r79',
maxBytes: 131072,
priceMultiplier: 48,
port: 8888,

View File

@@ -6,7 +6,7 @@ import {
PostgresConnector,
schema,
initDB,
} from '@zkopru/database'
} from '@zkopru/database/dist/node'
import { L1Contract } from '@zkopru/core'
import Configurator, { Context, Menu } from '../configurator'
@@ -20,10 +20,10 @@ export default class LoadDatabase extends Configurator {
}
let database: DB
if (this.base.postgres) {
database = await PostgresConnector.create(this.base.postgres)
database = await PostgresConnector.create(schema, this.base.postgres)
} else if (this.base.sqlite) {
const dbPath = this.base.sqlite
database = await SQLiteConnector.create(dbPath)
database = await SQLiteConnector.create(schema, dbPath)
} else {
// no configuration. try to create new one
const enum DBType {
@@ -84,6 +84,7 @@ export default class LoadDatabase extends Configurator {
initial: 'zkopru-wallet',
})
database = await PostgresConnector.create(
schema,
`postgresql://${user}:${password}@${host}:${port}/${dbName}`,
)
} else {
@@ -112,10 +113,9 @@ export default class LoadDatabase extends Configurator {
fs.unlinkSync(dbName)
}
}
database = await SQLiteConnector.create(dbName)
database = await SQLiteConnector.create(schema, dbName)
}
}
await database.createTables(schema)
await initDB(
database,
context.web3,

View File

@@ -1 +1,26 @@
{"_":[],"fullnode":true,"f":true,"develop":false,"d":false,"ws":"ws://localhost:5000","websocket":"ws://localhost:5000","coordinator":"http://localhost:8888","r":"http://localhost:8888","networkId":1,"n":1,"network-id":1,"chainId":1,"c":1,"chain-id":1,"address":"0x970e8f18ebfEa0B08810f33a5A40438b9530FBCF","a":"0x970e8f18ebfEa0B08810f33a5A40438b9530FBCF","keys":"keys","db":"db","$0":"zk-wizard","seedKeystore":{"ciphertext":"f34047785c14c5b1b87ce540c1bbd0bdb748f7ddf95fdf95d2d1e462de1dde98cf7815e280084111567fc3aac24225aa","iv":"188e2d6b2c7f7a6fecc9d850aa3a8914","algorithm":"aes-256-cbc","keylen":32,"kdf":"scrypt","kdfParams":{"N":16384,"r":8,"p":1},"salt":"9950f726377129f30ad64e5f56e5a4077765ecf433e677481f9ce141c474e011"},"accountNumber":3}
{
"_": [],
"fullnode": true,
"develop": false,
"websocket": "ws://192.168.1.199:9546",
"coordinator": "http://localhost:8888",
"networkId": 5,
"chainId": 5,
"address": "0x24A8072a8d2fde1e22b99398a104640f58C8462d",
"keys": "keys",
"db": "db",
"seedKeystore": {
"ciphertext": "f34047785c14c5b1b87ce540c1bbd0bdb748f7ddf95fdf95d2d1e462de1dde98cf7815e280084111567fc3aac24225aa",
"iv": "188e2d6b2c7f7a6fecc9d850aa3a8914",
"algorithm": "aes-256-cbc",
"keylen": 32,
"kdf": "scrypt",
"kdfParams": {
"N": 16384,
"r": 8,
"p": 1
},
"salt": "9950f726377129f30ad64e5f56e5a4077765ecf433e677481f9ce141c474e011"
},
"accountNumber": 3
}

View File

@@ -0,0 +1 @@
module.exports = require('buffer/').Buffer

View File

@@ -0,0 +1,7 @@
<html>
<body>
<script src="./browser/main.js">
</script>
</body>
</html>

View File

@@ -5,14 +5,16 @@
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
"dist",
"browser"
],
"_moduleAliases": {
"~client": "dist",
"~coordinator": "../coordinator/dist"
},
"scripts": {
"build": "tsc --build tsconfig.build.json",
"build": "tsc --build tsconfig.build.json && webpack",
"webpack": "webpack",
"clean": "tsc --build tsconfig.build.json --clean && shx rm -rf coverage *.log junit.xml dist && jest --clearCache",
"link-modules": "link-module-alias",
"test": "jest"
@@ -20,8 +22,24 @@
"license": "GPL-3.0-or-later",
"dependencies": {
"@zkopru/coordinator": "file:../coordinator",
"@zkopru/core": "file:../core",
"@zkopru/database": "file:../database",
"assert": "^2.0.0",
"browserify-zlib": "^0.2.0",
"buffer": "^6.0.3",
"constants-browserify": "^1.0.0",
"crypto-browserify": "^3.12.0",
"https-browserify": "^1.0.0",
"node-fetch": "^2.6.1",
"web3": "^1.3.4"
"os-browserify": "^0.3.0",
"path-browserify": "^1.0.1",
"shebang-loader": "^0.0.1",
"stream-browserify": "^3.0.0",
"stream-http": "^3.1.1",
"url": "^0.11.0",
"web3": "^1.3.4",
"webpack": "^5.26.2",
"webpack-cli": "^4.5.0"
},
"publishConfig": {
"access": "public"

View File

@@ -1,7 +1,19 @@
import { RpcMethod } from '@zkopru/coordinator'
import { RpcType, RpcConfig, Block, Tx, Registry } from './types'
import fetch from './fetch'
enum RpcMethod {
address = 'l1_address',
vks = 'l1_getVKs',
syncing = 'l2_syncing',
blockCount = 'l2_blockCount',
blockNumber = 'l2_blockNumber',
blockByIndex = 'l2_getBlockByIndex',
blockByNumber = 'l2_getBlockByNumber',
blockByHash = 'l2_getBlockByHash',
transactionByHash = 'l2_getTransactionByHash',
registeredTokens = 'l2_getRegisteredTokens',
}
export default class RpcClient {
config: RpcConfig

View File

@@ -1,31 +1,100 @@
/* eslint-disable no-underscore-dangle */
import Web3 from 'web3'
import { IndexedDBConnector, DB, schema } from '@zkopru/database/dist/web'
import { FullNode } from '@zkopru/core'
import RpcClient from './rpc-client'
import { RpcConfig, RpcType } from './types'
/* eslint-disable no-underscore-dangle */
type Config = {
address?: string
bootstrap?: boolean
websocket?: string
rpcUrl?: string
}
const DEFAULT = {
address: '0x24A8072a8d2fde1e22b99398a104640f58C8462d',
bootstrap: true,
// websocket: 'wss://goerli.infura.io/ws/v3/5b122dbc87ed4260bf9a2031e8a0e2aa',
websocket: 'ws://192.168.1.199:9546',
}
export default class ZkopruClient {
config: RpcConfig
rpcConfig?: RpcConfig
config: Config
private _web3?: Web3
private _rpc?: RpcClient
constructor(rpcUrl: string) {
if (rpcUrl.indexOf('http') === 0) {
private _db?: DB
private node?: FullNode
constructor(_config: Config) {
this.config = {
...DEFAULT,
...(_config || {}),
}
if (this.config.rpcUrl && this.config.rpcUrl.indexOf('http') === 0) {
// http rpc
this.config = {
this.rpcConfig = {
type: RpcType.http,
url: rpcUrl,
url: this.config.rpcUrl,
}
} else {
} else if (this.config.rpcUrl) {
throw new Error(`Unsupported RPC protocol, only http(s) allowed`)
}
}
static async create(config: Config) {
const client = new this(config)
client._db = await IndexedDBConnector.create(schema)
if (!client.config.websocket) {
throw new Error('No websocket provided')
}
const provider = new Web3.providers.WebsocketProvider(
client.config.websocket,
{
reconnect: {
delay: 2000,
auto: true,
},
clientConfig: {
keepalive: true,
keepaliveInterval: 30000,
},
},
)
async function waitConnection(_provider) {
return new Promise<void>(res => {
if (_provider.connected) return res()
_provider.on('connect', res)
})
}
provider.connect()
await waitConnection(provider)
await new Promise(r => setTimeout(r, 1000))
client.node = await FullNode.new({
address: DEFAULT.address,
provider,
db: client._db,
})
return client
}
async start() {
if (!this.node) throw new Error('Node is not initialized')
this.node.start()
}
get rpc() {
if (!this.rpcConfig) {
throw new Error('No rpc config supplied')
}
if (!this._rpc) {
this._rpc = new RpcClient(this.config)
this._rpc = new RpcClient(this.rpcConfig)
}
return this._rpc
}
@@ -33,7 +102,9 @@ export default class ZkopruClient {
// Return a passthrough web3 instance
get web3() {
if (!this._web3) {
this._web3 = new Web3(this.config.url)
this._web3 = new Web3(
(this.config.websocket || this.config.rpcUrl) as any,
)
}
return this._web3
}

View File

@@ -14,7 +14,7 @@ describe('mocked fetch', () => {
})
it('should use mocked fetch implementation', async () => {
const zkopru = new Zkopru('http://localhost')
const zkopru = new Zkopru({ rpcUrl: 'http://localhost' })
assert.equal(await zkopru.rpc.getAddress(), 'mocked')
})
})

View File

@@ -3,7 +3,9 @@ import Zkopru from '../src'
describe('client tests', () => {
it('should use node-fetch implementation', async () => {
const zkopru = new Zkopru('http://localhost')
const zkopru = new Zkopru({
rpcUrl: 'http://localhost',
})
try {
await zkopru.rpc.getAddress()
assert(false)

View File

@@ -10,5 +10,7 @@
"include": ["src"],
"references": [
{ "path": "../coordinator/tsconfig.build.json", "prepend": false },
{ "path": "../core/tsconfig.build.json", "prepend": false },
{ "path": "../database/tsconfig.build.json", "prepend": false },
]
}

View File

@@ -0,0 +1,66 @@
const path = require('path')
const webpack = require('webpack')
module.exports = {
entry: './dist/index.js',
mode: 'development',
output: {
filename: 'index.js',
path: path.resolve(__dirname, 'browser'),
libraryTarget: 'commonjs2',
},
target: 'web',
module: {
rules: [
{
test: /JSONStream\/index\.js$/,
use: 'shebang-loader',
},
],
},
resolve: {
fallback: {
stream: require.resolve('stream-browserify'),
path: require.resolve('path-browserify'),
crypto: require.resolve('crypto-browserify'),
url: require.resolve('url/'),
child_process: false,
assert: require.resolve('assert/'),
fs: false,
npm: false,
zlib: require.resolve('browserify-zlib'),
https: require.resolve('https-browserify'),
http: require.resolve('stream-http'),
os: require.resolve('os-browserify/browser'),
constants: require.resolve('constants-browserify'),
worker_threads: false,
net: false,
tls: false,
'aws-sdk': false,
dns: false,
readline: false,
'node-docker-api': false,
prompts: false,
buffer: require.resolve('buffer/'),
},
},
plugins: [
new webpack.DefinePlugin({
'process.title': '"browser"',
}),
new webpack.DefinePlugin({
'process.env': {},
'process.argv': [],
'process.versions': {},
'process.versions.node': '"12"',
process: {
exit: '(() => {})',
browser: true,
versions: {},
},
}),
new webpack.ProvidePlugin({
Buffer: path.resolve(__dirname, 'buffer.js'),
}),
],
}

View File

@@ -17,7 +17,7 @@
},
"dependencies": {
"@openzeppelin/contracts": "3.4.1",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"ganache-time-traveler": "^1.0.15",
"soltypes": "^1.3.5",
"web3": "^1.2.6",

View File

@@ -5,6 +5,7 @@
const chai = require("chai");
const { UtxoTree, poseidonHasher } = require("~tree");
const { append, appendAsSubTrees } = require("~tree/utils/merkle-tree-sol");
const sample = require("~tree/sample");
const { Fp } = require("~babyjubjub");
const { expect } = chai;
@@ -50,7 +51,7 @@ contract("Utxo tree update tests", async accounts => {
let db;
const depth = 48;
before(async () => {
const { tree, db } = await UtxoTree.sample(depth);
const { tree, db } = await sample(depth);
tsTree = tree;
mockup = db;
solTree = await UtxoTreeTester.new();

View File

@@ -14,7 +14,9 @@
"~account": "../account/dist",
"~coordinator": "dist",
"~database": "../database/dist",
"~utils": "../utils/dist"
"~database-node": "../database/dist/node.js",
"~utils": "../utils/dist",
"~utils-docker": "../utils-docker/dist"
},
"scripts": {
"prebuild": "shx mkdir -p dist",
@@ -44,7 +46,7 @@
"big-integer": "^1.6.48",
"bip39": "^3.0.2",
"blessed": "^0.1.81",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"chalk": "^4.0.0",
"express": "^4.17.1",
"figlet": "^1.4.0",

View File

@@ -2,19 +2,6 @@ import { Bytes32 } from 'soltypes'
import { CoordinatorContext } from './context'
import EthMethods from './eth-rpc-methods'
export enum RpcMethod {
address = 'l1_address',
vks = 'l1_getVKs',
syncing = 'l2_syncing',
blockCount = 'l2_blockCount',
blockNumber = 'l2_blockNumber',
blockByIndex = 'l2_getBlockByIndex',
blockByNumber = 'l2_getBlockByNumber',
blockByHash = 'l2_getBlockByHash',
transactionByHash = 'l2_getTransactionByHash',
registeredTokens = 'l2_getRegisteredTokens',
}
export class ClientApi {
context: CoordinatorContext

View File

@@ -1,4 +1,16 @@
// export { Coordinator, CoordinatorInterface } from './coordinator'
export { TxMemPool, TxPoolInterface } from './tx-pool'
export { RpcMethod } from './client-api'
export { Coordinator } from './coordinator'
export enum RpcMethod {
address = 'l1_address',
vks = 'l1_getVKs',
syncing = 'l2_syncing',
blockCount = 'l2_blockCount',
blockNumber = 'l2_blockNumber',
blockByIndex = 'l2_getBlockByIndex',
blockByNumber = 'l2_getBlockByNumber',
blockByHash = 'l2_getBlockByHash',
transactionByHash = 'l2_getTransactionByHash',
registeredTokens = 'l2_getRegisteredTokens',
}

View File

@@ -10,8 +10,9 @@ import assert from 'assert'
import fetch from 'node-fetch'
import { Coordinator } from '~coordinator'
import { ZkAccount } from '~account'
import { readFromContainer, sleep, pullOrBuildAndGetContainer } from '~utils'
import { DB, SQLiteConnector, schema } from '~database'
import { sleep } from '~utils'
import { readFromContainer, pullOrBuildAndGetContainer } from '~utils-docker'
import { DB, SQLiteConnector, schema } from '~database-node'
async function callMethod(
_method:
@@ -68,8 +69,7 @@ describe('coordinator test to run testnet', () => {
const coordinators = [] as Coordinator[]
beforeAll(async () => {
// logStream.addStream(process.stdout)
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
// It may take about few minutes. If you want to skip building image,
// run `yarn pull:images` on the root directory
container = await pullOrBuildAndGetContainer({

View File

@@ -12,7 +12,8 @@
"~account": "../account/dist",
"~babyjubjub": "../babyjubjub/dist",
"~dataset": "../dataset/dist",
"~utils": "../utils/dist"
"~utils": "../utils/dist",
"~utils-docker": "../utils-docker/dist"
},
"scripts": {
"prebuild": "shx mkdir -p dist",
@@ -38,7 +39,7 @@
"@zkopru/utils": "file:../utils",
"async-lock": "^1.2.2",
"big-integer": "^1.6.48",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"ffjavascript": "0.2.22",
"node-fetch": "^2.6.0",
"node-schedule": "^1.3.2",

View File

@@ -77,8 +77,12 @@ export class FullValidator extends Validator {
) {
onchainResult.forEach((result, index) => {
if (result.slashable !== offchainResult[index].slashable) {
console.log('onchain', onchainResult[index])
console.log('offchain', offchainResult[index])
logger.info(
`onchain slashable: ${(onchainResult[index] as any).slashable}`,
)
logger.info(
`offchain slashable: ${(offchainResult[index] as any).slashable}`,
)
}
})
logger.error(

View File

@@ -552,12 +552,6 @@ export class Synchronizer extends EventEmitter {
const block = Block.fromTx(proposalData)
const header = block.getHeaderSql()
try {
await this.db.update('Proposal', {
where: { hash: header.hash },
update: {
proposalData: JSON.stringify(proposalData),
},
})
await this.db.upsert('Block', {
where: { hash: header.hash },
create: { hash: header.hash },
@@ -568,6 +562,12 @@ export class Synchronizer extends EventEmitter {
create: header,
update: header,
})
await this.db.update('Proposal', {
where: { hash: header.hash },
update: {
proposalData: JSON.stringify(proposalData),
},
})
} catch (err) {
logger.error(err)
process.exit()

View File

@@ -3,13 +3,15 @@ import { ZkAccount } from '@zkopru/account'
import { DB } from '@zkopru/database'
import { Grove, poseidonHasher, keccakHasher } from '@zkopru/tree'
import { logger } from '@zkopru/utils'
import AsyncLock from 'async-lock'
import { L1Contract } from '../context/layer1'
import { L2Chain } from '../context/layer2'
import { BootstrapHelper } from './bootstrap'
import { Synchronizer, NetworkStatus } from './synchronizer'
import { Synchronizer } from './synchronizer'
import { Tracker } from './tracker'
import { BlockProcessor } from './block-processor'
import { Watchdog } from './watchdog'
import { Block } from '../block'
export class ZkopruNode {
running: boolean
@@ -35,6 +37,8 @@ export class ZkopruNode {
bootstrapHelper?: BootstrapHelper
canonicalLock = new AsyncLock()
constructor({
db,
l1Contract,
@@ -85,14 +89,10 @@ export class ZkopruNode {
})
this.blockProcessor.on('processed', async proposal => {
this.synchronizer.setLatestProcessed(proposal.proposalNum)
if (this.synchronizer.status === NetworkStatus.FULLY_SYNCED) {
await this.calcCanonicalBlockHeights()
}
await this.calcCanonicalBlockHeights()
})
this.synchronizer.on('status', async status => {
if (status === NetworkStatus.FULLY_SYNCED) {
await this.calcCanonicalBlockHeights()
}
this.synchronizer.on('status', async () => {
await this.calcCanonicalBlockHeights()
})
} else {
logger.info('already on syncing')
@@ -157,32 +157,35 @@ export class ZkopruNode {
// idempotently calculate canonical numbers
private async calcCanonicalBlockHeights() {
// find earliest block with no canonical num
const startBlock = await this.db.findMany('Proposal', {
where: {
canonicalNum: null,
},
orderBy: { proposalNum: 'asc' },
limit: 1,
await this.canonicalLock.acquire('canon', async () => {
// find earliest block with no canonical num
const startBlock = await this.db.findMany('Proposal', {
where: {
canonicalNum: null,
OR: [{ proposalData: { ne: null } }, { proposalNum: 0 }],
},
orderBy: { proposalNum: 'asc' },
limit: 1,
})
if (startBlock.length === 0) {
// have canonical numbers for all blocks
return
}
// The proposal to start at
const [{ proposalNum }] = startBlock
if (proposalNum === null) {
throw new Error('Proposal number is null')
}
const blockHeight = await this.db.count('Proposal', {})
const latestProcessed = this.synchronizer.latestProcessed || 0
for (
let x = proposalNum;
x <= Math.min(blockHeight, latestProcessed);
x += 1
) {
await this.calcCanonicalBlockHeight(x)
}
})
if (startBlock.length === 0) {
// have canonical numbers for all blocks
return
}
// The proposal to start at
const [{ proposalNum }] = startBlock
if (proposalNum === null) {
throw new Error('Proposal number is null')
}
const blockHeight = await this.db.count('Proposal', {})
const latestProcessed = this.synchronizer.latestProcessed || 0
for (
let x = proposalNum;
x < Math.min(blockHeight, latestProcessed);
x += 1
) {
this.calcCanonicalBlockHeight(x)
}
}
private async calcCanonicalBlockHeight(proposalNum: number) {
@@ -199,15 +202,11 @@ export class ZkopruNode {
where: { hash },
update: { canonicalNum: 0 },
})
// eslint-disable-next-line no-continue
return
}
const header = await this.db.findOne('Header', {
where: { hash },
})
if (!header) {
throw new Error(`Unable to find header for proposal ${proposal.hash}`)
}
if (!proposal.proposalData) return
const block = Block.fromTx(JSON.parse(proposal.proposalData))
const header = block.getHeaderSql()
const parent = await this.db.findOne('Proposal', {
where: {
hash: header.parentBlock.toString(),
@@ -219,7 +218,6 @@ export class ZkopruNode {
if (parent.canonicalNum === null) {
throw new Error(`Expected canonicalNum to exist!`)
}
// console.log(`canonical num: ${parent.canonicalNum+1}`)
await this.db.update('Proposal', {
where: { hash },
update: { canonicalNum: (parent.canonicalNum as number) + 1 },

View File

@@ -5,6 +5,7 @@ import { join } from 'path'
import assert from 'assert'
import { Uint256 } from 'soltypes'
import * as ffjs from 'ffjavascript'
import * as snarkjs from 'snarkjs'
export interface VerifyingKey {
protocol: string
@@ -47,10 +48,37 @@ export class SNARKVerifier {
}
async verifyTx(tx: ZkTx): Promise<boolean> {
// run in main process if in browser
// TODO: use webworker
if (typeof window !== 'undefined') {
const registeredVk = this.vks[
verifyingKeyIdentifier(tx.inflow.length, tx.outflow.length)
]
const proof = ffjs.utils.stringifyBigInts(tx.circomProof())
const signals = ffjs.utils.stringifyBigInts(tx.signals())
const vk = ffjs.utils.stringifyBigInts(registeredVk)
let result!: boolean
try {
result = snarkjs.groth16.verify(
ffjs.utils.unstringifyBigInts(vk),
ffjs.utils.unstringifyBigInts(signals),
ffjs.utils.unstringifyBigInts(proof),
)
} catch (e) {
logger.error(e)
result = false
}
return result
}
return new Promise<boolean>(res => {
const registeredVk = this.vks[
verifyingKeyIdentifier(tx.inflow.length, tx.outflow.length)
]
logger.info(
`verifying key: ${JSON.stringify(registeredVk, (_, v) =>
typeof v === 'bigint' ? v.toString() : v,
)}`,
)
if (!registeredVk) {
res(false)
return
@@ -75,6 +103,13 @@ export class SNARKVerifier {
}
async verifyTxs(txs: ZkTx[]): Promise<{ result: boolean; index?: number }> {
if (typeof window !== 'undefined') {
for (const [index, tx] of Object.entries(txs)) {
const result = await this.verifyTx(tx)
if (!result) return Promise.resolve({ result: false, index: +index })
}
return Promise.resolve({ result: true })
}
return new Promise<{ result: boolean; index?: number }>(res => {
// 1. check vk existence
for (let index = 0; index < txs.length; index += 1) {

View File

@@ -7,7 +7,8 @@ import { WebsocketProvider } from 'web3-core'
import { Container } from 'node-docker-api/lib/container'
import { DB, SQLiteConnector, schema } from '@zkopru/database'
import { ZkAccount } from '~account'
import { sleep, readFromContainer, pullOrBuildAndGetContainer } from '~utils'
import { sleep } from '~utils'
import { readFromContainer, pullOrBuildAndGetContainer } from '~utils-docker'
import { FullNode } from '~core'
describe('integration test to run testnet', () => {
@@ -18,8 +19,7 @@ describe('integration test to run testnet', () => {
let wsProvider: WebsocketProvider
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
// It may take about few minutes. If you want to skip building image,
// run `yarn pull:images` on the root directory
container = await pullOrBuildAndGetContainer({

View File

@@ -7,7 +7,8 @@ import { WebsocketProvider } from 'web3-core'
import { Container } from 'node-docker-api/lib/container'
import { DB, SQLiteConnector, schema } from '@zkopru/database'
import { ZkAccount } from '~account'
import { sleep, readFromContainer, pullOrBuildAndGetContainer } from '~utils'
import { sleep } from '~utils'
import { readFromContainer, pullOrBuildAndGetContainer } from '~utils-docker'
import { LightNode, HttpBootstrapHelper } from '~core'
describe('integration test to run testnet', () => {
@@ -18,8 +19,7 @@ describe('integration test to run testnet', () => {
let wsProvider: WebsocketProvider
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
// It may take about few minutes. If you want to skip building image,
// run `yarn pull:images` on the root directory
container = await pullOrBuildAndGetContainer({

View File

@@ -157,15 +157,6 @@ Batch database operations to be executed at once. If any operation in the transa
Once the `operation` function finishes executing the transaction will be applied.
### Create Tables
Call this method to ensure tables (collections) exist. This _must_ be called each time a new DB instance is initialized.
`createTables(tableData: TableData[]): Promise<void>`
`TableData`: A [schema array](#Schema).
### Close
A function for closing and tearing down database instances. Call this when disposing of a `DB` reference.

View File

@@ -2,4 +2,5 @@ const baseConfig = require('../../jest.config.base.js')
module.exports = {
...baseConfig,
setupFiles: ['fake-indexeddb/auto'],
}

View File

@@ -5,10 +5,13 @@
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
"dist",
"dist/web.js"
],
"_moduleAliases": {
"~database": "./src"
"~database": "./dist",
"~database-web": "./dist/web.js",
"~database-node": "./dist/node.js"
},
"keywords": [
"cli"
@@ -29,8 +32,11 @@
"@zkopru/babyjubjub": "file:../babyjubjub",
"@zkopru/utils": "file:../utils",
"async-lock": "^1.2.8",
"bn.js": "^4.12.0",
"bn.js": "^5.2.0",
"fake-indexeddb": "^3.1.2",
"idb": "^6.0.0",
"pg": "^8.5.1",
"sql.js": "^1.5.0",
"sqlite": "^4.0.19",
"sqlite3": "^5.0.2",
"uuid": "^8.3.2",

View File

@@ -0,0 +1,449 @@
/* eslint-disable class-methods-use-this, no-underscore-dangle */
import AsyncLock from 'async-lock'
import { openDB, IDBPDatabase, IDBPTransaction } from 'idb'
import {
DB,
FindOneOptions,
FindManyOptions,
WhereClause,
UpdateOptions,
UpsertOptions,
DeleteManyOptions,
TableData,
TransactionDB,
Schema,
Relation,
normalizeRowDef,
constructSchema,
} from '../types'
const DB_NAME = 'zkopru'
export class IndexedDBConnector extends DB {
db?: IDBPDatabase<any>
schema: Schema = {}
lock = new AsyncLock()
constructor(schema: Schema) {
super()
this.schema = schema
}
static async create(tables: TableData[]) {
const schema = constructSchema(tables)
const connector = new this(schema)
connector.db = await openDB(DB_NAME, 2, {
upgrade(db /* oldVersion, newVersion, transaction */) {
for (const table of tables) {
const store = db.createObjectStore(table.name, {
keyPath: table.primaryKey,
})
for (const row of table.rows) {
const fullRow = normalizeRowDef(row)
if (
fullRow.unique ||
fullRow.index ||
[table.primaryKey].flat().indexOf(fullRow.name) !== -1
) {
store.createIndex(fullRow.name, fullRow.name, {
unique: !!fullRow.unique,
})
}
}
}
},
})
return connector
}
async create(collection: string, _doc: any) {
return this.lock.acquire('db', async () => this._create(collection, _doc))
}
async _create(
collection: string,
_doc: any,
_tx?: IDBPTransaction<any, string[], 'readwrite'>,
) {
const table = this.schema[collection]
if (!table) throw new Error(`Invalid collection: "${collection}"`)
const docs = [_doc].flat().map(doc => {
// insert defaults where needed
const defaults = {}
for (const key of Object.keys(table.rowsByName)) {
const row = table.rowsByName[key]
if (!row) throw new Error('Expected row to exist')
if (
row.default &&
(doc[row.name] === undefined || doc[row.name] === null)
) {
Object.assign(defaults, {
[row.name]:
typeof row.default === 'function' ? row.default() : row.default,
})
}
const wipDoc = {
...defaults,
...doc,
}
if (
!row.optional &&
!row.relation &&
(wipDoc[row.name] === undefined || wipDoc[row.name] === null)
) {
throw new Error(`NULL received for non-optional field "${row.name}"`)
}
if (
typeof wipDoc[row.name] !== 'undefined' &&
wipDoc[row.name] !== null
) {
if (row.type === 'Bool' && typeof wipDoc[row.name] !== 'boolean') {
throw new Error(
`Unrecognized value ${wipDoc[row.name]} for type Bool`,
)
} else if (
row.type === 'Int' &&
typeof wipDoc[row.name] !== 'number'
) {
throw new Error(
`Unrecognized value ${wipDoc[row.name]} for type Int`,
)
} else if (
row.type === 'String' &&
typeof wipDoc[row.name] !== 'string'
) {
throw new Error(
`Unrecognized value ${wipDoc[row.name]} for type String`,
)
} else if (
row.type === 'Object' &&
typeof wipDoc[row.name] !== 'object'
) {
throw new Error(
`Unrecognized value ${wipDoc[row.name]} for type Object`,
)
}
}
}
return {
...defaults,
...doc,
}
})
if (!this.db) throw new Error('DB is not initialized')
const tx = _tx || this.db.transaction(collection, 'readwrite')
const createPromises = docs.map(doc => {
const store = tx.objectStore(collection)
return store.add(doc)
})
if (!_tx) {
await Promise.all([...createPromises, tx.done])
}
return docs.length === 1 ? docs[0] : docs
}
async findOne(collection: string, options: FindOneOptions) {
const [obj] = await this.findMany(collection, {
...options,
limit: 1,
})
return obj === undefined ? null : obj
}
async loadIncluded(
collection: string,
options: { models: any[]; include?: any },
) {
const { models, include } = options
if (!include) return
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection} in schema`)
for (const key of Object.keys(include)) {
const relation = table.relations[key]
if (!relation) {
throw new Error(`Unable to find relation ${key} in ${collection}`)
}
if (include[key]) {
await this.loadIncludedModels(
models,
relation,
typeof include[key] === 'object' ? include[key] : undefined,
)
}
}
}
private async loadIncludedModels(
models: any[],
relation: Relation & { name: string },
include?: any,
) {
const values = models.map(model => model[relation.localField])
// load relevant submodels
const submodels = await this._findMany(relation.foreignTable, {
where: {
[relation.foreignField]: values,
},
include: include as any, // load subrelations if needed
})
// key the submodels by their relation field
const keyedSubmodels = {}
for (const submodel of submodels) {
// assign to the models
keyedSubmodels[submodel[relation.foreignField]] = submodel
}
// Assign submodel onto model
for (const model of models) {
const submodel = keyedSubmodels[model[relation.localField]]
Object.assign(model, {
[relation.name]: submodel,
})
}
}
async findMany(collection: string, options: FindManyOptions) {
return this.lock.acquire('db', async () =>
this._findMany(collection, options),
)
}
async _findMany(
collection: string,
options: FindManyOptions,
_tx?: IDBPTransaction<any, string[], 'readwrite' | 'readonly'>,
) {
if (!this.db) throw new Error('DB is not initialized')
const found = [] as any[]
let cursor: any
const tx = _tx || this.db.transaction(collection)
if (Object.keys(options.orderBy || {}).length > 0) {
// use a cursor
const key = Object.keys(options.orderBy || {})[0]
const direction = (options.orderBy || {})[key] === 'asc' ? 'next' : 'prev'
const index = tx.objectStore(collection).index(key)
cursor = await index.openCursor(null, direction)
} else {
cursor = await tx.objectStore(collection).openCursor()
}
// TODO: index accelerated queries when possible
const matchDoc = (where: WhereClause, doc: any) => {
for (const [key, val] of Object.entries(where)) {
if (typeof val === 'object' && !Array.isArray(val) && val !== null) {
if (typeof val.ne !== 'undefined' && doc[key] === val.ne) {
return false
}
if (typeof val.lt !== 'undefined' && doc[key] >= val.lt) {
return false
}
if (typeof val.lte !== 'undefined' && doc[key] > val.lte) {
return false
}
if (typeof val.gt !== 'undefined' && doc[key] <= val.gt) {
return false
}
if (typeof val.gte !== 'undefined' && doc[key] < val.gte) {
return false
}
} else if (Array.isArray(val)) {
let exists = false
for (const v of val) {
if (v === null && typeof doc[key] === 'undefined') {
exists = true
break
}
if (doc[key] === v) {
exists = true
break
}
}
if (!exists) return false
} else if (
val === null &&
typeof doc[key] !== 'undefined' &&
doc[key] !== null
) {
return false
} else if (val !== null && doc[key] !== val) {
return false
}
}
return true
}
const { where, limit } = options
while (cursor) {
if (typeof limit === 'number' && found.length >= limit) break
const obj = cursor.value
const topWhere = { ...where, OR: undefined }
const or = where.OR || []
if (or.length === 0 && matchDoc(topWhere, obj)) {
found.push(obj)
}
for (const _where of or) {
if (matchDoc(_where, obj) && matchDoc(topWhere, obj)) {
found.push(obj)
break
}
}
cursor = await cursor.continue()
}
await this.loadIncluded(collection, {
models: found,
include: options.include,
})
return found
}
async count(collection: string, where: WhereClause) {
return (await this.findMany(collection, { where })).length
}
async update(collection: string, options: UpdateOptions) {
return this.lock.acquire('db', async () =>
this._update(collection, options),
)
}
async _update(
collection: string,
options: UpdateOptions,
_tx?: IDBPTransaction<any, string[], 'readwrite'>,
) {
if (!this.db) throw new Error('DB is not initialized')
const items = await this._findMany(
collection,
{ where: options.where },
_tx,
)
if (Object.keys(options.update).length === 0) return items.length
const tx = _tx || this.db.transaction(collection, 'readwrite')
const promises = [] as Promise<any>[]
const table = this.schema[collection]
if (!table) throw new Error('Table not found')
for (const item of items) {
const store = tx.objectStore(collection)
store.put({
...item,
...options.update,
})
}
if (!_tx) {
await Promise.all([...promises, tx.done])
}
return items.length
}
async upsert(collection: string, options: UpsertOptions) {
return this.lock.acquire('db', async () =>
this._upsert(collection, options),
)
}
async _upsert(
collection: string,
options: UpsertOptions,
_tx?: IDBPTransaction<any, string[], 'readwrite'>,
) {
const updated = await this._update(collection, options, _tx)
if (updated > 0) {
return Object.keys(options.update).length === 0 ? 0 : updated
}
const created = await this._create(collection, options.create, _tx)
return Array.isArray(created) ? created.length : 1
}
async delete(collection: string, options: DeleteManyOptions) {
return this.lock.acquire('db', async () =>
this._delete(collection, options),
)
}
async _delete(
collection: string,
options: DeleteManyOptions,
_tx?: IDBPTransaction<any, string[], 'readwrite'>,
) {
if (!this.db) throw new Error('DB is not initialized')
const items = await this._findMany(
collection,
{ where: options.where },
_tx,
)
const tx = _tx || this.db.transaction(collection, 'readwrite')
const promises = [] as Promise<any>[]
const table = this.schema[collection]
if (!table) throw new Error('Table not found')
const store = tx.objectStore(collection)
for (const item of items) {
promises.push(
store.delete(
Array.isArray(table.primaryKey)
? table.primaryKey.map(k => item[k])
: item[table.primaryKey],
),
)
}
if (!_tx) {
await Promise.all([...promises, tx.done])
}
return items.length
}
async transaction(operation: (db: TransactionDB) => void) {
return this.lock.acquire('db', async () => this._transaction(operation))
}
async _transaction(operation: (db: TransactionDB) => void) {
if (!this.db) throw new Error('DB is not initialized')
// create an array of stores that the operation will mutate
const stores = [] as string[]
let tx: IDBPTransaction<any, string[], 'readwrite'>
// don't start the transaction until we know what stores to involve and have
// created and set the tx object
let start: Function | undefined
let promise = new Promise(rs => {
start = rs
})
const db = {
delete: (collection: string, options: DeleteManyOptions) => {
stores.push(collection)
promise = promise.then(() => this._delete(collection, options, tx))
},
create: (collection: string, docs: any) => {
stores.push(collection)
promise = promise.then(() => this._create(collection, docs, tx))
},
update: (collection: string, options: UpdateOptions) => {
stores.push(collection)
promise = promise.then(() => this._update(collection, options, tx))
},
upsert: (collection: string, options: UpsertOptions) => {
stores.push(collection)
promise = promise.then(() => this._upsert(collection, options, tx))
},
} as TransactionDB
// Call the `operation` function to get a list of the stores that are going
// to be accessed. Once that is done create the transaction and call the
// start function to begin executing the transaction operations
operation(db)
// get a unique list of stores
const storeNames = {}
const storesUnique = stores.filter(store => {
if (storeNames[store]) return false
storeNames[store] = true
return true
})
tx = this.db.transaction(storesUnique, 'readwrite')
// explicitly cast the start function because TS cannot determine that it's
// set above. The body of a promise is executed sychronously so start will
// be assigned at this point
;(start as Function)()
await Promise.all([promise, tx.done])
}
async close() {
if (!this.db) throw new Error('DB is not initialized')
this.db.close()
}
}

View File

@@ -25,7 +25,7 @@ import {
upsertSql,
} from '../helpers/sql'
export class PostgresConnector implements DB {
export class PostgresConnector extends DB {
db: Client
config: any | string
@@ -35,6 +35,7 @@ export class PostgresConnector implements DB {
lock = new AsyncLock()
constructor(config: any | string) {
super()
this.config = config
this.db = {} as any
}
@@ -50,9 +51,10 @@ export class PostgresConnector implements DB {
await this.db.connect()
}
static async create(config: any | string) {
static async create(tables: TableData[], config: any | string) {
const connector = new this(config)
await connector.init()
await connector.createTables(tables)
return connector
}
@@ -154,8 +156,8 @@ export class PostgresConnector implements DB {
if (!table) throw new Error(`Unable to find table ${collection}`)
const sql = findManySql(table, options)
const { rows } = await this.db.query(sql)
const objectKeys = Object.keys(table.rows).filter(key => {
return table.rows[key]?.type === 'Object'
const objectKeys = Object.keys(table.rowsByName).filter(key => {
return table.rowsByName[key]?.type === 'Object'
})
if (objectKeys.length > 0) {
// need to expand json objects

View File

@@ -0,0 +1,307 @@
/* eslint-disable no-underscore-dangle */
import initSqlJs from 'sql.js'
import AsyncLock from 'async-lock'
import {
DB,
WhereClause,
DeleteManyOptions,
FindManyOptions,
FindOneOptions,
UpdateOptions,
UpsertOptions,
TableData,
// normalizeRowDef,
constructSchema,
Schema,
Relation,
TransactionDB,
} from '../types'
import {
tableCreationSql,
createSql,
findManySql,
countSql,
updateSql,
deleteManySql,
upsertSql,
} from '../helpers/sql'
export class SQLiteMemoryConnector extends DB {
db: any
schema: Schema = {}
lock = new AsyncLock()
constructor() {
super()
this.db = {} as any
}
async init() {
const SQL = await initSqlJs({
// locateFile: (file: string) => `https://sql.js.org/dist/${file}`,
})
this.db = new SQL.Database()
}
static async create(tables: TableData[]) {
const connector = new this()
await connector.init()
await connector.createTables(tables)
return connector
}
async create(collection: string, _doc: any | any): Promise<any> {
return this.lock.acquire('db', async () => this._create(collection, _doc))
}
private async _create(collection: string, _doc: any | any): Promise<any> {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection} in schema`)
const docs = [_doc].flat()
const { sql, query } = createSql(table, docs)
await this.db.exec(sql)
if (Array.isArray(_doc)) {
return this._findMany(collection, {
where: query,
})
}
return this._findOne(collection, {
where: query,
})
}
async findOne(collection: string, options: FindOneOptions) {
return this.lock.acquire('db', async () =>
this._findOne(collection, options),
)
}
async _findOne(collection: string, options: FindOneOptions) {
const [obj] = await this._findMany(collection, {
...options,
limit: 1,
})
return obj === undefined ? null : obj
}
// load related models
async loadIncluded(
collection: string,
options: { models: any[]; include?: any },
) {
const { models, include } = options
if (!include) return
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection} in schema`)
for (const key of Object.keys(include)) {
// for each relation to include
const relation = table.relations[key]
if (!relation)
throw new Error(`Unable to find relation ${key} in ${collection}`)
if (include[key]) {
await this.loadIncludedModels(
models,
relation,
typeof include[key] === 'object' ? include[key] : undefined,
)
}
}
}
// load and assign submodels, mutates the models array supplied
private async loadIncludedModels(
models: any[],
relation: Relation & { name: string },
include?: any,
) {
const values = models.map(model => model[relation.localField])
// load relevant submodels
const submodels = await this._findMany(relation.foreignTable, {
where: {
[relation.foreignField]: values,
},
include: include as any, // load subrelations if needed
})
// key the submodels by their relation field
const keyedSubmodels = {}
for (const submodel of submodels) {
// assign to the models
keyedSubmodels[submodel[relation.foreignField]] = submodel
}
// Assign submodel onto model
for (const model of models) {
const submodel = keyedSubmodels[model[relation.localField]]
Object.assign(model, {
[relation.name]: submodel,
})
}
}
async findMany(collection: string, options: FindManyOptions) {
return this.lock.acquire('db', async () =>
this._findMany(collection, options),
)
}
async _findMany(collection: string, options: FindManyOptions) {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection}`)
const sql = findManySql(table, options)
const result = await this.db.exec(sql)
if (result.length === 0) return []
const [{ columns, values }] = result
const models = [] as any[]
for (const value of values) {
const obj = {}
for (const [index, column] of Object.entries(columns)) {
obj[column as string] = value[index]
}
models.push(obj)
}
const objectKeys = Object.keys(table.rowsByName).filter(key => {
return table.rowsByName[key]?.type === 'Object'
})
if (objectKeys.length > 0) {
// need to expand json objects
// nested yuck!
// TODO handle json parse errors
for (const model of models) {
for (const key of objectKeys) {
// eslint-disable-next-line no-continue
if (typeof model[key] !== 'string') continue
Object.assign(model, {
[key]: JSON.parse(model[key]),
})
}
}
}
const { include } = options
await this.loadIncluded(collection, {
models,
include,
})
return models
}
async count(collection: string, where: WhereClause) {
return this.lock.acquire('db', async () => this._count(collection, where))
}
async _count(collection: string, where: WhereClause) {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection}`)
const sql = countSql(table, where)
const result = await this.db.exec(sql)
return result[0].values[0][0]
}
async update(collection: string, options: UpdateOptions) {
return this.lock.acquire('db', async () =>
this._update(collection, options),
)
}
private async _update(collection: string, options: UpdateOptions) {
const { where, update } = options
if (Object.keys(update).length === 0) return this._count(collection, where)
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection} in schema`)
const sql = updateSql(table, options)
await this.db.exec(sql)
return this.db.getRowsModified()
}
async upsert(collection: string, options: UpsertOptions) {
return this.lock.acquire('db', async () =>
this._upsert(collection, options),
)
}
async _upsert(collection: string, options: UpsertOptions) {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table ${collection} in schema`)
const sql = upsertSql(table, options)
try {
await this.db.run(sql)
return this.db.getRowsModified()
} catch (err) {
console.log(sql)
throw err
}
}
async delete(collection: string, options: DeleteManyOptions) {
return this.lock.acquire('db', async () =>
this._deleteMany(collection, options),
)
}
private async _deleteMany(collection: string, options: DeleteManyOptions) {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table "${collection}"`)
const sql = deleteManySql(table, options)
await this.db.run(sql)
return this.db.getRowsModified()
}
async transaction(operation: (db: TransactionDB) => void) {
return this.lock.acquire('db', async () => this._transaction(operation))
}
// Allow only updates, upserts, deletes, and creates
private async _transaction(operation: (db: TransactionDB) => void) {
if (typeof operation !== 'function') throw new Error('Invalid operation')
const sqlOperations = [] as string[]
const transactionDB = {
create: (collection: string, _doc: any) => {
const table = this.schema[collection]
if (!table)
throw new Error(`Unable to find table ${collection} in schema`)
const docs = [_doc].flat()
const { sql } = createSql(table, docs)
sqlOperations.push(sql)
},
update: (collection: string, options: UpdateOptions) => {
const table = this.schema[collection]
if (!table)
throw new Error(`Unable to find table ${collection} in schema`)
sqlOperations.push(updateSql(table, options))
},
delete: (collection: string, options: DeleteManyOptions) => {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table "${collection}"`)
const sql = deleteManySql(table, options)
sqlOperations.push(sql)
},
upsert: (collection: string, options: UpsertOptions) => {
const table = this.schema[collection]
if (!table) throw new Error(`Unable to find table "${collection}"`)
const sql = upsertSql(table, options)
sqlOperations.push(sql)
},
}
await Promise.resolve(operation(transactionDB))
// now apply the transaction
const transactionSql = `BEGIN TRANSACTION;
${sqlOperations.join('\n')}
COMMIT;`
try {
await this.db.exec(transactionSql)
} catch (err) {
await this.db.exec('ROLLBACK;')
throw err
}
}
async close() {
await this.db.close()
}
async createTables(tableData: TableData[]) {
this.schema = constructSchema(tableData)
const createTablesCommand = tableCreationSql(tableData)
await this.db.exec(createTablesCommand)
}
}

View File

@@ -27,7 +27,7 @@ import {
upsertSql,
} from '../helpers/sql'
export class SQLiteConnector implements DB {
export class SQLiteConnector extends DB {
db: any // Database<sqlite3.Database, sqlite3.Statement>
config: {
@@ -39,6 +39,7 @@ export class SQLiteConnector implements DB {
lock = new AsyncLock()
constructor(config: any /* ISqlite.Config */) {
super()
this.config = config
this.db = {} as any
}
@@ -47,7 +48,10 @@ export class SQLiteConnector implements DB {
this.db = await open(this.config)
}
static async create(_config: any /* ISqlite.Config */ | string) {
static async create(
tables: TableData[],
_config: any /* ISqlite.Config */ | string,
) {
const config =
typeof _config === 'string'
? {
@@ -57,6 +61,7 @@ export class SQLiteConnector implements DB {
: _config
const connector = new this(config)
await connector.init()
await connector.createTables(tables)
return connector
}
@@ -161,8 +166,8 @@ export class SQLiteConnector implements DB {
if (!table) throw new Error(`Unable to find table ${collection}`)
const sql = findManySql(table, options)
const models = await this.db.all(sql)
const objectKeys = Object.keys(table.rows).filter(key => {
return table.rows[key]?.type === 'Object'
const objectKeys = Object.keys(table.rowsByName).filter(key => {
return table.rowsByName[key]?.type === 'Object'
})
if (objectKeys.length > 0) {
// need to expand json objects

View File

@@ -33,12 +33,12 @@ export function parseType(type: string, value: any) {
throw new Error(`Unrecognized value "${value}" for type ${type}`)
}
export function whereToSql(table: TableData, doc: any = {}, sqlOnly = false) {
export function whereToSql(table: SchemaTable, doc: any = {}, sqlOnly = false) {
if (Object.keys(doc).length === 0) return ''
const sql = Object.keys(doc)
.map(key => {
if (key === 'OR') return
const rowDef = table.rows[key]
const rowDef = table.rowsByName[key]
if (!rowDef)
throw new Error(`Unable to find row definition for key: "${key}"`)
const val = doc[key]
@@ -173,7 +173,7 @@ export function createSql(
}
// query for retrieving the created documents, uses IN operator for all
// primary keys
const uniqueKeys = keys.filter(k => table.rows[k]?.unique)
const uniqueKeys = keys.filter(k => table.rowsByName[k]?.unique)
const query = [table.primaryKey, uniqueKeys].flat().reduce((acc, key) => {
if (key === undefined) return acc
return {
@@ -186,7 +186,7 @@ export function createSql(
for (const doc of docs) {
const values = keys
.map(k => {
const rowDef = table.rows[k]
const rowDef = table.rowsByName[k]
if (!rowDef)
throw new Error(`Unable to find row definition for key: "${k}"`)
if (query[k]) {
@@ -234,7 +234,7 @@ export function updateSql(table: SchemaTable, options: UpdateOptions): string {
const { where, update } = options
const setSql = Object.keys(update)
.map(key => {
const rowDef = table.rows[key]
const rowDef = table.rowsByName[key]
if (!rowDef)
throw new Error(`Unable to find row definition for key: "${key}"`)
return `"${key}" = ${parseType(rowDef.type, update[key])}`
@@ -291,7 +291,7 @@ export function upsertSql(table: SchemaTable, options: UpsertOptions): string {
.join(',')
const updateSqlCommand = Object.keys(options.update)
.map(key => {
const rowDef = table.rows[key]
const rowDef = table.rowsByName[key]
if (!rowDef)
throw new Error(`Unable to find row definition for key: "${key}"`)
return `"${key}" = ${parseType(rowDef.type, options.update[key])}`

View File

@@ -1,16 +1,9 @@
import Web3 from 'web3'
import schema from './schema'
import schema from './schema'
import { DB } from './types'
import { Config } from './schema.types'
export { SQLiteConnector } from './connectors/sqlite'
export { PostgresConnector } from './connectors/postgres'
export { DB, TableData } from './types'
export * from './schema.types'
export * from './preset'
export { schema }
interface L1Contract {
getConfig(): Promise<Config>
}
@@ -44,3 +37,8 @@ export enum TreeSpecies {
}
export const NULLIFIER_TREE_ID = 'nullifier-tree'
export { DB, TableData } from './types'
export * from './schema.types'
export * from './preset'
export { schema }

View File

@@ -0,0 +1,4 @@
export { SQLiteConnector } from './connectors/sqlite'
export { SQLiteMemoryConnector } from './connectors/sqlite-memory'
export { PostgresConnector } from './connectors/postgres'
export * from '.'

View File

@@ -136,9 +136,9 @@ export default [
primaryKey: 'hash',
rows: [
['hash', 'String'],
['proposalNum', 'Int', { optional: true }],
['proposalNum', 'Int', { index: true, optional: true }],
['canonicalNum', 'Int', { optional: true }],
['proposedAt', 'Int', { optional: true }],
['proposedAt', 'Int', { index: true, optional: true }],
['proposalTx', 'String', { optional: true }],
['proposalData', 'String', { optional: true }],
['fetched', 'String', { optional: true }],
@@ -165,7 +165,7 @@ export default [
['proposer', 'String'],
['reason', 'String'],
['executionTx', 'String'],
['slashedAt', 'Int'],
['slashedAt', 'Int', { index: true }],
{
name: 'block',
relation: {
@@ -224,7 +224,7 @@ export default [
['index', 'String'],
['merged', 'String'],
['fee', 'String'],
['blockNumber', 'Int'],
['blockNumber', 'Int', { index: true }],
{
name: 'includedIn',
type: 'String',
@@ -240,7 +240,7 @@ export default [
['fee', 'String'],
['transactionIndex', 'Int'],
['logIndex', 'Int'],
['blockNumber', 'Int'],
['blockNumber', 'Int', { index: true }],
['queuedAt', 'String'],
],
},
@@ -358,12 +358,13 @@ export default [
},
{
name: 'LightTree',
primaryKey: 'id',
rows: [
{
name: 'id',
type: 'String',
default: () => uuid.v4(),
unique: true,
// unique: true,
},
['species', 'Int', { unique: true }],
['start', 'String'],
@@ -381,7 +382,7 @@ export default [
['isERC20', 'Bool'],
['isERC721', 'Bool'],
['identifier', 'Int'],
['blockNumber', 'Int'],
['blockNumber', 'Int', { index: true }],
],
},
] as TableData[]

View File

@@ -49,6 +49,7 @@ export type RowDef = {
name: string
unique?: boolean
optional?: boolean
index?: boolean
type: DataType
// relational fields should be virtual
relation?: Relation
@@ -58,33 +59,47 @@ export type RowDef = {
export type ShortRowDef = [
string,
DataType,
{ optional?: boolean; unique?: boolean } | undefined,
{ index?: boolean; optional?: boolean; unique?: boolean } | undefined,
]
export interface TableData {
name: string
primaryKey?: string | string[]
primaryKey: string | string[]
rows: (RowDef | ShortRowDef)[]
}
export interface DB {
create: (collection: string, doc: any | any[]) => Promise<any>
findOne: (collection: string, options: FindOneOptions) => Promise<any>
export abstract class DB {
static create: (tables: TableData[], ...args: any[]) => Promise<DB>
abstract create(collection: string, doc: any | any[]): Promise<any>
abstract findOne(collection: string, options: FindOneOptions): Promise<any>
// retrieve many documents matching a where clause
findMany: (collection: string, options: FindManyOptions) => Promise<any[]>
abstract findMany(
collection: string,
options: FindManyOptions,
): Promise<any[]>
// count document matching a where clause
count: (collection: string, where: WhereClause) => Promise<number>
abstract count(collection: string, where: WhereClause): Promise<number>
// update some documents returning the number updated
update: (collection: string, options: UpdateOptions) => Promise<number>
abstract update(collection: string, options: UpdateOptions): Promise<number>
// update or create some documents
upsert: (collection: string, options: UpsertOptions) => Promise<number>
// provide a schema to connectors that need schema info
createTables: (tableData: TableData[]) => Promise<void>
abstract upsert(collection: string, options: UpsertOptions): Promise<number>
// delete many documents, return the number of documents deleted
delete: (collection: string, options: DeleteManyOptions) => Promise<number>
transaction: (operation: (db: TransactionDB) => void) => Promise<void>
abstract delete(
collection: string,
options: DeleteManyOptions,
): Promise<number>
abstract transaction(operation: (db: TransactionDB) => void): Promise<void>
// close the db and cleanup
close: () => Promise<void>
abstract close(): Promise<void>
}
// The object available in a transaction context
@@ -109,10 +124,11 @@ export function normalizeRowDef(row: RowDef | ShortRowDef): RowDef {
}
export type SchemaTable = {
rows: { [rowKey: string]: RowDef | undefined }
rowsByName: { [rowKey: string]: RowDef | undefined }
relations: {
[relation: string]: (Relation & { name: string }) | undefined
}
rows: RowDef[]
} & TableData
export type Schema = {
@@ -124,11 +140,13 @@ export function constructSchema(tables: TableData[]): Schema {
for (const table of tables) {
schema[table.name] = {
relations: {},
rowsByName: {},
...table,
}
for (const row of table.rows) {
const fullRow = normalizeRowDef(row)
schema[table.name].rows[fullRow.name] = fullRow
schema[table.name].rowsByName[fullRow.name] = fullRow
schema[table.name].rows = schema[table.name].rows.map(normalizeRowDef)
if (fullRow.relation) {
schema[table.name].relations[fullRow.name] = {
name: fullRow.name,

View File

@@ -0,0 +1,2 @@
export { IndexedDBConnector } from './connectors/indexed-db'
export * from '.'

View File

@@ -25,6 +25,12 @@ export default function(this: { db: DB }) {
})
assert.equal(row.id, 'test9')
}
{
const row = await this.db.findOne(table, {
where: { optionalField: 'nonexistent' },
})
assert.strictEqual(row, null)
}
})
test('should return null if not found', async () => {

View File

@@ -0,0 +1,30 @@
/* eslint-disable jest/no-hooks, jest/valid-describe */
import testSchema from './test-schema'
import { DB, IndexedDBConnector } from '~database-web'
import FindTests from './database/find'
import CreateTests from './database/create'
import UpdateTests from './database/update'
import DeleteTests from './database/delete'
import TransactionTests from './database/transaction'
describe('indexedDB tests', function(this: any) {
this.db = {} as DB
beforeEach(async () => {
this.db = await IndexedDBConnector.create(testSchema)
for (const { name } of testSchema) {
await this.db.delete(name, {
where: {},
})
}
})
afterEach(async () => {
await this.db.close()
})
FindTests.bind(this)()
CreateTests.bind(this)()
UpdateTests.bind(this)()
DeleteTests.bind(this)()
TransactionTests.bind(this)()
})

View File

@@ -1,6 +1,6 @@
/* eslint-disable jest/no-hooks, jest/valid-describe */
import testSchema from './test-schema'
import { DB, PostgresConnector } from '~database'
import { DB, PostgresConnector } from '~database-node'
import FindTests from './database/find'
import CreateTests from './database/create'
import UpdateTests from './database/update'
@@ -11,9 +11,9 @@ describe('postgres tests', function(this: any) {
this.db = {} as DB
beforeEach(async () => {
this.db = await PostgresConnector.create(
testSchema,
'postgres://postgres:password@localhost:5432',
)
await this.db.createTables(testSchema)
for (const { name } of testSchema) {
await this.db.delete(name, {
where: {},

View File

@@ -0,0 +1,29 @@
/* eslint-disable jest/no-hooks, jest/valid-describe */
import testSchema from './test-schema'
import { DB, SQLiteMemoryConnector } from '~database-node'
import FindTests from './database/find'
import CreateTests from './database/create'
import UpdateTests from './database/update'
import DeleteTests from './database/delete'
import TransactionTests from './database/transaction'
describe('sqlite memory tests', function(this: { db: DB }) {
beforeEach(async () => {
this.db = await SQLiteMemoryConnector.create(testSchema)
for (const { name } of testSchema) {
await this.db.delete(name, {
where: {},
})
}
})
afterEach(async () => {
await this.db.close()
})
FindTests.bind(this)()
CreateTests.bind(this)()
UpdateTests.bind(this)()
DeleteTests.bind(this)()
TransactionTests.bind(this)()
})

View File

@@ -1,6 +1,6 @@
/* eslint-disable jest/no-hooks, jest/valid-describe */
import testSchema from './test-schema'
import { DB, SQLiteConnector } from '~database'
import { DB, SQLiteConnector } from '~database-node'
import FindTests from './database/find'
import CreateTests from './database/create'
import UpdateTests from './database/update'
@@ -9,8 +9,7 @@ import TransactionTests from './database/transaction'
describe('sqlite tests', function(this: { db: DB }) {
beforeEach(async () => {
this.db = await SQLiteConnector.create(':memory:')
await this.db.createTables(testSchema)
this.db = await SQLiteConnector.create(testSchema, ':memory:')
for (const { name } of testSchema) {
await this.db.delete(name, {
where: {},

View File

@@ -103,8 +103,9 @@ export default [
},
{
name: 'Table7',
primaryKey: 'id',
rows: [
['id', 'Int', { unique: true }],
['id', 'Int'],
['boolField', 'Bool'],
['stringField', 'String'],
['objectField', 'Object'],

View File

@@ -35,7 +35,7 @@
"@zkopru/database": "file:../database",
"@zkopru/transaction": "file:../transaction",
"@zkopru/tree": "file:../tree",
"@zkopru/utils": "file:../utils",
"@zkopru/utils-docker": "file:../utils-docker",
"@zkopru/zk-wizard": "file:../zk-wizard",
"big-integer": "^1.6.48",
"fs-extra": "^9.0.0",

View File

@@ -1,7 +1,7 @@
/* eslint-disable @typescript-eslint/camelcase */
import fs from 'fs-extra'
import path from 'path'
import * as utils from '@zkopru/utils'
import * as utils from '@zkopru/utils-docker'
import tar from 'tar'
export async function loadCircuits() {

View File

@@ -5,7 +5,12 @@ import { Fp } from '@zkopru/babyjubjub'
import { ZkTx, Utxo, UtxoStatus } from '@zkopru/transaction'
import { ZkWizard } from '@zkopru/zk-wizard'
import { keccakHasher, poseidonHasher, Grove } from '@zkopru/tree'
import { DB, SQLiteConnector, TreeSpecies, schema } from '@zkopru/database'
import {
DB,
SQLiteConnector,
TreeSpecies,
schema,
} from '@zkopru/database/dist/node'
import { accounts, address } from './testset-predefined'
import { utxos } from './testset-utxos'
import { txs } from './testset-txs'
@@ -89,8 +94,7 @@ export async function saveUtxos(db: DB, utxos: Utxo[]): Promise<DB> {
}
export async function loadZkTxs(): Promise<ZkTx[]> {
const mockupDB = await SQLiteConnector.create(':memory:')
await mockupDB.createTables(schema)
const mockupDB = await SQLiteConnector.create(schema, ':memory:')
const { grove } = await loadGrove(mockupDB)
await saveUtxos(mockupDB, [
utxos.utxo1_in_1,

View File

@@ -13,7 +13,7 @@
{ "path": "../database/tsconfig.build.json", "prepend": false },
{ "path": "../tree/tsconfig.build.json", "prepend": false },
{ "path": "../transaction/tsconfig.build.json", "prepend": false },
{ "path": "../utils/tsconfig.build.json", "prepend": false },
{ "path": "../utils-docker/tsconfig.build.json", "prepend": false },
{ "path": "../zk-wizard/tsconfig.build.json", "prepend": false },
]
}

View File

@@ -11,8 +11,10 @@
"~core": "../core/dist",
"~coordinator": "../coordinator/dist",
"~database": "../database/dist",
"~database-node": "../database/dist/node.js",
"~transaction": "../transaction/dist",
"~utils": "../utils/dist",
"~utils-docker": "../utils-docker/dist",
"~zk-wizard": "../zk-wizard/dist"
},
"scripts": {
@@ -39,7 +41,7 @@
"@zkopru/transaction": "file:../transaction",
"@zkopru/utils": "file:../utils",
"@zkopru/zk-wizard": "file:../zk-wizard",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"soltypes": "^1.3.5",
"node-docker-api": "^1.1.22",
"puppeteer": "^5.0.0",

View File

@@ -3,9 +3,10 @@ import Web3 from 'web3'
import path from 'path'
import { WebsocketProvider, Account } from 'web3-core'
import { Address } from 'soltypes'
import { DB, SQLiteConnector, schema } from '~database'
import { DB, SQLiteConnector, schema } from '~database-node'
import { ZkAccount, HDWallet } from '~account'
import { sleep, readFromContainer, pullAndGetContainer } from '~utils'
import { sleep } from '~utils'
import { readFromContainer, pullAndGetContainer } from '~utils-docker'
import { DEFAULT } from '~cli/apps/coordinator/config'
import { L1Contract, FullNode } from '~core'
import { Coordinator } from '~coordinator'
@@ -154,8 +155,7 @@ async function getAccounts(
carl: ZkAccount
coordinator: ZkAccount
}> {
const mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
const mockup = await SQLiteConnector.create(schema, ':memory:')
const hdWallet = new HDWallet(web3, mockup)
const mnemonic =
'myth like bonus scare over problem client lizard pioneer submit female collect'
@@ -204,8 +204,7 @@ async function getCoordinator(
address: string,
account: Account,
): Promise<{ coordinator: Coordinator; mockupDB: DB }> {
const mockupDB = await SQLiteConnector.create(':memory:')
await mockupDB.createTables(schema)
const mockupDB = await SQLiteConnector.create(schema, ':memory:')
const fullNode: FullNode = await FullNode.new({
address,
provider,
@@ -238,8 +237,7 @@ export async function getWallet({
erc20s: string[]
erc721s: string[]
}): Promise<{ zkWallet: ZkWallet; mockupDB: DB }> {
const mockupDB = await SQLiteConnector.create(':memory:')
await mockupDB.createTables(schema)
const mockupDB = await SQLiteConnector.create(schema, ':memory:')
const node: FullNode = await FullNode.new({
address,
provider,

View File

@@ -10,6 +10,7 @@
"_moduleAliases": {
"~babyjubjub": "../babyjubjub/dist",
"~database": "../database/dist",
"~database-node": "../database/dist/node.js",
"~dataset": "../dataset/dist",
"~tree": "dist"
},
@@ -35,7 +36,7 @@
"@zkopru/transaction": "file:../transaction",
"async-lock": "^1.2.2",
"big-integer": "^1.6.48",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"circomlib": "0.5.1",
"soltypes": "^1.3.5",
"uuid": "^8.1.0",

View File

@@ -0,0 +1,46 @@
import {
DB,
SQLiteMemoryConnector,
TreeSpecies,
schema,
} from '@zkopru/database/dist/node'
import { Fp } from '@zkopru/babyjubjub'
import { v4 } from 'uuid'
import { TreeConfig } from './light-rollup-tree'
import { UtxoTree } from './utxo-tree'
import { genesisRoot, poseidonHasher } from './hasher'
export default async function sample(
depth: number,
): Promise<{ tree: UtxoTree; db: DB }> {
const utxoTreeMetadata = {
id: v4(),
index: 1,
species: TreeSpecies.UTXO,
start: Fp.from(0),
end: Fp.from(0),
}
const utxoTreeConfig: TreeConfig<Fp> = {
hasher: poseidonHasher(depth),
forceUpdate: true,
fullSync: true,
}
const preHashes = poseidonHasher(depth).preHash
const utxoTreeInitialData = {
root: genesisRoot(poseidonHasher(depth)),
index: Fp.zero,
siblings: preHashes.slice(0, -1),
}
const mockupDB = await SQLiteMemoryConnector.create(schema)
const utxoTree = new UtxoTree({
db: mockupDB,
metadata: utxoTreeMetadata,
data: utxoTreeInitialData,
config: utxoTreeConfig,
})
await utxoTree.init()
return {
tree: utxoTree,
db: mockupDB,
}
}

View File

@@ -1,14 +1,6 @@
import { Fp } from '@zkopru/babyjubjub'
import {
DB,
LightTree,
TreeSpecies,
SQLiteConnector,
schema,
} from '@zkopru/database'
import { DB, LightTree, TreeSpecies } from '@zkopru/database'
import { ZkAddress } from '@zkopru/transaction'
import { v4 } from 'uuid'
import { genesisRoot, poseidonHasher } from './hasher'
import {
LightRollUpTree,
TreeMetadata,
@@ -88,38 +80,4 @@ export class UtxoTree extends LightRollUpTree<Fp> {
config,
})
}
static async sample(depth: number): Promise<{ tree: UtxoTree; db: DB }> {
const utxoTreeMetadata = {
id: v4(),
index: 1,
species: TreeSpecies.UTXO,
start: Fp.from(0),
end: Fp.from(0),
}
const utxoTreeConfig: TreeConfig<Fp> = {
hasher: poseidonHasher(depth),
forceUpdate: true,
fullSync: true,
}
const preHashes = poseidonHasher(depth).preHash
const utxoTreeInitialData = {
root: genesisRoot(poseidonHasher(depth)),
index: Fp.zero,
siblings: preHashes.slice(0, -1),
}
const mockupDB = await SQLiteConnector.create(':memory:')
await mockupDB.createTables(schema)
const utxoTree = new UtxoTree({
db: mockupDB,
metadata: utxoTreeMetadata,
data: utxoTreeInitialData,
config: utxoTreeConfig,
})
await utxoTree.init()
return {
tree: utxoTree,
db: mockupDB,
}
}
}

View File

@@ -2,7 +2,7 @@
import BN from 'bn.js'
import { toBN } from 'web3-utils'
import { DB, SQLiteConnector, schema } from '~database'
import { DB, SQLiteConnector, schema } from '~database-node'
import { Fp } from '~babyjubjub'
import { Grove, poseidonHasher, keccakHasher, Leaf } from '~tree'
import { utxos } from '~dataset/testset-utxos'
@@ -14,8 +14,7 @@ describe('grove full sync grove()', () => {
let lightSyncGrove: Grove
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
fullSyncGrvoe = new Grove(mockup, {
utxoTreeDepth: 31,
withdrawalTreeDepth: 31,
@@ -155,8 +154,7 @@ describe('grove full sync grove()', () => {
},
}
const mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
const mockup = await SQLiteConnector.create(schema, ':memory:')
lightSyncGrove = new Grove(mockup, {
utxoTreeDepth: 31,
withdrawalTreeDepth: 31,

View File

@@ -3,7 +3,7 @@
/* eslint-disable jest/no-hooks */
import { toBN } from 'web3-utils'
import BN from 'bn.js'
import { DB, SQLiteConnector, schema } from '~database'
import { DB, SQLiteConnector, schema } from '~database-node'
import { NullifierTree, keccakHasher, genesisRoot } from '../../src'
describe('nullifier tree unit test', () => {
@@ -12,8 +12,7 @@ describe('nullifier tree unit test', () => {
const hasher = keccakHasher(depth)
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
nullifierTree = new NullifierTree({
db: mockup,
hasher,

View File

@@ -2,6 +2,7 @@
/* eslint-disable jest/no-hooks */
import { Fp } from '~babyjubjub'
import { UtxoTree, poseidonHasher, Leaf, verifyProof } from '~tree'
import sample from '~tree/sample'
import { utxos } from '~dataset/testset-utxos'
import { accounts } from '~dataset/testset-predefined'
import { DB } from '~database'
@@ -12,7 +13,7 @@ describe('utxo tree unit test', () => {
const depth = 48
beforeAll(async () => {
// const db = nSQL()
const { tree, db } = await UtxoTree.sample(depth)
const { tree, db } = await sample(depth)
utxoTree = tree
mockup = db
})

View File

@@ -3,7 +3,7 @@
import BN from 'bn.js'
import { toBN } from 'web3-utils'
import { Fp } from '~babyjubjub'
import { DB, TreeSpecies, SQLiteConnector, schema } from '~database'
import { DB, TreeSpecies, SQLiteConnector, schema } from '~database-node'
import {
WithdrawalTree,
TreeConfig,
@@ -38,8 +38,7 @@ describe('withdrawal tree unit test', () => {
}
let mockup: DB
beforeAll(async () => {
mockup = await SQLiteConnector.create(':memory:')
await mockup.createTables(schema)
mockup = await SQLiteConnector.create(schema, ':memory:')
withdrawalTree = new WithdrawalTree({
db: mockup,
metadata: withdrawalTreeMetadata,

View File

@@ -0,0 +1,5 @@
const baseConfig = require('../../jest.config.base.js')
module.exports = {
...baseConfig,
}

View File

@@ -0,0 +1,28 @@
{
"name": "@zkopru/utils-docker",
"version": "1.0.0-beta.2",
"license": "GPL-3.0-or-later",
"main": "dist/index.js",
"types": "dist/index.d.ts",
"files": [
"dist"
],
"_moduleAliases": {
"~utils-docker": "dist"
},
"scripts": {
"prebuild": "shx mkdir -p dist",
"build": "tsc --build tsconfig.build.json",
"clean": "tsc --build tsconfig.build.json --clean && shx rm -rf coverage *.log junit.xml dist && jest --clearCache",
"link-modules": "link-module-alias"
},
"dependencies": {
"docker-compose": "^0.23.5",
"js-yaml": "^3.14.0",
"node-docker-api": "^1.1.22",
"tar": "^6.0.2"
},
"publishConfig": {
"access": "public"
}
}

View File

@@ -0,0 +1,11 @@
{
"extends": "../../tsconfig.build.json",
"compilerOptions": {
"rootDir": "./src",
"outDir": "./dist",
"allowJs": true,
"noImplicitAny": false,
"tsBuildInfoFile": "../../.build-cache/utils-docker.tsbuildinfo"
},
"include": ["src"]
}

View File

@@ -0,0 +1,3 @@
{
"extends": "../../tsconfig.json"
}

View File

@@ -27,19 +27,15 @@
},
"dependencies": {
"axios": "^0.21.1",
"bn.js": "^5.1.1",
"bn.js": "^5.2.0",
"circom_runtime": "0.1.13",
"docker-compose": "^0.23.5",
"ffjavascript": "0.2.22",
"fs-extra": "^9.0.0",
"js-yaml": "^3.14.0",
"node-docker-api": "^1.1.22",
"pino": "^6.2.0",
"pino-pretty": "^4.0.0",
"prompts": "^2.3.2",
"snarkjs": "0.3.33",
"soltypes": "^1.3.5",
"tar": "^6.0.2",
"web3-utils": "^1.2.6"
},
"publishConfig": {

View File

@@ -12,15 +12,6 @@ export { PromptApp } from './prompt'
export { Worker } from './worker'
export {
readFromContainer,
copyFromContainer,
getContainer,
buildAndGetContainer,
pullAndGetContainer,
pullOrBuildAndGetContainer,
} from './docker'
const units: Unit[] = [
'noether',
'wei',

View File

@@ -7,7 +7,7 @@ import { Fp } from '@zkopru/babyjubjub'
import { ZkTx, Utxo, UtxoStatus, TokenRegistry } from '@zkopru/transaction'
import { ZkWizard } from '@zkopru/zk-wizard'
import { keccakHasher, poseidonHasher, Grove } from '@zkopru/tree'
import { DB, SQLiteConnector, schema } from '@zkopru/database'
import { DB, SQLiteConnector, schema } from '@zkopru/database/dist/node'
import { accounts, address } from '~dataset/testset-predefined'
import { utxos } from '~dataset/testset-utxos'
import { txs } from '~dataset/testset-txs'
@@ -87,8 +87,7 @@ async function loadZkWizard(): Promise<{
zkWizard: ZkWizard
mockupDB: DB
}> {
const mockupDB = await SQLiteConnector.create(':memory:')
await mockupDB.createTables(schema)
const mockupDB = await SQLiteConnector.create(schema, ':memory:')
const { grove } = await loadGrove(mockupDB)
await saveUtxos(mockupDB, [
utxos.utxo1_in_1,

778
yarn.lock

File diff suppressed because it is too large Load Diff