mirror of
https://github.com/zkopru-network/zkopru.git
synced 2026-01-10 06:57:58 -05:00
Merge branch 'develop' into main
This commit is contained in:
199
.circleci/config.yml
Normal file
199
.circleci/config.yml
Normal file
@@ -0,0 +1,199 @@
|
||||
version: 2.1
|
||||
workflows:
|
||||
test_app:
|
||||
jobs:
|
||||
- build
|
||||
- lint:
|
||||
requires:
|
||||
- build
|
||||
- load_keys:
|
||||
requires:
|
||||
- build
|
||||
# - test_circuits:
|
||||
# requires:
|
||||
# - build_keys
|
||||
- test_coordinator:
|
||||
requires:
|
||||
- build
|
||||
- test_accounts:
|
||||
requires:
|
||||
- build
|
||||
- test_tree:
|
||||
requires:
|
||||
- build
|
||||
- test_babyjubjub:
|
||||
requires:
|
||||
- build
|
||||
- test_client:
|
||||
requires:
|
||||
- build
|
||||
- test_database:
|
||||
requires:
|
||||
- build
|
||||
- test_zkwizard:
|
||||
requires:
|
||||
- load_keys
|
||||
- test_integration:
|
||||
requires:
|
||||
- load_keys
|
||||
- test_utils:
|
||||
requires:
|
||||
- build
|
||||
jobs:
|
||||
build:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
name: Install Yarn
|
||||
command: curl -o- -L https://yarnpkg.com/install.sh | bash
|
||||
- run:
|
||||
name: Install
|
||||
command: yarn
|
||||
- run:
|
||||
name: Build Contract
|
||||
command: yarn build:contract
|
||||
- run:
|
||||
name: Build TS
|
||||
command: yarn build:ts:serial
|
||||
- persist_to_workspace:
|
||||
root: ~/
|
||||
paths: project
|
||||
lint:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Lint
|
||||
command: yarn lint .
|
||||
load_keys:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Load Keys
|
||||
command: |
|
||||
cd packages/circuits
|
||||
yarn download-keys
|
||||
- persist_to_workspace:
|
||||
root: ~/
|
||||
paths: project
|
||||
test_circuits:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Circuit Tests
|
||||
command: yarn test --scope=@zkopru/circuits
|
||||
no_output_timeout: 120m
|
||||
test_coordinator:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Build Testnet
|
||||
command: docker-compose -f compose/docker-compose.yml build contracts
|
||||
- run:
|
||||
name: Coordinator Tests
|
||||
command: yarn test --scope=@zkopru/coordinator
|
||||
test_contracts:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Build Testnet
|
||||
command: docker-compose -f compose/docker-compose.yml build contracts-for-integration-test
|
||||
- run:
|
||||
name: Generate Block Data
|
||||
command: |
|
||||
cd packages/contracts
|
||||
DEBUG=1 yarn testblock:generate --scope=@zkopru/contracts
|
||||
- run:
|
||||
name: Contract Tests
|
||||
command: yarn test --scope=@zkopru/contracts
|
||||
test_accounts:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Account Tests
|
||||
command: yarn test --scope=@zkopru/account
|
||||
test_tree:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Tree Tests
|
||||
command: yarn test --scope=@zkopru/tree
|
||||
test_zkwizard:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: ZK Wizard Tests
|
||||
command: yarn test --scope=@zkopru/zk-wizard
|
||||
test_client:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Client Tests
|
||||
command: yarn test --scope=@zkopru/client
|
||||
test_database:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Database Tests
|
||||
command: yarn test --scope=@zkopru/database
|
||||
test_babyjubjub:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: BabyJubJub Tests
|
||||
command: yarn test --scope=@zkopru/babyjubjub
|
||||
test_utils:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Utils Tests
|
||||
command: yarn test --scope=@zkopru/utils
|
||||
test_integration:
|
||||
machine:
|
||||
image: ubuntu-2004:202010-01
|
||||
steps:
|
||||
- attach_workspace:
|
||||
at: ~/
|
||||
- run:
|
||||
name: Build Testnet
|
||||
command: docker-compose -f compose/docker-compose.yml build contracts-for-integration-test
|
||||
- run:
|
||||
name: Integration Tests
|
||||
command: DEBUG=1 yarn test --scope=@zkopru/integration-test
|
||||
@@ -7,4 +7,4 @@
|
||||
packages/**/keys
|
||||
.git
|
||||
.build-cache
|
||||
!packages/prisma/mockup.db
|
||||
!packages/circuits/keys
|
||||
|
||||
@@ -1,2 +1,4 @@
|
||||
node_modules
|
||||
dist
|
||||
dist
|
||||
packages/contracts/src/contracts
|
||||
packages/client/browser/*
|
||||
|
||||
41
.eslintrc.js
41
.eslintrc.js
@@ -1,10 +1,14 @@
|
||||
const path = require('path')
|
||||
|
||||
const common = {
|
||||
env: {
|
||||
node: true,
|
||||
es6: true,
|
||||
es2020: true,
|
||||
'jest/globals': true,
|
||||
},
|
||||
parserOptions: {
|
||||
ecmaVersion: 2020,
|
||||
},
|
||||
plugins: ['prettier', 'jest', 'markdown'],
|
||||
extends: ['airbnb-base', 'prettier', 'plugin:jest/all'],
|
||||
rules: {
|
||||
@@ -18,7 +22,10 @@ const common = {
|
||||
'jest/prefer-expect-assertions': 'off',
|
||||
'jest/no-test-return-statement': 'off',
|
||||
'import/prefer-default-export': 'off',
|
||||
'import/no-extraneous-dependencies': ['error', {"devDependencies": ["**/*.test.ts", "**/*.spec.ts"]}],
|
||||
'import/no-extraneous-dependencies': [
|
||||
'error',
|
||||
{ devDependencies: ['**/*.test.ts', '**/*.spec.ts'] },
|
||||
],
|
||||
'import/extensions': 'off',
|
||||
'no-console': 'off',
|
||||
'no-iterator': 'off',
|
||||
@@ -58,17 +65,29 @@ module.exports = {
|
||||
rules: {
|
||||
...common.rules,
|
||||
'@typescript-eslint/explicit-function-return-type': 'off',
|
||||
"@typescript-eslint/member-delimiter-style": ["error", {
|
||||
multiline: {
|
||||
delimiter: 'none', // 'none' or 'semi' or 'comma'
|
||||
requireLast: true,
|
||||
'@typescript-eslint/member-delimiter-style': [
|
||||
'error',
|
||||
{
|
||||
multiline: {
|
||||
delimiter: 'none', // 'none' or 'semi' or 'comma'
|
||||
requireLast: true,
|
||||
},
|
||||
singleline: {
|
||||
delimiter: 'semi', // 'semi' or 'comma'
|
||||
requireLast: false,
|
||||
},
|
||||
},
|
||||
singleline: {
|
||||
delimiter: 'semi', // 'semi' or 'comma'
|
||||
requireLast: false,
|
||||
},
|
||||
}]
|
||||
],
|
||||
},
|
||||
overrides: [
|
||||
{
|
||||
files: '**/*.ts',
|
||||
rules: {
|
||||
'no-useless-constructor': 'off',
|
||||
'@typescript-eslint/no-useless-constructor': 'error',
|
||||
},
|
||||
},
|
||||
],
|
||||
settings: {
|
||||
'import/resolver': {
|
||||
typescript: {},
|
||||
|
||||
8
.github/ISSUE_TEMPLATE/etc.md
vendored
Normal file
8
.github/ISSUE_TEMPLATE/etc.md
vendored
Normal file
@@ -0,0 +1,8 @@
|
||||
---
|
||||
name: Etc
|
||||
about: 'Etc'
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
9
.gitignore
vendored
9
.gitignore
vendored
@@ -4,12 +4,15 @@ db
|
||||
zkopru.db
|
||||
.data
|
||||
**/*/.data
|
||||
.mockup
|
||||
**/*/.mockup
|
||||
**/*/*.db
|
||||
**/*/*.db-journal
|
||||
temp
|
||||
reports
|
||||
junit.xml
|
||||
dist
|
||||
browser
|
||||
.build-cache
|
||||
keys.tgz
|
||||
keys/
|
||||
@@ -204,3 +207,9 @@ typings/
|
||||
.history
|
||||
|
||||
# End of https://www.gitignore.io/api/node,intellij+all,visualstudiocode
|
||||
|
||||
*.sqlite
|
||||
*.secret
|
||||
config.json
|
||||
*.swp
|
||||
.DS_Store
|
||||
|
||||
13
.vscode/settings.json
vendored
13
.vscode/settings.json
vendored
@@ -4,7 +4,12 @@
|
||||
"**/.git/subtree-cache/**": true,
|
||||
"**/.hg/store/**": true
|
||||
},
|
||||
"jest.runAllTestsFirst": false,
|
||||
"jest.autoEnable": false,
|
||||
"jest.showCoverageOnLoad": false
|
||||
}
|
||||
"solidity.packageDefaultDependenciesDirectory": "node_modules",
|
||||
"solidity.packageDefaultDependenciesContractsDirectory": "",
|
||||
"solidity.compileUsingRemoteVersion": "v0.7.4+commit.3f05b770",
|
||||
"solidity.linter": "solium",
|
||||
"[solidity]": {
|
||||
"editor.defaultFormatter": "JuanBlanco.solidity"
|
||||
},
|
||||
"editor.formatOnSave": true
|
||||
}
|
||||
|
||||
@@ -68,9 +68,9 @@ members of the project's leadership.
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4,
|
||||
available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html
|
||||
available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
|
||||
|
||||
[homepage]: https://www.contributor-covenant.org
|
||||
|
||||
For answers to common questions about this code of conduct, see
|
||||
https://www.contributor-covenant.org/faq
|
||||
<https://www.contributor-covenant.org/faq>
|
||||
|
||||
143
CONTRIBUTING.md
143
CONTRIBUTING.md
@@ -1,6 +1,6 @@
|
||||
# Contributing to Zkopru👋
|
||||
|
||||
Thanks for taking a time to read this document. This document includes how to contribute to the project including testing and commits.
|
||||
Thanks for taking a time to read this document. This document includes how to contribute to the project including testing and commits.
|
||||
|
||||
## Table of Content
|
||||
|
||||
@@ -13,7 +13,7 @@ Thanks for taking a time to read this document. This document includes how to co
|
||||
## Security vulnerability
|
||||
|
||||
After the mainnet stage, you should not open up issues on Github to report bugs that can affect the network's security.
|
||||
Mostly, it will be the case when you find some bugs in [`packages/contracts`](./packages/contracts) or [`packages/circuits`](./pacakges/circuits).
|
||||
Mostly, it will be the case when you find some bugs in [`packages/contracts`](./packages/contracts) or [`packages/circuits`](./packages/circuits).
|
||||
In this case, please report the bug via [security@zkopru.network](mailto:security@zkopru.network) instead of opening a public issue on Github.
|
||||
|
||||
## Commit rule
|
||||
@@ -21,46 +21,50 @@ In this case, please report the bug via [security@zkopru.network](mailto:securit
|
||||
This project follows the conventional commit rule.
|
||||
To check the full specification, please see [https://www.conventionalcommits.org/](https://www.conventionalcommits.org/)
|
||||
Here is the sample commits.
|
||||
|
||||
|
||||
1. Commit message with description and breaking change footer
|
||||
|
||||
```
|
||||
```text
|
||||
feat: allow provided config object to extend other configs
|
||||
|
||||
|
||||
BREAKING CHANGE: `extends` key in config file is now used for extending other config files
|
||||
```
|
||||
|
||||
2. Commit message with ! to draw attention to breaking change
|
||||
|
||||
```
|
||||
```text
|
||||
refactor!: drop support for Node 6
|
||||
```
|
||||
|
||||
3. Commit message with both ! and BREAKING CHANGE footer
|
||||
|
||||
```
|
||||
```text
|
||||
refactor!: drop support for Node 6
|
||||
|
||||
BREAKING CHANGE: refactor to use JavaScript features not available in Node 6.
|
||||
```
|
||||
|
||||
4. Commit message with no body
|
||||
|
||||
```
|
||||
```text
|
||||
docs: correct spelling of CHANGELOG
|
||||
```
|
||||
|
||||
5. Commit message with scope
|
||||
|
||||
```
|
||||
```text
|
||||
feat(lang): add polish language
|
||||
```
|
||||
|
||||
6. Commit message with multi-paragraph body and multiple footers
|
||||
|
||||
```
|
||||
```text
|
||||
fix: correct minor typos in code
|
||||
|
||||
see the issue for details
|
||||
|
||||
|
||||
see the issue for details
|
||||
|
||||
on typos fixed.
|
||||
|
||||
|
||||
Reviewed-by: Z
|
||||
Refs #133
|
||||
```
|
||||
@@ -82,11 +86,14 @@ This uses airbnb eslint, and husky will automatically prettify using commit-hook
|
||||
|
||||
* Get nvm [here](https://github.com/nvm-sh/nvm#installing-and-updating)
|
||||
* Download node version 12 and set to use it.
|
||||
|
||||
```shell
|
||||
nvm install 12
|
||||
nvm use 12
|
||||
```
|
||||
|
||||
If you want to make node 12 as the default option run && yarn build:keys
|
||||
|
||||
```shell
|
||||
nvm alias default 12
|
||||
```
|
||||
@@ -102,7 +109,7 @@ This uses airbnb eslint, and husky will automatically prettify using commit-hook
|
||||
1. Install & get initial setup for the project
|
||||
|
||||
```shell
|
||||
yarn initialize
|
||||
yarn bootstrap
|
||||
```
|
||||
|
||||
2. Build packages
|
||||
@@ -114,21 +121,29 @@ This uses airbnb eslint, and husky will automatically prettify using commit-hook
|
||||
3. Run development env
|
||||
|
||||
```shell
|
||||
make develop
|
||||
yarn develop
|
||||
```
|
||||
|
||||
This command will run the coordinator & cli wallet using docker and you can easily access to the running programs via web browser.
|
||||
* coordinator: http://localhost:1234
|
||||
* cli wallet: http://localhost:4321
|
||||
* coordinator: <http://localhost:1234>
|
||||
* cli wallet: <http://localhost:4321>
|
||||
|
||||
Or you can setup the environment without docker-compose. Please check ["Manually setup Run cli applications"](#manually-setup-run-cli-applications) section.
|
||||
|
||||
### Integration test
|
||||
### Integrated test
|
||||
|
||||
```
|
||||
```shell
|
||||
yarn test
|
||||
```
|
||||
|
||||
If you have not enough cpu core, please use
|
||||
|
||||
```shell
|
||||
yarn test:serial
|
||||
```
|
||||
|
||||
Or you can test specific package by just going into the sub package and running `yarn test` there.
|
||||
|
||||
### Manually setup Run cli applications
|
||||
|
||||
1. Prepare three terminals
|
||||
@@ -144,22 +159,58 @@ yarn test
|
||||
```shell
|
||||
cd packages/cli && yarn dev:coordinator
|
||||
```
|
||||
This will give you a cli menu to operate coordinator locally.
|
||||
|
||||
This will give you a cli menu to operate coordinator locally.
|
||||
|
||||
4. Go to the cli package and run wallet with a pre-configured test account.
|
||||
|
||||
```shell
|
||||
cd packages/cli && yarn dev:wallet
|
||||
```
|
||||
|
||||
This will give you a cli menu to run wallet locally.
|
||||
|
||||
5. It stores the dev log in `packages/cli/WALLET_LOG` and `packages/cli/COORDINATOR_LOG`. You can beautify the logs using this command.
|
||||
|
||||
```shell
|
||||
$ npm install -g pino-pretty
|
||||
$ tail -f packages/cli/WALLET_LOG | pino-pretty
|
||||
$ tail -f packages/cli/COORDINATOR_LOG | pino-pretty
|
||||
npm install -g pino-pretty
|
||||
tail -f packages/cli/WALLET_LOG | pino-pretty
|
||||
tail -f packages/cli/COORDINATOR_LOG | pino-pretty
|
||||
```
|
||||
|
||||
### Circuit, contract changes and its test
|
||||
|
||||
Currently, Zkopru is using prebuilt docker images for local testing to reduce the SNARK circuit building time consumption. Therefore, if you're trying to make any changes on smart contract or circuits you need to follow this steps.
|
||||
|
||||
1. Go to `compose/docker-compose.yml`.
|
||||
2. Modify the tag of the service what you want to make some modifications. Tag name convention is the branch name with issue number just like `feat-6`, `refactor-913`.
|
||||
3. And then run `yarn images build <service_name>` on the root directory. If you make changes on the 'circuit' image, this command will take about a day on common laptops.
|
||||
4. After you built the image, run `yarn test` in the sub package directory or in the root directory.
|
||||
|
||||
### How to make changes of the circuit package
|
||||
|
||||
1. Add a test circuit in the directory `packages/circuits/tester/`
|
||||
2. Write testcase in the directory `packages/circuits/tests`
|
||||
3. Run test command
|
||||
|
||||
```shell
|
||||
lerna run test --scope=@zkopru/circuits
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```shell
|
||||
cd packages/circuits
|
||||
yarn test
|
||||
```
|
||||
|
||||
4. After the testing, build a docker image to use the compiled circuit and keys
|
||||
|
||||
```shell
|
||||
# root directory of the project
|
||||
yarn images build
|
||||
# Or you can build only the zkoprunet/circuits image with this command
|
||||
yarn images build circuits
|
||||
```
|
||||
### How to make changes of the circuit package.
|
||||
|
||||
@@ -189,24 +240,53 @@ yarn test
|
||||
6. Tag the docker image and push to the docker hub.
|
||||
7. (Optional) Specify the docker image tag in the test cases.
|
||||
|
||||
This command will compile and setup circuits in the `impls` directory.
|
||||
|
||||
5. (maintainer only) Update dockerifles/docker-compose.yml to modify tag and run following:
|
||||
|
||||
```shell
|
||||
yarn images build
|
||||
docker-compose -f compose/docker-compose.yml push
|
||||
```
|
||||
|
||||
### How to make changes of the prisma package
|
||||
|
||||
1. Because prisma currently not support multi source gracefully, we have to update the following scheme at once
|
||||
* `packages/prisma/prisma/base.prisma`
|
||||
* `packages/prisma/prisma/postgres.prisma`
|
||||
* `packages/prisma/prisma/sqlite.prisma`
|
||||
* `packages/prisma/prisma/postgres-migrator.prisma`
|
||||
* `packages/prisma/prisma/sqlite-migrator.prisma`
|
||||
2. Then run `yarn build:prisma`. This will update the prisma client typescript and the mockup sqlite db file.
|
||||
3. To save the changes, run `yarn save:prisma`.
|
||||
4. (optional) If you want to create a migration file for postgres manually, run `cd packages/prisma && yarn gen-migration:postgres`.
|
||||
You may have to run this command before running the integration test.
|
||||
5. (optional) Migration may cause some problems, sometimes you need to clean up the `packages/prisma/prisma/migrations` directory.
|
||||
6. (optional) To commit the change, clean up and squash the recent migrations into a single migration.
|
||||
And then, force stage the migration using `git add packages/prisma/prisma/migrations --force`.
|
||||
|
||||
### Explore database
|
||||
|
||||
You can open the Prisma Studio to explore the database with following steps:
|
||||
|
||||
1. Create `pacakges/prisma/prisma/.env`
|
||||
1. Create `packages/prisma/prisma/.env`
|
||||
|
||||
2. Write up the database connection information.
|
||||
|
||||
* for dev coordinator
|
||||
```
|
||||
|
||||
```shell
|
||||
# file packages/prisma/prisma/.env
|
||||
DATABASE_URL="file:../../cli/zkopru-coordinator.db"
|
||||
```
|
||||
|
||||
* for dev wallet
|
||||
```
|
||||
|
||||
```shell
|
||||
# file packages/prisma/prisma/.env
|
||||
DATABASE_URL="file:../../cli/zkopru-wallet.db"
|
||||
```
|
||||
|
||||
3. Run `yarn studio`
|
||||
|
||||
```shell
|
||||
@@ -218,10 +298,16 @@ You can open the Prisma Studio to explore the database with following steps:
|
||||
1. Modify `packages/prisma/prisma/schema.prisma`
|
||||
|
||||
2. Run the following command will update the typescript automatically.
|
||||
|
||||
```shell
|
||||
yarn build:prisma
|
||||
```
|
||||
3. Update mockup database (WIP)
|
||||
|
||||
3. Update mockup database.
|
||||
|
||||
```shell
|
||||
yarn save:prisma
|
||||
```
|
||||
|
||||
### Optional commands
|
||||
|
||||
@@ -237,7 +323,6 @@ yarn build:fresh
|
||||
yarn build:ts
|
||||
```
|
||||
|
||||
|
||||
This command will re-build the whole packages by wiping away every artifacts.
|
||||
|
||||
#### Setting up new snark keys
|
||||
|
||||
47
Makefile
47
Makefile
@@ -1,47 +0,0 @@
|
||||
SHELL:=/bin/bash
|
||||
DIR := ${CURDIR}
|
||||
|
||||
test-env: container-contract
|
||||
|
||||
# -------------------- Dev Containers -------------------- #
|
||||
develop:
|
||||
$(info Make: yarn build:ts && docker-compose -f docker-compose.dev.yml up --build)
|
||||
@yarn build:ts
|
||||
@docker-compose -f docker-compose.dev.yml up --build --force-recreate -V
|
||||
|
||||
develop-instant:
|
||||
$(info Make: yarn build:ts && docker-compose -f docker-compose.dev.yml up --build)
|
||||
@yarn build:ts
|
||||
@docker-compose -f docker-compose.instant-block.yml up --build --force-recreate -V
|
||||
|
||||
playground-container:
|
||||
$(info Make: build container and compile circuits)
|
||||
@docker build -f containers/Playground.dockerfile ./ -t zkoprunet/playground --no-cache
|
||||
|
||||
contract-container:
|
||||
$(info Make: build container and compile circuits)
|
||||
@docker build -f containers/Contract.dockerfile ./packages/contracts -t wanseob/zkopru-contract
|
||||
|
||||
contract-container-for-integration-test:
|
||||
$(info Make: build container and compile circuits)
|
||||
@docker build -f containers/Contract.integration.dockerfile ./packages/contracts -t wanseob/zkopru-contract-integration-test
|
||||
|
||||
circuit-container:
|
||||
$(info Make: build container and compile circuits)
|
||||
@docker build -f containers/Circuits.dockerfile ./ -t wanseob/zkopru-circuits
|
||||
|
||||
circuit-testing-container:
|
||||
$(info Make: build container and compile circuits)
|
||||
@docker build -f containers/Circuits.test.dockerfile ./ -t wanseob/zkopru-circuits-test
|
||||
|
||||
coordinator-container:
|
||||
$(info Make: build container and compile circuits)
|
||||
@lerna run build --scope=@zkopru/coordinator
|
||||
@docker build -f containers/Coordinator.dockerfile ./ -t wanseob/zkopru-coordinator
|
||||
|
||||
# ------------ Pull containers fro docker hub ------------- #
|
||||
pull-dev-images:
|
||||
@docker pull wanseob/zkopru-contract:0.0.1
|
||||
@docker pull wanseob/zkopru-contract-integration-test:0.0.1
|
||||
@docker pull wanseob/zkopru-circuits:0.0.1
|
||||
@docker pull wanseob/zkopru-circuits-test:0.0.1
|
||||
35
README.md
35
README.md
@@ -2,11 +2,15 @@
|
||||
|
||||

|
||||
|
||||
Zkopru testnet, Arctic Roll, has launched! Please visit https://zkopru.network and try it now :)
|
||||
An audit for the Zkopru testnet v2 is on the process!
|
||||
|
||||
| Branch | Status |
|
||||
| --------- | -------------------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||
| `develop` | [](https://app.circleci.com/pipelines/github/zkopru-network/zkopru) |
|
||||
|
||||

|
||||
|
||||
## What is zkopru?
|
||||
## What is zkopru
|
||||
|
||||
Zkopru(zk-optimistic-rollup) is a layer-2 scaling solution for private transactions using zk-SNARK and optimistic rollup. It supports private transfer and private atomic swap within the layer-2 network between ETH, ERC20, ERC721 at a low cost. Also, with the pay-in-advance feature, users can withdraw assets from the layer-2 before the finalization.
|
||||
|
||||
@@ -20,17 +24,34 @@ zkopru-wallet
|
||||
## Get more information
|
||||
|
||||
<!-- * [Ethresear.ch](https://ethresear.ch) -->
|
||||
* [Document](https://docs.zkopru.network) [(edit)](https://github.com/wanseob/docs.zkopru.network)
|
||||
|
||||
- [Document](https://docs.zkopru.network) [(edit)](https://github.com/wanseob/docs.zkopru.network)
|
||||
|
||||
## Trusted Setup
|
||||
|
||||
- [Trusted Setup Result](https://storage.googleapis.com/zkopru-mpc-files/index.html)
|
||||
- [Keys](https://ipfs.io/ipfs/QmSQtbTnt5RWrP8uWJ3S5xUKntTx2DqcM7mM5vUg9uJGxq)(ipfs cid: QmSQtbTnt5RWrP8uWJ3S5xUKntTx2DqcM7mM5vUg9uJGxq)
|
||||
|
||||
## Contribution
|
||||
|
||||
* [Code of conduct](./CODE_OF_CONDUCT.md)
|
||||
* [Contributing](./CONTRIBUTING.md)
|
||||
- [Code of conduct](./CODE_OF_CONDUCT.md)
|
||||
- [Contributing](./CONTRIBUTING.md)
|
||||
|
||||
## Authors
|
||||
|
||||
* Wanseob Lim
|
||||
* Barry Whitehat
|
||||
- Wanseob Lim ([@wanseob](https://github.com/wanseob))
|
||||
- Barry Whitehat ([@barrywhitehat](https://github.com/barrywhitehat))
|
||||
|
||||
## Co-authors
|
||||
|
||||
- Chance Hudson ([@JChanceHud](https://github.com/jchancehud))
|
||||
|
||||
## Contributors
|
||||
|
||||
- Koh Wei Jie ([@weijiekoh](https://github.com/weijiekoh), weijie.koh@ethereum.org)
|
||||
- Kobi Gurkan ([@kobigurk](https://github.com/kobigurk), kobigurk@gmail.com)
|
||||
- Thore Hildebrandt ([@ETHorHIL](https://github.com/ETHorHIL), hildebrandtthore@gmail.com)
|
||||
- Geoff Lamperd ([@glamperd](https://github.com/glamperd))
|
||||
|
||||
## License
|
||||
|
||||
|
||||
37
compose/docker-compose.dev.yml
Normal file
37
compose/docker-compose.dev.yml
Normal file
@@ -0,0 +1,37 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
testnet:
|
||||
build:
|
||||
context: ../packages/contracts/
|
||||
dockerfile: ../../dockerfiles/Contract.dockerfile
|
||||
ports:
|
||||
- "5000:5000"
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0
|
||||
coordinator:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: ./dockerfiles/Coordinator.dockerfile
|
||||
ports:
|
||||
- "8888:8888"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
volumes:
|
||||
- '../packages:/proj/packages'
|
||||
command: 'node /proj/packages/cli/dist/apps/coordinator/cli.js --ws ws://testnet:5000 --config /proj/packages/cli/coordinator.dev.json'
|
||||
wallet:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: ./dockerfiles/Cli.dockerfile
|
||||
ports:
|
||||
- "4321:4321"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'coordinator:coordinator'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
volumes:
|
||||
- '../packages:/proj/packages'
|
||||
command: sh -c "sleep 5s && gotty -w --port 4321 node /proj/packages/cli/dist/apps/wallet/cli.js --config /proj/packages/cli/wallet.dev.json"
|
||||
34
compose/docker-compose.instant-block.yml
Normal file
34
compose/docker-compose.instant-block.yml
Normal file
@@ -0,0 +1,34 @@
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
testnet:
|
||||
ports:
|
||||
- '5000:5000'
|
||||
build:
|
||||
context: ../packages/contracts/
|
||||
dockerfile: ../../dockerfiles/Contract.dockerfile
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0
|
||||
coordinator:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dockerfiles/Cli.dockerfile
|
||||
ports:
|
||||
- '1234:1234'
|
||||
- '8888:8888'
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
command: sh -c "sleep 5s && gotty -w --port 1234 node /proj/packages/cli/dist/apps/coordinator/cli.js --config /proj/packages/cli/coordinator.dev.json"
|
||||
wallet:
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: ./dockerfiles/Cli.dockerfile
|
||||
ports:
|
||||
- '4321:4321'
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'coordinator:coordinator'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
command: sh -c "sleep 5s && gotty -w --port 4321 node /proj/packages/cli/dist/apps/wallet/cli.js --config /proj/packages/cli/wallet.dev.json"
|
||||
28
compose/docker-compose.yml
Normal file
28
compose/docker-compose.yml
Normal file
@@ -0,0 +1,28 @@
|
||||
version: '3.4'
|
||||
|
||||
services:
|
||||
contracts:
|
||||
image: zkoprunet/contracts:2.0.0-beta.0
|
||||
build:
|
||||
context: ../packages/contracts/
|
||||
dockerfile: ../../dockerfiles/Contract.dockerfile
|
||||
contracts-for-integration-test:
|
||||
image: zkoprunet/contracts-integration-test:2.0.0-beta.0
|
||||
build:
|
||||
context: ../packages/contracts/
|
||||
dockerfile: ../../dockerfiles/Contract.integration.dockerfile
|
||||
circuits:
|
||||
image: zkoprunet/circuits:2.0.0-beta.0
|
||||
build:
|
||||
context: ../packages/circuits/
|
||||
dockerfile: ../../dockerfiles/Circuits.light.dockerfile
|
||||
playground:
|
||||
image: zkoprunet/playground:2.0.0-beta.0
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: dockerfiles/Playground.dockerfile
|
||||
coordinator:
|
||||
image: zkoprunet/coordinator:2.0.0-beta.0
|
||||
build:
|
||||
context: ../
|
||||
dockerfile: dockerfiles/Coordinator.dockerfile
|
||||
@@ -1,14 +0,0 @@
|
||||
FROM node:13-alpine
|
||||
RUN apk update && apk add bash
|
||||
RUN npm install -g circom snarkjs
|
||||
WORKDIR /proj
|
||||
COPY ./packages/circuits/package.json /proj/package.json
|
||||
RUN npm install
|
||||
COPY ./packages/circuits/impls /proj/impls
|
||||
COPY ./packages/circuits/lib /proj/lib
|
||||
RUN mkdir /proj/script
|
||||
COPY ./packages/circuits/script/compile_circuits.sh /proj/script/compile_circuits.sh
|
||||
RUN /bin/bash /proj/script/compile_circuits.sh
|
||||
COPY ./packages/circuits/script/snark_setup.sh /proj/script/snark_setup.sh
|
||||
RUN /bin/bash /proj/script/snark_setup.sh
|
||||
CMD /bin/bash
|
||||
@@ -1,12 +0,0 @@
|
||||
FROM node:13-alpine
|
||||
RUN apk update && apk add bash
|
||||
RUN npm install -g circom snarkjs
|
||||
WORKDIR /proj
|
||||
COPY ./packages/circuits/package.json /proj/package.json
|
||||
RUN npm install
|
||||
COPY ./packages/circuits/script /proj/script
|
||||
COPY ./packages/circuits/tester /proj/impls
|
||||
COPY ./packages/circuits/lib /proj/lib
|
||||
RUN /bin/bash /proj/script/compile_circuits.sh
|
||||
RUN /bin/bash /proj/script/snark_setup.sh
|
||||
CMD /bin/bash
|
||||
@@ -1,19 +0,0 @@
|
||||
FROM node:12-alpine
|
||||
RUN apk add --no-cache --virtual .gyp \
|
||||
python \
|
||||
make \
|
||||
g++ \
|
||||
&& npm install -g truffle ganache-cli \
|
||||
&& apk del .gyp
|
||||
WORKDIR /proj
|
||||
COPY ./package.json /proj/package.json
|
||||
RUN yarn install
|
||||
COPY ./contracts /proj/contracts
|
||||
COPY ./utils /proj/utils
|
||||
COPY ./migrations /proj/migrations
|
||||
COPY ./truffle-config.js /proj/truffle-config.js
|
||||
RUN truffle compile
|
||||
EXPOSE 5000
|
||||
COPY ./keys /proj/keys
|
||||
RUN ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 & sleep 5 && truffle migrate --network testnet
|
||||
CMD ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0
|
||||
@@ -1,18 +0,0 @@
|
||||
FROM node:12-alpine
|
||||
RUN apk add --no-cache --virtual .gyp \
|
||||
python \
|
||||
make \
|
||||
g++ \
|
||||
&& npm install -g truffle ganache-cli \
|
||||
&& apk del .gyp
|
||||
WORKDIR /proj
|
||||
COPY ./package.json /proj/package.json
|
||||
RUN yarn install
|
||||
COPY ./contracts /proj/contracts
|
||||
COPY ./utils /proj/utils
|
||||
COPY ./migrations /proj/migrations
|
||||
COPY ./truffle-config.js /proj/truffle-config.js
|
||||
RUN truffle compile
|
||||
EXPOSE 5000
|
||||
RUN ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 & sleep 5 && truffle migrate --network integrationtest
|
||||
CMD ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0
|
||||
@@ -1,3 +0,0 @@
|
||||
FROM postgres
|
||||
|
||||
COPY ./packages/prisma/init.d/init.sql /docker-entrypoint-initdb.d/
|
||||
@@ -1,8 +0,0 @@
|
||||
FROM node:14-alpine
|
||||
RUN npm install -g @prisma/cli
|
||||
|
||||
WORKDIR /proj
|
||||
|
||||
# Copy package.json
|
||||
COPY ./packages/prisma/prisma/postgres-migrator.prisma /proj/prisma/postgres-migrator.prisma
|
||||
COPY ./packages/prisma/prisma/migrations /proj/prisma/migrations
|
||||
@@ -1,57 +0,0 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
testnet:
|
||||
ports:
|
||||
- "5000:5000"
|
||||
build:
|
||||
context: ./packages/contracts/
|
||||
dockerfile: ../../containers/Contract.dockerfile
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 --secure --blockTime 10
|
||||
postgres:
|
||||
ports:
|
||||
- "5678:5432"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Postgres.dockerfile
|
||||
environment:
|
||||
POSTGRES_PASSWORD: helloworld
|
||||
postgres-setup:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/PostgresSetup.dockerfile
|
||||
depends_on:
|
||||
- 'postgres'
|
||||
links:
|
||||
- 'postgres:postgres'
|
||||
environment:
|
||||
POSTGRES_URL: postgresql://postgres:helloworld@postgres:5432/zkopru
|
||||
command: prisma migrate up --experimental --schema /proj/prisma/postgres-migrator.prisma --verbose
|
||||
coordinator:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Cli.dockerfile
|
||||
ports:
|
||||
- "1234:1234"
|
||||
- "8888:8888"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'postgres:postgres'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
- 'postgres-setup'
|
||||
command: sh -c "sleep 5s && gotty -w --port 1234 node /proj/packages/cli/dist/apps/coordinator/cli.js --config /proj/packages/cli/coordinator.dev.json"
|
||||
wallet:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Cli.dockerfile
|
||||
ports:
|
||||
- "4321:4321"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'postgres:postgres'
|
||||
- 'coordinator:coordinator'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
- 'postgres-setup'
|
||||
command: sh -c "sleep 5s && gotty -w --port 4321 node /proj/packages/cli/dist/apps/wallet/cli.js --config /proj/packages/cli/wallet.dev.json"
|
||||
@@ -1,57 +0,0 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
testnet:
|
||||
ports:
|
||||
- "5000:5000"
|
||||
build:
|
||||
context: ./packages/contracts/
|
||||
dockerfile: ../../containers/Contract.dockerfile
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 --secure
|
||||
postgres:
|
||||
ports:
|
||||
- "5678:5432"
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Postgres.dockerfile
|
||||
environment:
|
||||
POSTGRES_PASSWORD: helloworld
|
||||
postgres-setup:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/PostgresSetup.dockerfile
|
||||
depends_on:
|
||||
- 'postgres'
|
||||
links:
|
||||
- 'postgres:postgres'
|
||||
environment:
|
||||
POSTGRES_URL: postgresql://postgres:helloworld@postgres:5432/zkopru
|
||||
command: prisma migrate up --experimental --schema /proj/prisma/postgres-migrator.prisma --verbose
|
||||
coordinator:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Cli.dockerfile
|
||||
ports:
|
||||
- "1234:1234"
|
||||
- "8888:8888"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'postgres:postgres'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
- 'postgres-setup'
|
||||
command: sh -c "sleep 5s && gotty -w --port 1234 node /proj/packages/cli/dist/apps/coordinator/cli.js --config /proj/packages/cli/coordinator.dev.json"
|
||||
wallet:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Cli.dockerfile
|
||||
ports:
|
||||
- "4321:4321"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'postgres:postgres'
|
||||
- 'coordinator:coordinator'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
- 'postgres-setup'
|
||||
command: sh -c "sleep 5s && gotty -w --port 4321 node /proj/packages/cli/dist/apps/wallet/cli.js --config /proj/packages/cli/wallet.dev.json"
|
||||
@@ -4,26 +4,30 @@ services:
|
||||
testnet:
|
||||
build:
|
||||
context: ./packages/contracts/
|
||||
dockerfile: ../../containers/Contract.dockerfile
|
||||
dockerfile: ../../dockerfiles/Contract.dockerfile
|
||||
ports:
|
||||
- "5000:5000"
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0
|
||||
command: ganache-cli --db=/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 --blockTime 13
|
||||
coordinator:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Coordinator.dockerfile
|
||||
dockerfile: ./dockerfiles/Coordinator.dockerfile
|
||||
ports:
|
||||
- "8888:8888"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
- 'postgres'
|
||||
command: 'node /proj/packages/coordinator/dist/cli.js --ws ws://testnet:5000 --config /proj/packages/coordinator/coordinator.json'
|
||||
postgres:
|
||||
image: postgres
|
||||
restart: always
|
||||
environment:
|
||||
POSTGRES_PASSWORD: helloworld
|
||||
command: 'node /proj/packages/cli/dist/apps/coordinator/cli.js --ws ws://testnet:5000 --config /proj/packages/cli/coordinator.json'
|
||||
wallet:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./dockerfiles/Cli.dockerfile
|
||||
ports:
|
||||
- "5432:5432"
|
||||
- "4321:4321"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
- 'coordinator:coordinator'
|
||||
depends_on:
|
||||
- 'testnet'
|
||||
command: sh -c "sleep 5s && gotty -w --port 4321 node /proj/packages/cli/dist/apps/wallet/cli.js --config /proj/packages/cli/wallet.dev.json"
|
||||
|
||||
20
dockerfiles/Circuits.dockerfile
Normal file
20
dockerfiles/Circuits.dockerfile
Normal file
@@ -0,0 +1,20 @@
|
||||
FROM node:13-alpine as phase1
|
||||
WORKDIR /proj
|
||||
RUN apk update && apk add bash git
|
||||
COPY package.json /proj/package.json
|
||||
RUN mkdir /utils-docker && echo '{"version": "0.0.0"}' > /utils-docker/package.json
|
||||
RUN npm install --only=prod
|
||||
RUN npm install -g circom ./node_modules/circom
|
||||
RUN npm install -g snarkjs ./node_modules/snarkjs
|
||||
COPY script/powers_of_tau_phase_1.sh /proj/script/powers_of_tau_phase_1.sh
|
||||
RUN /bin/bash /proj/script/powers_of_tau_phase_1.sh
|
||||
CMD /bin/bash
|
||||
|
||||
FROM phase1 as phase2
|
||||
COPY impls/*.circom /proj/impls/
|
||||
COPY lib /proj/lib
|
||||
COPY script/compile_circuits.sh /proj/script/compile_circuits.sh
|
||||
RUN /bin/bash /proj/script/compile_circuits.sh
|
||||
COPY script/powers_of_tau_phase_2.sh /proj/script/powers_of_tau_phase_2.sh
|
||||
RUN /bin/bash /proj/script/powers_of_tau_phase_2.sh
|
||||
CMD /bin/bash
|
||||
19
dockerfiles/Circuits.light.dockerfile
Normal file
19
dockerfiles/Circuits.light.dockerfile
Normal file
@@ -0,0 +1,19 @@
|
||||
FROM node:13-alpine
|
||||
WORKDIR /proj
|
||||
RUN apk update && apk add bash git curl
|
||||
COPY package.json /proj/package.json
|
||||
RUN mkdir /utils-docker && echo '{"version": "0.0.0"}' > /utils-docker/package.json
|
||||
RUN npm install --only=prod
|
||||
RUN npm install -g circom ./node_modules/circom
|
||||
RUN npm install -g snarkjs ./node_modules/snarkjs
|
||||
RUN curl -LJO https://github.com/zkopru-network/zkopru/releases/download/20201112/pot17_final.ptau
|
||||
RUN mkdir -p /proj/build/ptau
|
||||
RUN mv pot17_final.ptau /proj/build/ptau/pot17_final.ptau
|
||||
|
||||
COPY impls/*.circom /proj/impls/
|
||||
COPY lib /proj/lib
|
||||
COPY script/compile_circuits.sh /proj/script/compile_circuits.sh
|
||||
RUN /bin/bash /proj/script/compile_circuits.sh
|
||||
COPY script/powers_of_tau_phase_2.sh /proj/script/powers_of_tau_phase_2.sh
|
||||
RUN /bin/bash /proj/script/powers_of_tau_phase_2.sh
|
||||
CMD /bin/bash
|
||||
@@ -3,7 +3,7 @@ RUN apt update
|
||||
RUN apt install -y git make musl-dev golang-go sqlite g++ tmux
|
||||
RUN mkdir -p /usr/share/man/man1
|
||||
RUN mkdir -p /usr/share/man/man7
|
||||
RUN apt install -y postgresql-client
|
||||
RUN apt install -y postgresql-client netcat
|
||||
|
||||
# Configure Go
|
||||
ENV GOROOT /usr/lib/go
|
||||
@@ -25,7 +25,7 @@ RUN ln -s "$(which nodejs)" /usr/bin/node
|
||||
WORKDIR /proj
|
||||
|
||||
# Copy SNARK keys
|
||||
COPY ./keys /proj/keys
|
||||
COPY ./packages/circuits/keys /proj/keys
|
||||
|
||||
# Copy package.json
|
||||
COPY ./.package-dev.json /proj/package.json
|
||||
@@ -36,7 +36,7 @@ COPY ./packages/contracts/package.json /proj/packages/contracts/package.json
|
||||
COPY ./packages/coordinator/package.json /proj/packages/coordinator/package.json
|
||||
COPY ./packages/cli/package.json /proj/packages/cli/package.json
|
||||
COPY ./packages/core/package.json /proj/packages/core/package.json
|
||||
COPY ./packages/prisma/package.json /proj/packages/prisma/package.json
|
||||
COPY ./packages/database/package.json /proj/packages/database/package.json
|
||||
COPY ./packages/transaction/package.json /proj/packages/transaction/package.json
|
||||
COPY ./packages/tree/package.json /proj/packages/tree/package.json
|
||||
COPY ./packages/utils/package.json /proj/packages/utils/package.json
|
||||
@@ -46,15 +46,13 @@ COPY ./yarn.lock /proj/yarn.lock
|
||||
RUN yarn install
|
||||
|
||||
# Copy dist
|
||||
COPY ./packages/account/dist /proj/packages/account/dist
|
||||
COPY ./packages/account/dist /proj/packages/account/dist
|
||||
COPY ./packages/babyjubjub/dist /proj/packages/babyjubjub/dist
|
||||
COPY ./packages/contracts/dist /proj/packages/contracts/dist
|
||||
COPY ./packages/coordinator/dist /proj/packages/coordinator/dist
|
||||
COPY ./packages/core/dist /proj/packages/core/dist
|
||||
COPY ./packages/database/dist /proj/packages/database/dist
|
||||
COPY ./packages/cli/dist /proj/packages/cli/dist
|
||||
COPY ./packages/prisma/dist /proj/packages/prisma/dist
|
||||
COPY ./packages/prisma/generated /proj/packages/prisma/generated
|
||||
COPY ./packages/prisma/mockup.db /proj/packages/prisma/mockup.db
|
||||
COPY ./packages/transaction/dist /proj/packages/transaction/dist
|
||||
COPY ./packages/tree/dist /proj/packages/tree/dist
|
||||
COPY ./packages/utils/dist /proj/packages/utils/dist
|
||||
@@ -63,6 +61,8 @@ RUN lerna clean -y --loglevel silent && lerna bootstrap
|
||||
|
||||
COPY ./packages/cli/coordinator.*.json /proj/packages/cli/
|
||||
COPY ./packages/cli/wallet.*.json /proj/packages/cli/
|
||||
COPY ./packages/prisma/prisma /proj/packages/prisma/prisma
|
||||
|
||||
COPY ./scripts/dev_start.sh /dev_start.sh
|
||||
EXPOSE 8888
|
||||
CMD ["node", "/proj/packages/cli/dist/apps/coordinator/cli.js", "--ws ws://localhost:5000", "--config /proj/packages/cli/coordinator.json"]
|
||||
CMD ["/bin/sh", "/dev_start.sh"]
|
||||
# CMD ["node", "/proj/packages/cli/dist/apps/coordinator/cli.js", "--ws ws://localhost:5000", "--config /proj/packages/cli/coordinator.json"]
|
||||
22
dockerfiles/Contract.dockerfile
Normal file
22
dockerfiles/Contract.dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
FROM node:12-alpine
|
||||
RUN apk add --no-cache --virtual .gyp \
|
||||
python \
|
||||
make \
|
||||
g++ \
|
||||
&& npm install -g truffle ganache-cli --unsafe-perm=true --allow-root \
|
||||
&& apk del .gyp
|
||||
RUN apk add git
|
||||
WORKDIR /proj
|
||||
COPY ./package.json /proj/package.json
|
||||
# Stub a package json for @zkopru/utils so yarn install works
|
||||
RUN mkdir /utils && echo '{"version": "0.0.0"}' > /utils/package.json
|
||||
RUN yarn install
|
||||
COPY ./contracts /proj/contracts
|
||||
COPY ./utils /proj/utils
|
||||
COPY ./migrations /proj/migrations
|
||||
COPY ./truffle-config.js /proj/truffle-config.js
|
||||
RUN truffle compile
|
||||
EXPOSE 5000
|
||||
COPY ./keys /proj/keys
|
||||
RUN ganache-cli --db=/data -i 20200406 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 & sleep 5 && truffle migrate --network testnet
|
||||
CMD ganache-cli --db=/data -b 5 -i 20200406 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 --gasPrice 2000000000
|
||||
22
dockerfiles/Contract.integration.dockerfile
Normal file
22
dockerfiles/Contract.integration.dockerfile
Normal file
@@ -0,0 +1,22 @@
|
||||
FROM node:12-alpine
|
||||
RUN apk add --no-cache --virtual .gyp \
|
||||
python \
|
||||
make \
|
||||
g++ \
|
||||
&& npm install -g truffle ganache-cli --unsafe-perm=true --allow-root \
|
||||
&& apk del .gyp
|
||||
RUN apk add git
|
||||
WORKDIR /proj
|
||||
COPY ./package.json /proj/package.json
|
||||
# Stub a package json for @zkopru/utils so yarn install works
|
||||
RUN mkdir /utils && echo '{"version": "0.0.0"}' > /utils/package.json
|
||||
RUN yarn install
|
||||
COPY ./contracts /proj/contracts
|
||||
COPY ./utils /proj/utils
|
||||
COPY ./migrations /proj/migrations
|
||||
COPY ./truffle-config.js /proj/truffle-config.js
|
||||
RUN truffle compile
|
||||
EXPOSE 5000
|
||||
COPY ./keys /proj/keys
|
||||
RUN ganache-cli --db=/data -i 20200406 --chainId 1337 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 & sleep 5 && truffle migrate --network integrationtest
|
||||
CMD ganache-cli --db=/data -i 20200406 --chainId 1337 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 --gasPrice 2000000000
|
||||
@@ -18,7 +18,7 @@ COPY ./packages/contracts/package.json /proj/packages/contracts/package.json
|
||||
COPY ./packages/coordinator/package.json /proj/packages/coordinator/package.json
|
||||
COPY ./packages/cli/package.json /proj/packages/cli/package.json
|
||||
COPY ./packages/core/package.json /proj/packages/core/package.json
|
||||
COPY ./packages/prisma/package.json /proj/packages/prisma/package.json
|
||||
COPY ./packages/database/package.json /proj/packages/database/package.json
|
||||
COPY ./packages/transaction/package.json /proj/packages/transaction/package.json
|
||||
COPY ./packages/tree/package.json /proj/packages/tree/package.json
|
||||
COPY ./packages/utils/package.json /proj/packages/utils/package.json
|
||||
@@ -27,14 +27,13 @@ COPY ./packages/zk-wizard/package.json /proj/packages/zk-wizard/package.json
|
||||
RUN yarn install
|
||||
|
||||
# Copy dist
|
||||
COPY ./packages/account/dist /proj/packages/account/dist
|
||||
COPY ./packages/account/dist /proj/packages/account/dist
|
||||
COPY ./packages/babyjubjub/dist /proj/packages/babyjubjub/dist
|
||||
COPY ./packages/contracts/dist /proj/packages/contracts/dist
|
||||
COPY ./packages/coordinator/dist /proj/packages/coordinator/dist
|
||||
COPY ./packages/core/dist /proj/packages/core/dist
|
||||
COPY ./packages/cli/dist /proj/packages/cli/dist
|
||||
COPY ./packages/prisma/dist /proj/packages/prisma/dist
|
||||
COPY ./packages/prisma/generated /proj/packages/prisma/generated
|
||||
COPY ./packages/database/dist /proj/packages/database/dist
|
||||
COPY ./packages/transaction/dist /proj/packages/transaction/dist
|
||||
COPY ./packages/tree/dist /proj/packages/tree/dist
|
||||
COPY ./packages/utils/dist /proj/packages/utils/dist
|
||||
@@ -5,7 +5,7 @@ WORKDIR /proj
|
||||
|
||||
RUN npm install -g node-gyp-build
|
||||
RUN ln -s "$(which nodejs)" /usr/bin/node
|
||||
RUN npm install -g truffle ganache-cli
|
||||
RUN npm install -g truffle ganache-cli --unsafe-perm=true --allow-root
|
||||
|
||||
# Install yarn
|
||||
RUN git clone --depth=1 https://github.com/zkopru-network/zkopru
|
||||
@@ -16,15 +16,15 @@ RUN yarn build
|
||||
|
||||
WORKDIR /proj/zkopru/packages/cli
|
||||
|
||||
RUN ganache-cli --db=/proj/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 & \
|
||||
RUN ganache-cli --db=/proj/data -i 20200406 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 & \
|
||||
sleep 5 && cd /proj/zkopru/packages/contracts && truffle migrate --network testnet
|
||||
|
||||
COPY keys /proj/zkopru/packages/cli/keys
|
||||
COPY ./packages/circuits/keys /proj/zkopru/packages/cli/keys
|
||||
|
||||
CMD ganache-cli --db=/proj/data -i 20200406 -p 5000 --deterministic --host 0.0.0.0 > /dev/null & \
|
||||
CMD ganache-cli --db=/proj/data -i 20200406 -p 5000 --gasLimit 12000000 --deterministic --host 0.0.0.0 > /dev/null & \
|
||||
sleep 1;\
|
||||
node /proj/zkopru/packages/cli/dist/apps/coordinator/cli.js \
|
||||
--nonInteractive --config /proj/zkopru/packages/cli/coordinator.playground.json > /dev/null & \
|
||||
--daemon --config /proj/zkopru/packages/cli/coordinator.playground.json > /dev/null & \
|
||||
sleep 2;\
|
||||
node /proj/zkopru/packages/cli/dist/apps/wallet/cli.js \
|
||||
--config /proj/zkopru/packages/cli/wallet.playground.json
|
||||
--config /proj/zkopru/packages/cli/wallet.playground.json
|
||||
1
dockerfiles/Postgres.dockerfile
Normal file
1
dockerfiles/Postgres.dockerfile
Normal file
@@ -0,0 +1 @@
|
||||
FROM postgres
|
||||
3
dockerfiles/PostgresSetup.dockerfile
Normal file
3
dockerfiles/PostgresSetup.dockerfile
Normal file
@@ -0,0 +1,3 @@
|
||||
FROM node:14-alpine
|
||||
|
||||
WORKDIR /proj
|
||||
@@ -8,4 +8,5 @@ module.exports = {
|
||||
collectCoverage: true,
|
||||
coveragePathIgnorePatterns: ['(tests/.*.mock).(jsx?|tsx?)$'],
|
||||
verbose: true,
|
||||
testEnvironment: 'node',
|
||||
}
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
// eslint-disable-next-line import/no-extraneous-dependencies
|
||||
const { pathsToModuleNameMapper } = require('ts-jest/utils')
|
||||
const { compilerOptions } = require('./tsconfig')
|
||||
const base = require('./jest.config.base.js')
|
||||
|
||||
@@ -3,6 +3,6 @@
|
||||
"packages": [
|
||||
"packages/**"
|
||||
],
|
||||
"version": "1.0.0-beta.2",
|
||||
"version": "2.0.0-beta.0",
|
||||
"useWorkspaces": true
|
||||
}
|
||||
|
||||
84
package.json
84
package.json
@@ -7,90 +7,106 @@
|
||||
"packages/*"
|
||||
],
|
||||
"scripts": {
|
||||
"initialize": "yarn && yarn prep-containers && yarn build && lerna run gendata",
|
||||
"build:prisma": "prisma generate",
|
||||
"bootstrap": "yarn && yarn images pull && yarn build && lerna run gendata",
|
||||
"postinstall": "lerna bootstrap && yarn clean && shx rm -rf packages/*/node_modules",
|
||||
"prep-containers": "make pull-dev-images",
|
||||
"bootstrap": "lerna bootstrap && lerna link --force-local",
|
||||
"build": "lerna run build:contract && lerna run build:prisma && yarn build:ts",
|
||||
"images": "docker-compose -f compose/docker-compose.yml",
|
||||
"images:pull": "docker-compose -f compose/docker-compose.yml pull",
|
||||
"images:build": "docker-compose -f compose/docker-compose.yml build",
|
||||
"images:push": "docker-compose -f compose/docker-compose.yml push",
|
||||
"predevelop": "yarn build:ts && docker-compose -f docker-compose.dev.yml build",
|
||||
"develop": "(sleep 10; open-cli http://localhost:4321) & (sleep 10; open-cli http://localhost:1234) & docker-compose -f docker-compose.dev.yml up --force-recreate -V",
|
||||
"develop:instant": "yarn build:ts && docker-compose -f docker-compose.instant-block.yml up --build --force-recreate -V",
|
||||
"build": "lerna run build:contract && yarn build:ts",
|
||||
"build:ts": "lerna run --parallel build && yarn link-modules",
|
||||
"build:ts:serial": "lerna run build && yarn link-modules",
|
||||
"circuit:setup": "lerna run setup:build",
|
||||
"build:contract": "lerna run --parallel build:contract",
|
||||
"build:keys": "lerna run build-keys --scope=@zkopru/dataset",
|
||||
"build:keys": "lerna run build-keys --scope=@zkopru/circuits",
|
||||
"build:fresh": "yarn clean && shx rm -rf packages/*/node_modules && yarn build",
|
||||
"testnet": "yarn build:ts && docker-compose build && docker-compose up",
|
||||
"clean": "lerna run --parallel clean && shx rm -rf packages/*/node_modules && shx rm -rf .build-cache *.log coverage junit.xml",
|
||||
"test": "lerna run test --parallel",
|
||||
"clean:db": "shx rm -rf **/*/.mockup",
|
||||
"test": "yarn test:parallel",
|
||||
"test:parallel": "yarn test:serial --parallel",
|
||||
"test:serial": "lerna run test --stream",
|
||||
"test:trace": "LOG_LEVEL=trace PRINT_LOG=true yarn test:serial",
|
||||
"posttest:all": "yarn clean:db",
|
||||
"pretest:integrate": "docker-compose up -d --build",
|
||||
"test:integrate": "lerna run test:integrate --parallel || true",
|
||||
"posttest:integrate": "docker-compose down",
|
||||
"test:ci": "jest --coverage --ci --reporters='jest-junit'",
|
||||
"test:unit": "jest packages/*/test/unit",
|
||||
"test:integration": "jest packages/*/test/integration",
|
||||
"coverage": "jest --coverage",
|
||||
"coverage:unit": "yarn test:unit --coverage",
|
||||
"coverage:integration": "yarn test:integration --coverage",
|
||||
"coverage:show": "live-server coverage",
|
||||
"link-modules": "lerna run --parallel link-modules",
|
||||
"studio": "cd packages/prisma && prisma studio --experimental",
|
||||
"lint": "eslint --ext js --ext ts --ext md",
|
||||
"lint:ci": "yarn lint . --format junit",
|
||||
"lint:md": "markdownlint --ignore node_modules --ignore .git",
|
||||
"format": "yarn lint --fix",
|
||||
"format:md": "yarn lint:md --fix",
|
||||
"husky-skip": "cross-env HUSKY_SKIP_HOOKS=1",
|
||||
"precommit": "lerna run --concurrency 1 --stream precommit --since HEAD",
|
||||
"precommit": "lint-staged && lerna run --concurrency 1 --stream precommit --since HEAD",
|
||||
"commit": "git cz"
|
||||
},
|
||||
"husky": {
|
||||
"hooks": {
|
||||
"pre-commit": "yarn precommit && yarn test",
|
||||
"pre-commit": "yarn precommit && yarn test:serial",
|
||||
"commit-msg": "commitlint -E HUSKY_GIT_PARAMS"
|
||||
}
|
||||
},
|
||||
"lint-staged": {
|
||||
"*.{js,jsx,ts,tsx}": [
|
||||
"eslint --fix",
|
||||
"git add"
|
||||
],
|
||||
"*.md": [
|
||||
"yarn format:md",
|
||||
"git add"
|
||||
"linters": {
|
||||
"*.{js,jsx,ts,tsx}": [
|
||||
"eslint --fix",
|
||||
"git add --force"
|
||||
],
|
||||
"*.md": [
|
||||
"yarn format:md",
|
||||
"git add --force"
|
||||
]
|
||||
},
|
||||
"ignore": [
|
||||
"packages/contracts/src"
|
||||
]
|
||||
},
|
||||
"devDependencies": {
|
||||
"@commitlint/cli": "^8.1.0",
|
||||
"@commitlint/config-conventional": "^8.1.0",
|
||||
"@prisma/cli": "2.1.3",
|
||||
"@truffle/artifactor": "^4.0.30",
|
||||
"@typechain/web3-v1": "^2.2.0",
|
||||
"@types/async-lock": "^1.1.1",
|
||||
"@types/blessed": "^0.1.17",
|
||||
"@types/cli-progress": "^3.7.0",
|
||||
"@types/bs58": "^4.0.1",
|
||||
"@types/express": "^4.17.4",
|
||||
"@types/figlet": "^1.2.0",
|
||||
"@types/follow-redirects": "^1.8.0",
|
||||
"@types/fs-extra": "^8.1.0",
|
||||
"@types/hdkey": "^0.7.1",
|
||||
"@types/jest": "^24.0.23",
|
||||
"@types/js-yaml": "^3.12.5",
|
||||
"@types/keccak": "^3.0.1",
|
||||
"@types/node-fetch": "^2.5.7",
|
||||
"@types/node-schedule": "^1.3.0",
|
||||
"@types/pino": "^6.0.0",
|
||||
"@types/pino-multi-stream": "^5.0.0",
|
||||
"@types/prompts": "^2.0.8",
|
||||
"@types/puppeteer": "^3.0.1",
|
||||
"@types/shelljs": "^0.8.8",
|
||||
"@types/tar": "^4.0.3",
|
||||
"@types/tar-fs": "^2.0.0",
|
||||
"@types/uuid": "^8.0.0",
|
||||
"@typescript-eslint/eslint-plugin": "^2.2.0",
|
||||
"@typescript-eslint/parser": "^2.2.0",
|
||||
"@typescript-eslint/eslint-plugin": "^2.34.0",
|
||||
"@typescript-eslint/parser": "^2.34.0",
|
||||
"bignumber.js": "^9.0.0",
|
||||
"bn.js": "^5.1.1",
|
||||
"circom": "^0.5.11",
|
||||
"circomlib": "^0.1.1",
|
||||
"bn.js": "^5.2.0",
|
||||
"circom": "0.5.42",
|
||||
"circomlib": "0.5.1",
|
||||
"commitizen": "^4.0.3",
|
||||
"copyfiles": "^2.3.0",
|
||||
"cross-env": "^6.0.3",
|
||||
"cz-conventional-changelog": "^3.0.2",
|
||||
"docker-compose": "^0.23.4",
|
||||
"eslint": "^6.1.0",
|
||||
"dotenv": "^10.0.0",
|
||||
"eslint": "^6.4.0",
|
||||
"eslint-config-airbnb-base": "^14.0.0",
|
||||
"eslint-config-prettier": "^6.0.0",
|
||||
"eslint-import-resolver-lerna": "^1.1.0",
|
||||
@@ -101,7 +117,7 @@
|
||||
"eslint-plugin-prettier": "^3.1.0",
|
||||
"eslint-plugin-truffle": "^0.3.1",
|
||||
"eth-gas-reporter": "^0.2.16",
|
||||
"ffjavascript": "^0.1.3",
|
||||
"ffjavascript": "0.2.22",
|
||||
"fs-extra": "^9.0.0",
|
||||
"husky": "^3.1.0",
|
||||
"jest": "^24.7.1",
|
||||
@@ -113,24 +129,24 @@
|
||||
"markdownlint-cli": "^0.22.0",
|
||||
"merkle-tree-rollup": "^1.1.4",
|
||||
"node-docker-api": "^1.1.22",
|
||||
"open-cli": "^6.0.1",
|
||||
"prettier": "^1.16.4",
|
||||
"puppeteer": "^5.0.0",
|
||||
"serve": "^11.2.0",
|
||||
"shx": "^0.3.2",
|
||||
"smt-rollup": "^0.6.3",
|
||||
"snarkjs": "0.1.31",
|
||||
"snarkjs": "0.3.33",
|
||||
"solc": "^0.6.4",
|
||||
"solc5": "npm:solc@0.5.15",
|
||||
"solium": "^1.2.5",
|
||||
"tar": "^6.0.2",
|
||||
"tar-fs": "^2.1.0",
|
||||
"truffle": "^5.1.19",
|
||||
"truffle": "5.2.3",
|
||||
"truffle-artifactor": "^4.0.30",
|
||||
"ts-jest": "^24.2.0",
|
||||
"ts-node-dev": "^1.0.0-pre.44",
|
||||
"tsconfig-paths": "^3.9.0",
|
||||
"typechain": "^1.0.5",
|
||||
"typechain-target-web3-v1": "^1.0.4",
|
||||
"typechain": "^4.0.3",
|
||||
"typescript": "3.8.3",
|
||||
"uuid": "^8.1.0"
|
||||
},
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@zkopru/account",
|
||||
"version": "1.0.0-beta.2",
|
||||
"version": "2.0.0-beta.0",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -10,13 +10,14 @@
|
||||
"_moduleAliases": {
|
||||
"~account": "dist",
|
||||
"~utils": "../utils/dist",
|
||||
"~prisma": "../prisma/dist"
|
||||
"~database": "../database/dist"
|
||||
},
|
||||
"scripts": {
|
||||
"prebuild": "shx mkdir -p dist",
|
||||
"build": "tsc --build tsconfig.build.json",
|
||||
"clean": "tsc --build tsconfig.build.json --clean && shx rm -rf coverage *.log junit.xml dist && jest --clearCache",
|
||||
"test": "jest",
|
||||
"test:trace": "LOG_LEVEL=trace PRINT_LOG=true jest",
|
||||
"link-modules": "link-module-alias",
|
||||
"test:unit": "jest test/unit",
|
||||
"test:integration": "jest test/unit",
|
||||
@@ -29,14 +30,18 @@
|
||||
},
|
||||
"dependencies": {
|
||||
"@zkopru/babyjubjub": "file:../babyjubjub",
|
||||
"@zkopru/prisma": "file:../prisma",
|
||||
"@zkopru/database": "file:../database",
|
||||
"@zkopru/transaction": "file:../transaction",
|
||||
"@zkopru/utils": "file:../utils",
|
||||
"bip39": "^3.0.2",
|
||||
"circomlib": "0.5.1",
|
||||
"hdkey": "^1.1.1",
|
||||
"web3": "^1.2.6",
|
||||
"web3-core": "^1.2.6",
|
||||
"web3-eth-accounts": "^1.2.6"
|
||||
"keccak": "^3.0.1",
|
||||
"soltypes": "^1.3.5",
|
||||
"web3": "1.2.11",
|
||||
"web3-core": "1.2.11",
|
||||
"web3-eth-accounts": "1.2.11",
|
||||
"web3-utils": "1.2.11"
|
||||
},
|
||||
"devDependencies": {
|
||||
"uuid": "^8.1.0"
|
||||
|
||||
@@ -1,85 +1,75 @@
|
||||
import Web3 from 'web3'
|
||||
// import { Accounts } from 'web3-eth-accounts'
|
||||
import { Account, EncryptedKeystoreV3Json, AddAccount } from 'web3-core'
|
||||
import { Field, Point, EdDSA, signEdDSA, verifyEdDSA } from '@zkopru/babyjubjub'
|
||||
import { Keystore } from '@zkopru/prisma'
|
||||
import { ZkTx, Utxo } from '@zkopru/transaction'
|
||||
import { hexify } from '@zkopru/utils'
|
||||
import {
|
||||
Fr,
|
||||
Fp,
|
||||
Point,
|
||||
EdDSA,
|
||||
signEdDSA,
|
||||
verifyEdDSA,
|
||||
} from '@zkopru/babyjubjub'
|
||||
import { Keystore } from '@zkopru/database'
|
||||
import createKeccak from 'keccak'
|
||||
import assert from 'assert'
|
||||
import { ZkViewer } from './viewer'
|
||||
|
||||
export class ZkAccount {
|
||||
private snarkPK: Field
|
||||
export class ZkAccount extends ZkViewer {
|
||||
private privateKey: string // ECDSA private key
|
||||
|
||||
private ethPK: string
|
||||
|
||||
address: string
|
||||
|
||||
pubKey: Point
|
||||
ethAddress: string
|
||||
|
||||
ethAccount: Account
|
||||
|
||||
constructor(pk: Buffer | string | Account) {
|
||||
if (pk instanceof Buffer || typeof pk === 'string') {
|
||||
if (pk instanceof Buffer) {
|
||||
this.ethPK = hexify(pk, 32)
|
||||
this.snarkPK = Field.fromBuffer(pk)
|
||||
} else {
|
||||
this.ethPK = hexify(pk, 32)
|
||||
this.snarkPK = Field.from(pk)
|
||||
}
|
||||
const web3 = new Web3()
|
||||
this.ethAccount = web3.eth.accounts.privateKeyToAccount(this.ethPK)
|
||||
} else {
|
||||
this.ethPK = hexify(pk.privateKey, 32)
|
||||
this.snarkPK = Field.from(pk.privateKey)
|
||||
this.ethAccount = pk
|
||||
}
|
||||
this.address = this.ethAccount.address.toLowerCase()
|
||||
this.pubKey = Point.fromPrivKey(this.snarkPK.toHex(32))
|
||||
constructor(_privateKey: Buffer | string) {
|
||||
const web3 = new Web3()
|
||||
const privateKey =
|
||||
typeof _privateKey === 'string'
|
||||
? _privateKey
|
||||
: _privateKey.toString('hex')
|
||||
|
||||
const ethAccount = web3.eth.accounts.privateKeyToAccount(privateKey)
|
||||
|
||||
const A = Point.fromPrivKey(privateKey)
|
||||
// https://github.com/zkopru-network/zkopru/issues/34#issuecomment-666988505
|
||||
// Note: viewing key can be derived using another method. This is just for the convenience
|
||||
// to make it easy to restore spending key & viewing key together from a mnemonic source in
|
||||
// a deterministic way
|
||||
const v = Fr.from(
|
||||
createKeccak('keccak256')
|
||||
.update(privateKey)
|
||||
.digest(),
|
||||
)
|
||||
super(A, v)
|
||||
this.privateKey = privateKey
|
||||
this.ethAddress = ethAccount.address
|
||||
this.ethAccount = ethAccount
|
||||
}
|
||||
|
||||
static fromEthAccount(account: Account): ZkAccount {
|
||||
return new ZkAccount(account.privateKey)
|
||||
}
|
||||
|
||||
toKeystoreSqlObj(password: string): Keystore {
|
||||
return {
|
||||
pubKey: hexify(this.pubKey.encode()),
|
||||
address: this.address,
|
||||
zkAddress: this.zkAddress.toString(),
|
||||
address: this.ethAddress,
|
||||
encrypted: JSON.stringify(this.ethAccount.encrypt(password)),
|
||||
}
|
||||
}
|
||||
|
||||
signEdDSA(msg: Field): EdDSA {
|
||||
const signature = signEdDSA({ msg, privKey: this.snarkPK.toHex(32) })
|
||||
assert(verifyEdDSA(msg, signature, this.pubKey))
|
||||
signEdDSA(msg: Fp): EdDSA {
|
||||
const signature = signEdDSA({ msg, privKey: this.privateKey })
|
||||
assert(verifyEdDSA(msg, signature, this.getEdDSAPubKey()))
|
||||
return signature
|
||||
}
|
||||
|
||||
toAddAccount(): AddAccount {
|
||||
return {
|
||||
address: this.address,
|
||||
privateKey: this.ethPK,
|
||||
address: this.ethAddress,
|
||||
privateKey: this.privateKey,
|
||||
}
|
||||
}
|
||||
|
||||
decrypt(zkTx: ZkTx): Utxo | undefined {
|
||||
const { memo } = zkTx
|
||||
if (!memo) {
|
||||
return
|
||||
}
|
||||
let note: Utxo | undefined
|
||||
for (const outflow of zkTx.outflow) {
|
||||
try {
|
||||
note = Utxo.decrypt({
|
||||
utxoHash: outflow.note,
|
||||
memo,
|
||||
privKey: this.snarkPK.toHex(32),
|
||||
})
|
||||
} catch (err) {
|
||||
console.error(err)
|
||||
}
|
||||
if (note) break
|
||||
}
|
||||
return note ? Utxo.from(note) : undefined
|
||||
}
|
||||
|
||||
static fromEncryptedKeystoreV3Json(
|
||||
obj: EncryptedKeystoreV3Json,
|
||||
password: string,
|
||||
|
||||
@@ -8,9 +8,8 @@ import {
|
||||
import crypto from 'crypto'
|
||||
import HDNode from 'hdkey'
|
||||
import Web3 from 'web3'
|
||||
import { Field } from '@zkopru/babyjubjub'
|
||||
import { hexify } from '@zkopru/utils'
|
||||
import { DB, EncryptedWallet, Keystore } from '@zkopru/prisma'
|
||||
import { Fp } from '@zkopru/babyjubjub'
|
||||
import { DB, EncryptedWallet, Keystore } from '@zkopru/database'
|
||||
import { ZkAccount } from './account'
|
||||
|
||||
export const PATH = (index: number) => `m/44'/60'/0'/0/${index}`
|
||||
@@ -55,10 +54,7 @@ export class HDWallet {
|
||||
}
|
||||
|
||||
async list(): Promise<EncryptedWallet[]> {
|
||||
const wallets = await this.db.read(prisma =>
|
||||
prisma.encryptedWallet.findMany(),
|
||||
)
|
||||
return wallets
|
||||
return this.db.findMany('EncryptedWallet', { where: {} })
|
||||
}
|
||||
|
||||
async load(wallet: EncryptedWallet, password: string) {
|
||||
@@ -97,16 +93,14 @@ export class HDWallet {
|
||||
|
||||
async retrieveAccounts(): Promise<ZkAccount[]> {
|
||||
if (!this.seed) throw Error('Not initialized')
|
||||
const keys: Keystore[] = await this.db.read(prisma =>
|
||||
prisma.keystore.findMany(),
|
||||
)
|
||||
const keys: Keystore[] = await this.db.findMany('Keystore', { where: {} })
|
||||
const accounts: ZkAccount[] = []
|
||||
for (let i = 0; i < keys.length; i += 1) {
|
||||
const ethAccount = this.web3.eth.accounts.decrypt(
|
||||
JSON.parse(keys[i].encrypted),
|
||||
this.password,
|
||||
)
|
||||
accounts.push(new ZkAccount(ethAccount))
|
||||
accounts.push(ZkAccount.fromEthAccount(ethAccount))
|
||||
}
|
||||
return accounts
|
||||
}
|
||||
@@ -115,20 +109,17 @@ export class HDWallet {
|
||||
if (!this.seed || !this.password) throw Error('Not initialized')
|
||||
const masterNode = HDNode.fromMasterSeed(this.seed)
|
||||
const derivedKey = masterNode.derive(PATH(deriveIndex))
|
||||
const { privateKey } = derivedKey
|
||||
try {
|
||||
Field.fromBuffer(derivedKey.privateKey)
|
||||
Fp.fromBuffer(privateKey)
|
||||
} catch (err) {
|
||||
throw Error('Jubjub does not support the derived key. Use another index')
|
||||
}
|
||||
const ethAccount = this.web3.eth.accounts.privateKeyToAccount(
|
||||
hexify(derivedKey.privateKey, 32),
|
||||
)
|
||||
const account = new ZkAccount(ethAccount)
|
||||
await this.db.write(prisma =>
|
||||
prisma.keystore.create({
|
||||
data: account.toKeystoreSqlObj(this.password),
|
||||
}),
|
||||
typeof privateKey === 'string' ? privateKey : privateKey.toString('hex'),
|
||||
)
|
||||
const account = ZkAccount.fromEthAccount(ethAccount)
|
||||
await this.db.create('Keystore', account.toKeystoreSqlObj(this.password))
|
||||
return account
|
||||
}
|
||||
|
||||
@@ -165,19 +156,16 @@ export class HDWallet {
|
||||
|
||||
async save(password: string): Promise<{ id: string }> {
|
||||
const hdwallet = this.export(password)
|
||||
let result: EncryptedWallet
|
||||
if (!this.id) {
|
||||
result = await this.db.write(prisma =>
|
||||
prisma.encryptedWallet.create({ data: hdwallet }),
|
||||
)
|
||||
await this.db.create('EncryptedWallet', hdwallet)
|
||||
} else {
|
||||
result = await this.db.write(prisma =>
|
||||
prisma.encryptedWallet.update({
|
||||
where: { id: this.id },
|
||||
data: hdwallet,
|
||||
}),
|
||||
)
|
||||
await this.db.update('EncryptedWallet', {
|
||||
where: {
|
||||
id: this.id,
|
||||
},
|
||||
update: hdwallet,
|
||||
})
|
||||
}
|
||||
return { id: result.id }
|
||||
return { id: hdwallet.id }
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,3 +1,3 @@
|
||||
export { ZkAccount } from './account'
|
||||
|
||||
export { ZkViewer } from './viewer'
|
||||
export { HDWallet, PATH } from './hdwallet'
|
||||
|
||||
106
packages/account/src/viewer.ts
Normal file
106
packages/account/src/viewer.ts
Normal file
@@ -0,0 +1,106 @@
|
||||
import { poseidon } from 'circomlib'
|
||||
import { Fp, Fr, Point } from '@zkopru/babyjubjub'
|
||||
import {
|
||||
ZkAddress,
|
||||
ZkTx,
|
||||
Utxo,
|
||||
TokenRegistry,
|
||||
ZkOutflow,
|
||||
} from '@zkopru/transaction'
|
||||
import { soliditySha3Raw } from 'web3-utils'
|
||||
|
||||
export class ZkViewer {
|
||||
private A: Point // EdDSA Public Key
|
||||
|
||||
private v: Fr // viewing key, nullifier seed
|
||||
|
||||
zkAddress: ZkAddress // https://github.com/zkopru-network/zkopru/issues/43
|
||||
|
||||
constructor(A: Point, v: Fr) {
|
||||
this.A = A
|
||||
this.v = v
|
||||
// Public viewing key, public nullifier seed
|
||||
const V = Point.BASE8.mul(v)
|
||||
// Public spending key
|
||||
const PubSK = Fp.from(
|
||||
poseidon([
|
||||
this.A.x.toBigInt(),
|
||||
this.A.y.toBigInt(),
|
||||
this.v.toBigInt(),
|
||||
]).toString(),
|
||||
)
|
||||
this.zkAddress = ZkAddress.from(PubSK, V)
|
||||
}
|
||||
|
||||
getEdDSAPubKey(): Point {
|
||||
return this.A
|
||||
}
|
||||
|
||||
decrypt(zkTx: ZkTx, tokenRegistry?: TokenRegistry): Utxo[] {
|
||||
const tryDecodeNote = (
|
||||
bytes: Buffer,
|
||||
outflows: ZkOutflow[],
|
||||
): Utxo | void => {
|
||||
if (bytes.length !== 81) {
|
||||
throw new Error('Expected a single encrypted note with 81 bytes')
|
||||
}
|
||||
for (const outflow of outflows) {
|
||||
try {
|
||||
const note = Utxo.decrypt({
|
||||
utxoHash: outflow.note,
|
||||
memo: bytes,
|
||||
spendingPubKey: this.zkAddress.spendingPubKey(),
|
||||
viewingKey: this.v,
|
||||
tokenRegistry,
|
||||
})
|
||||
if (note) return note
|
||||
} catch (err) {
|
||||
// console.error(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
const { encryptedNotes } = zkTx.parseMemo()
|
||||
const notes = [] as Utxo[]
|
||||
for (const encrypted of encryptedNotes) {
|
||||
const note = tryDecodeNote(encrypted, zkTx.outflow)
|
||||
if (note) notes.push(note)
|
||||
}
|
||||
return notes
|
||||
}
|
||||
|
||||
getNullifierSeed(): Fp {
|
||||
return this.v
|
||||
}
|
||||
|
||||
encodeViewingKey(): string {
|
||||
const concatenated = Buffer.concat([
|
||||
this.A.x.toBytes32().toBuffer(),
|
||||
this.A.y.toBytes32().toBuffer(),
|
||||
this.v.toBytes32().toBuffer(),
|
||||
Buffer.from(soliditySha3Raw(this.zkAddress.toString()).slice(-8), 'hex'),
|
||||
])
|
||||
return concatenated.toString('hex')
|
||||
}
|
||||
|
||||
static from(encoded: string): ZkViewer {
|
||||
const buff = Buffer.from(encoded, 'hex')
|
||||
const Ax = Fp.from(buff.slice(0, 32))
|
||||
const Ay = Fp.from(buff.slice(32, 64))
|
||||
const v = Fr.from(buff.slice(64, 92))
|
||||
const addressHash = buff.slice(92, 96)
|
||||
const A = Point.from(Ax, Ay)
|
||||
const viewer = new ZkViewer(A, v)
|
||||
const success = addressHash.equals(
|
||||
Buffer.from(
|
||||
soliditySha3Raw(viewer.zkAddress.toString()).slice(-8),
|
||||
'hex',
|
||||
),
|
||||
)
|
||||
if (!success) {
|
||||
throw Error(
|
||||
'Invalid encoding. The last 4 bytes should be the hash of the retrieved address',
|
||||
)
|
||||
}
|
||||
return viewer
|
||||
}
|
||||
}
|
||||
@@ -1,18 +1,24 @@
|
||||
/* eslint-disable jest/no-hooks */
|
||||
import { Field, verifyEdDSA } from '@zkopru/babyjubjub'
|
||||
import { hexify } from '~utils'
|
||||
import { Fp, verifyEdDSA } from '@zkopru/babyjubjub'
|
||||
import { ZkAccount } from '~account'
|
||||
import { trimHexToLength } from '~utils'
|
||||
|
||||
describe('class ZkAccount', () => {
|
||||
it('should make eddsa signature and verify that', async () => {
|
||||
const alicePrivKey = hexify("I am Alice's private key")
|
||||
const bobPrivKey = hexify("I am Bob's private key")
|
||||
const alicePrivKey = trimHexToLength(
|
||||
Buffer.from("I am Alice's private key"),
|
||||
64,
|
||||
)
|
||||
const bobPrivKey = trimHexToLength(
|
||||
Buffer.from("I am Bob's private key"),
|
||||
64,
|
||||
)
|
||||
const alice = new ZkAccount(alicePrivKey)
|
||||
const bob = new ZkAccount(bobPrivKey)
|
||||
const msg = Field.from('0xabcd12344321d')
|
||||
const msg = Fp.from('0xabcd12344321d')
|
||||
const aliceSig = alice.signEdDSA(msg)
|
||||
const bobSig = bob.signEdDSA(msg)
|
||||
expect(verifyEdDSA(msg, aliceSig, alice.pubKey)).toBe(true)
|
||||
expect(verifyEdDSA(msg, bobSig, bob.pubKey)).toBe(true)
|
||||
expect(verifyEdDSA(msg, aliceSig, alice.getEdDSAPubKey())).toBe(true)
|
||||
expect(verifyEdDSA(msg, bobSig, bob.getEdDSAPubKey())).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,19 +1,19 @@
|
||||
/* eslint-disable jest/no-hooks */
|
||||
import Web3 from 'web3'
|
||||
import { HDWallet, ZkAccount } from '~account'
|
||||
import { DB, MockupDB } from '~prisma'
|
||||
import { DB, SQLiteConnector, schema } from '~database/node'
|
||||
|
||||
describe('unit test', () => {
|
||||
let mockup: MockupDB
|
||||
let mockup: DB
|
||||
beforeAll(async () => {
|
||||
mockup = await DB.mockup()
|
||||
mockup = await SQLiteConnector.create(schema, ':memory:')
|
||||
})
|
||||
afterAll(async () => {
|
||||
await mockup.terminate()
|
||||
await mockup.close()
|
||||
})
|
||||
it('has same private keys and eth address with ganache default accounts', async () => {
|
||||
const web3 = new Web3()
|
||||
const hdWallet = new HDWallet(web3, mockup.db)
|
||||
const hdWallet = new HDWallet(web3, mockup)
|
||||
await hdWallet.init(
|
||||
'myth like bonus scare over problem client lizard pioneer submit female collect',
|
||||
'samplepassword',
|
||||
@@ -39,8 +39,8 @@ describe('unit test', () => {
|
||||
'0xACa94ef8bD5ffEE41947b4585a84BdA5a3d3DA6E',
|
||||
'0x1dF62f291b2E969fB0849d99D9Ce41e2F137006e',
|
||||
]
|
||||
expect(ganacheAddress.map(a => a.toLowerCase())).toStrictEqual(
|
||||
accounts.map(account => account.address),
|
||||
expect(ganacheAddress).toStrictEqual(
|
||||
accounts.map(account => account.ethAddress),
|
||||
)
|
||||
}, 30000)
|
||||
}, 90000)
|
||||
})
|
||||
|
||||
@@ -10,7 +10,7 @@
|
||||
"include": ["src"],
|
||||
"references": [
|
||||
{ "path": "../babyjubjub/tsconfig.build.json", "prepend": false },
|
||||
{ "path": "../prisma/tsconfig.build.json", "prepend": false },
|
||||
{ "path": "../database/tsconfig.build.json", "prepend": false },
|
||||
{ "path": "../transaction/tsconfig.build.json", "prepend": false },
|
||||
{ "path": "../utils/tsconfig.build.json", "prepend": false }
|
||||
]
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@zkopru/babyjubjub",
|
||||
"version": "1.0.0-beta.2",
|
||||
"version": "2.0.0-beta.0",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"main": "dist/index.js",
|
||||
"types": "dist/index.d.ts",
|
||||
@@ -16,6 +16,7 @@
|
||||
"clean": "tsc --build tsconfig.build.json --clean && shx rm -rf coverage *.log junit.xml dist && jest --clearCache",
|
||||
"link-modules": "link-module-alias",
|
||||
"test": "jest",
|
||||
"test:trace": "LOG_LEVEL=trace PRINT_LOG=true jest",
|
||||
"test:unit": "jest test/unit",
|
||||
"test:integration": "jest test/unit",
|
||||
"test:watch": "jest --watch",
|
||||
@@ -29,11 +30,11 @@
|
||||
"@zkopru/utils": "file:../utils",
|
||||
"big-integer": "^1.6.48",
|
||||
"blake-hash": "^1.1.0",
|
||||
"bn.js": "^5.1.1",
|
||||
"circomlib": "^0.1.1",
|
||||
"ffjavascript": "^0.1.2",
|
||||
"soltypes": "^1.2.2",
|
||||
"web3-utils": "^1.2.6"
|
||||
"bn.js": "^5.2.0",
|
||||
"circomlib": "0.5.1",
|
||||
"ffjavascript": "0.2.22",
|
||||
"soltypes": "^1.3.5",
|
||||
"web3-utils": "1.2.11"
|
||||
},
|
||||
"publishConfig": {
|
||||
"access": "public"
|
||||
|
||||
38
packages/babyjubjub/src/eddsa.ts
Normal file
38
packages/babyjubjub/src/eddsa.ts
Normal file
@@ -0,0 +1,38 @@
|
||||
import { hexToBuffer } from '@zkopru/utils'
|
||||
import * as circomlib from 'circomlib'
|
||||
import { F } from './types/ff'
|
||||
import { Fp } from './fp'
|
||||
import { Point } from './point'
|
||||
|
||||
export interface EdDSA {
|
||||
R8: Point
|
||||
S: Fp
|
||||
}
|
||||
|
||||
export function verifyEdDSA(msg: F, sig: EdDSA, pubKey: Point): boolean {
|
||||
const result = circomlib.eddsa.verifyPoseidon(
|
||||
Fp.from(msg).toBigInt(),
|
||||
{
|
||||
R8: [sig.R8.x.toBigInt(), sig.R8.y.toBigInt()],
|
||||
S: sig.S.toBigInt(),
|
||||
},
|
||||
[pubKey.x.toBigInt(), pubKey.y.toBigInt()],
|
||||
)
|
||||
return result
|
||||
}
|
||||
|
||||
export function signEdDSA({
|
||||
msg,
|
||||
privKey,
|
||||
}: {
|
||||
msg: F
|
||||
privKey: Buffer | string
|
||||
}): EdDSA {
|
||||
const buff: Buffer =
|
||||
typeof privKey === 'string' ? hexToBuffer(privKey) : privKey
|
||||
const result = circomlib.eddsa.signPoseidon(buff, Fp.from(msg).toBigInt())
|
||||
return {
|
||||
R8: Point.from(result.R8[0].toString(), result.R8[1].toString()),
|
||||
S: Fp.from(result.S.toString()),
|
||||
}
|
||||
}
|
||||
@@ -1,314 +0,0 @@
|
||||
import BN from 'bn.js'
|
||||
import bigInt, { BigInteger } from 'big-integer'
|
||||
import { Bytes32, Uint256, Address } from 'soltypes'
|
||||
|
||||
export type F = number | string | number[] | Uint8Array | Buffer | BN
|
||||
|
||||
const r = new BN(
|
||||
'21888242871839275222246405745257275088548364400416034343698204186575808495617',
|
||||
)
|
||||
export class Field extends BN {
|
||||
constructor(number: F, base?: number | 'hex', endian?: BN.Endianness) {
|
||||
if (number instanceof BN) {
|
||||
super(number.toString())
|
||||
} else if (typeof number === 'string' && number.startsWith('0x')) {
|
||||
super(number.substr(2), 16, endian)
|
||||
} else {
|
||||
super(number, base, endian)
|
||||
}
|
||||
if (super.gte(r)) {
|
||||
// console.warn('Exceeds babyjubjub field range')
|
||||
return Field.from(super.sub(r))
|
||||
}
|
||||
if (super.isNeg()) {
|
||||
return Field.from(super.add(r))
|
||||
}
|
||||
}
|
||||
|
||||
static zero = Field.from(0)
|
||||
|
||||
static one = Field.from(1)
|
||||
|
||||
static MAX = r
|
||||
|
||||
static from(x: F): Field {
|
||||
if (x === undefined) return new Field(0)
|
||||
if (x instanceof Field) {
|
||||
return x
|
||||
}
|
||||
return new Field(x)
|
||||
}
|
||||
|
||||
static strictFrom(x: F): Field {
|
||||
if (!Field.inRange(x)) throw Error('Not in range')
|
||||
return Field.from(x)
|
||||
}
|
||||
|
||||
static toBN(x: F): BN {
|
||||
if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
return new BN(x.substr(2), 16)
|
||||
}
|
||||
return new BN(x)
|
||||
}
|
||||
|
||||
static fromBuffer(buff: Buffer): Field {
|
||||
return Field.from(`0x${buff.toString('hex')}`)
|
||||
}
|
||||
|
||||
static inRange(x: F): boolean {
|
||||
let n: BN
|
||||
if (x instanceof BN) {
|
||||
n = x
|
||||
} else if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
n = new BN(x.substr(2), 16)
|
||||
} else {
|
||||
n = new BN(x)
|
||||
}
|
||||
return n.lt(r)
|
||||
}
|
||||
|
||||
addPrefixBit(bitLength: number): BN {
|
||||
const prefix = new BN(1).shln(bitLength)
|
||||
if (this.gt(prefix)) throw Error('prefix bit is less than current value')
|
||||
return prefix.or(this)
|
||||
}
|
||||
|
||||
toHex(byteLength?: number): string {
|
||||
if (byteLength) {
|
||||
return `0x${this.toBuffer('be', byteLength).toString('hex')}`
|
||||
}
|
||||
return `0x${this.toString('hex')}`
|
||||
}
|
||||
|
||||
toBytes32(): Bytes32 {
|
||||
return new Bytes32(`0x${this.toString(16, 64)}`)
|
||||
}
|
||||
|
||||
toUint256(): Uint256 {
|
||||
return this.toBytes32().toUint()
|
||||
}
|
||||
|
||||
toAddress(): Address {
|
||||
return new Address(`0x${this.toString(16, 40)}`)
|
||||
}
|
||||
|
||||
toIden3BigInt(): BigInteger {
|
||||
return bigInt(this.toString())
|
||||
}
|
||||
|
||||
toTwos(width: number): Field {
|
||||
return Field.from(super.toTwos(width))
|
||||
}
|
||||
|
||||
fromTwos(width: number): Field {
|
||||
return Field.from(super.fromTwos(width))
|
||||
}
|
||||
|
||||
neg(): Field {
|
||||
return Field.from(super.neg())
|
||||
}
|
||||
|
||||
ineg(): Field {
|
||||
return Field.from(super.ineg())
|
||||
}
|
||||
|
||||
abs(): Field {
|
||||
return Field.from(super.abs())
|
||||
}
|
||||
|
||||
iabs(): Field {
|
||||
return Field.from(super.iabs())
|
||||
}
|
||||
|
||||
add(f: F): Field {
|
||||
return Field.from(super.add(Field.from(f)))
|
||||
}
|
||||
|
||||
iadd(f: F): Field {
|
||||
return Field.from(super.iadd(Field.from(f)))
|
||||
}
|
||||
|
||||
addn(n: number): Field {
|
||||
return Field.from(super.addn(n))
|
||||
}
|
||||
|
||||
iaddn(n: number): Field {
|
||||
return Field.from(super.iaddn(n))
|
||||
}
|
||||
|
||||
sub(f: F): Field {
|
||||
return Field.from(super.sub(Field.from(f)))
|
||||
}
|
||||
|
||||
isub(f: F): Field {
|
||||
return Field.from(super.isub(Field.from(f)))
|
||||
}
|
||||
|
||||
subn(n: number): Field {
|
||||
return Field.from(super.subn(n))
|
||||
}
|
||||
|
||||
isubn(n: number): Field {
|
||||
return Field.from(super.isubn(n))
|
||||
}
|
||||
|
||||
mul(f: F): Field {
|
||||
return Field.from(super.mul(Field.from(f)))
|
||||
}
|
||||
|
||||
imul(f: F): Field {
|
||||
return Field.from(super.imul(Field.from(f)))
|
||||
}
|
||||
|
||||
muln(n: number): Field {
|
||||
return Field.from(super.muln(n))
|
||||
}
|
||||
|
||||
imuln(n: number): Field {
|
||||
return Field.from(super.imuln(n))
|
||||
}
|
||||
|
||||
sqr(): Field {
|
||||
return Field.from(super.sqr())
|
||||
}
|
||||
|
||||
isqr(): Field {
|
||||
return Field.from(super.isqr())
|
||||
}
|
||||
|
||||
pow(f: F): Field {
|
||||
return Field.from(super.pow(Field.from(f)))
|
||||
}
|
||||
|
||||
div(f: F): Field {
|
||||
return Field.from(super.div(Field.from(f)))
|
||||
}
|
||||
|
||||
divn(n: number): Field {
|
||||
return Field.from(super.divn(n))
|
||||
}
|
||||
|
||||
mod(f: F): Field {
|
||||
return Field.from(super.mod(Field.from(f)))
|
||||
}
|
||||
|
||||
umod(f: F): Field {
|
||||
return Field.from(super.umod(Field.from(f)))
|
||||
}
|
||||
|
||||
divRound(f: F): Field {
|
||||
return Field.from(super.divRound(Field.from(f)))
|
||||
}
|
||||
|
||||
or(f: F): Field {
|
||||
return Field.from(super.or(Field.from(f)))
|
||||
}
|
||||
|
||||
ior(f: F): Field {
|
||||
return Field.from(super.ior(Field.from(f)))
|
||||
}
|
||||
|
||||
uor(f: F): Field {
|
||||
return Field.from(super.uor(Field.from(f)))
|
||||
}
|
||||
|
||||
iuor(f: F): Field {
|
||||
return Field.from(super.iuor(Field.from(f)))
|
||||
}
|
||||
|
||||
and(f: F): Field {
|
||||
return Field.from(super.and(Field.from(f)))
|
||||
}
|
||||
|
||||
iand(f: F): Field {
|
||||
return Field.from(super.iand(Field.from(f)))
|
||||
}
|
||||
|
||||
uand(f: F): Field {
|
||||
return Field.from(super.uand(Field.from(f)))
|
||||
}
|
||||
|
||||
iuand(f: F): Field {
|
||||
return Field.from(super.iuand(Field.from(f)))
|
||||
}
|
||||
|
||||
andln(n: number): Field {
|
||||
return Field.from(super.andln(n))
|
||||
}
|
||||
|
||||
xor(f: F): Field {
|
||||
return Field.from(super.xor(Field.from(f)))
|
||||
}
|
||||
|
||||
ixor(f: F): Field {
|
||||
return Field.from(super.ixor(Field.from(f)))
|
||||
}
|
||||
|
||||
uxor(f: F): Field {
|
||||
return Field.from(super.uxor(Field.from(f)))
|
||||
}
|
||||
|
||||
iuxor(f: F): Field {
|
||||
return Field.from(super.iuxor(Field.from(f)))
|
||||
}
|
||||
|
||||
setn(n: number): Field {
|
||||
return Field.from(super.setn(n))
|
||||
}
|
||||
|
||||
shln(n: number): Field {
|
||||
return Field.from(super.shln(n))
|
||||
}
|
||||
|
||||
ishln(n: number): Field {
|
||||
return Field.from(super.ishln(n))
|
||||
}
|
||||
|
||||
ushln(n: number): Field {
|
||||
return Field.from(super.ushln(n))
|
||||
}
|
||||
|
||||
iushln(n: number): Field {
|
||||
return Field.from(super.iushln(n))
|
||||
}
|
||||
|
||||
shrn(n: number): Field {
|
||||
return Field.from(super.shrn(n))
|
||||
}
|
||||
|
||||
ishrn(n: number): Field {
|
||||
return Field.from(super.ishrn(n))
|
||||
}
|
||||
|
||||
ushrn(n: number): Field {
|
||||
return Field.from(super.ushrn(n))
|
||||
}
|
||||
|
||||
iushrn(n: number): Field {
|
||||
return Field.from(super.iushrn(n))
|
||||
}
|
||||
|
||||
maskn(n: number): Field {
|
||||
return Field.from(super.maskn(n))
|
||||
}
|
||||
|
||||
imaskn(n: number): Field {
|
||||
return Field.from(super.imaskn(n))
|
||||
}
|
||||
|
||||
bincn(n: number): Field {
|
||||
return Field.from(super.bincn(n))
|
||||
}
|
||||
|
||||
notn(w: number): Field {
|
||||
return Field.from(super.notn(w))
|
||||
}
|
||||
|
||||
inotn(w: number): Field {
|
||||
return Field.from(super.inotn(w))
|
||||
}
|
||||
|
||||
gcd(f: F): Field {
|
||||
return Field.from(super.gcd(Field.from(f)))
|
||||
}
|
||||
}
|
||||
328
packages/babyjubjub/src/fp.ts
Normal file
328
packages/babyjubjub/src/fp.ts
Normal file
@@ -0,0 +1,328 @@
|
||||
import BN from 'bn.js'
|
||||
import { Bytes32, Uint256, Address } from 'soltypes'
|
||||
import RedBN from './types/redbn'
|
||||
|
||||
export type F = number | string | number[] | Uint8Array | Buffer | BN
|
||||
|
||||
export class Fp extends BN {
|
||||
static ORDER = new BN(
|
||||
'21888242871839275222246405745257275088548364400416034343698204186575808495617',
|
||||
)
|
||||
|
||||
static half = Fp.from(Fp.ORDER.shrn(1))
|
||||
|
||||
static zero = Fp.from(0)
|
||||
|
||||
static one = Fp.from(1)
|
||||
|
||||
static Red = BN.red(Fp.ORDER)
|
||||
|
||||
constructor(number: F, base?: number | 'hex', endian?: BN.Endianness) {
|
||||
let n: BN
|
||||
if (number instanceof BN) {
|
||||
n = new BN(number.toString())
|
||||
} else if (typeof number === 'string' && number.startsWith('0x')) {
|
||||
n = new BN(number.substr(2), 16, endian)
|
||||
} else {
|
||||
n = new BN(number, base, endian)
|
||||
}
|
||||
if (n.isNeg()) {
|
||||
super(n.mod(Fp.ORDER).add(Fp.ORDER), base, endian)
|
||||
} else {
|
||||
super(n.mod(Fp.ORDER), base, endian)
|
||||
}
|
||||
Object.setPrototypeOf(this, Fp.prototype)
|
||||
}
|
||||
|
||||
static from(x: F): Fp {
|
||||
if (x === undefined) return new Fp(0)
|
||||
return new Fp(x)
|
||||
}
|
||||
|
||||
static strictFrom(x: F): Fp {
|
||||
if (!Fp.inRange(x)) throw Error('Not in range')
|
||||
return Fp.from(x)
|
||||
}
|
||||
|
||||
static toBN(x: F): BN {
|
||||
if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
return new BN(x.substr(2), 16)
|
||||
}
|
||||
return new BN(x)
|
||||
}
|
||||
|
||||
static fromBuffer(buff: Buffer): Fp {
|
||||
return Fp.from(`0x${buff.toString('hex')}`)
|
||||
}
|
||||
|
||||
static inRange(x: F): boolean {
|
||||
let n: BN
|
||||
if (x instanceof BN) {
|
||||
n = x
|
||||
} else if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
n = new BN(x.substr(2), 16)
|
||||
} else {
|
||||
n = new BN(x)
|
||||
}
|
||||
return n.lt(Fp.ORDER)
|
||||
}
|
||||
|
||||
toBuffer(endian?: BN.Endianness, length?: number): Buffer {
|
||||
return this.toArrayLike(Buffer, endian, length)
|
||||
}
|
||||
|
||||
addPrefixBit(bitLength: number): BN {
|
||||
const prefix = new BN(1).shln(bitLength)
|
||||
if (this.gt(prefix)) throw Error('prefix bit is less than current value')
|
||||
return prefix.or(this)
|
||||
}
|
||||
|
||||
toJSON(): string {
|
||||
return `0x${super.toJSON()}`
|
||||
}
|
||||
|
||||
toHex(byteLength?: number): string {
|
||||
if (byteLength) {
|
||||
return `0x${this.toBuffer('be', byteLength).toString('hex')}`
|
||||
}
|
||||
return `0x${this.toString('hex')}`
|
||||
}
|
||||
|
||||
toBytes32(): Bytes32 {
|
||||
return new Bytes32(`0x${this.toString(16, 64)}`)
|
||||
}
|
||||
|
||||
toUint256(): Uint256 {
|
||||
return this.toBytes32().toUint()
|
||||
}
|
||||
|
||||
toAddress(): Address {
|
||||
return new Address(`0x${this.toString(16, 40)}`)
|
||||
}
|
||||
|
||||
toBigInt(): bigint {
|
||||
return BigInt(this.toString())
|
||||
// return ffjs.utils.stringifyBigInts(this.toString())
|
||||
}
|
||||
|
||||
toTwos(width: number): Fp {
|
||||
return Fp.from(super.toTwos(width))
|
||||
}
|
||||
|
||||
fromTwos(width: number): Fp {
|
||||
return Fp.from(super.fromTwos(width))
|
||||
}
|
||||
|
||||
neg(): Fp {
|
||||
return Fp.from(super.neg())
|
||||
}
|
||||
|
||||
ineg(): Fp {
|
||||
return Fp.from(super.ineg())
|
||||
}
|
||||
|
||||
abs(): Fp {
|
||||
return Fp.from(super.abs())
|
||||
}
|
||||
|
||||
iabs(): Fp {
|
||||
return Fp.from(super.iabs())
|
||||
}
|
||||
|
||||
add(f: F): Fp {
|
||||
return Fp.from(super.add(Fp.from(f)))
|
||||
}
|
||||
|
||||
iadd(f: F): Fp {
|
||||
return Fp.from(super.iadd(Fp.from(f)))
|
||||
}
|
||||
|
||||
addn(n: number): Fp {
|
||||
return Fp.from(super.addn(n))
|
||||
}
|
||||
|
||||
iaddn(n: number): Fp {
|
||||
return Fp.from(super.iaddn(n))
|
||||
}
|
||||
|
||||
sub(f: F): Fp {
|
||||
return Fp.from(super.sub(Fp.from(f)))
|
||||
}
|
||||
|
||||
isub(f: F): Fp {
|
||||
return Fp.from(super.isub(Fp.from(f)))
|
||||
}
|
||||
|
||||
subn(n: number): Fp {
|
||||
return Fp.from(super.subn(n))
|
||||
}
|
||||
|
||||
isubn(n: number): Fp {
|
||||
return Fp.from(super.isubn(n))
|
||||
}
|
||||
|
||||
mul(f: F): Fp {
|
||||
return Fp.from(super.mul(Fp.from(f)))
|
||||
}
|
||||
|
||||
imul(f: F): Fp {
|
||||
return Fp.from(super.imul(Fp.from(f)))
|
||||
}
|
||||
|
||||
muln(n: number): Fp {
|
||||
return Fp.from(super.muln(n))
|
||||
}
|
||||
|
||||
imuln(n: number): Fp {
|
||||
return Fp.from(super.imuln(n))
|
||||
}
|
||||
|
||||
sqr(): Fp {
|
||||
return Fp.from(super.sqr())
|
||||
}
|
||||
|
||||
isqr(): Fp {
|
||||
return Fp.from(super.isqr())
|
||||
}
|
||||
|
||||
pow(f: F): Fp {
|
||||
return Fp.from(super.pow(Fp.from(f)))
|
||||
}
|
||||
|
||||
div(f: F): Fp {
|
||||
return Fp.from(super.div(Fp.from(f)))
|
||||
}
|
||||
|
||||
divn(n: number): Fp {
|
||||
return Fp.from(super.divn(n))
|
||||
}
|
||||
|
||||
mod(f: F): Fp {
|
||||
return Fp.from(super.mod(Fp.from(f)))
|
||||
}
|
||||
|
||||
umod(f: F): Fp {
|
||||
return Fp.from(super.umod(Fp.from(f)))
|
||||
}
|
||||
|
||||
divRound(f: F): Fp {
|
||||
return Fp.from(super.divRound(Fp.from(f)))
|
||||
}
|
||||
|
||||
or(f: F): Fp {
|
||||
return Fp.from(super.or(Fp.from(f)))
|
||||
}
|
||||
|
||||
ior(f: F): Fp {
|
||||
return Fp.from(super.ior(Fp.from(f)))
|
||||
}
|
||||
|
||||
uor(f: F): Fp {
|
||||
return Fp.from(super.uor(Fp.from(f)))
|
||||
}
|
||||
|
||||
iuor(f: F): Fp {
|
||||
return Fp.from(super.iuor(Fp.from(f)))
|
||||
}
|
||||
|
||||
and(f: F): Fp {
|
||||
return Fp.from(super.and(Fp.from(f)))
|
||||
}
|
||||
|
||||
iand(f: F): Fp {
|
||||
return Fp.from(super.iand(Fp.from(f)))
|
||||
}
|
||||
|
||||
uand(f: F): Fp {
|
||||
return Fp.from(super.uand(Fp.from(f)))
|
||||
}
|
||||
|
||||
iuand(f: F): Fp {
|
||||
return Fp.from(super.iuand(Fp.from(f)))
|
||||
}
|
||||
|
||||
andln(n: number): Fp {
|
||||
return Fp.from(super.andln(n))
|
||||
}
|
||||
|
||||
xor(f: F): Fp {
|
||||
return Fp.from(super.xor(Fp.from(f)))
|
||||
}
|
||||
|
||||
ixor(f: F): Fp {
|
||||
return Fp.from(super.ixor(Fp.from(f)))
|
||||
}
|
||||
|
||||
uxor(f: F): Fp {
|
||||
return Fp.from(super.uxor(Fp.from(f)))
|
||||
}
|
||||
|
||||
iuxor(f: F): Fp {
|
||||
return Fp.from(super.iuxor(Fp.from(f)))
|
||||
}
|
||||
|
||||
setn(n: number): Fp {
|
||||
return Fp.from(super.setn(n))
|
||||
}
|
||||
|
||||
shln(n: number): Fp {
|
||||
return Fp.from(super.shln(n))
|
||||
}
|
||||
|
||||
ishln(n: number): Fp {
|
||||
return Fp.from(super.ishln(n))
|
||||
}
|
||||
|
||||
ushln(n: number): Fp {
|
||||
return Fp.from(super.ushln(n))
|
||||
}
|
||||
|
||||
iushln(n: number): Fp {
|
||||
return Fp.from(super.iushln(n))
|
||||
}
|
||||
|
||||
shrn(n: number): Fp {
|
||||
return Fp.from(super.shrn(n))
|
||||
}
|
||||
|
||||
ishrn(n: number): Fp {
|
||||
return Fp.from(super.ishrn(n))
|
||||
}
|
||||
|
||||
ushrn(n: number): Fp {
|
||||
return Fp.from(super.ushrn(n))
|
||||
}
|
||||
|
||||
iushrn(n: number): Fp {
|
||||
return Fp.from(super.iushrn(n))
|
||||
}
|
||||
|
||||
maskn(n: number): Fp {
|
||||
return Fp.from(super.maskn(n))
|
||||
}
|
||||
|
||||
imaskn(n: number): Fp {
|
||||
return Fp.from(super.imaskn(n))
|
||||
}
|
||||
|
||||
bincn(n: number): Fp {
|
||||
return Fp.from(super.bincn(n))
|
||||
}
|
||||
|
||||
notn(w: number): Fp {
|
||||
return Fp.from(super.notn(w))
|
||||
}
|
||||
|
||||
inotn(w: number): Fp {
|
||||
return Fp.from(super.inotn(w))
|
||||
}
|
||||
|
||||
gcd(f: F): Fp {
|
||||
return Fp.from(super.gcd(Fp.from(f)))
|
||||
}
|
||||
|
||||
toRed(): RedBN {
|
||||
const r = new BN(this.toString()).toRed(Fp.Red)
|
||||
return r
|
||||
}
|
||||
}
|
||||
328
packages/babyjubjub/src/fr.ts
Normal file
328
packages/babyjubjub/src/fr.ts
Normal file
@@ -0,0 +1,328 @@
|
||||
import BN from 'bn.js'
|
||||
import { Bytes32, Uint256, Address } from 'soltypes'
|
||||
import RedBN from './types/redbn'
|
||||
|
||||
export type F = number | string | number[] | Uint8Array | Buffer | BN
|
||||
|
||||
export class Fr extends BN {
|
||||
static ORDER = new BN(
|
||||
'2736030358979909402780800718157159386076813972158567259200215660948447373041',
|
||||
)
|
||||
|
||||
static half = Fr.from(Fr.ORDER.shrn(1))
|
||||
|
||||
static zero = Fr.from(0)
|
||||
|
||||
static one = Fr.from(1)
|
||||
|
||||
static Red = BN.red(Fr.ORDER)
|
||||
|
||||
constructor(number: F, base?: number | 'hex', endian?: BN.Endianness) {
|
||||
let n: BN
|
||||
if (number instanceof BN) {
|
||||
n = new BN(number.toString())
|
||||
} else if (typeof number === 'string' && number.startsWith('0x')) {
|
||||
n = new BN(number.substr(2), 16, endian)
|
||||
} else {
|
||||
n = new BN(number, base, endian)
|
||||
}
|
||||
if (n.isNeg()) {
|
||||
super(n.mod(Fr.ORDER).add(Fr.ORDER), base, endian)
|
||||
} else {
|
||||
super(n.mod(Fr.ORDER), base, endian)
|
||||
}
|
||||
Object.setPrototypeOf(this, Fr.prototype)
|
||||
}
|
||||
|
||||
static from(x: F): Fr {
|
||||
if (x === undefined) return new Fr(0)
|
||||
return new Fr(x)
|
||||
}
|
||||
|
||||
static strictFrom(x: F): Fr {
|
||||
if (!Fr.inRange(x)) throw Error('Not in range')
|
||||
return Fr.from(x)
|
||||
}
|
||||
|
||||
static toBN(x: F): BN {
|
||||
if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
return new BN(x.substr(2), 16)
|
||||
}
|
||||
return new BN(x)
|
||||
}
|
||||
|
||||
static fromBuffer(buff: Buffer): Fr {
|
||||
return Fr.from(`0x${buff.toString('hex')}`)
|
||||
}
|
||||
|
||||
static inRange(x: F): boolean {
|
||||
let n: BN
|
||||
if (x instanceof BN) {
|
||||
n = x
|
||||
} else if (typeof x === 'string' && x.startsWith('0x')) {
|
||||
n = new BN(x.substr(2), 16)
|
||||
} else {
|
||||
n = new BN(x)
|
||||
}
|
||||
return n.lt(Fr.ORDER)
|
||||
}
|
||||
|
||||
toBuffer(endian?: BN.Endianness, length?: number): Buffer {
|
||||
return this.toArrayLike(Buffer, endian, length)
|
||||
}
|
||||
|
||||
addPrefixBit(bitLength: number): BN {
|
||||
const prefix = new BN(1).shln(bitLength)
|
||||
if (this.gt(prefix)) throw Error('prefix bit is less than current value')
|
||||
return prefix.or(this)
|
||||
}
|
||||
|
||||
toJSON(): string {
|
||||
return `0x${super.toJSON()}`
|
||||
}
|
||||
|
||||
toHex(byteLength?: number): string {
|
||||
if (byteLength) {
|
||||
return `0x${this.toBuffer('be', byteLength).toString('hex')}`
|
||||
}
|
||||
return `0x${this.toString('hex')}`
|
||||
}
|
||||
|
||||
toBytes32(): Bytes32 {
|
||||
return new Bytes32(`0x${this.toString(16, 64)}`)
|
||||
}
|
||||
|
||||
toUint256(): Uint256 {
|
||||
return this.toBytes32().toUint()
|
||||
}
|
||||
|
||||
toAddress(): Address {
|
||||
return new Address(`0x${this.toString(16, 40)}`)
|
||||
}
|
||||
|
||||
toBigInt(): bigint {
|
||||
return BigInt(this.toString())
|
||||
// return ffjs.utils.stringifyBigInts(this.toString())
|
||||
}
|
||||
|
||||
toTwos(width: number): Fr {
|
||||
return Fr.from(super.toTwos(width))
|
||||
}
|
||||
|
||||
fromTwos(width: number): Fr {
|
||||
return Fr.from(super.fromTwos(width))
|
||||
}
|
||||
|
||||
neg(): Fr {
|
||||
return Fr.from(super.neg())
|
||||
}
|
||||
|
||||
ineg(): Fr {
|
||||
return Fr.from(super.ineg())
|
||||
}
|
||||
|
||||
abs(): Fr {
|
||||
return Fr.from(super.abs())
|
||||
}
|
||||
|
||||
iabs(): Fr {
|
||||
return Fr.from(super.iabs())
|
||||
}
|
||||
|
||||
add(f: F): Fr {
|
||||
return Fr.from(super.add(Fr.from(f)))
|
||||
}
|
||||
|
||||
iadd(f: F): Fr {
|
||||
return Fr.from(super.iadd(Fr.from(f)))
|
||||
}
|
||||
|
||||
addn(n: number): Fr {
|
||||
return Fr.from(super.addn(n))
|
||||
}
|
||||
|
||||
iaddn(n: number): Fr {
|
||||
return Fr.from(super.iaddn(n))
|
||||
}
|
||||
|
||||
sub(f: F): Fr {
|
||||
return Fr.from(super.sub(Fr.from(f)))
|
||||
}
|
||||
|
||||
isub(f: F): Fr {
|
||||
return Fr.from(super.isub(Fr.from(f)))
|
||||
}
|
||||
|
||||
subn(n: number): Fr {
|
||||
return Fr.from(super.subn(n))
|
||||
}
|
||||
|
||||
isubn(n: number): Fr {
|
||||
return Fr.from(super.isubn(n))
|
||||
}
|
||||
|
||||
mul(f: F): Fr {
|
||||
return Fr.from(super.mul(Fr.from(f)))
|
||||
}
|
||||
|
||||
imul(f: F): Fr {
|
||||
return Fr.from(super.imul(Fr.from(f)))
|
||||
}
|
||||
|
||||
muln(n: number): Fr {
|
||||
return Fr.from(super.muln(n))
|
||||
}
|
||||
|
||||
imuln(n: number): Fr {
|
||||
return Fr.from(super.imuln(n))
|
||||
}
|
||||
|
||||
sqr(): Fr {
|
||||
return Fr.from(super.sqr())
|
||||
}
|
||||
|
||||
isqr(): Fr {
|
||||
return Fr.from(super.isqr())
|
||||
}
|
||||
|
||||
pow(f: F): Fr {
|
||||
return Fr.from(super.pow(Fr.from(f)))
|
||||
}
|
||||
|
||||
div(f: F): Fr {
|
||||
return Fr.from(super.div(Fr.from(f)))
|
||||
}
|
||||
|
||||
divn(n: number): Fr {
|
||||
return Fr.from(super.divn(n))
|
||||
}
|
||||
|
||||
mod(f: F): Fr {
|
||||
return Fr.from(super.mod(Fr.from(f)))
|
||||
}
|
||||
|
||||
umod(f: F): Fr {
|
||||
return Fr.from(super.umod(Fr.from(f)))
|
||||
}
|
||||
|
||||
divRound(f: F): Fr {
|
||||
return Fr.from(super.divRound(Fr.from(f)))
|
||||
}
|
||||
|
||||
or(f: F): Fr {
|
||||
return Fr.from(super.or(Fr.from(f)))
|
||||
}
|
||||
|
||||
ior(f: F): Fr {
|
||||
return Fr.from(super.ior(Fr.from(f)))
|
||||
}
|
||||
|
||||
uor(f: F): Fr {
|
||||
return Fr.from(super.uor(Fr.from(f)))
|
||||
}
|
||||
|
||||
iuor(f: F): Fr {
|
||||
return Fr.from(super.iuor(Fr.from(f)))
|
||||
}
|
||||
|
||||
and(f: F): Fr {
|
||||
return Fr.from(super.and(Fr.from(f)))
|
||||
}
|
||||
|
||||
iand(f: F): Fr {
|
||||
return Fr.from(super.iand(Fr.from(f)))
|
||||
}
|
||||
|
||||
uand(f: F): Fr {
|
||||
return Fr.from(super.uand(Fr.from(f)))
|
||||
}
|
||||
|
||||
iuand(f: F): Fr {
|
||||
return Fr.from(super.iuand(Fr.from(f)))
|
||||
}
|
||||
|
||||
andln(n: number): Fr {
|
||||
return Fr.from(super.andln(n))
|
||||
}
|
||||
|
||||
xor(f: F): Fr {
|
||||
return Fr.from(super.xor(Fr.from(f)))
|
||||
}
|
||||
|
||||
ixor(f: F): Fr {
|
||||
return Fr.from(super.ixor(Fr.from(f)))
|
||||
}
|
||||
|
||||
uxor(f: F): Fr {
|
||||
return Fr.from(super.uxor(Fr.from(f)))
|
||||
}
|
||||
|
||||
iuxor(f: F): Fr {
|
||||
return Fr.from(super.iuxor(Fr.from(f)))
|
||||
}
|
||||
|
||||
setn(n: number): Fr {
|
||||
return Fr.from(super.setn(n))
|
||||
}
|
||||
|
||||
shln(n: number): Fr {
|
||||
return Fr.from(super.shln(n))
|
||||
}
|
||||
|
||||
ishln(n: number): Fr {
|
||||
return Fr.from(super.ishln(n))
|
||||
}
|
||||
|
||||
ushln(n: number): Fr {
|
||||
return Fr.from(super.ushln(n))
|
||||
}
|
||||
|
||||
iushln(n: number): Fr {
|
||||
return Fr.from(super.iushln(n))
|
||||
}
|
||||
|
||||
shrn(n: number): Fr {
|
||||
return Fr.from(super.shrn(n))
|
||||
}
|
||||
|
||||
ishrn(n: number): Fr {
|
||||
return Fr.from(super.ishrn(n))
|
||||
}
|
||||
|
||||
ushrn(n: number): Fr {
|
||||
return Fr.from(super.ushrn(n))
|
||||
}
|
||||
|
||||
iushrn(n: number): Fr {
|
||||
return Fr.from(super.iushrn(n))
|
||||
}
|
||||
|
||||
maskn(n: number): Fr {
|
||||
return Fr.from(super.maskn(n))
|
||||
}
|
||||
|
||||
imaskn(n: number): Fr {
|
||||
return Fr.from(super.imaskn(n))
|
||||
}
|
||||
|
||||
bincn(n: number): Fr {
|
||||
return Fr.from(super.bincn(n))
|
||||
}
|
||||
|
||||
notn(w: number): Fr {
|
||||
return Fr.from(super.notn(w))
|
||||
}
|
||||
|
||||
inotn(w: number): Fr {
|
||||
return Fr.from(super.inotn(w))
|
||||
}
|
||||
|
||||
gcd(f: F): Fr {
|
||||
return Fr.from(super.gcd(Fr.from(f)))
|
||||
}
|
||||
|
||||
toRed(): RedBN {
|
||||
const r = new BN(this.toString()).toRed(Fr.Red)
|
||||
return r
|
||||
}
|
||||
}
|
||||
@@ -1,2 +1,5 @@
|
||||
export { Field, F } from './field'
|
||||
export { Point, signEdDSA, verifyEdDSA, EdDSA } from './point'
|
||||
export { F } from './types/ff'
|
||||
export { Fp } from './fp'
|
||||
export { Fr } from './fr'
|
||||
export { Point } from './point'
|
||||
export { signEdDSA, verifyEdDSA, EdDSA } from './eddsa'
|
||||
|
||||
@@ -1,24 +1,21 @@
|
||||
import { hexToBuffer, hexify } from '@zkopru/utils'
|
||||
import bigInt, { BigInteger } from 'big-integer'
|
||||
import { hexToBuffer } from '@zkopru/utils'
|
||||
import * as ffjs from 'ffjavascript'
|
||||
import * as circomlib from 'circomlib'
|
||||
import createBlakeHash from 'blake-hash'
|
||||
import { Field, F } from './field'
|
||||
import BN from 'bn.js'
|
||||
import { Fp } from './fp'
|
||||
import { Fr } from './fr'
|
||||
import { F } from './types/ff'
|
||||
|
||||
export class Point {
|
||||
x: Field
|
||||
x: Fp
|
||||
|
||||
y: Field
|
||||
y: Fp
|
||||
|
||||
constructor(x: Field, y: Field) {
|
||||
constructor(x: Fp, y: Fp) {
|
||||
this.x = x
|
||||
this.y = y
|
||||
if (
|
||||
!circomlib.babyJub.inCurve([
|
||||
this.x.toIden3BigInt(),
|
||||
this.y.toIden3BigInt(),
|
||||
])
|
||||
) {
|
||||
if (!circomlib.babyJub.inCurve([this.x.toBigInt(), this.y.toBigInt()])) {
|
||||
throw new Error('Given point is not on the Babyjubjub curve')
|
||||
}
|
||||
}
|
||||
@@ -26,7 +23,25 @@ export class Point {
|
||||
static zero = Point.from(0, 1)
|
||||
|
||||
static from(x: F, y: F) {
|
||||
return new Point(Field.from(x), Field.from(y))
|
||||
return new Point(Fp.from(x), Fp.from(y))
|
||||
}
|
||||
|
||||
static fromY(y: F, xOdd: boolean): Point {
|
||||
const redY = Fp.from(y).toRed()
|
||||
const y2 = redY.redSqr()
|
||||
const D = Point.D.toRed()
|
||||
const numerator = Fp.one.toRed().redSub(y2)
|
||||
const denominator = Point.A.toRed()
|
||||
.redSub(D.redMul(y2))
|
||||
.redInvm()
|
||||
const x = numerator
|
||||
.redMul(denominator)
|
||||
.redSqrt()
|
||||
.fromRed()
|
||||
if (x.isOdd() === xOdd) {
|
||||
return Point.from(x, y)
|
||||
}
|
||||
return Point.from(x.neg(), y)
|
||||
}
|
||||
|
||||
static fromHex(hex: string) {
|
||||
@@ -35,21 +50,28 @@ export class Point {
|
||||
}
|
||||
|
||||
static decode(packed: Buffer): Point {
|
||||
const point = circomlib.babyJub.unpackPoint(packed)
|
||||
return Point.from(point[0].toString(), point[1].toString())
|
||||
if (packed.length !== 32) throw Error('invalid length')
|
||||
const oddX = (packed[31] & 0x80) !== 0
|
||||
const yBuff = Buffer.from(packed)
|
||||
yBuff[31] &= 0x7f // clear the most significant bit
|
||||
const y = new BN(yBuff, 'le')
|
||||
return Point.fromY(y, oddX)
|
||||
}
|
||||
|
||||
static generate(n: F): Point {
|
||||
return Point.BASE8.mul(Field.from(n))
|
||||
return Point.BASE8.mul(Fr.from(n))
|
||||
}
|
||||
|
||||
/**
|
||||
* @returns getMultiplier(key)*G
|
||||
*/
|
||||
static fromPrivKey(key: string | Buffer): Point {
|
||||
const buff: Buffer = typeof key === 'string' ? hexToBuffer(key) : key
|
||||
const result = circomlib.eddsa.prv2pub(buff)
|
||||
return Point.from(result[0].toString(), result[1].toString())
|
||||
}
|
||||
|
||||
static getMultiplier(key: string | Buffer): Field {
|
||||
static getMultiplier(key: string | Buffer): Fr {
|
||||
const buff: Buffer = typeof key === 'string' ? hexToBuffer(key) : key
|
||||
const sBuff = Buffer.from(
|
||||
createBlakeHash('blake512')
|
||||
@@ -62,43 +84,49 @@ export class Point {
|
||||
sBuff[31] |= 0x40
|
||||
const s = ffjs.utils.leBuff2int(sBuff)
|
||||
const multiplier = ffjs.Scalar.shr(s, 3)
|
||||
return Field.from(multiplier)
|
||||
return Fr.from(multiplier)
|
||||
}
|
||||
|
||||
static isOnJubjub(x: F, y: F): boolean {
|
||||
return circomlib.babyJub.inCurve([
|
||||
Field.from(x).toIden3BigInt(),
|
||||
Field.from(y).toIden3BigInt(),
|
||||
Fp.from(x).toBigInt(),
|
||||
Fp.from(y).toBigInt(),
|
||||
])
|
||||
}
|
||||
|
||||
// https://tools.ietf.org/html/rfc8032#section-5.1.5
|
||||
encode(): Buffer {
|
||||
return circomlib.babyJub.packPoint([
|
||||
this.x.toIden3BigInt(),
|
||||
this.y.toIden3BigInt(),
|
||||
])
|
||||
const buff = this.y.toBuffer('le', 32)
|
||||
if ((buff[31] & 0x80) !== 0)
|
||||
throw Error('The MSB of the final octet should be zero')
|
||||
if (this.x.isOdd()) {
|
||||
buff[31] |= 0x80
|
||||
}
|
||||
return buff
|
||||
}
|
||||
|
||||
toHex(): string {
|
||||
return hexify(this.encode(), 32)
|
||||
const encoded = this.encode()
|
||||
if (encoded.length !== 32) throw new Error('Expected 32 bytes')
|
||||
return encoded.toString('hex')
|
||||
}
|
||||
|
||||
toBigIntArr(): BigInteger[] {
|
||||
return [this.x.toIden3BigInt(), this.y.toIden3BigInt(), bigInt(1)]
|
||||
toBigIntArr(): bigint[] {
|
||||
return [this.x.toBigInt(), this.y.toBigInt(), BigInt(1)]
|
||||
}
|
||||
|
||||
add(p: Point): Point {
|
||||
const result = circomlib.babyJub.addPoint(
|
||||
[this.x.toIden3BigInt(), this.y.toIden3BigInt()],
|
||||
[p.x.toIden3BigInt(), p.y.toIden3BigInt()],
|
||||
[this.x.toBigInt(), this.y.toBigInt()],
|
||||
[p.x.toBigInt(), p.y.toBigInt()],
|
||||
)
|
||||
return Point.from(result[0].toString(), result[1].toString())
|
||||
}
|
||||
|
||||
mul(n: F): Point {
|
||||
const result = circomlib.babyJub.mulPointEscalar(
|
||||
[this.x.toIden3BigInt(), this.y.toIden3BigInt()],
|
||||
Field.from(n).toIden3BigInt(),
|
||||
[this.x.toBigInt(), this.y.toBigInt()],
|
||||
Fr.from(n).toBigInt(),
|
||||
)
|
||||
return Point.from(result[0].toString(), result[1].toString())
|
||||
}
|
||||
@@ -123,43 +151,7 @@ export class Point {
|
||||
|
||||
static PRIME: bigint = circomlib.babyJub.p
|
||||
|
||||
static A = circomlib.babyJub
|
||||
static A = Fp.from(circomlib.babyJub.A)
|
||||
|
||||
static D = circomlib.babyJub
|
||||
}
|
||||
|
||||
export interface EdDSA {
|
||||
R8: Point
|
||||
S: Field
|
||||
}
|
||||
|
||||
export function verifyEdDSA(msg: F, sig: EdDSA, pubKey: Point): boolean {
|
||||
const result = circomlib.eddsa.verifyPoseidon(
|
||||
Field.from(msg).toIden3BigInt(),
|
||||
{
|
||||
R8: [sig.R8.x.toIden3BigInt(), sig.R8.y.toIden3BigInt()],
|
||||
S: sig.S.toIden3BigInt(),
|
||||
},
|
||||
[pubKey.x.toIden3BigInt(), pubKey.y.toIden3BigInt()],
|
||||
)
|
||||
return result
|
||||
}
|
||||
|
||||
export function signEdDSA({
|
||||
msg,
|
||||
privKey,
|
||||
}: {
|
||||
msg: F
|
||||
privKey: Buffer | string
|
||||
}): EdDSA {
|
||||
const buff: Buffer =
|
||||
typeof privKey === 'string' ? hexToBuffer(privKey) : privKey
|
||||
const result = circomlib.eddsa.signPoseidon(
|
||||
buff,
|
||||
Field.from(msg).toIden3BigInt(),
|
||||
)
|
||||
return {
|
||||
R8: Point.from(result.R8[0].toString(), result.R8[1].toString()),
|
||||
S: Field.from(result.S.toString()),
|
||||
}
|
||||
static D = Fp.from(circomlib.babyJub.D)
|
||||
}
|
||||
|
||||
3
packages/babyjubjub/src/types/ff.d.ts
vendored
Normal file
3
packages/babyjubjub/src/types/ff.d.ts
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
import BN from 'bn.js'
|
||||
|
||||
export type F = number | string | number[] | Uint8Array | Buffer | BN
|
||||
75
packages/babyjubjub/src/types/redbn.d.ts
vendored
Normal file
75
packages/babyjubjub/src/types/redbn.d.ts
vendored
Normal file
@@ -0,0 +1,75 @@
|
||||
import BN from 'bn.js'
|
||||
|
||||
declare class RedBN extends BN {
|
||||
/**
|
||||
* @description Convert back a number using a reduction context
|
||||
*/
|
||||
fromRed(): BN
|
||||
|
||||
/**
|
||||
* @description modular addition
|
||||
*/
|
||||
redAdd(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description in-place modular addition
|
||||
*/
|
||||
redIAdd(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description modular subtraction
|
||||
*/
|
||||
redSub(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description in-place modular subtraction
|
||||
*/
|
||||
redISub(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description modular shift left
|
||||
*/
|
||||
redShl(num: number): RedBN
|
||||
|
||||
/**
|
||||
* @description modular multiplication
|
||||
*/
|
||||
redMul(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description in-place modular multiplication
|
||||
*/
|
||||
redIMul(b: BN): RedBN
|
||||
|
||||
/**
|
||||
* @description modular square
|
||||
*/
|
||||
redSqr(): RedBN
|
||||
|
||||
/**
|
||||
* @description in-place modular square
|
||||
*/
|
||||
redISqr(): RedBN
|
||||
|
||||
/**
|
||||
* @description modular square root
|
||||
*/
|
||||
redSqrt(): RedBN
|
||||
|
||||
/**
|
||||
* @description modular inverse of the number
|
||||
*/
|
||||
redInvm(): RedBN
|
||||
|
||||
/**
|
||||
* @description modular negation
|
||||
*/
|
||||
redNeg(): RedBN
|
||||
|
||||
/**
|
||||
* @description modular exponentiation
|
||||
*/
|
||||
redPow(b: BN): RedBN
|
||||
}
|
||||
|
||||
export = RedBN
|
||||
1
packages/babyjubjub/src/types/typings.d.ts
vendored
1
packages/babyjubjub/src/types/typings.d.ts
vendored
@@ -1,4 +1,5 @@
|
||||
/* eslint-disable max-classes-per-file */
|
||||
declare module 'snarkjs'
|
||||
declare module 'ffjavascript'
|
||||
declare module 'circomlib'
|
||||
declare module 'blake-hash'
|
||||
|
||||
@@ -1,48 +1,48 @@
|
||||
/* eslint-disable jest/no-hooks */
|
||||
import BN from 'bn.js'
|
||||
import { Field } from '~babyjubjub'
|
||||
import { Fp } from '~babyjubjub'
|
||||
|
||||
describe('finite field', () => {
|
||||
let constant: Field
|
||||
let constant: Fp
|
||||
beforeAll(() => {
|
||||
constant = new Field(18)
|
||||
constant = new Fp(18)
|
||||
})
|
||||
it('should accept number for its constructor parameter', () => {
|
||||
const a = new Field(18)
|
||||
const b = Field.from(18)
|
||||
const a = new Fp(18)
|
||||
const b = Fp.from(18)
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
it('should accept string string for its constructor parameter', () => {
|
||||
const a = new Field('18')
|
||||
const b = Field.from('18')
|
||||
const a = new Fp('18')
|
||||
const b = Fp.from('18')
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
it('should accept hex string with 0x prefix for its constructor parameter', () => {
|
||||
const a = new Field('0x12')
|
||||
const b = Field.from('0x12')
|
||||
const a = new Fp('0x12')
|
||||
const b = Fp.from('0x12')
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
it('should return same hex', () => {
|
||||
const f = new Field('0xabcd1234abcd1234')
|
||||
const f = new Fp('0xabcd1234abcd1234')
|
||||
expect(f.toHex(8)).toStrictEqual('0xabcd1234abcd1234')
|
||||
})
|
||||
it('should return cyclic hex for a number beyond the field range', () => {
|
||||
const f = new Field(
|
||||
const f = new Fp(
|
||||
'0xabcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234abcd1234',
|
||||
)
|
||||
expect(f.toHex(32)).not.toStrictEqual(
|
||||
@@ -50,54 +50,54 @@ describe('finite field', () => {
|
||||
)
|
||||
})
|
||||
it('should accept Buffer obj for its constructor parameter', () => {
|
||||
const a = new Field(Buffer.from('12', 'hex'))
|
||||
const b = Field.from(Buffer.from('12', 'hex'))
|
||||
expect(new Field(Buffer.from('12', 'hex'))).toBeDefined()
|
||||
const a = new Fp(Buffer.from('12', 'hex'))
|
||||
const b = Fp.from(Buffer.from('12', 'hex'))
|
||||
expect(new Fp(Buffer.from('12', 'hex'))).toBeDefined()
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
it('should accept BN object for its constructor parameter', () => {
|
||||
const a = new Field(new BN(18))
|
||||
const b = Field.from(new BN(18))
|
||||
expect(new Field(new BN(18))).toBeDefined()
|
||||
const a = new Fp(new BN(18))
|
||||
const b = Fp.from(new BN(18))
|
||||
expect(new Fp(new BN(18))).toBeDefined()
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
it('should accept itself for its constructor parameter', () => {
|
||||
const a = new Field(new Field(18))
|
||||
const b = Field.from(new Field(18))
|
||||
expect(new Field(new Field(18))).toBeDefined()
|
||||
const a = new Fp(new Fp(18))
|
||||
const b = Fp.from(new Fp(18))
|
||||
expect(new Fp(new Fp(18))).toBeDefined()
|
||||
expect(a).toBeDefined()
|
||||
expect(b).toBeDefined()
|
||||
expect(a).toBeInstanceOf(Field)
|
||||
expect(b).toBeInstanceOf(Field)
|
||||
expect(a).toBeInstanceOf(Fp)
|
||||
expect(b).toBeInstanceOf(Fp)
|
||||
expect(constant.eq(a)).toBe(true)
|
||||
expect(constant.eq(b)).toBe(true)
|
||||
})
|
||||
})
|
||||
describe('cyclic group', () => {
|
||||
it('a + (-a) = 0', () => {
|
||||
const a = new Field(18)
|
||||
const b = new Field(-18)
|
||||
const a = new Fp(18)
|
||||
const b = new Fp(-18)
|
||||
expect(a.add(b).isZero()).toBe(true)
|
||||
})
|
||||
it('a >= 0 and -a >= 0', () => {
|
||||
const a = new Field(18)
|
||||
const b = new Field(-18)
|
||||
const a = new Fp(18)
|
||||
const b = new Fp(-18)
|
||||
expect(a.gtn(0)).toBe(true)
|
||||
expect(b.gtn(0)).toBe(true)
|
||||
})
|
||||
it('a - b > a when b > a', () => {
|
||||
const a = new Field(18)
|
||||
const b = new Field(20)
|
||||
const a = new Fp(18)
|
||||
const b = new Fp(20)
|
||||
expect(a.sub(b).gt(a)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
/* eslint-disable jest/no-hooks */
|
||||
import { Point, signEdDSA, verifyEdDSA, Field } from '~babyjubjub'
|
||||
import { Point, signEdDSA, verifyEdDSA } from '~babyjubjub'
|
||||
|
||||
describe('baby jubjub point', () => {
|
||||
it('should return generator', () => {
|
||||
@@ -19,12 +19,25 @@ describe('baby jubjub point', () => {
|
||||
})
|
||||
describe('fromHex()', () => {
|
||||
it('should return a Point instance from 0x prefixed hex string', () => {
|
||||
const hex = Point.generate(2).toHex()
|
||||
const point = Point.fromHex(hex)
|
||||
const regeneratedHex = point.toHex()
|
||||
const point = Point.generate(2)
|
||||
const pointToHex = point.toHex()
|
||||
const pointFromHex = Point.fromHex(pointToHex)
|
||||
const regeneratedHex = pointFromHex.toHex()
|
||||
expect(point).toBeDefined()
|
||||
expect(regeneratedHex).toBe(hex)
|
||||
expect(regeneratedHex).toBe(pointToHex)
|
||||
})
|
||||
Array(50)
|
||||
.fill(null)
|
||||
.forEach((_, i) =>
|
||||
it(`should return a Point instance from hex string (${i + 1}G)`, () => {
|
||||
const point = Point.generate(i + 1)
|
||||
const pointToHex = point.toHex()
|
||||
const pointFromHex = Point.fromHex(pointToHex)
|
||||
const regeneratedHex = pointFromHex.toHex()
|
||||
expect(point).toBeDefined()
|
||||
expect(regeneratedHex).toBe(pointToHex)
|
||||
}),
|
||||
)
|
||||
})
|
||||
describe('isOnJubjub()', () => {
|
||||
it('should return true for generated points', () => {
|
||||
@@ -32,22 +45,17 @@ describe('baby jubjub point', () => {
|
||||
expect(Point.isOnJubjub(point.x, point.y)).toBe(true)
|
||||
})
|
||||
it('should return true for decoded points', () => {
|
||||
const snarkPk = Field.from(
|
||||
'0x6cbed15c793ce57650b9877cf6fa156fbef513c4e6134f022a85b1ffdd59b2a1',
|
||||
)
|
||||
const pubKey = Point.fromPrivKey(snarkPk.toHex())
|
||||
const secret =
|
||||
'0x6cbed15c793ce57650b9877cf6fa156fbef513c4e6134f022a85b1ffdd59b2a1'
|
||||
const pubKey = Point.fromPrivKey(secret)
|
||||
expect(Point.isOnJubjub(pubKey.x, pubKey.y)).toBe(true)
|
||||
})
|
||||
it('should return true for points from pub key', () => {
|
||||
const snarkPk = Field.from(
|
||||
'0x4f3edf983ac636a65a842ce7c78d9aa706d3b113bce9c46f30d7d21715b23b1d',
|
||||
)
|
||||
const pubKey = Point.fromPrivKey(snarkPk.toHex())
|
||||
const secret =
|
||||
'0x4f3edf983ac636a65a842ce7c78d9aa706d3b113bce9c46f30d7d21715b23b1d'
|
||||
const pubKey = Point.fromPrivKey(secret)
|
||||
const pubKeyToHex = pubKey.toHex()
|
||||
const retrievedPoint = Point.fromHex(pubKeyToHex)
|
||||
expect(pubKeyToHex).toBe(
|
||||
'0xa544f842c83b24ec53910f98ff0b22c2dab69bc329ffb81e29d3ed9638bfec28',
|
||||
)
|
||||
expect(Point.isOnJubjub(retrievedPoint.x, retrievedPoint.y)).toBe(true)
|
||||
})
|
||||
})
|
||||
|
||||
8
packages/circuits/.dockerignore
Normal file
8
packages/circuits/.dockerignore
Normal file
@@ -0,0 +1,8 @@
|
||||
**/build
|
||||
**/node_modules
|
||||
**/coverage
|
||||
**/*.db
|
||||
**/db
|
||||
**/keys.tgz
|
||||
.git
|
||||
.build-cache
|
||||
@@ -1,4 +1,5 @@
|
||||
# @zkopru/circuits
|
||||
|
||||
## Testcases
|
||||
|
||||
[../packages/dataset/test/circuits](../packages/dataset/test/circuits)
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
version: "3"
|
||||
|
||||
services:
|
||||
test-circuits:
|
||||
image: zkopru-test-circuits
|
||||
build:
|
||||
context: ./packages/contracts/
|
||||
dockerfile: ../../containers/Contract.dockerfile
|
||||
ports:
|
||||
- "5000:5000"
|
||||
circuits:
|
||||
build:
|
||||
context: ./
|
||||
dockerfile: ./containers/Coordinator.dockerfile
|
||||
ports:
|
||||
- "8888:8888"
|
||||
links:
|
||||
- 'testnet:testnet'
|
||||
command: 'node /proj/packages/coordinator/dist/cli.js --ws ws://testnet:5000 --config /proj/packages/coordinator/coordinator.json'
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 1, 1);
|
||||
component main = ZkTransaction(48, 1, 1);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 1, 2);
|
||||
component main = ZkTransaction(48, 1, 2);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 1, 3);
|
||||
component main = ZkTransaction(48, 1, 3);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 1, 4);
|
||||
component main = ZkTransaction(48, 1, 4);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 2, 1);
|
||||
component main = ZkTransaction(48, 2, 1);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 2, 2);
|
||||
component main = ZkTransaction(48, 2, 2);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 2, 3);
|
||||
component main = ZkTransaction(48, 2, 3);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 2, 4);
|
||||
component main = ZkTransaction(48, 2, 4);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 3, 1);
|
||||
component main = ZkTransaction(48, 3, 1);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 3, 2);
|
||||
component main = ZkTransaction(48, 3, 2);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 3, 3);
|
||||
component main = ZkTransaction(48, 3, 3);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 3, 4);
|
||||
component main = ZkTransaction(48, 3, 4);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 4, 1);
|
||||
component main = ZkTransaction(48, 4, 1);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 4, 2);
|
||||
component main = ZkTransaction(48, 4, 2);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 4, 3);
|
||||
component main = ZkTransaction(48, 4, 3);
|
||||
@@ -1,3 +1,3 @@
|
||||
include "../lib/zk_transaction.circom";
|
||||
|
||||
component main = ZkTransaction(31, 4, 4);
|
||||
component main = ZkTransaction(48, 4, 4);
|
||||
25
packages/circuits/lib/asset_hash.circom
Normal file
25
packages/circuits/lib/asset_hash.circom
Normal file
@@ -0,0 +1,25 @@
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
|
||||
template AssetHash() {
|
||||
signal input eth;
|
||||
signal input token_addr;
|
||||
signal input erc20;
|
||||
signal input erc721;
|
||||
signal output out;
|
||||
// out = poseidon4(eth, token_addr, erc20, erc721)
|
||||
//
|
||||
// poseidon4 => {
|
||||
// t: 5,
|
||||
// nRoundsF: 8,
|
||||
// nRoundsP: 60,
|
||||
// }
|
||||
// https://eprint.iacr.org/2019/458.pdf
|
||||
// https://github.com/iden3/circomlib/blob/86c6a2a6f5e8de4024a8d366eff9e35351bc1a2e/src/poseidon.js
|
||||
|
||||
component hash = Poseidon(4);
|
||||
hash.inputs[0] <== eth;
|
||||
hash.inputs[1] <== token_addr;
|
||||
hash.inputs[2] <== erc20;
|
||||
hash.inputs[3] <== erc721;
|
||||
hash.out ==> out;
|
||||
}
|
||||
@@ -1,50 +0,0 @@
|
||||
include "./utils.circom";
|
||||
include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
include "../node_modules/circomlib/circuits/escalarmul.circom";
|
||||
|
||||
template AtomicSwapMPC() {
|
||||
signal input my_mpc_salt;
|
||||
signal input order[3];
|
||||
signal input giving_token_type;
|
||||
signal input giving_token_addr;
|
||||
signal input giving_note_salt;
|
||||
signal input counterpart_pk[2];
|
||||
signal input counterpart_computation[2]; /// counterpart_computation = g^(counterpart_mpc_salt * receiving_token_type * receiving_token_addr * receiving_note_salt * my_pk)
|
||||
signal output out[2];
|
||||
|
||||
/// type 0: no-swap / 1: ETH / 2: ERC20 / 3: ERC721
|
||||
component correct_type = LessThan(3);
|
||||
correct_type.in[0] <== giving_token_type;
|
||||
correct_type.in[1] <== 4;
|
||||
correct_type.out === 1;
|
||||
|
||||
/// Order data or token addr can include some zero values.
|
||||
/// If then, multiply (JUBJUB prime - 1) instead of zero.
|
||||
component filter = ZeroToJubjubPrime(4);
|
||||
filter.in[0] <== order[0];
|
||||
filter.in[1] <== order[1];
|
||||
filter.in[2] <== order[2];
|
||||
filter.in[3] <== giving_token_addr;
|
||||
|
||||
/// Calculate scalar multiplication of the input values and the counterpart's public salt
|
||||
var BASE8 = [
|
||||
5299619240641551281634865583518297030282874472190772894086521144482721001553,
|
||||
16950150798460657717958625567821834550301663161624707787222815936182638968203
|
||||
];
|
||||
component mpc = EscalarMul(9, BASE8);
|
||||
mpc.inp[0] <== counterpart_computation[0];
|
||||
mpc.inp[1] <== counterpart_computation[1];
|
||||
mpc.in[0] <== my_mpc_salt;
|
||||
mpc.in[1] <== filter.out[0] /// order[0];
|
||||
mpc.in[2] <== filter.out[1] /// order[1];
|
||||
mpc.in[3] <== filter.out[2] /// order[2];
|
||||
mpc.in[4] <== giving_token_type;
|
||||
mpc.in[5] <== filter.out[3] /// giving_token_addr;
|
||||
mpc.in[6] <== giving_note_salt;
|
||||
mpc.in[7] <== counterpart_pk[0];
|
||||
mpc.in[8] <== counterpart_pk[1];
|
||||
|
||||
// Return outputs
|
||||
mpc.out[0] ==> out[0];
|
||||
mpc.out[1] ==> out[1];
|
||||
}
|
||||
@@ -1,11 +1,17 @@
|
||||
include "../node_modules/circomlib/circuits/babyjub.circom";
|
||||
include "./utils.circom";
|
||||
include "./if_else_then.circom";
|
||||
|
||||
template ERC20Sum(n) {
|
||||
signal input addr;
|
||||
signal input note_addr[n];
|
||||
signal input note_amount[n];
|
||||
signal output out;
|
||||
// Filter with the given address and compute the sum of amount.
|
||||
// If we write the same logic in JS, that should be like below
|
||||
//
|
||||
// out = notes
|
||||
// .filter(note => note.addr == addr)
|
||||
// .reduce((acc, note) => acc + note.amount)
|
||||
|
||||
component sum[n];
|
||||
signal intermediates[n+1];
|
||||
|
||||
27
packages/circuits/lib/if_else_then.circom
Normal file
27
packages/circuits/lib/if_else_then.circom
Normal file
@@ -0,0 +1,27 @@
|
||||
include "../node_modules/circomlib/circuits/mux1.circom";
|
||||
include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
|
||||
template IfElseThen(n) {
|
||||
signal input obj1[n];
|
||||
signal input obj2[n];
|
||||
signal input if_v;
|
||||
signal input else_v;
|
||||
signal output out;
|
||||
// It returns `if_v` when obj1[i] == obj2[i] for every i.
|
||||
// Or it returns `else_v`
|
||||
|
||||
component comparators[n];
|
||||
signal result[n + 1];
|
||||
result[0] <== 1;
|
||||
for(var i = 0; i < n; i++) {
|
||||
comparators[i] = IsEqual();
|
||||
comparators[i].in[0] <== obj1[i];
|
||||
comparators[i].in[1] <== obj2[i];
|
||||
result[i + 1] <== result[i] * comparators[i].out;
|
||||
}
|
||||
component mux = Mux1();
|
||||
mux.c[1] <== if_v;
|
||||
mux.c[0] <== else_v;
|
||||
mux.s <== result[n];
|
||||
out <== mux.out;
|
||||
}
|
||||
@@ -6,8 +6,17 @@ template BranchNode() {
|
||||
signal input left;
|
||||
signal input right;
|
||||
signal output parent;
|
||||
// parent = poseidon2(left, right)
|
||||
//
|
||||
// poseidon2 => {
|
||||
// t: 3,
|
||||
// nRoundsF: 8,
|
||||
// nRoundsP: 57,
|
||||
// }
|
||||
// https://eprint.iacr.org/2019/458.pdf
|
||||
// https://github.com/iden3/circomlib/blob/86c6a2a6f5e8de4024a8d366eff9e35351bc1a2e/src/poseidon.js
|
||||
|
||||
component hasher = Poseidon(2, 3, 8, 57); // Constant
|
||||
component hasher = Poseidon(2); // Constant
|
||||
hasher.inputs[0] <== left;
|
||||
hasher.inputs[1] <== right;
|
||||
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
include "../node_modules/circomlib/circuits/babyjub.circom";
|
||||
include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
include "./utils.circom";
|
||||
include "./if_else_then.circom";
|
||||
|
||||
template CountSameNFT(n) {
|
||||
signal input addr;
|
||||
@@ -8,6 +8,12 @@ template CountSameNFT(n) {
|
||||
signal input comp_addr[n];
|
||||
signal input comp_nft[n];
|
||||
signal output out;
|
||||
// Filter with the given address and find the number of NFTs that have same ID
|
||||
// If we write the same logic in JS, that should be like below
|
||||
//
|
||||
// out = notes
|
||||
// .filter(note => note.addr == addr)
|
||||
// .reduce((acc, note) => acc + (note.nft == nft ? 1 : 0))
|
||||
|
||||
component counter[n];
|
||||
component nft_exist[n];
|
||||
|
||||
@@ -1,24 +1,24 @@
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
|
||||
template NoteHash() {
|
||||
signal input eth;
|
||||
signal input pubkey_x;
|
||||
signal input pubkey_y;
|
||||
signal input spending_pubkey;
|
||||
signal input salt;
|
||||
signal input token_addr;
|
||||
signal input erc20;
|
||||
signal input nft;
|
||||
signal input asset_hash;
|
||||
signal output out;
|
||||
// out = poseidon3(spending_pubkey, salt, asset_hash)
|
||||
// https://docs.zkopru.network/v/burrito/how-it-works/utxo
|
||||
//
|
||||
// poseidon3 => {
|
||||
// t: 4,
|
||||
// nRoundsF: 8,
|
||||
// nRoundsP: 56,
|
||||
// }
|
||||
// https://eprint.iacr.org/2019/458.pdf
|
||||
// https://github.com/iden3/circomlib/blob/86c6a2a6f5e8de4024a8d366eff9e35351bc1a2e/src/poseidon.js
|
||||
|
||||
component intermediate_hash = Poseidon(4, 6, 8, 57);
|
||||
intermediate_hash.inputs[0] <== eth;
|
||||
intermediate_hash.inputs[1] <== pubkey_x;
|
||||
intermediate_hash.inputs[2] <== pubkey_y;
|
||||
intermediate_hash.inputs[3] <== salt;
|
||||
component final_result = Poseidon(4, 6, 8, 57);
|
||||
final_result.inputs[0] <== intermediate_hash.out;
|
||||
final_result.inputs[1] <== token_addr;
|
||||
final_result.inputs[2] <== erc20;
|
||||
final_result.inputs[3] <== nft;
|
||||
final_result.out ==> out;
|
||||
component hash = Poseidon(3);
|
||||
hash.inputs[0] <== spending_pubkey;
|
||||
hash.inputs[1] <== salt;
|
||||
hash.inputs[2] <== asset_hash;
|
||||
hash.out ==> out;
|
||||
}
|
||||
|
||||
@@ -1,12 +1,22 @@
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
|
||||
template Nullifier() {
|
||||
signal input note_hash;
|
||||
signal input note_salt;
|
||||
signal input nullifier_seed;
|
||||
signal input leaf_index;
|
||||
signal output out;
|
||||
// out = poseidon2(nullifier_seed, leaf_indexright)
|
||||
// https://docs.zkopru.network/v/burrito/how-it-works/account
|
||||
//
|
||||
// poseidon2 => {
|
||||
// t: 3,
|
||||
// nRoundsF: 8,
|
||||
// nRoundsP: 57,
|
||||
// }
|
||||
// https://eprint.iacr.org/2019/458.pdf
|
||||
// https://github.com/iden3/circomlib/blob/86c6a2a6f5e8de4024a8d366eff9e35351bc1a2e/src/poseidon.js
|
||||
|
||||
component hash = Poseidon(2, 6, 8, 57); // Constant
|
||||
hash.inputs[0] <== note_hash;
|
||||
hash.inputs[1] <== note_salt;
|
||||
component hash = Poseidon(2); // Constant
|
||||
hash.inputs[0] <== nullifier_seed;
|
||||
hash.inputs[1] <== leaf_index;
|
||||
hash.out ==> out;
|
||||
}
|
||||
|
||||
10
packages/circuits/lib/range_limit.circom
Normal file
10
packages/circuits/lib/range_limit.circom
Normal file
@@ -0,0 +1,10 @@
|
||||
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||
|
||||
template RangeLimit(bitLength) {
|
||||
signal input in;
|
||||
// bitLength should be less than the SNARK field's bit length
|
||||
assert(bitLength < 254);
|
||||
// This automatically limits its max value to 2**bitLength - 1
|
||||
component bits = Num2Bits(bitLength);
|
||||
bits.in <== in;
|
||||
}
|
||||
24
packages/circuits/lib/spending_pubkey.circom
Normal file
24
packages/circuits/lib/spending_pubkey.circom
Normal file
@@ -0,0 +1,24 @@
|
||||
include "../node_modules/circomlib/circuits/poseidon.circom";
|
||||
|
||||
template SpendingPubKey() {
|
||||
signal input pubkey_x;
|
||||
signal input pubkey_y;
|
||||
signal input nullifier_seed;
|
||||
signal output out;
|
||||
// out = poseidon3(pubkey_x, pubkey_y, nullifier_seed)
|
||||
// https://docs.zkopru.network/v/burrito/how-it-works/account
|
||||
//
|
||||
// poseidon3 => {
|
||||
// t: 4,
|
||||
// nRoundsF: 8,
|
||||
// nRoundsP: 56,
|
||||
// }
|
||||
// https://eprint.iacr.org/2019/458.pdf
|
||||
// https://github.com/iden3/circomlib/blob/86c6a2a6f5e8de4024a8d366eff9e35351bc1a2e/src/poseidon.js
|
||||
|
||||
component hash = Poseidon(3); // Constant
|
||||
hash.inputs[0] <== pubkey_x;
|
||||
hash.inputs[1] <== pubkey_y;
|
||||
hash.inputs[2] <== nullifier_seed;
|
||||
hash.out ==> out;
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||
include "../node_modules/circomlib/circuits/mux1.circom";
|
||||
include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
|
||||
template NFTtoBits(size) {
|
||||
signal input nft;
|
||||
signal output out[size];
|
||||
component is_zero = IsZero();
|
||||
is_zero.in <== nft;
|
||||
component mux = Mux1();
|
||||
mux.s <== is_zero.out;
|
||||
mux.c[0] <== nft;
|
||||
mux.c[1] <== 1; /// means skipping the multiplication
|
||||
component bits = Num2Bits(size);
|
||||
bits.in <== mux.out;
|
||||
for (var i = 0; i < size; i++) {
|
||||
out[i] <== bits.out[i];
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
template IfElseThen(n) {
|
||||
signal input obj1[n];
|
||||
signal input obj2[n];
|
||||
signal input if_v;
|
||||
signal input else_v;
|
||||
signal output out;
|
||||
component comparators[n];
|
||||
signal result[n + 1];
|
||||
result[0] <== 1;
|
||||
for(var i = 0; i < n; i++) {
|
||||
comparators[i] = IsEqual();
|
||||
comparators[i].in[0] <== obj1[i];
|
||||
comparators[i].in[1] <== obj2[i];
|
||||
result[i + 1] <== result[i] * comparators[i].out;
|
||||
}
|
||||
component mux = Mux1();
|
||||
mux.c[1] <== if_v;
|
||||
mux.c[0] <== else_v;
|
||||
mux.s <== result[n];
|
||||
out <== mux.out;
|
||||
}
|
||||
|
||||
template ZeroToJubjubPrime(n) {
|
||||
signal input in[n];
|
||||
signal output out[n];
|
||||
|
||||
component filter[n];
|
||||
component in_range[n];
|
||||
var prime_field = 21888242871839275222246405745257275088548364400416034343698204186575808495617;
|
||||
|
||||
for(var i = 0; i < n; i++) {
|
||||
in_range[i] = LessThan(254);
|
||||
in_range[i].in[0] <== in[i];
|
||||
in_range[i].in[1] <== prime_field - 1;
|
||||
in_range[i].out === 1;
|
||||
|
||||
filter[i] = IfElseThen(1);
|
||||
filter[i].obj1[0] <== in[i];
|
||||
filter[i].obj2[0] <== 0;
|
||||
filter[i].if_v <== prime_field - 1;
|
||||
filter[i].else_v <== in[i];
|
||||
out[i] <== filter[i].out;
|
||||
}
|
||||
}
|
||||
@@ -1,17 +1,27 @@
|
||||
include "./utils.circom";
|
||||
include "./if_else_then.circom";
|
||||
include "./inclusion_proof.circom";
|
||||
include "./erc20_sum.circom";
|
||||
include "./non_fungible.circom";
|
||||
include "./note_hash.circom";
|
||||
include "./asset_hash.circom";
|
||||
include "./nullifier.circom";
|
||||
include "./ownership_proof.circom";
|
||||
//include "./atomic_swap_mpc.circom";
|
||||
include "./spending_pubkey.circom";
|
||||
include "./range_limit.circom";
|
||||
include "../node_modules/circomlib/circuits/eddsaposeidon.circom";
|
||||
include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
include "../node_modules/circomlib/circuits/bitify.circom";
|
||||
|
||||
/**
|
||||
* Note properties
|
||||
* note[0]: ETH value
|
||||
* asset_hash = poseidon(eth, token_addr, erc20Amount, nftId)
|
||||
* note_hash = poseidon(P, salt, asset_hash)
|
||||
* P = poseidon(A.x, A.y, n)
|
||||
* A = from EdDSA
|
||||
*
|
||||
* nullifier_seed = n // nullifier_seed
|
||||
* spending_note_data[1]: salt
|
||||
* spending_note_data[1]: salt
|
||||
* spending_note_data[1]: salt
|
||||
* note[1]: Pub Key x
|
||||
* note[2]: Pub Key y
|
||||
* note[3]: salt
|
||||
@@ -22,71 +32,107 @@ include "../node_modules/circomlib/circuits/comparators.circom";
|
||||
* https://ethresear.ch/
|
||||
*/
|
||||
template ZkTransaction(tree_depth, n_i, n_o) {
|
||||
/** Private Signals */
|
||||
// Spending notes
|
||||
signal private input spending_note[7][n_i];
|
||||
signal private input signatures[3][n_i];
|
||||
/** Spending notes - private signals */
|
||||
signal private input spending_note_eddsa_point[2][n_i]; // A, when P = poseidon(A.x, A.y, n)
|
||||
signal private input spending_note_eddsa_sig[3][n_i]; // eddsa(p, A)
|
||||
signal private input spending_note_nullifier_seed[n_i]; // n, when P = poseidon(A.x, A.y, n)
|
||||
signal private input spending_note_salt[n_i];
|
||||
signal private input spending_note_eth[n_i];
|
||||
signal private input spending_note_token_addr[n_i];
|
||||
signal private input spending_note_erc20[n_i];
|
||||
signal private input spending_note_erc721[n_i];
|
||||
signal private input note_index[n_i];
|
||||
signal private input siblings[tree_depth][n_i];
|
||||
// New notes
|
||||
signal private input new_note[7][n_o];
|
||||
/** Spending notes - public signals */
|
||||
signal input inclusion_references[n_i];
|
||||
signal input nullifiers[n_i]; // prevents double-spending
|
||||
|
||||
/** New utxos - private signals */
|
||||
signal private input new_note_spending_pubkey[n_o];
|
||||
signal private input new_note_salt[n_o];
|
||||
signal private input new_note_eth[n_o];
|
||||
signal private input new_note_token_addr[n_o];
|
||||
signal private input new_note_erc20[n_o];
|
||||
signal private input new_note_erc721[n_o];
|
||||
/** New utxos - public signals */
|
||||
signal input new_note_hash[n_o];
|
||||
signal input typeof_new_note[n_o]; // 0: UTXO, 1: Withdrawal, 2: Migration
|
||||
|
||||
/**
|
||||
* public_data is Only for Withdrawal or Migration outflow.
|
||||
* Default values for UTXO are zero.
|
||||
*/
|
||||
signal input public_data_to[n_o];
|
||||
signal input public_data_eth[n_o];
|
||||
signal input public_data_token_addr[n_o];
|
||||
signal input public_data_erc20[n_o];
|
||||
signal input public_data_erc721[n_o];
|
||||
signal input public_data_fee[n_o];
|
||||
|
||||
/** Transaction metadata - public signals */
|
||||
signal input fee; // tx fee
|
||||
signal input swap; // for atomic swap
|
||||
|
||||
|
||||
/** MPC atomic swap binder TODO later
|
||||
signal private input binding_factors[9];
|
||||
*/
|
||||
|
||||
/** Public Signals */
|
||||
/// tx fee
|
||||
signal input fee;
|
||||
/// for atomic swap
|
||||
signal input swap;
|
||||
/// preventing double-spending
|
||||
signal input inclusion_references[n_i];
|
||||
signal input nullifiers[n_i];
|
||||
/// UTXO note hash
|
||||
signal input new_note_hash[n_o];
|
||||
signal input typeof_new_note[n_o]; // 0: UTXO, 1: Withdrawal, 2: Migration
|
||||
signal input public_data[6][n_o]; // to, eth_amount, token_addr, erc20_amount, erc721_id, fee @ layer1
|
||||
|
||||
|
||||
/** MPC atomic swap: TODO later
|
||||
signal input binder[2]; // default: (0, 1)
|
||||
signal input counterpart_computation[2]; // default: (0, 1)
|
||||
*/
|
||||
|
||||
/** Constraints */
|
||||
/// Calculate spending note hash
|
||||
|
||||
/// Calculate spending pubkey
|
||||
component spending_pubkeys[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
spending_pubkeys[i] = SpendingPubKey();
|
||||
spending_pubkeys[i].pubkey_x <== spending_note_eddsa_point[0][i];
|
||||
spending_pubkeys[i].pubkey_y <== spending_note_eddsa_point[1][i];
|
||||
spending_pubkeys[i].nullifier_seed <== spending_note_nullifier_seed[i];
|
||||
}
|
||||
|
||||
/// Calculate asset hash
|
||||
component spending_note_asset[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
spending_note_asset[i] = AssetHash();
|
||||
spending_note_asset[i].eth <== spending_note_eth[i];
|
||||
spending_note_asset[i].token_addr <== spending_note_token_addr[i];
|
||||
spending_note_asset[i].erc20 <== spending_note_erc20[i];
|
||||
spending_note_asset[i].erc721 <== spending_note_erc721[i];
|
||||
}
|
||||
|
||||
/// Calculate spending note hash using spending pubkey
|
||||
component note_hashes[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
note_hashes[i] = NoteHash();
|
||||
note_hashes[i].eth <== spending_note[0][i];
|
||||
note_hashes[i].pubkey_x <== spending_note[1][i];
|
||||
note_hashes[i].pubkey_y <== spending_note[2][i];
|
||||
note_hashes[i].salt <== spending_note[3][i];
|
||||
note_hashes[i].token_addr <== spending_note[4][i];
|
||||
note_hashes[i].erc20 <== spending_note[5][i];
|
||||
note_hashes[i].nft <== spending_note[6][i];
|
||||
note_hashes[i].spending_pubkey <== spending_pubkeys[i].out;
|
||||
note_hashes[i].salt <== spending_note_salt[i];
|
||||
note_hashes[i].asset_hash <== spending_note_asset[i].out;
|
||||
}
|
||||
|
||||
/// Check the EdDSA signature
|
||||
component ownership_proof[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
ownership_proof[i] = OwnershipProof();
|
||||
ownership_proof[i].note <== note_hashes[i].out;
|
||||
ownership_proof[i].pub_key[0] <== spending_note_eddsa_point[0][i];
|
||||
ownership_proof[i].pub_key[1] <== spending_note_eddsa_point[1][i];
|
||||
ownership_proof[i].sig[0] <== spending_note_eddsa_sig[0][i];
|
||||
ownership_proof[i].sig[1] <== spending_note_eddsa_sig[1][i];
|
||||
ownership_proof[i].sig[2] <== spending_note_eddsa_sig[2][i];
|
||||
}
|
||||
|
||||
/// Nullifier proof
|
||||
component spending_nullifier[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
spending_nullifier[i] = Nullifier(); // Constant
|
||||
spending_nullifier[i].note_hash <== note_hashes[i].out; // note hash
|
||||
spending_nullifier[i].note_salt <== spending_note[3][i]; // note salt
|
||||
spending_nullifier[i].nullifier_seed <== spending_note_nullifier_seed[i]; // nullifier seed
|
||||
spending_nullifier[i].leaf_index <== note_index[i]; // leaf index
|
||||
spending_nullifier[i].out === nullifiers[i];
|
||||
}
|
||||
|
||||
/// Ownership proof
|
||||
component ownership_proof[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
ownership_proof[i] = OwnershipProof();
|
||||
ownership_proof[i].note <== note_hashes[i].out;
|
||||
ownership_proof[i].pub_key[0] <== spending_note[1][i];
|
||||
ownership_proof[i].pub_key[1] <== spending_note[2][i];
|
||||
ownership_proof[i].sig[0] <== signatures[0][i];
|
||||
ownership_proof[i].sig[1] <== signatures[1][i];
|
||||
ownership_proof[i].sig[2] <== signatures[2][i];
|
||||
}
|
||||
}
|
||||
|
||||
/// Inclusion proof
|
||||
component inclusion_proof[n_i];
|
||||
@@ -100,18 +146,23 @@ template ZkTransaction(tree_depth, n_i, n_o) {
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculate new notes' asset hash
|
||||
component new_note_asset[n_o];
|
||||
for(var i = 0; i < n_o; i ++) {
|
||||
new_note_asset[i] = AssetHash();
|
||||
new_note_asset[i].eth <== new_note_eth[i];
|
||||
new_note_asset[i].token_addr <== new_note_token_addr[i];
|
||||
new_note_asset[i].erc20 <== new_note_erc20[i];
|
||||
new_note_asset[i].erc721 <== new_note_erc721[i];
|
||||
}
|
||||
/// New note hash proof
|
||||
// component poseidon_new_note_int[n_o];
|
||||
component poseidon_new_note[n_o];
|
||||
for(var i = 0; i < n_o; i ++) {
|
||||
poseidon_new_note[i] = NoteHash();
|
||||
poseidon_new_note[i].eth <== new_note[0][i];
|
||||
poseidon_new_note[i].pubkey_x <== new_note[1][i];
|
||||
poseidon_new_note[i].pubkey_y <== new_note[2][i];
|
||||
poseidon_new_note[i].salt <== new_note[3][i];
|
||||
poseidon_new_note[i].token_addr <== new_note[4][i];
|
||||
poseidon_new_note[i].erc20 <== new_note[5][i];
|
||||
poseidon_new_note[i].nft <== new_note[6][i];
|
||||
poseidon_new_note[i].spending_pubkey <== new_note_spending_pubkey[i];
|
||||
poseidon_new_note[i].salt <== new_note_salt[i];
|
||||
poseidon_new_note[i].asset_hash <== new_note_asset[i].out;
|
||||
poseidon_new_note[i].out === new_note_hash[i];
|
||||
}
|
||||
|
||||
@@ -130,167 +181,122 @@ template ZkTransaction(tree_depth, n_i, n_o) {
|
||||
revealed_eth[i].obj1[0] <== typeof_new_note[i];
|
||||
revealed_eth[i].obj2[0] <== 0; // internal utxo type
|
||||
revealed_eth[i].if_v <== 0; // Do not reveal value
|
||||
revealed_eth[i].else_v <== new_note[0][i]; // eth amount
|
||||
revealed_eth[i].else_v <== new_note_eth[i]; // eth amount
|
||||
|
||||
revealed_token_addr[i] = IfElseThen(1);
|
||||
revealed_token_addr[i].obj1[0] <== typeof_new_note[i];
|
||||
revealed_token_addr[i].obj2[0] <== 0; // internal utxo type
|
||||
revealed_token_addr[i].if_v <== 0; // Do not reveal value
|
||||
revealed_token_addr[i].else_v <== new_note[4][i]; // token addr
|
||||
revealed_token_addr[i].else_v <== new_note_token_addr[i]; // token addr
|
||||
|
||||
revealed_erc20_amount[i] = IfElseThen(1);
|
||||
revealed_erc20_amount[i].obj1[0] <== typeof_new_note[i];
|
||||
revealed_erc20_amount[i].obj2[0] <== 0; // internal utxo type
|
||||
revealed_erc20_amount[i].if_v <== 0; // Do not reveal nothing
|
||||
revealed_erc20_amount[i].else_v <== new_note[5][i]; // erc20 amount
|
||||
revealed_erc20_amount[i].else_v <== new_note_erc20[i]; // erc20 amount
|
||||
|
||||
revealed_erc721_id[i] = IfElseThen(1);
|
||||
revealed_erc721_id[i].obj1[0] <== typeof_new_note[i];
|
||||
revealed_erc721_id[i].obj2[0] <== 0; // internal utxo type
|
||||
revealed_erc721_id[i].if_v <== 0; // Do not reveal nothing
|
||||
revealed_erc721_id[i].else_v <== new_note[6][i]; // erc721 id
|
||||
revealed_erc721_id[i].else_v <== new_note_erc721[i]; // erc721 id
|
||||
|
||||
public_data[1][i] === revealed_eth[i].out;
|
||||
public_data[2][i] === revealed_token_addr[i].out;
|
||||
public_data[3][i] === revealed_erc20_amount[i].out;
|
||||
public_data[4][i] === revealed_erc721_id[i].out;
|
||||
public_data_eth[i] === revealed_eth[i].out;
|
||||
public_data_token_addr[i] === revealed_token_addr[i].out;
|
||||
public_data_erc20[i] === revealed_erc20_amount[i].out;
|
||||
public_data_erc721[i] === revealed_erc721_id[i].out;
|
||||
}
|
||||
|
||||
/// Range limitation to prevent overflow. Techincal maximum of inputs: 256
|
||||
var range_limit = (0 - 1) >> 8;
|
||||
component inflow_eth_range[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
inflow_eth_range[i] = LessThan(254);
|
||||
inflow_eth_range[i].in[0] <== spending_note[0][i];
|
||||
inflow_eth_range[i].in[1] <== range_limit;
|
||||
inflow_eth_range[i].out === 1;
|
||||
inflow_eth_range[i] = RangeLimit(245);
|
||||
inflow_eth_range[i].in <== spending_note_eth[i];
|
||||
}
|
||||
component inflow_erc20_range[n_i];
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
inflow_erc20_range[i] = LessThan(254);
|
||||
inflow_erc20_range[i].in[0] <== spending_note[5][i];
|
||||
inflow_erc20_range[i].in[1] <== range_limit;
|
||||
inflow_erc20_range[i].out === 1;
|
||||
inflow_erc20_range[i] = RangeLimit(245);
|
||||
inflow_erc20_range[i].in <== spending_note_erc20[i];
|
||||
}
|
||||
component outflow_eth_range[n_o];
|
||||
for(var i = 0; i < n_o; i ++) {
|
||||
outflow_eth_range[i] = LessThan(254);
|
||||
outflow_eth_range[i].in[0] <== new_note[0][i];
|
||||
outflow_eth_range[i].in[1] <== range_limit;
|
||||
outflow_eth_range[i].out === 1;
|
||||
outflow_eth_range[i] = RangeLimit(245);
|
||||
outflow_eth_range[i].in <== new_note_eth[i];
|
||||
}
|
||||
component outflow_erc20_range[n_o];
|
||||
for(var i = 0; i < n_o; i ++) {
|
||||
outflow_erc20_range[i] = LessThan(254);
|
||||
outflow_erc20_range[i].in[0] <== new_note[5][i];
|
||||
outflow_erc20_range[i].in[1] <== range_limit;
|
||||
outflow_erc20_range[i].out === 1;
|
||||
outflow_erc20_range[i] = RangeLimit(245);
|
||||
outflow_erc20_range[i].in <== new_note_erc20[i];
|
||||
}
|
||||
|
||||
/// Zero sum proof of ETH
|
||||
var eth_inflow = 0;
|
||||
var eth_outflow = 0;
|
||||
for ( var i = 0; i < n_i; i++) {
|
||||
eth_inflow += spending_note[0][i];
|
||||
eth_inflow += spending_note_eth[i];
|
||||
}
|
||||
for ( var i = 0; i < n_o; i++) {
|
||||
eth_outflow += new_note[0][i]; // eth
|
||||
eth_outflow += public_data[5][i]; // fee for withdrawal or migration, default = 0
|
||||
eth_outflow += new_note_eth[i]; // eth
|
||||
eth_outflow += public_data_fee[i]; // fee for withdrawal or migration, default = 0
|
||||
}
|
||||
eth_outflow += fee;
|
||||
eth_inflow === eth_outflow;
|
||||
|
||||
/// Only one of ERC20 and ERC721 exists.
|
||||
for(var i = 0; i < n_i; i ++) {
|
||||
spending_note[5][i]*spending_note[6][i] === 0;
|
||||
spending_note_erc20[i]*spending_note_erc721[i] === 0;
|
||||
}
|
||||
for(var i = 0; i < n_o; i ++) {
|
||||
new_note[5][i]*new_note[6][i] === 0;
|
||||
new_note_erc20[i]*new_note_erc721[i] === 0;
|
||||
}
|
||||
|
||||
|
||||
/// Zero sum proof of ERC20
|
||||
component inflow_erc20[n_i];
|
||||
component outflow_erc20[n_i];
|
||||
/// Zero sum proof of ERC20: round 1 - check every token addresses in input notes
|
||||
component inflow_erc20_1[n_i];
|
||||
component outflow_erc20_1[n_i];
|
||||
for (var i = 0; i <n_i; i++) {
|
||||
inflow_erc20[i] = ERC20Sum(n_i);
|
||||
outflow_erc20[i] = ERC20Sum(n_o);
|
||||
inflow_erc20[i].addr <== spending_note[4][i];
|
||||
outflow_erc20[i].addr <== spending_note[4][i];
|
||||
inflow_erc20_1[i] = ERC20Sum(n_i);
|
||||
outflow_erc20_1[i] = ERC20Sum(n_o);
|
||||
inflow_erc20_1[i].addr <== spending_note_token_addr[i];
|
||||
outflow_erc20_1[i].addr <== spending_note_token_addr[i];
|
||||
for (var j = 0; j <n_i; j++) {
|
||||
inflow_erc20[i].note_addr[j] <== spending_note[4][j];
|
||||
inflow_erc20[i].note_amount[j] <== spending_note[5][j];
|
||||
inflow_erc20_1[i].note_addr[j] <== spending_note_token_addr[j];
|
||||
inflow_erc20_1[i].note_amount[j] <== spending_note_erc20[j];
|
||||
}
|
||||
for (var j = 0; j <n_o; j++) {
|
||||
outflow_erc20[i].note_addr[j] <== new_note[4][j];
|
||||
outflow_erc20[i].note_amount[j] <== new_note[5][j];
|
||||
outflow_erc20_1[i].note_addr[j] <== new_note_token_addr[j];
|
||||
outflow_erc20_1[i].note_amount[j] <== new_note_erc20[j];
|
||||
}
|
||||
inflow_erc20[i].out === outflow_erc20[i].out;
|
||||
inflow_erc20_1[i].out === outflow_erc20_1[i].out;
|
||||
}
|
||||
/// Zero sum proof of ERC20: round 2- check every token addresses in output notes
|
||||
component inflow_erc20_2[n_o];
|
||||
component outflow_erc20_2[n_o];
|
||||
for (var i = 0; i <n_o; i++) {
|
||||
inflow_erc20_2[i] = ERC20Sum(n_i);
|
||||
outflow_erc20_2[i] = ERC20Sum(n_o);
|
||||
inflow_erc20_2[i].addr <== new_note_token_addr[i];
|
||||
outflow_erc20_2[i].addr <== new_note_token_addr[i];
|
||||
for (var j = 0; j <n_i; j++) {
|
||||
inflow_erc20_2[i].note_addr[j] <== spending_note_token_addr[j];
|
||||
inflow_erc20_2[i].note_amount[j] <== spending_note_erc20[j];
|
||||
}
|
||||
for (var j = 0; j <n_o; j++) {
|
||||
outflow_erc20_2[i].note_addr[j] <== new_note_token_addr[j];
|
||||
outflow_erc20_2[i].note_amount[j] <== new_note_erc20[j];
|
||||
}
|
||||
inflow_erc20_2[i].out === outflow_erc20_2[i].out;
|
||||
}
|
||||
|
||||
/// Non fungible proof of ERC721
|
||||
component non_fungible = NonFungible(n_i, n_o);
|
||||
for(var i = 0; i < n_i; i++) {
|
||||
non_fungible.prev_token_addr[i] <== spending_note[4][i];
|
||||
non_fungible.prev_token_nft[i] <== spending_note[6][i];
|
||||
non_fungible.prev_token_addr[i] <== spending_note_token_addr[i];
|
||||
non_fungible.prev_token_nft[i] <== spending_note_erc721[i];
|
||||
}
|
||||
for(var i = 0; i < n_o; i++) {
|
||||
non_fungible.post_token_addr[i] <== new_note[4][i];
|
||||
non_fungible.post_token_nft[i] <== new_note[6][i];
|
||||
non_fungible.post_token_addr[i] <== new_note_token_addr[i];
|
||||
non_fungible.post_token_nft[i] <== new_note_erc721[i];
|
||||
}
|
||||
|
||||
/** MPC atomic swap: TODO later
|
||||
/// MPC proof
|
||||
component mpc = AtomicSwapMPC();
|
||||
mpc.my_mpc_salt <== binding_factors[0];
|
||||
mpc.order[0] <== binding_factors[1];
|
||||
mpc.order[1] <== binding_factors[2];
|
||||
mpc.order[2] <== binding_factors[3];
|
||||
mpc.giving_token_type <== binding_factors[4];
|
||||
mpc.giving_token_addr <== binding_factors[5];
|
||||
mpc.giving_note_salt <== binding_factors[6];
|
||||
mpc.counterpart_pk[0] <== binding_factors[7];
|
||||
mpc.counterpart_pk[1] <== binding_factors[8];
|
||||
mpc.counterpart_computation[0] <== counterpart_computation[0];
|
||||
mpc.counterpart_computation[1] <== counterpart_computation[1];
|
||||
|
||||
binder[0] === mpc.out[0];
|
||||
binder[1] === mpc.out[1];
|
||||
|
||||
/// eth for swap note
|
||||
component eth_amount = IfElseThen(1);
|
||||
eth_amount.obj1[0] <== mpc.giving_token_type;
|
||||
eth_amount.obj2[0] <== 1;
|
||||
eth_amount.if_v <== mpc.order[0];
|
||||
eth_amount.else_v <== 0;
|
||||
/// erc20 for swap note
|
||||
component erc20_amount = IfElseThen(1);
|
||||
erc20_amount.obj1[0] <== mpc.giving_token_type;
|
||||
erc20_amount.obj2[0] <== 2;
|
||||
erc20_amount.if_v <== mpc.order[1];
|
||||
erc20_amount.else_v <== 0;
|
||||
/// erc721 for swap note
|
||||
component erc721_id = IfElseThen(1);
|
||||
erc721_id.obj1[0] <== mpc.giving_token_type;
|
||||
erc721_id.obj2[0] <== 3;
|
||||
erc721_id.if_v <== mpc.order[2];
|
||||
erc721_id.else_v <== 0;
|
||||
/// If binder is not zero, the last item of new_note[] is the note for the atomic swap.
|
||||
component bound_note[7];
|
||||
for (var i = 0; i < 7; i ++) {
|
||||
bound_note[i] = IfElseThen(1);
|
||||
bound_note[i].obj1[0] <== binder[0];
|
||||
bound_note[i].obj2[0] <== 0;
|
||||
bound_note[i].if_v <== 0;
|
||||
bound_note[i].else_v <== new_note[i][n_o - 1];
|
||||
}
|
||||
/// Bind the note properties to the mpc factors
|
||||
bound_note[0].out === eth_amount.out;
|
||||
bound_note[1].out === mpc.counterpart_pk[0];
|
||||
bound_note[2].out === mpc.counterpart_pk[1];
|
||||
bound_note[3].out === mpc.giving_note_salt;
|
||||
bound_note[4].out === mpc.giving_token_addr;
|
||||
bound_note[5].out === erc20_amount.out;
|
||||
bound_note[6].out === erc721_id.out;
|
||||
*/
|
||||
}
|
||||
|
||||
@@ -1,40 +1,63 @@
|
||||
{
|
||||
"name": "@zkopru/circuits",
|
||||
"version": "1.0.0-beta.2",
|
||||
"version": "2.0.0-beta.0",
|
||||
"license": "GPL-3.0-or-later",
|
||||
"_moduleAliases": {
|
||||
"circomlib": "../../node_modules/circomlib",
|
||||
"wasmsnark": "../../node_modules/wasmsnark",
|
||||
"~babyjubjub": "../babyjubjub/dist",
|
||||
"~dataset": "../dataset/dist",
|
||||
"~utils": "../utils/dist"
|
||||
"~database": "../database/dist",
|
||||
"~tree": "../tree/dist",
|
||||
"~utils": "../utils/dist",
|
||||
"~zk-wizard": "../zk-wizard/dist"
|
||||
},
|
||||
"files": [
|
||||
"impls",
|
||||
"lib"
|
||||
],
|
||||
"scripts": {
|
||||
"download-keys": "ts-node utils/download-keys.ts",
|
||||
"prebuild": "shx mkdir -p dist",
|
||||
"circuit": "./script/compile_circuits.sh",
|
||||
"setup": "./script/snark_setup.sh",
|
||||
"phase1:pull": "ts-node utils/pull-phase1.ts",
|
||||
"phase1:build": "./script/powers_of_tau_phase_1.sh",
|
||||
"phase2:pull": "ts-node utils/pull-keys.ts",
|
||||
"phase2:build": "./script/powers_of_tau_phase_2.sh",
|
||||
"build-keys": "ts-node utils/build-keys.ts",
|
||||
"postbuild-keys": "yarn update-contracts",
|
||||
"update-contracts": "copyfiles -f keys/vks/* ../contracts/keys/vks",
|
||||
"compile": "./script/compile_circuits.sh",
|
||||
"setup": "yarn setup:pull",
|
||||
"setup:pull": "yarn phase1:pull && yarn phase2:pull",
|
||||
"setup:build": "yarn phase1:pull && yarn compile && yarn phase2:build",
|
||||
"build": "tsc --build tsconfig.build.json",
|
||||
"clean": "tsc --build tsconfig.build.json --clean && shx rm -rf coverage *.log junit.xml dist && jest --clearCache",
|
||||
"link-modules": "link-module-alias",
|
||||
"test": "jest",
|
||||
"test": "jest --detectOpenHandles --forceExit",
|
||||
"test:trace": "LOG_LEVEL=trace PRINT_LOG=true jest",
|
||||
"test:unit": "jest test/unit",
|
||||
"test:watch": "jest --watch",
|
||||
"test:ci": "jest --coverage --ci --reporters='jest-junit'",
|
||||
"coverage": "jest --coverage",
|
||||
"coverage:show": "live-server coverage",
|
||||
"dev": "ts-node-dev -r tsconfig-paths/register src/index.ts",
|
||||
"start": "node dist/index.js"
|
||||
"start": "ts-node index.ts"
|
||||
},
|
||||
"dependencies": {
|
||||
"circomlib": "^0.1.1"
|
||||
"@zkopru/utils-docker": "file:../utils-docker",
|
||||
"circom": "0.5.42",
|
||||
"circomlib": "0.5.1",
|
||||
"snarkjs": "0.3.33",
|
||||
"tar": "^6.0.2"
|
||||
},
|
||||
"devDependencies": {
|
||||
"ffjavascript": "^0.1.2",
|
||||
"@types/cli-progress": "^3.9.2",
|
||||
"@types/follow-redirects": "^1.13.1",
|
||||
"cli-progress": "^3.9.1",
|
||||
"ffjavascript": "0.2.22",
|
||||
"follow-redirects": "^1.14.4",
|
||||
"node-docker-api": "^1.1.22",
|
||||
"snarkjs": "^0.1.25",
|
||||
"shelljs": "^0.8.4",
|
||||
"uuid": "^8.1.0",
|
||||
"wasmsnark": "^0.0.10"
|
||||
},
|
||||
"publishConfig": {
|
||||
|
||||
@@ -2,13 +2,13 @@
|
||||
|
||||
BASEDIR=$(dirname "$0")
|
||||
ARTIFACTS="build/circuits"
|
||||
MAX_JOB=32
|
||||
MAX_JOB=$(nproc)
|
||||
cd $BASEDIR/..
|
||||
mkdir -p $ARTIFACTS
|
||||
i=0
|
||||
for circuit in "impls"/*.circom;
|
||||
do
|
||||
i=$(($i+4))
|
||||
i=$(($i+1))
|
||||
prefix="$ARTIFACTS/$(basename "$circuit" ".circom")"
|
||||
node --stack-size=8192 $(which circom) "$circuit" -r "$prefix.r1cs" && \
|
||||
echo "Circuit compile result: $(basename "$circuit" ".circom")" && \
|
||||
@@ -19,3 +19,8 @@ do
|
||||
if (( $i % $MAX_JOB == 0 )); then wait; fi
|
||||
done
|
||||
wait
|
||||
for circuit in "impls"/*.circom;
|
||||
do
|
||||
prefix="$ARTIFACTS/$(basename "$circuit" ".circom")"
|
||||
snarkjs r1cs export json "$prefix.r1cs" "$prefix.json"
|
||||
done
|
||||
|
||||
23
packages/circuits/script/pack-artifacts.sh
Executable file
23
packages/circuits/script/pack-artifacts.sh
Executable file
@@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEDIR=$(dirname "$0")
|
||||
WORK_DIR=$BASEDIR/..
|
||||
PTAU_ARTIFACTS="build/ptau"
|
||||
CIRCUIT_ARTIFACTS="build/circuits"
|
||||
VK_ARTIFACTS="build/vks"
|
||||
ZKEY_ARTIFACTS="build/zkeys"
|
||||
|
||||
KEYS_ARTIFACTS="keys"
|
||||
|
||||
cd $WORK_DIR
|
||||
|
||||
mkdir -p $KEYS_ARTIFACTS/circuits
|
||||
mkdir -p $KEYS_ARTIFACTS/vks
|
||||
mkdir -p $KEYS_ARTIFACTS/zkeys
|
||||
|
||||
cp $CIRCUIT_ARTIFACTS/*.wasm $KEYS_ARTIFACTS/circuits
|
||||
cp $ZKEY_ARTIFACTS/*.zkey $KEYS_ARTIFACTS/zkeys
|
||||
rm $KEYS_ARTIFACTS/zkeys/*_000*.zkey
|
||||
cp $VK_ARTIFACTS/*.json $KEYS_ARTIFACTS/vks
|
||||
|
||||
tar -czvf $KEYS_ARTIFACTS.tgz $KEYS_ARTIFACTS/*
|
||||
14
packages/circuits/script/powers_of_tau_phase_1.sh
Executable file
14
packages/circuits/script/powers_of_tau_phase_1.sh
Executable file
@@ -0,0 +1,14 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEDIR=$(dirname "$0")
|
||||
PTAU_ARTIFACTS="build/ptau"
|
||||
mkdir -p $PTAU_ARTIFACTS
|
||||
cd $BASEDIR/../$PTAU_ARTIFACTS
|
||||
|
||||
snarkjs powersoftau new bn128 17 pot17_0000.ptau -v
|
||||
snarkjs powersoftau contribute pot17_0000.ptau pot17_0001.ptau --name="Sample contribution 1" -v -e="some random text 1"
|
||||
snarkjs powersoftau contribute pot17_0001.ptau pot17_0002.ptau --name="Sample contribution 1" -v -e="some random text 2"
|
||||
# skip the 3rd party contribution
|
||||
snarkjs powersoftau verify pot17_0002.ptau
|
||||
snarkjs powersoftau beacon pot17_0002.ptau pot17_beacon.ptau 0102030405060708090a0b0c0d0e0f101112131415161717191a1b1c1d1e1f 10 -n="Final Beacon"
|
||||
snarkjs powersoftau prepare phase2 pot17_beacon.ptau pot17_final.ptau -v
|
||||
42
packages/circuits/script/powers_of_tau_phase_2.sh
Executable file
42
packages/circuits/script/powers_of_tau_phase_2.sh
Executable file
@@ -0,0 +1,42 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEDIR=$(dirname "$0")
|
||||
cd $BASEDIR/..
|
||||
WORK_DIR=$(pwd)
|
||||
PTAU_ARTIFACTS="build/ptau"
|
||||
CIRCUIT_ARTIFACTS="build/circuits"
|
||||
VK_ARTIFACTS="build/vks"
|
||||
ZKEY_ARTIFACTS="build/zkeys"
|
||||
PHASE_1_FINAL=$PTAU_ARTIFACTS/pot17_final.ptau
|
||||
mkdir -p $CIRCUIT_ARTIFACTS
|
||||
mkdir -p $VK_ARTIFACTS
|
||||
mkdir -p $ZKEY_ARTIFACTS
|
||||
MAX_JOB=$(nproc)
|
||||
i=0
|
||||
|
||||
phase2() {
|
||||
circuit="$(basename "$1" ".circom")"
|
||||
prefix="$CIRCUIT_ARTIFACTS/$circuit"
|
||||
snarkjs zkey new "$CIRCUIT_ARTIFACTS/$circuit.r1cs" "$PHASE_1_FINAL" "$ZKEY_ARTIFACTS/$circuit"_0000.zkey
|
||||
snarkjs zkey contribute "$ZKEY_ARTIFACTS/$circuit"_0000.zkey "$ZKEY_ARTIFACTS/$circuit"_0001.zkey --name="1st Contributor Name" -v -e="random entropy 1" # Testing purpose
|
||||
snarkjs zkey contribute "$ZKEY_ARTIFACTS/$circuit"_0001.zkey "$ZKEY_ARTIFACTS/$circuit"_0002.zkey --name="2nd Contributor Name" -v -e="random entropy 2" # Testing purpose
|
||||
snarkjs zkey verify "$CIRCUIT_ARTIFACTS/$circuit".r1cs $PHASE_1_FINAL "$ZKEY_ARTIFACTS/$circuit"_0002.zkey
|
||||
snarkjs zkey beacon "$ZKEY_ARTIFACTS/$circuit"_0002.zkey "$ZKEY_ARTIFACTS/$circuit".zkey 0102030405060708090a0b0c0d0e0f101112131415161717191a1b1c1d1e1f 10 -n="Final Beacon phase2 - circuit $i"
|
||||
snarkjs zkey verify "$CIRCUIT_ARTIFACTS/$circuit".r1cs $PHASE_1_FINAL "$ZKEY_ARTIFACTS/$circuit".zkey
|
||||
TMP_DIR="build/tmp/$circuit"
|
||||
mkdir -p $TMP_DIR
|
||||
cp "$ZKEY_ARTIFACTS/$circuit".zkey $TMP_DIR/
|
||||
ls $TMP_DIR
|
||||
cd $TMP_DIR && snarkjs zkey export verificationkey "$circuit".zkey
|
||||
cd $WORK_DIR
|
||||
mv $TMP_DIR/verification_key.json "$VK_ARTIFACTS/$circuit".vk.json
|
||||
rm -rf $TMP_DIR
|
||||
}
|
||||
|
||||
for circuit_file in "impls"/*.circom;
|
||||
do
|
||||
i=$(($i+1))
|
||||
phase2 $circuit_file &
|
||||
if (( $i % $MAX_JOB == 0 )); then wait; fi
|
||||
done
|
||||
wait
|
||||
@@ -1,22 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
BASEDIR=$(dirname "$0")
|
||||
VK_ARTIFACTS="build/vks"
|
||||
PK_ARTIFACTS="build/snarkjsPKs"
|
||||
PK_BIN_ARTIFACTS="build/pks"
|
||||
MAX_JOB=32
|
||||
cd $BASEDIR/..
|
||||
mkdir -p $VK_ARTIFACTS
|
||||
mkdir -p $PK_ARTIFACTS
|
||||
mkdir -p $PK_BIN_ARTIFACTS
|
||||
|
||||
i=0
|
||||
for r1cs in "build/circuits"/*.r1cs;
|
||||
do
|
||||
i=$(($i+1))
|
||||
r1cs_name="$(basename "$r1cs" ".r1cs")"
|
||||
snarkjs setup -r "$r1cs" --pk "$PK_ARTIFACTS/$r1cs_name.pk.json" --vk "$VK_ARTIFACTS/$r1cs_name.vk.json" --protocol groth && \
|
||||
node node_modules/wasmsnark/tools/buildpkey.js -i "$PK_ARTIFACTS/$r1cs_name.pk.json" -o "$PK_BIN_ARTIFACTS/$r1cs_name.pk.bin" &
|
||||
if (( $i % $MAX_JOB == 0 )); then wait; fi
|
||||
done
|
||||
wait
|
||||
3
packages/circuits/tester/asset_hash.test.circom
Normal file
3
packages/circuits/tester/asset_hash.test.circom
Normal file
@@ -0,0 +1,3 @@
|
||||
include "../lib/asset_hash.circom";
|
||||
|
||||
component main = AssetHash();
|
||||
@@ -1,3 +1,21 @@
|
||||
include "../lib/inclusion_proof.circom";
|
||||
|
||||
component main = InclusionProof(31);
|
||||
template InclusionProofTest(depth) {
|
||||
// Signal definitions
|
||||
signal input root;
|
||||
signal input leaf;
|
||||
signal input path;
|
||||
signal input siblings[depth];
|
||||
signal output result;
|
||||
component proof = InclusionProof(depth);
|
||||
|
||||
proof.root <== root;
|
||||
proof.leaf <== leaf;
|
||||
proof.path <== path;
|
||||
for (var level = 0; level < depth; level++) {
|
||||
proof.siblings[level] <== siblings[level];
|
||||
}
|
||||
result <== 1;
|
||||
}
|
||||
|
||||
component main = InclusionProofTest(3);
|
||||
|
||||
26
packages/circuits/tester/matrix.test.circom
Normal file
26
packages/circuits/tester/matrix.test.circom
Normal file
@@ -0,0 +1,26 @@
|
||||
template Multiplier() {
|
||||
signal input a;
|
||||
signal input b;
|
||||
signal output c;
|
||||
c <== a*b;
|
||||
}
|
||||
|
||||
template MatrixMultiplier(m, n, p) {
|
||||
signal input a[m][n];
|
||||
signal input b[n][p];
|
||||
signal input ab[m][p];
|
||||
component intermediates[m][p][n];
|
||||
for(var row = 0; row < m; row++) {
|
||||
for(var col = 0; col < p; col++) {
|
||||
var sum = 0;
|
||||
for(var i = 0; i < n; i++) {
|
||||
intermediates[row][col][i] = Multiplier();
|
||||
intermediates[row][col][i].a <== a[row][i];
|
||||
intermediates[row][col][i].b <== b[i][col];
|
||||
sum = sum + intermediates[row][col][i].c
|
||||
}
|
||||
ab[row][col] === sum;
|
||||
}
|
||||
}
|
||||
}
|
||||
component main = MatrixMultiplier(2, 3, 4);
|
||||
16
packages/circuits/tester/multiplier.test.circom
Normal file
16
packages/circuits/tester/multiplier.test.circom
Normal file
@@ -0,0 +1,16 @@
|
||||
template Multiplier(n) {
|
||||
signal private input a;
|
||||
signal private input b;
|
||||
signal output c;
|
||||
|
||||
signal int[n];
|
||||
|
||||
int[0] <== a*a + b;
|
||||
for (var i=1; i<n; i++) {
|
||||
int[i] <== int[i-1]*int[i-1] + b;
|
||||
}
|
||||
|
||||
c <== int[n-1];
|
||||
}
|
||||
|
||||
component main = Multiplier(100);
|
||||
@@ -1,29 +0,0 @@
|
||||
include "../lib/nullifier.circom";
|
||||
include "../lib/note_hash.circom";
|
||||
|
||||
template NullifierFromNote() {
|
||||
signal private input eth;
|
||||
signal private input pubkey_x;
|
||||
signal private input pubkey_y;
|
||||
signal private input salt;
|
||||
signal private input token_addr;
|
||||
signal private input erc20;
|
||||
signal private input nft;
|
||||
signal private input note_hash;
|
||||
signal private input nullifier;
|
||||
component note = NoteHash();
|
||||
note.eth <== eth;
|
||||
note.pubkey_x <== pubkey_x;
|
||||
note.pubkey_y <== pubkey_y;
|
||||
note.salt <== salt;
|
||||
note.token_addr <== token_addr;
|
||||
note.erc20 <== erc20;
|
||||
note.nft <== nft;
|
||||
note.out === note_hash
|
||||
component n = Nullifier();
|
||||
n.note_hash <== note.out;
|
||||
n.note_salt <== salt;
|
||||
n.out === nullifier
|
||||
}
|
||||
|
||||
component main = NullifierFromNote();
|
||||
@@ -1,3 +1,22 @@
|
||||
include "../lib/ownership_proof.circom";
|
||||
|
||||
component main = OwnershipProof();
|
||||
template OwnershipProofTest() {
|
||||
signal input note;
|
||||
signal input Ax;
|
||||
signal input Ay;
|
||||
signal input R8x;
|
||||
signal input R8y;
|
||||
signal input S;
|
||||
signal output result;
|
||||
|
||||
component ownership_proof = OwnershipProof();
|
||||
ownership_proof.note <== note;
|
||||
ownership_proof.pub_key[0] <== Ax;
|
||||
ownership_proof.pub_key[1] <== Ay;
|
||||
ownership_proof.sig[0] <== R8x;
|
||||
ownership_proof.sig[1] <== R8y;
|
||||
ownership_proof.sig[2] <== S;
|
||||
result <== 1;
|
||||
}
|
||||
|
||||
component main = OwnershipProofTest();
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user