mirror of
https://github.com/scroll-tech/scroll.git
synced 2026-01-11 23:18:07 -05:00
Compare commits
4 Commits
feat-prove
...
v4.4.88
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5b6b145753 | ||
|
|
c07975acdf | ||
|
|
dfdb2ecf07 | ||
|
|
a6f2457040 |
2
.github/workflows/prover.yml
vendored
2
.github/workflows/prover.yml
vendored
@@ -43,7 +43,7 @@ jobs:
|
||||
github.event.pull_request.draft == false &&
|
||||
(github.event.action == 'ready_for_review' || needs.skip_check.outputs.should_skip != 'true')
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 5
|
||||
timeout-minutes: 15
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: dtolnay/rust-toolchain@master
|
||||
|
||||
@@ -8,7 +8,7 @@ require (
|
||||
github.com/go-redis/redis/v8 v8.11.5
|
||||
github.com/pressly/goose/v3 v3.16.0
|
||||
github.com/prometheus/client_golang v1.19.0
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241011150208-4742882675d8
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939
|
||||
github.com/stretchr/testify v1.9.0
|
||||
github.com/urfave/cli/v2 v2.25.7
|
||||
golang.org/x/sync v0.7.0
|
||||
@@ -17,39 +17,30 @@ require (
|
||||
|
||||
require (
|
||||
dario.cat/mergo v1.0.0 // indirect
|
||||
github.com/DataDog/zstd v1.4.5 // indirect
|
||||
github.com/Microsoft/go-winio v0.6.1 // indirect
|
||||
github.com/VictoriaMetrics/fastcache v1.12.1 // indirect
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2 // indirect
|
||||
github.com/beorn7/perks v1.0.1 // indirect
|
||||
github.com/bits-and-blooms/bitset v1.13.0 // indirect
|
||||
github.com/btcsuite/btcd v0.20.1-beta // indirect
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.2.0 // indirect
|
||||
github.com/bytedance/sonic v1.10.1 // indirect
|
||||
github.com/cespare/xxhash/v2 v2.2.0 // indirect
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d // indirect
|
||||
github.com/chenzhuoyu/iasm v0.9.0 // indirect
|
||||
github.com/cockroachdb/errors v1.11.1 // indirect
|
||||
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b // indirect
|
||||
github.com/cockroachdb/pebble v1.1.0 // indirect
|
||||
github.com/cockroachdb/redact v1.1.5 // indirect
|
||||
github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 // indirect
|
||||
github.com/consensys/bavard v0.1.13 // indirect
|
||||
github.com/consensys/gnark-crypto v0.12.1 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3 // indirect
|
||||
github.com/crate-crypto/go-kzg-4844 v1.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/deckarep/golang-set/v2 v2.1.0 // indirect
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 // indirect
|
||||
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea // indirect
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect
|
||||
github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible // indirect
|
||||
github.com/docker/docker v26.1.0+incompatible // indirect
|
||||
github.com/docker/go-connections v0.5.0 // indirect
|
||||
github.com/edsrzf/mmap-go v1.0.0 // indirect
|
||||
github.com/ethereum/c-kzg-4844 v1.0.3 // indirect
|
||||
github.com/fjl/memsize v0.0.2 // indirect
|
||||
github.com/fsnotify/fsnotify v1.6.0 // indirect
|
||||
github.com/gabriel-vasile/mimetype v1.4.2 // indirect
|
||||
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff // indirect
|
||||
github.com/getsentry/sentry-go v0.18.0 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/go-ole/go-ole v1.3.0 // indirect
|
||||
github.com/go-playground/locales v0.14.1 // indirect
|
||||
@@ -57,14 +48,11 @@ require (
|
||||
github.com/go-playground/validator/v10 v10.15.5 // indirect
|
||||
github.com/go-stack/stack v1.8.1 // indirect
|
||||
github.com/goccy/go-json v0.10.2 // indirect
|
||||
github.com/gofrs/flock v0.8.1 // indirect
|
||||
github.com/gogo/protobuf v1.3.2 // indirect
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 // indirect
|
||||
github.com/golang/snappy v0.0.5-0.20220116011046-fa5810519dcb // indirect
|
||||
github.com/google/uuid v1.6.0 // indirect
|
||||
github.com/gorilla/websocket v1.5.0 // indirect
|
||||
github.com/hashicorp/go-bexpr v0.1.10 // indirect
|
||||
github.com/holiman/billy v0.0.0-20230718173358-1c7e68d277a7 // indirect
|
||||
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d // indirect
|
||||
github.com/holiman/bloomfilter/v2 v2.0.3 // indirect
|
||||
github.com/holiman/uint256 v1.2.4 // indirect
|
||||
github.com/huin/goupnp v1.3.0 // indirect
|
||||
@@ -76,8 +64,6 @@ require (
|
||||
github.com/json-iterator/go v1.1.12 // indirect
|
||||
github.com/klauspost/compress v1.17.9 // indirect
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 // indirect
|
||||
github.com/kr/pretty v0.3.1 // indirect
|
||||
github.com/kr/text v0.2.0 // indirect
|
||||
github.com/leodido/go-urn v1.2.4 // indirect
|
||||
github.com/mattn/go-colorable v0.1.13 // indirect
|
||||
github.com/mattn/go-isatty v0.0.20 // indirect
|
||||
@@ -97,11 +83,12 @@ require (
|
||||
github.com/prometheus/client_model v0.5.0 // indirect
|
||||
github.com/prometheus/common v0.48.0 // indirect
|
||||
github.com/prometheus/procfs v0.12.0 // indirect
|
||||
github.com/prometheus/tsdb v0.7.1 // indirect
|
||||
github.com/rivo/uniseg v0.4.4 // indirect
|
||||
github.com/rogpeppe/go-internal v1.10.0 // indirect
|
||||
github.com/rjeczalik/notify v0.9.1 // indirect
|
||||
github.com/rs/cors v1.7.0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.1.0 // indirect
|
||||
github.com/scroll-tech/da-codec v0.1.2 // indirect
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250210041951-d028c537b995 // indirect
|
||||
github.com/scroll-tech/zktrie v0.8.4 // indirect
|
||||
github.com/sethvargo/go-retry v0.2.4 // indirect
|
||||
github.com/shirou/gopsutil v3.21.11+incompatible // indirect
|
||||
@@ -120,16 +107,14 @@ require (
|
||||
go.uber.org/multierr v1.11.0 // indirect
|
||||
golang.org/x/arch v0.5.0 // indirect
|
||||
golang.org/x/crypto v0.24.0 // indirect
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect
|
||||
golang.org/x/mod v0.17.0 // indirect
|
||||
golang.org/x/net v0.25.0 // indirect
|
||||
golang.org/x/sys v0.21.0 // indirect
|
||||
golang.org/x/text v0.16.0 // indirect
|
||||
golang.org/x/time v0.3.0 // indirect
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 // indirect
|
||||
google.golang.org/protobuf v1.33.0 // indirect
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 // indirect
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce // indirect
|
||||
gopkg.in/urfave/cli.v1 v1.20.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.1 // indirect
|
||||
rsc.io/tmplfunc v0.0.3 // indirect
|
||||
)
|
||||
|
||||
@@ -2,35 +2,33 @@ dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk=
|
||||
dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161 h1:L/gRVlceqvL25UVaW/CKtUDjefjrs0SPonmDGUVOYP0=
|
||||
github.com/Azure/go-ansiterm v0.0.0-20230124172434-306776ec8161/go.mod h1:xomTg63KZ2rFqZQzSB4Vz2SUXa1BpHTVz9L5PTmPC4E=
|
||||
github.com/BurntSushi/toml v1.3.2 h1:o7IhLm0Msx3BaB+n3Ag7L8EVlByGnpq14C4YWiu/gL8=
|
||||
github.com/BurntSushi/toml v1.3.2/go.mod h1:CxXYINrC8qIiEnFrOxCa7Jy5BFHlXnUU2pbicEuybxQ=
|
||||
github.com/ClickHouse/ch-go v0.58.2 h1:jSm2szHbT9MCAB1rJ3WuCJqmGLi5UTjlNu+f530UTS0=
|
||||
github.com/ClickHouse/ch-go v0.58.2/go.mod h1:Ap/0bEmiLa14gYjCiRkYGbXvbe8vwdrfTYWhsuQ99aw=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.15.0 h1:G0hTKyO8fXXR1bGnZ0DY3vTG01xYfOGW76zgjg5tmC4=
|
||||
github.com/ClickHouse/clickhouse-go/v2 v2.15.0/go.mod h1:kXt1SRq0PIRa6aKZD7TnFnY9PQKmc2b13sHtOYcK6cQ=
|
||||
github.com/DataDog/zstd v1.4.5 h1:EndNeuB0l9syBZhut0wns3gV1hL8zX8LIu6ZiVHWLIQ=
|
||||
github.com/DataDog/zstd v1.4.5/go.mod h1:1jcaCB/ufaK+sKp1NBhlGmpz41jOoPQ35bpF36t7BBo=
|
||||
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
|
||||
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
|
||||
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw=
|
||||
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.1 h1:i0mICQuojGDL3KblA7wUNlY5lOK6a4bwt3uRKnkZU40=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.1/go.mod h1:tX04vaqcNoQeGLD+ra5pU5sWkuxnzWhEzLwhP9w653o=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI=
|
||||
github.com/aead/siphash v1.0.1/go.mod h1:Nywa3cDsYNNK3gaciGTWPwHt0wlpNV15vwmswBAUSII=
|
||||
github.com/agiledragon/gomonkey/v2 v2.12.0 h1:ek0dYu9K1rSV+TgkW5LvNNPRWyDZVIxGMCFI6Pz9o38=
|
||||
github.com/agiledragon/gomonkey/v2 v2.12.0/go.mod h1:ap1AmDzcVOAz1YpeJ3TCzIgstoaWLA6jbbgxfB4w2iY=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156 h1:eMwmnE/GDgah4HI848JfFxHt+iPb26b4zyfspmqY0/8=
|
||||
github.com/allegro/bigcache v1.2.1-0.20190218064605-e24eb225f156/go.mod h1:Cb/ax3seSYIx7SuZdm2G2xzfwmv3TPSk2ucNfQESPXM=
|
||||
github.com/andybalholm/brotli v1.0.6 h1:Yf9fFpf49Zrxb9NlQaluyE92/+X7UVHlhMNJN2sxfOI=
|
||||
github.com/andybalholm/brotli v1.0.6/go.mod h1:fO7iG3H7G2nSZ7m0zPUDn85XEX2GTukHGRSepvi9Eig=
|
||||
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
|
||||
github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM=
|
||||
github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw=
|
||||
github.com/bits-and-blooms/bitset v1.13.0 h1:bAQ9OPNFYbGHV6Nez0tmNI0RiEu7/hxlYJRUA0wFAVE=
|
||||
github.com/bits-and-blooms/bitset v1.13.0/go.mod h1:7hO7Gc7Pp1vODcmWvKMRA9BNmbv6a/7QIWpPxHddWR8=
|
||||
github.com/btcsuite/btcd v0.20.1-beta h1:Ik4hyJqN8Jfyv3S4AGBOmyouMsYE3EdYODkMbQjwPGw=
|
||||
github.com/btcsuite/btcd v0.20.1-beta/go.mod h1:wVuoA8VJLEcwgqHBwHmzLRazpKxTv13Px/pDuV7OomQ=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.2.0 h1:fzn1qaOt32TuLjFlkzYSsBC35Q3KUjT1SwPxiMSCF5k=
|
||||
github.com/btcsuite/btcd/btcec/v2 v2.2.0/go.mod h1:U7MHm051Al6XmscBQ0BoNydpOTsFAn707034b5nY8zU=
|
||||
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f/go.mod h1:TdznJufoqS23FtqVCzL0ZqgP5MqXbb4fg/WgDys70nA=
|
||||
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d/go.mod h1:+5NJ2+qvTyV9exUAL/rxXi3DcLg2Ts+ymUAY5y4NvMg=
|
||||
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd/go.mod h1:HHNXQzUsZCxOoE+CPiyCTO6x34Zs86zZUiwtpXoGdtg=
|
||||
@@ -46,6 +44,7 @@ github.com/cenkalti/backoff/v4 v4.2.1 h1:y4OZtCnogmCPw98Zjyt5a6+QwPLGkiQsYW5oUqy
|
||||
github.com/cenkalti/backoff/v4 v4.2.1/go.mod h1:Y3VNntkOUPxTVeUxJ/G5vcM//AlwfmyYozVcomhLiZE=
|
||||
github.com/cespare/cp v0.1.0 h1:SE+dxFebS7Iik5LK0tsi1k9ZCxEaFX4AjQmoyA+1dJk=
|
||||
github.com/cespare/cp v0.1.0/go.mod h1:SOGHArjBr4JWaSDEVpWpo/hNg6RoKrls6Oh40hiwW+s=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.2.0 h1:DC2CZ1Ep5Y4k3ZQ899DldepgrayRUGE6BBZ/cd9Cj44=
|
||||
github.com/cespare/xxhash/v2 v2.2.0/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20211019084208-fb5309c8db06/go.mod h1:DH46F32mSOjUmXrMHnKwZdA8wcEefY7UVqBKYGjpdQY=
|
||||
@@ -54,18 +53,6 @@ github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d h1:77cEq6EriyTZ
|
||||
github.com/chenzhuoyu/base64x v0.0.0-20230717121745-296ad89f973d/go.mod h1:8EPpVsBuRksnlj1mLy4AWzRNQYxauNi62uWcE3to6eA=
|
||||
github.com/chenzhuoyu/iasm v0.9.0 h1:9fhXjVzq5hUy2gkhhgHl95zG2cEAhw9OSGs8toWWAwo=
|
||||
github.com/chenzhuoyu/iasm v0.9.0/go.mod h1:Xjy2NpN3h7aUqeqM+woSuuvxmIe6+DDsiNLIrkAmYog=
|
||||
github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f h1:otljaYPt5hWxV3MUfO5dFPFiOXg9CyG5/kCfayTqsJ4=
|
||||
github.com/cockroachdb/datadriven v1.0.3-0.20230413201302-be42291fc80f/go.mod h1:a9RdTaap04u637JoCzcUoIcDmvwSUtcUFtT/C3kJlTU=
|
||||
github.com/cockroachdb/errors v1.11.1 h1:xSEW75zKaKCWzR3OfxXUxgrk/NtT4G1MiOv5lWZazG8=
|
||||
github.com/cockroachdb/errors v1.11.1/go.mod h1:8MUxA3Gi6b25tYlFEBGLf+D8aISL+M4MIpiWMSNRfxw=
|
||||
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b h1:r6VH0faHjZeQy818SGhaone5OnYfxFR/+AzdY3sf5aE=
|
||||
github.com/cockroachdb/logtags v0.0.0-20230118201751-21c54148d20b/go.mod h1:Vz9DsVWQQhf3vs21MhPMZpMGSht7O/2vFW2xusFUVOs=
|
||||
github.com/cockroachdb/pebble v1.1.0 h1:pcFh8CdCIt2kmEpK0OIatq67Ln9uGDYY3d5XnE0LJG4=
|
||||
github.com/cockroachdb/pebble v1.1.0/go.mod h1:sEHm5NOXxyiAoKWhoFxT8xMgd/f3RA6qUqQ1BXKrh2E=
|
||||
github.com/cockroachdb/redact v1.1.5 h1:u1PMllDkdFfPWaNGMyLD1+so+aq3uUItthCFqzwPJ30=
|
||||
github.com/cockroachdb/redact v1.1.5/go.mod h1:BVNblN9mBWFyMyqK1k3AAiSxhvhfK2oOZZ2lK+dpvRg=
|
||||
github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06 h1:zuQyyAKVxetITBuuhv3BI9cMrmStnpT18zmgmTxunpo=
|
||||
github.com/cockroachdb/tokenbucket v0.0.0-20230807174530-cc333fc44b06/go.mod h1:7nc4anLGjupUW/PeY5qiNYsdNXj7zopG+eqsS7To5IQ=
|
||||
github.com/consensys/bavard v0.1.13 h1:oLhMLOFGTLdlda/kma4VOJazblc7IM5y5QPd2A/YjhQ=
|
||||
github.com/consensys/bavard v0.1.13/go.mod h1:9ItSMtA/dXMAiL7BG6bqW2m3NdSEObYWoH223nGHukI=
|
||||
github.com/consensys/gnark-crypto v0.12.1 h1:lHH39WuuFgVHONRl3J0LRBtuYdQTumFSDtJF7HpyG8M=
|
||||
@@ -76,19 +63,15 @@ github.com/cpuguy83/go-md2man/v2 v2.0.3 h1:qMCsGGgs+MAzDFyp9LpAe1Lqy/fY/qCovCm0q
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o=
|
||||
github.com/crate-crypto/go-kzg-4844 v1.0.0 h1:TsSgHwrkTKecKJ4kadtHi4b3xHW5dCFUDFnUp1TsawI=
|
||||
github.com/crate-crypto/go-kzg-4844 v1.0.0/go.mod h1:1kMhvPgI0Ky3yIa+9lFySEBUBXkYxeOi8ZF1sYioxhc=
|
||||
github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E=
|
||||
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c=
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/deckarep/golang-set/v2 v2.1.0 h1:g47V4Or+DUdzbs8FxCCmgb6VYd+ptPAngjM6dtGktsI=
|
||||
github.com/deckarep/golang-set/v2 v2.1.0/go.mod h1:VAky9rY/yGXJOLEDv3OMci+7wtDpOF4IN+y82NBOac4=
|
||||
github.com/decred/dcrd/crypto/blake256 v1.0.0 h1:/8DMNYp9SGi5f0w7uCm6d6M4OU2rGFK09Y2A4Xv7EE0=
|
||||
github.com/decred/dcrd/crypto/blake256 v1.0.0/go.mod h1:sQl2p6Y26YV+ZOcSTP6thNdn47hh8kt6rqSlvmrXFAc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1 h1:YLtO71vCjJRCBcrPMtQ9nqBsqpA1m5sE92cU+pd5Mcc=
|
||||
github.com/decred/dcrd/dcrec/secp256k1/v4 v4.0.1/go.mod h1:hyedUtir6IdtD/7lIxGeCxkaw7y45JueMRL4DIyJDKs=
|
||||
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea h1:j4317fAZh7X6GqbFowYdYdI0L9bwxL07jyPZIdepyZ0=
|
||||
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78=
|
||||
github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible h1:xhVCHXq+P5LhT31+RuDuk0xXEbEnd50Fr37J1bGuyWg=
|
||||
github.com/docker/cli v25.0.4-0.20240305161310-2bf4225ad269+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
|
||||
github.com/docker/docker v26.1.0+incompatible h1:W1G9MPNbskA6VZWL7b3ZljTh0pXI68FpINx0GKaOdaM=
|
||||
@@ -99,6 +82,8 @@ github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4
|
||||
github.com/docker/go-units v0.5.0/go.mod h1:fgPhTUdO+D/Jk86RDLlptpiXQzgHJF7gydDDbaIK4Dk=
|
||||
github.com/dustin/go-humanize v1.0.1 h1:GzkhY7T5VNhEkwH0PVJgjz+fX1rhBrR7pRT3mDkpeCY=
|
||||
github.com/dustin/go-humanize v1.0.1/go.mod h1:Mu1zIs6XwVuF/gI1OepvI0qD18qycQx+mFykh5fBlto=
|
||||
github.com/edsrzf/mmap-go v1.0.0 h1:CEBF7HpRnUCSJgGUb5h1Gm7e3VkmVDrR8lvWVLtrOFw=
|
||||
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||
github.com/elastic/go-sysinfo v1.11.1 h1:g9mwl05njS4r69TisC+vwHWTSKywZFYYUu3so3T/Lao=
|
||||
github.com/elastic/go-sysinfo v1.11.1/go.mod h1:6KQb31j0QeWBDF88jIdWSxE8cwoOB9tO4Y4osN7Q70E=
|
||||
github.com/elastic/go-windows v1.0.1 h1:AlYZOldA+UJ0/2nBuqWdo90GFCgG9xuyw9SYzGUtJm0=
|
||||
@@ -115,20 +100,21 @@ github.com/gabriel-vasile/mimetype v1.4.2 h1:w5qFW6JKBz9Y393Y4q372O9A7cUSequkh1Q
|
||||
github.com/gabriel-vasile/mimetype v1.4.2/go.mod h1:zApsH/mKG4w07erKIaJPFiX0Tsq9BFQgN3qGY5GnNgA=
|
||||
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff h1:tY80oXqGNY4FhTFhk+o9oFHGINQ/+vhlm8HFzi6znCI=
|
||||
github.com/gballet/go-libpcsclite v0.0.0-20190607065134-2772fd86a8ff/go.mod h1:x7DCsMOv1taUwEWCzT4cmDeAkigA5/QCwUodaVOe8Ww=
|
||||
github.com/getsentry/sentry-go v0.18.0 h1:MtBW5H9QgdcJabtZcuJG80BMOwaBpkRDZkxRkNC1sN0=
|
||||
github.com/getsentry/sentry-go v0.18.0/go.mod h1:Kgon4Mby+FJ7ZWHFUAZgVaIa8sxHtnRJRLTXZr51aKQ=
|
||||
github.com/gin-contrib/cors v1.5.0 h1:DgGKV7DDoOn36DFkNtbHrjoRiT5ExCe+PC9/xp7aKvk=
|
||||
github.com/gin-contrib/cors v1.5.0/go.mod h1:TvU7MAZ3EwrPLI2ztzTt3tqgvBCq+wn8WpZmfADjupI=
|
||||
github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE=
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.9.1 h1:4idEAncQnU5cB7BeOkPtxjfCSye0AAm1R0RVIqJ+Jmg=
|
||||
github.com/gin-gonic/gin v1.9.1/go.mod h1:hPrL7YrpYKXt5YId3A/Tnip5kqbEAP+KLuI3SUcPTeU=
|
||||
github.com/go-errors/errors v1.4.2 h1:J6MZopCL4uSllY1OfXM374weqZFFItUbrImctkmUxIA=
|
||||
github.com/go-errors/errors v1.4.2/go.mod h1:sIVyrIiJhuEF+Pj9Ebtd6P/rEYROXFi3BopGUQ5a5Og=
|
||||
github.com/go-faster/city v1.0.1 h1:4WAxSZ3V2Ws4QRDrscLEDcibJY8uf41H6AhXDrNDcGw=
|
||||
github.com/go-faster/city v1.0.1/go.mod h1:jKcUJId49qdW3L1qKHH/3wPeUstCVpVSXTM6vO3VcTw=
|
||||
github.com/go-faster/errors v0.6.1 h1:nNIPOBkprlKzkThvS/0YaX8Zs9KewLCOSFQS5BU06FI=
|
||||
github.com/go-faster/errors v0.6.1/go.mod h1:5MGV2/2T9yvlrbhe9pD9LO5Z/2zCSq2T8j+Jpi2LAyY=
|
||||
github.com/go-kit/kit v0.8.0 h1:Wz+5lgoB0kkuqLEc6NVmwRknTKP6dTGbSqvhZtBI/j0=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA=
|
||||
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||
github.com/go-ole/go-ole v1.2.6/go.mod h1:pprOEPIfldk/42T2oK7lQ4v4JSDwmV0As9GaiUsvbm0=
|
||||
github.com/go-ole/go-ole v1.3.0 h1:Dt6ye7+vXGIKZ7Xtk4s6/xVdGDQynvom7xCFEdWr6uE=
|
||||
github.com/go-ole/go-ole v1.3.0/go.mod h1:5LS6F96DhAwUc7C+1HLexzMXY1xGRSryjyPPKW6zv78=
|
||||
@@ -144,12 +130,12 @@ github.com/go-redis/redis/v8 v8.11.5 h1:AcZZR7igkdvfVmQTPnu9WE37LRrO/YrBH5zWyjDC
|
||||
github.com/go-redis/redis/v8 v8.11.5/go.mod h1:gREzHqY1hg6oD9ngVRbLStwAWKhA0FEgq8Jd4h5lpwo=
|
||||
github.com/go-sql-driver/mysql v1.7.1 h1:lUIinVbN1DY0xBg0eMOzmmtGoHwWBbvnWubQUrtU8EI=
|
||||
github.com/go-sql-driver/mysql v1.7.1/go.mod h1:OXbVy3sEdcQ2Doequ6Z5BW6fXNQTmx+9S1MCJN5yJMI=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-stack/stack v1.8.1 h1:ntEHSVwIt7PNXNpgPmVfMrNhLtgjlmnZha2kOpuRiDw=
|
||||
github.com/go-stack/stack v1.8.1/go.mod h1:dcoOX6HbPZSZptuspn9bctJ+N/CnF5gGygcUP3XYfe4=
|
||||
github.com/goccy/go-json v0.10.2 h1:CrxCmQqYDkv1z7lO7Wbh2HN93uovUHgrECaO5ZrCXAU=
|
||||
github.com/goccy/go-json v0.10.2/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
|
||||
github.com/gofrs/flock v0.8.1 h1:+gYjHKf32LDeiEEFhQaotPbLuUXjY5ZqxKgXy7n59aw=
|
||||
github.com/gofrs/flock v0.8.1/go.mod h1:F1TvTiK9OcQqauNUHlbJvyl9Qa1QvF/gOUDKA14jxHU=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.3.2 h1:Ov1cvc58UF3b5XjBnZv7+opcTcQFZebYjWzi34vdm4Q=
|
||||
github.com/gogo/protobuf v1.3.2/go.mod h1:P1XiOD3dCwIKUDQYPy72D8LYyHL2YPYrpS2s69NZV8Q=
|
||||
github.com/golang-jwt/jwt/v4 v4.5.0 h1:7cYmW1XlMY7h7ii7UhUyChSgS5wUJEnm9uZVTGqOWzg=
|
||||
@@ -181,8 +167,8 @@ github.com/gorilla/websocket v1.5.0 h1:PPwGk2jz7EePpoHN/+ClbZu8SPxiqlu12wZP/3sWm
|
||||
github.com/gorilla/websocket v1.5.0/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/hashicorp/go-bexpr v0.1.10 h1:9kuI5PFotCboP3dkDYFr/wi0gg0QVbSNz5oFRpxn4uE=
|
||||
github.com/hashicorp/go-bexpr v0.1.10/go.mod h1:oxlubA2vC/gFVfX1A6JGp7ls7uCDlfJn732ehYYg+g0=
|
||||
github.com/holiman/billy v0.0.0-20230718173358-1c7e68d277a7 h1:3JQNjnMRil1yD0IfZKHF9GxxWKDJGj8I0IqOUol//sw=
|
||||
github.com/holiman/billy v0.0.0-20230718173358-1c7e68d277a7/go.mod h1:5GuXa7vkL8u9FkFuWdVvfR5ix8hRB7DbOAaYULamFpc=
|
||||
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d h1:dg1dEPuWpEqDnvIw251EVy4zlP8gWbsGj4BsUKCRpYs=
|
||||
github.com/hashicorp/golang-lru v0.5.5-0.20210104140557-80c98217689d/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/holiman/bloomfilter/v2 v2.0.3 h1:73e0e/V0tCydx14a0SCYS/EWCxgwLZ18CZcZKVu0fao=
|
||||
github.com/holiman/bloomfilter/v2 v2.0.3/go.mod h1:zpoh+gs7qcpqrHr3dB55AMiJwo0iURXE7ZOP9L9hSkA=
|
||||
github.com/holiman/uint256 v1.2.4 h1:jUc4Nk8fm9jZabQuqr2JzednajVmBpC+oiTiXZJEApU=
|
||||
@@ -216,8 +202,6 @@ github.com/json-iterator/go v1.1.12 h1:PV8peI4a0ysnczrg+LtxykD8LfKY9ML6u2jnxaEnr
|
||||
github.com/json-iterator/go v1.1.12/go.mod h1:e30LSqwooZae/UwlEbR2852Gd8hjQvJoHmT4TnhNGBo=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNUXsshfwJMBgNA0RU6/i7WVaAegv3PtuIHPMs=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.5.0/go.mod h1:pFxgyoBC7bSaBwPgfKdkLd5X25qrDl4LWUI2bnpBCr8=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/kkdai/bstream v0.0.0-20161212061736-f391b8402d23/go.mod h1:J+Gs4SYgM6CZQHDETBtE9HaSEkGmuNXF86RwHhHUvq4=
|
||||
github.com/klauspost/compress v1.17.9 h1:6KIumPrER1LHsvBVuDa0r5xaG0Es51mhhB9BQB2qeMA=
|
||||
github.com/klauspost/compress v1.17.9/go.mod h1:Di0epgTjJY877eYKx5yC51cX2A2Vl2ibi7bDH9ttBbw=
|
||||
@@ -225,12 +209,11 @@ github.com/klauspost/cpuid/v2 v2.0.9/go.mod h1:FInQzS24/EEf25PyTYn52gqo7WaD8xa02
|
||||
github.com/klauspost/cpuid/v2 v2.2.5 h1:0E5MSMDEoAulmXNFquVs//DdoomxaoTY1kUhbc/qbZg=
|
||||
github.com/klauspost/cpuid/v2 v2.2.5/go.mod h1:Lcz8mBdAVJIBVzewtcLocK12l3Y+JytZYpaMropDUws=
|
||||
github.com/knz/go-libedit v1.10.1/go.mod h1:MZTVkCWyz0oBc7JOWP3wNAzd002ZbM/5hgShxwh4x8M=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
|
||||
github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE=
|
||||
github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc=
|
||||
github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw=
|
||||
github.com/leanovate/gopter v0.2.9 h1:fQjYxZaynp97ozCzfOyOuAGOU4aU/z37zf/tOujFk7c=
|
||||
github.com/leanovate/gopter v0.2.9/go.mod h1:U2L/78B+KVFIx2VmW6onHJQzXtFb+p5y3y2Sh+Jxxv8=
|
||||
github.com/leodido/go-urn v1.2.4 h1:XlAE/cm/ms7TE/VMVoduSpNBoyc2dOxHs5MZSwAN63Q=
|
||||
@@ -243,6 +226,7 @@ github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D
|
||||
github.com/mattn/go-runewidth v0.0.9/go.mod h1:H031xJmbD/WCDINGzjvQ9THkh0rPKHF+m2gUSrubnMI=
|
||||
github.com/mattn/go-runewidth v0.0.15 h1:UNAjwbU9l54TA3KzvqLGxwWjHmMgBUVhBiTjelZgg3U=
|
||||
github.com/mattn/go-runewidth v0.0.15/go.mod h1:Jdepj2loyihRzMpdS35Xk/zdY8IAYHsh153qUoGf23w=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/mitchellh/mapstructure v1.4.1/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/mitchellh/mapstructure v1.5.0 h1:jeMsZIYE/09sWLaz43PL7Gy6RuMjD2eJVyuac5Z2hdY=
|
||||
github.com/mitchellh/mapstructure v1.5.0/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
@@ -263,6 +247,7 @@ github.com/modern-go/reflect2 v1.0.2/go.mod h1:yWuevngMOJpCy52FWWMvUC8ws7m/LJsjY
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/olekukonko/tablewriter v0.0.5 h1:P2Ga83D34wi1o9J6Wh1mRuqd4mF/x/lgBS7N7AbDhec=
|
||||
github.com/olekukonko/tablewriter v0.0.5/go.mod h1:hPp6KlRPjbx+hW8ykQs1w3UBbZlj6HuIJcUGPhkA7kY=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
@@ -290,39 +275,44 @@ github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6
|
||||
github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc=
|
||||
github.com/pierrec/lz4/v4 v4.1.18 h1:xaKrnTkyoqfh1YItXl56+6KJNVYWlEEPuAQW9xsplYQ=
|
||||
github.com/pierrec/lz4/v4 v4.1.18/go.mod h1:gZWDp/Ze/IJXGXf23ltt2EXimqmTUXEy0GFuRQyBid4=
|
||||
github.com/pingcap/errors v0.11.4 h1:lFuQV/oaUMGcD2tqt+01ROSmJs75VG1ToEOkZIZ4nE4=
|
||||
github.com/pingcap/errors v0.11.4/go.mod h1:Oi8TUi2kEtXXLMJk9l1cGmz20kV3TaQ0usTwv5KuLY8=
|
||||
github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/pressly/goose/v3 v3.16.0 h1:xMJUsZdHLqSnCqESyKSqEfcYVYsUuup1nrOhaEFftQg=
|
||||
github.com/pressly/goose/v3 v3.16.0/go.mod h1:JwdKVnmCRhnF6XLQs2mHEQtucFD49cQBdRM4UiwkxsM=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
github.com/prometheus/client_golang v1.19.0 h1:ygXvpU1AoN1MhdzckN+PyD9QJOSD4x7kmXYlnfbA6JU=
|
||||
github.com/prometheus/client_golang v1.19.0/go.mod h1:ZRM9uEAypZakd+q/x7+gmsvXdURP+DABIEIjnmDdp+k=
|
||||
github.com/prometheus/client_model v0.0.0-20180712105110-5c3871d89910/go.mod h1:MbSGuTsp3dbXC40dX6PRTWyKYBIrTGTE9sqQNg2J8bo=
|
||||
github.com/prometheus/client_model v0.5.0 h1:VQw1hfvPvk3Uv6Qf29VrPF32JB6rtbgI6cYPYQjL0Qw=
|
||||
github.com/prometheus/client_model v0.5.0/go.mod h1:dTiFglRmd66nLR9Pv9f0mZi7B7fk5Pm3gvsjB5tr+kI=
|
||||
github.com/prometheus/common v0.0.0-20181113130724-41aa239b4cce/go.mod h1:daVV7qP5qjZbuso7PdcryaAu0sAZbrN9i7WWcTMWvro=
|
||||
github.com/prometheus/common v0.48.0 h1:QO8U2CdOzSn1BBsmXJXduaaW+dY/5QLjfB8svtSzKKE=
|
||||
github.com/prometheus/common v0.48.0/go.mod h1:0/KsvlIEfPQCQ5I2iNSAWKPZziNCvRs5EC6ILDTlAPc=
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.12.0 h1:jluTpSng7V9hY0O2R9DzzJHYb2xULk9VTR1V1R/k6Bo=
|
||||
github.com/prometheus/procfs v0.12.0/go.mod h1:pcuDEFsWDnvcgNzo4EEweacyhjeA9Zk3cnaOZAZEfOo=
|
||||
github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec h1:W09IVJc94icq4NjY3clb7Lk8O1qJ8BdBEF8z0ibU0rE=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rivo/uniseg v0.2.0/go.mod h1:J6wj4VEh+S6ZtnVlnTBMWIodfgj8LQOQFoIToxlJtxc=
|
||||
github.com/rivo/uniseg v0.4.4 h1:8TfxU8dW6PdqD27gjM8MVNuicgxIjxpm4K7x4jp8sis=
|
||||
github.com/rivo/uniseg v0.4.4/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUcx88=
|
||||
github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
|
||||
github.com/rjeczalik/notify v0.9.1 h1:CLCKso/QK1snAlnhNR/CNvNiFU2saUtjV0bx3EwNeCE=
|
||||
github.com/rjeczalik/notify v0.9.1/go.mod h1:rKwnCoCGeuQnwBtTSPL9Dad03Vh2n40ePRrjvIXnJho=
|
||||
github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
|
||||
github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
|
||||
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
|
||||
github.com/russross/blackfriday/v2 v2.1.0 h1:JIOH55/0cWyOuilr9/qlrm0BSXldqnqwMsf35Ld67mk=
|
||||
github.com/russross/blackfriday/v2 v2.1.0/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/scroll-tech/da-codec v0.1.2 h1:QyJ+dQ4zWVVJwuqxNt4MiKyrymVc6rHe4YPtURkjiRc=
|
||||
github.com/scroll-tech/da-codec v0.1.2/go.mod h1:odz1ck3umvYccCG03osaQBISAYGinZktZYbpk94fYRE=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241011150208-4742882675d8 h1:pEP6+ThQIgSRO5SILiO6iBpWnxAUjoRNBA9Nc6ooOS0=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241011150208-4742882675d8/go.mod h1:MBHX2RcAV9KLWblo9DSa/xyPYd1Wpwnt64JSDOy85po=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250210041951-d028c537b995 h1:Zo1p42CUS9pADSKoDD0ZoDxf4dQ3gttqWZlV+RSeImk=
|
||||
github.com/scroll-tech/da-codec v0.1.3-0.20250210041951-d028c537b995/go.mod h1:UZhhjzqYsyEhcvY0Y+SP+oMdeOUqFn/UXpbAYuPGzg0=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939 h1:KODmYD4s4BY/SBheCHqGbATnGPLQKzTJVuAElA8Eh+0=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250225152658-bcfdb48dd939/go.mod h1:AgU8JJxC7+nfs7R7ma35AU7dMAGW7wCw3dRZRefIKyQ=
|
||||
github.com/scroll-tech/zktrie v0.8.4 h1:UagmnZ4Z3ITCk+aUq9NQZJNAwnWl4gSxsLb2Nl7IgRE=
|
||||
github.com/scroll-tech/zktrie v0.8.4/go.mod h1:XvNo7vAk8yxNyTjBDj5WIiFzYW4bx/gJ78+NK6Zn6Uk=
|
||||
github.com/segmentio/asm v1.2.0 h1:9BQrFxC+YOHJlTlHGkTrFWf59nbL3XnCoFLTwDCI7ys=
|
||||
@@ -337,11 +327,13 @@ github.com/sirupsen/logrus v1.9.3 h1:dueUQJ1C2q9oE3F7wvmSGAaVtTmUizReu6fjN8uqzbQ
|
||||
github.com/sirupsen/logrus v1.9.3/go.mod h1:naHLuLoDiP4jHNo9R0sCBMtWGeIprob74mVsIT4qYEQ=
|
||||
github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo=
|
||||
github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/status-im/keycard-go v0.2.0 h1:QDLFswOQu1r5jsycloeQh3bVU8n/NatHHaZobtDnDzA=
|
||||
github.com/status-im/keycard-go v0.2.0/go.mod h1:wlp8ZLbsmrF6g6WjugPAx+IzoLrkdf9+mHxBEeo3Hbg=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.4.0/go.mod h1:YvHI0jy2hoMjB+UWwv71VJQ9isScKT/TqJzVSSt89Yw=
|
||||
github.com/stretchr/objx v0.5.0/go.mod h1:Yh+to48EsGEfYuaHDzXPcE3xhTkx73EhmCGUpEOglKo=
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.7.0/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
@@ -381,8 +373,6 @@ github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd h1:dz
|
||||
github.com/ydb-platform/ydb-go-genproto v0.0.0-20231012155159-f85a672542fd/go.mod h1:Er+FePu1dNUieD+XTMDduGpQuCPssK5Q4BjF+IIXJ3I=
|
||||
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2 h1:E0yUuuX7UmPxXm92+yQCjMveLFO3zfvYFIJVuAqsVRA=
|
||||
github.com/ydb-platform/ydb-go-sdk/v3 v3.54.2/go.mod h1:fjBLQ2TdQNl4bMjuWl9adoTGBypwUTPoGC+EqYqiIcU=
|
||||
github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yuin/goldmark v1.2.1/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74=
|
||||
github.com/yusufpapurcu/wmi v1.2.4 h1:zFUKzehAFReQwLys1b/iSMl+JQGSCSjtVqQn9bBrPo0=
|
||||
github.com/yusufpapurcu/wmi v1.2.4/go.mod h1:SBZ9tNy3G9/m5Oi98Zks0QjeHVDvuK0qfxQmPyzfmi0=
|
||||
go.opentelemetry.io/otel v1.24.0 h1:0LAOdjNmQeSTzGBzduGe/rU4tZhMwL5rWgtp9Ku5Jfo=
|
||||
@@ -396,33 +386,24 @@ golang.org/x/arch v0.5.0 h1:jpGode6huXQxcskEIpOCvrU+tzo81b6+oFLUYXWtH/Y=
|
||||
golang.org/x/arch v0.5.0/go.mod h1:5om86z9Hs0C8fWVUuoMHwpExlXzs5Tkyp9hOrfG7pp8=
|
||||
golang.org/x/crypto v0.0.0-20170930174604-9419663f5a44/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.24.0 h1:mnl8DM0o513X8fdIkmyFE/5hTYxbwYOjDS/+rK6qpRI=
|
||||
golang.org/x/crypto v0.24.0/go.mod h1:Z1PMYSOR5nyMcyAVAIQSKCDwalqy85Aqn1x3Ws4L5DM=
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g=
|
||||
golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k=
|
||||
golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA=
|
||||
golang.org/x/mod v0.17.0 h1:zY54UmvipHiNd+pm+m0x9KhZ9hl1/7QNMyxXbc6ICqA=
|
||||
golang.org/x/mod v0.17.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c=
|
||||
golang.org/x/net v0.0.0-20180906233101-161cd47e91fd/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20190404232315-eb5bcb51f2a3/go.mod h1:t9HGtf8HONx5eT2rtn7q6eTqICYqUVnKs3thJo3Qplg=
|
||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200226121028-0de0cce0169b/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||
golang.org/x/net v0.0.0-20200520004742-59133d7f0dd7/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||
golang.org/x/net v0.0.0-20200813134508-3edf25e44fcc/go.mod h1:/O7V0waA8r7cgGh81Ro3o1hOxt32SMVPicZroKQ2sZA=
|
||||
golang.org/x/net v0.0.0-20201021035429-f5854403a974/go.mod h1:sp8m0HH+o8qH0wwXwYZr8TS3Oi6o0r6Gce1SSxlDquU=
|
||||
golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac=
|
||||
golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM=
|
||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190423024810-112230192c58/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20190911185100-cd5d95a43a6e/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||
golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M=
|
||||
golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk=
|
||||
golang.org/x/sys v0.0.0-20180909124046-d0be0721c37e/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190412213103-97732733099d/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190904154756-749cb33beabd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -432,12 +413,12 @@ golang.org/x/sys v0.0.0-20191120155948-bd437916bb0e/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200814200057-3d37ad5750ed/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.21.0 h1:rF+pYz3DAGSQAxAu1CbC7catZg4ebC4UIeIhKxBZvws=
|
||||
golang.org/x/sys v0.21.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
@@ -448,13 +429,8 @@ golang.org/x/text v0.16.0/go.mod h1:GhwF1Be+LQoKShO3cGOHzqOgRrGaYc9AvblQOmPVHnI=
|
||||
golang.org/x/time v0.3.0 h1:rg5rLMjNzMS1RkNLzCG38eapWhnYLFYXDXj2gOlr8j4=
|
||||
golang.org/x/time v0.3.0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200619180055-7c47624df98f/go.mod h1:EkVYQZoAsY45+roYkvgYkIh4xh/qjgUK9TdY2XT94GE=
|
||||
golang.org/x/tools v0.0.0-20210106214847-113979e3529a/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d h1:vU5i/LfpvrRCpgM/VPfJLg5KjxD3E+hfT1SH+d9zLwg=
|
||||
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/genproto/googleapis/rpc v0.0.0-20231127180814-3a041ad873d4 h1:DC7wcm+i+P1rN3Ff07vL+OndGg5OhNddHyTA+ocPqYE=
|
||||
@@ -469,14 +445,17 @@ google.golang.org/protobuf v1.21.0/go.mod h1:47Nbq4nVaFHyn7ilMalzfO3qCViNmqZ2kzi
|
||||
google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2l/sGQquU=
|
||||
google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI=
|
||||
google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
|
||||
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0 h1:1Lc07Kr7qY4U2YPouBjpCLxpiyxIVoxqXgkXLknAOE8=
|
||||
gopkg.in/natefinch/lumberjack.v2 v2.0.0/go.mod h1:l0ndWWf7gzL7RNwBG7wST/UCcT4T24xpD6X8LsfU/+k=
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce h1:+JknDZhAj8YMt7GC73Ei8pv4MzjDUNPHgQWJdtMAaDU=
|
||||
gopkg.in/natefinch/npipe.v2 v2.0.0-20160621034901-c1b8fa8bdcce/go.mod h1:5AcXVHNjg+BDxry382+8OKon8SEWiKktQR07RKPsv1c=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 h1:uRGJdciOHaEIrze2W8Q3AKkepLTh2hOroT7a+7czfdQ=
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7/go.mod h1:dt/ZhP58zS4L8KSrWDmTeBkI65Dw0HsyUHuEVlX15mw=
|
||||
gopkg.in/urfave/cli.v1 v1.20.0 h1:NdAVW6RYxDif9DhDHaAortIu956m2c0v+09AZBPTbE0=
|
||||
gopkg.in/urfave/cli.v1 v1.20.0/go.mod h1:vuBzUtMdQeixQj8LVd+/98pzhxNGQoyuPBlsXHOQNO0=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
|
||||
@@ -5,7 +5,7 @@ import (
|
||||
"runtime/debug"
|
||||
)
|
||||
|
||||
var tag = "v4.4.85"
|
||||
var tag = "v4.4.88"
|
||||
|
||||
var commit = func() string {
|
||||
if info, ok := debug.ReadBuildInfo(); ok {
|
||||
|
||||
@@ -70,10 +70,11 @@ func (a *AuthController) PayloadFunc(data interface{}) jwt.MapClaims {
|
||||
}
|
||||
|
||||
return jwt.MapClaims{
|
||||
types.HardForkName: v.HardForkName,
|
||||
types.PublicKey: v.PublicKey,
|
||||
types.ProverName: v.Message.ProverName,
|
||||
types.ProverVersion: v.Message.ProverVersion,
|
||||
types.HardForkName: v.HardForkName,
|
||||
types.PublicKey: v.PublicKey,
|
||||
types.ProverName: v.Message.ProverName,
|
||||
types.ProverVersion: v.Message.ProverVersion,
|
||||
types.ProverProviderTypeKey: v.Message.ProverProviderType,
|
||||
}
|
||||
}
|
||||
|
||||
@@ -96,5 +97,9 @@ func (a *AuthController) IdentityHandler(c *gin.Context) interface{} {
|
||||
c.Set(types.HardForkName, hardForkName)
|
||||
}
|
||||
|
||||
if providerType, ok := claims[types.ProverProviderTypeKey]; ok {
|
||||
c.Set(types.ProverProviderTypeKey, providerType)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@@ -106,6 +106,17 @@ func (l *LoginLogic) Check(login *types.LoginParameter) error {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if login.Message.ProverProviderType != types.ProverProviderTypeInternal && login.Message.ProverProviderType != types.ProverProviderTypeExternal {
|
||||
// for backward compatibility, set ProverProviderType as internal
|
||||
if login.Message.ProverProviderType == types.ProverProviderTypeUndefined {
|
||||
login.Message.ProverProviderType = types.ProverProviderTypeInternal
|
||||
} else {
|
||||
log.Error("invalid prover_provider_type", "value", login.Message.ProverProviderType, "prover name", login.Message.ProverName, "prover version", login.Message.ProverVersion)
|
||||
return errors.New("invalid prover provider type.")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
@@ -23,6 +22,7 @@ import (
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
coordinatorType "scroll-tech/coordinator/internal/types"
|
||||
cutils "scroll-tech/coordinator/internal/utils"
|
||||
)
|
||||
|
||||
// BatchProverTask is prover task implement for batch proof
|
||||
@@ -64,10 +64,10 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
|
||||
maxActiveAttempts := bp.cfg.ProverManager.ProversPerSession
|
||||
maxTotalAttempts := bp.cfg.ProverManager.SessionAttempts
|
||||
if strings.HasPrefix(taskCtx.ProverName, ExternalProverNamePrefix) {
|
||||
if taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) {
|
||||
unassignedBatchCount, getCountError := bp.batchOrm.GetUnassignedBatchCount(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getCountError != nil {
|
||||
log.Error("failed to get unassigned batch proving tasks count", "height", getTaskParameter.ProverHeight, "err", err)
|
||||
log.Error("failed to get unassigned batch proving tasks count", "height", getTaskParameter.ProverHeight, "err", getCountError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
// Assign external prover if unassigned task number exceeds threshold
|
||||
@@ -80,42 +80,38 @@ func (bp *BatchProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpBatchTask *orm.Batch
|
||||
var assignedOffset, unassignedOffset = 0, 0
|
||||
tmpAssignedBatchTasks, getTaskError := bp.batchOrm.GetAssignedBatches(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, 50)
|
||||
tmpBatchTask, getTaskError = bp.batchOrm.GetAssignedBatch(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get assigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
|
||||
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
|
||||
tmpUnassignedBatchTask, getTaskError := bp.batchOrm.GetUnassignedBatches(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, 50)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get unassigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for {
|
||||
tmpBatchTask = nil
|
||||
if assignedOffset < len(tmpAssignedBatchTasks) {
|
||||
tmpBatchTask = tmpAssignedBatchTasks[assignedOffset]
|
||||
assignedOffset++
|
||||
} else if unassignedOffset < len(tmpUnassignedBatchTask) {
|
||||
tmpBatchTask = tmpUnassignedBatchTask[unassignedOffset]
|
||||
unassignedOffset++
|
||||
}
|
||||
|
||||
if tmpBatchTask == nil {
|
||||
log.Debug("get empty batch", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTask, getTaskError := bp.proverTaskOrm.GetTaskOfProver(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, taskCtx.PublicKey, taskCtx.ProverVersion)
|
||||
// batch to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
|
||||
if tmpBatchTask == nil {
|
||||
tmpBatchTask, getTaskError = bp.batchOrm.GetUnassignedBatch(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover task of prover", "proof_type", message.ProofTypeBatch.String(), "taskID", tmpBatchTask.Hash, "key", taskCtx.PublicKey, "Prover_version", taskCtx.ProverVersion, "error", getTaskError)
|
||||
log.Error("failed to get unassigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
if proverTask == nil || types.ProverProveStatus(proverTask.ProvingStatus) != types.ProverProofInvalid {
|
||||
break
|
||||
}
|
||||
|
||||
if tmpBatchTask == nil {
|
||||
log.Debug("get empty batch", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBatch, tmpBatchTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBatch.String(), "task ID", tmpBatchTask.Hash, "error", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty batch, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
@@ -22,6 +21,7 @@ import (
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
coordinatorType "scroll-tech/coordinator/internal/types"
|
||||
cutils "scroll-tech/coordinator/internal/utils"
|
||||
)
|
||||
|
||||
// BundleProverTask is prover task implement for bundle proof
|
||||
@@ -64,10 +64,10 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
|
||||
maxActiveAttempts := bp.cfg.ProverManager.ProversPerSession
|
||||
maxTotalAttempts := bp.cfg.ProverManager.SessionAttempts
|
||||
if strings.HasPrefix(taskCtx.ProverName, ExternalProverNamePrefix) {
|
||||
if taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) {
|
||||
unassignedBundleCount, getCountError := bp.bundleOrm.GetUnassignedBundleCount(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getCountError != nil {
|
||||
log.Error("failed to get unassigned batch proving tasks count", "height", getTaskParameter.ProverHeight, "err", err)
|
||||
log.Error("failed to get unassigned bundle proving tasks count", "height", getTaskParameter.ProverHeight, "err", getCountError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
// Assign external prover if unassigned task number exceeds threshold
|
||||
@@ -80,42 +80,38 @@ func (bp *BundleProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinat
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpBundleTask *orm.Bundle
|
||||
var assignedOffset, unassignedOffset = 0, 0
|
||||
tmpAssignedBundleTasks, getTaskError := bp.bundleOrm.GetAssignedBundles(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, 50)
|
||||
tmpBundleTask, getTaskError = bp.bundleOrm.GetAssignedBundle(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get assigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
log.Error("failed to get assigned bundle proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
|
||||
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
|
||||
tmpUnassignedBundleTask, getTaskError := bp.bundleOrm.GetUnassignedBundles(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, 50)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get unassigned batch proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for {
|
||||
tmpBundleTask = nil
|
||||
if assignedOffset < len(tmpAssignedBundleTasks) {
|
||||
tmpBundleTask = tmpAssignedBundleTasks[assignedOffset]
|
||||
assignedOffset++
|
||||
} else if unassignedOffset < len(tmpUnassignedBundleTask) {
|
||||
tmpBundleTask = tmpUnassignedBundleTask[unassignedOffset]
|
||||
unassignedOffset++
|
||||
}
|
||||
|
||||
if tmpBundleTask == nil {
|
||||
log.Debug("get empty bundle", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTask, getTaskError := bp.proverTaskOrm.GetTaskOfProver(ctx.Copy(), message.ProofTypeBatch, tmpBundleTask.Hash, taskCtx.PublicKey, taskCtx.ProverVersion)
|
||||
// bundle to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
|
||||
if tmpBundleTask == nil {
|
||||
tmpBundleTask, getTaskError = bp.bundleOrm.GetUnassignedBundle(ctx.Copy(), maxActiveAttempts, maxTotalAttempts)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover task of prover", "proof_type", message.ProofTypeBatch.String(), "taskID", tmpBundleTask.Hash, "key", taskCtx.PublicKey, "Prover_version", taskCtx.ProverVersion, "error", getTaskError)
|
||||
log.Error("failed to get unassigned bundle proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
if proverTask == nil || types.ProverProveStatus(proverTask.ProvingStatus) != types.ProverProofInvalid {
|
||||
break
|
||||
}
|
||||
|
||||
if tmpBundleTask == nil {
|
||||
log.Debug("get empty bundle", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := bp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeBundle, tmpBundleTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeBundle.String(), "task ID", tmpBundleTask.Hash, "error", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty bundle, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -4,7 +4,6 @@ import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gin-gonic/gin"
|
||||
@@ -22,6 +21,7 @@ import (
|
||||
"scroll-tech/coordinator/internal/config"
|
||||
"scroll-tech/coordinator/internal/orm"
|
||||
coordinatorType "scroll-tech/coordinator/internal/types"
|
||||
cutils "scroll-tech/coordinator/internal/utils"
|
||||
)
|
||||
|
||||
// ChunkProverTask the chunk prover task
|
||||
@@ -62,10 +62,10 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
|
||||
maxActiveAttempts := cp.cfg.ProverManager.ProversPerSession
|
||||
maxTotalAttempts := cp.cfg.ProverManager.SessionAttempts
|
||||
if strings.HasPrefix(taskCtx.ProverName, ExternalProverNamePrefix) {
|
||||
if taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) {
|
||||
unassignedChunkCount, getCountError := cp.chunkOrm.GetUnassignedChunkCount(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
|
||||
if getCountError != nil {
|
||||
log.Error("failed to get unassigned chunk proving tasks count", "height", getTaskParameter.ProverHeight, "err", err)
|
||||
log.Error("failed to get unassigned chunk proving tasks count", "height", getTaskParameter.ProverHeight, "err", getCountError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
// Assign external prover if unassigned task number exceeds threshold
|
||||
@@ -78,42 +78,38 @@ func (cp *ChunkProverTask) Assign(ctx *gin.Context, getTaskParameter *coordinato
|
||||
for i := 0; i < 5; i++ {
|
||||
var getTaskError error
|
||||
var tmpChunkTask *orm.Chunk
|
||||
var assignedOffset, unassignedOffset = 0, 0
|
||||
tmpAssignedChunkTasks, getTaskError := cp.chunkOrm.GetAssignedChunks(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight, 50)
|
||||
tmpChunkTask, getTaskError = cp.chunkOrm.GetAssignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get assigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
|
||||
// Why here need get again? In order to support a task can assign to multiple prover, need also assign `ProvingTaskAssigned`
|
||||
// chunk to prover. But use `proving_status in (1, 2)` will not use the postgres index. So need split the sql.
|
||||
tmpUnassignedChunkTask, getTaskError := cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight, 50)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for {
|
||||
tmpChunkTask = nil
|
||||
if assignedOffset < len(tmpAssignedChunkTasks) {
|
||||
tmpChunkTask = tmpAssignedChunkTasks[assignedOffset]
|
||||
assignedOffset++
|
||||
} else if unassignedOffset < len(tmpUnassignedChunkTask) {
|
||||
tmpChunkTask = tmpUnassignedChunkTask[unassignedOffset]
|
||||
unassignedOffset++
|
||||
}
|
||||
|
||||
if tmpChunkTask == nil {
|
||||
log.Debug("get empty chunk", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTask, getTaskError := cp.proverTaskOrm.GetTaskOfProver(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, taskCtx.PublicKey, taskCtx.ProverVersion)
|
||||
if tmpChunkTask == nil {
|
||||
tmpChunkTask, getTaskError = cp.chunkOrm.GetUnassignedChunk(ctx.Copy(), maxActiveAttempts, maxTotalAttempts, getTaskParameter.ProverHeight)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover task of prover", "proof_type", message.ProofTypeChunk.String(), "taskID", tmpChunkTask.Hash, "key", taskCtx.PublicKey, "Prover_version", taskCtx.ProverVersion, "error", getTaskError)
|
||||
log.Error("failed to get unassigned chunk proving tasks", "height", getTaskParameter.ProverHeight, "err", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
if proverTask == nil || types.ProverProveStatus(proverTask.ProvingStatus) != types.ProverProofInvalid {
|
||||
break
|
||||
}
|
||||
|
||||
if tmpChunkTask == nil {
|
||||
log.Debug("get empty chunk", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
// Don't dispatch the same failing job to the same prover
|
||||
proverTasks, getTaskError := cp.proverTaskOrm.GetFailedProverTasksByHash(ctx.Copy(), message.ProofTypeChunk, tmpChunkTask.Hash, 2)
|
||||
if getTaskError != nil {
|
||||
log.Error("failed to get prover tasks", "proof type", message.ProofTypeChunk.String(), "task ID", tmpChunkTask.Hash, "error", getTaskError)
|
||||
return nil, ErrCoordinatorInternalFailure
|
||||
}
|
||||
for i := 0; i < len(proverTasks); i++ {
|
||||
if proverTasks[i].ProverPublicKey == taskCtx.PublicKey ||
|
||||
taskCtx.ProverProviderType == uint8(coordinatorType.ProverProviderTypeExternal) && cutils.IsExternalProverNameMatch(proverTasks[i].ProverName, taskCtx.ProverName) {
|
||||
log.Debug("get empty chunk, the prover already failed this task", "height", getTaskParameter.ProverHeight)
|
||||
return nil, nil
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@@ -27,11 +27,6 @@ var (
|
||||
getTaskCounterVec *prometheus.CounterVec = nil
|
||||
)
|
||||
|
||||
var (
|
||||
// ExternalProverNamePrefix prefix of prover name
|
||||
ExternalProverNamePrefix = "external"
|
||||
)
|
||||
|
||||
// ProverTask the interface of a collector who send data to prover
|
||||
type ProverTask interface {
|
||||
Assign(ctx *gin.Context, getTaskParameter *coordinatorType.GetTaskParameter) (*coordinatorType.GetTaskSchema, error)
|
||||
@@ -52,10 +47,11 @@ type BaseProverTask struct {
|
||||
}
|
||||
|
||||
type proverTaskContext struct {
|
||||
PublicKey string
|
||||
ProverName string
|
||||
ProverVersion string
|
||||
HardForkNames map[string]struct{}
|
||||
PublicKey string
|
||||
ProverName string
|
||||
ProverVersion string
|
||||
ProverProviderType uint8
|
||||
HardForkNames map[string]struct{}
|
||||
}
|
||||
|
||||
// checkParameter check the prover task parameter illegal
|
||||
@@ -81,6 +77,12 @@ func (b *BaseProverTask) checkParameter(ctx *gin.Context) (*proverTaskContext, e
|
||||
}
|
||||
ptc.ProverVersion = proverVersion.(string)
|
||||
|
||||
ProverProviderType, ProverProviderTypeExist := ctx.Get(coordinatorType.ProverProviderTypeKey)
|
||||
if !ProverProviderTypeExist {
|
||||
return nil, errors.New("get prover provider type from context failed")
|
||||
}
|
||||
ptc.ProverProviderType = uint8(ProverProviderType.(float64))
|
||||
|
||||
hardForkNamesStr, hardForkNameExist := ctx.Get(coordinatorType.HardForkName)
|
||||
if !hardForkNameExist {
|
||||
return nil, errors.New("get hard fork name from context failed")
|
||||
|
||||
@@ -78,21 +78,24 @@ func (*Batch) TableName() string {
|
||||
return "batch"
|
||||
}
|
||||
|
||||
// GetUnassignedBatches retrieves unassigned batches based on the specified limit.
|
||||
// GetUnassignedBatch retrieves unassigned batch based on the specified limit.
|
||||
// The returned batches are sorted in ascending order by their index.
|
||||
func (o *Batch) GetUnassignedBatches(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, limit uint64) ([]*Batch, error) {
|
||||
var batch []*Batch
|
||||
func (o *Batch) GetUnassignedBatch(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Batch, error) {
|
||||
var batch Batch
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM batch WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk_proofs_status = %d AND batch.deleted_at IS NULL ORDER BY batch.index LIMIT %d;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, int(types.ChunkProofsStatusReady), limit)
|
||||
sql := fmt.Sprintf("SELECT * FROM batch WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk_proofs_status = %d AND batch.deleted_at IS NULL ORDER BY batch.index LIMIT 1;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, int(types.ChunkProofsStatusReady))
|
||||
err := db.Raw(sql).Scan(&batch).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Batch.GetUnassignedBatches error: %w", err)
|
||||
return nil, fmt.Errorf("Batch.GetUnassignedBatch error: %w", err)
|
||||
}
|
||||
return batch, nil
|
||||
if batch.Hash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &batch, nil
|
||||
}
|
||||
|
||||
// GetUnassignedBatchCount retrieves unassigned batch count based on the specified limit.
|
||||
// GetUnassignedBatchCount retrieves unassigned batch count.
|
||||
func (o *Batch) GetUnassignedBatchCount(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (int64, error) {
|
||||
var count int64
|
||||
db := o.db.WithContext(ctx)
|
||||
@@ -108,18 +111,21 @@ func (o *Batch) GetUnassignedBatchCount(ctx context.Context, maxActiveAttempts,
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// GetAssignedBatches retrieves assigned batches based on the specified limit.
|
||||
// GetAssignedBatch retrieves assigned batch based on the specified limit.
|
||||
// The returned batches are sorted in ascending order by their index.
|
||||
func (o *Batch) GetAssignedBatches(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, limit uint64) ([]*Batch, error) {
|
||||
var batch []*Batch
|
||||
func (o *Batch) GetAssignedBatch(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Batch, error) {
|
||||
var batch Batch
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM batch WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk_proofs_status = %d AND batch.deleted_at IS NULL ORDER BY batch.index LIMIT %d;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, int(types.ChunkProofsStatusReady), limit)
|
||||
sql := fmt.Sprintf("SELECT * FROM batch WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND chunk_proofs_status = %d AND batch.deleted_at IS NULL ORDER BY batch.index LIMIT 1;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, int(types.ChunkProofsStatusReady))
|
||||
err := db.Raw(sql).Scan(&batch).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Batch.GetAssignedBatches error: %w", err)
|
||||
return nil, fmt.Errorf("Batch.GetAssignedBatch error: %w", err)
|
||||
}
|
||||
return batch, nil
|
||||
if batch.Hash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &batch, nil
|
||||
}
|
||||
|
||||
// GetUnassignedAndChunksUnreadyBatches get the batches which is unassigned and chunks is not ready
|
||||
@@ -142,6 +148,19 @@ func (o *Batch) GetUnassignedAndChunksUnreadyBatches(ctx context.Context, offset
|
||||
return batches, nil
|
||||
}
|
||||
|
||||
// GetAssignedBatches retrieves all batches whose proving_status is either types.ProvingTaskAssigned.
|
||||
func (o *Batch) GetAssignedBatches(ctx context.Context) ([]*Batch, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
db = db.Model(&Batch{})
|
||||
db = db.Where("proving_status = ?", int(types.ProvingTaskAssigned))
|
||||
|
||||
var assignedBatches []*Batch
|
||||
if err := db.Find(&assignedBatches).Error; err != nil {
|
||||
return nil, fmt.Errorf("Batch.GetAssignedBatches error: %w", err)
|
||||
}
|
||||
return assignedBatches, nil
|
||||
}
|
||||
|
||||
// GetProvingStatusByHash retrieves the proving status of a batch given its hash.
|
||||
func (o *Batch) GetProvingStatusByHash(ctx context.Context, hash string) (types.ProvingStatus, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
|
||||
@@ -54,21 +54,24 @@ func (*Bundle) TableName() string {
|
||||
return "bundle"
|
||||
}
|
||||
|
||||
// GetUnassignedBundles retrieves unassigned bundle based on the specified limit.
|
||||
// GetUnassignedBundle retrieves unassigned bundle based on the specified limit.
|
||||
// The returned batch sorts in ascending order by their index.
|
||||
func (o *Bundle) GetUnassignedBundles(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, limit uint64) ([]*Bundle, error) {
|
||||
var bundle []*Bundle
|
||||
func (o *Bundle) GetUnassignedBundle(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Bundle, error) {
|
||||
var bundle Bundle
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM bundle WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND batch_proofs_status = %d AND bundle.deleted_at IS NULL ORDER BY bundle.index LIMIT %d;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, int(types.BatchProofsStatusReady), limit)
|
||||
sql := fmt.Sprintf("SELECT * FROM bundle WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND batch_proofs_status = %d AND bundle.deleted_at IS NULL ORDER BY bundle.index LIMIT 1;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, int(types.BatchProofsStatusReady))
|
||||
err := db.Raw(sql).Scan(&bundle).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Batch.GetUnassignedBundles error: %w", err)
|
||||
return nil, fmt.Errorf("Batch.GetUnassignedBundle error: %w", err)
|
||||
}
|
||||
return bundle, nil
|
||||
if bundle.StartBatchHash == "" || bundle.EndBatchHash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &bundle, nil
|
||||
}
|
||||
|
||||
// GetUnassignedBundleCount retrieves unassigned bundle count based on the specified limit.
|
||||
// GetUnassignedBundleCount retrieves unassigned bundle count.
|
||||
func (o *Bundle) GetUnassignedBundleCount(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (int64, error) {
|
||||
var count int64
|
||||
db := o.db.WithContext(ctx)
|
||||
@@ -76,6 +79,7 @@ func (o *Bundle) GetUnassignedBundleCount(ctx context.Context, maxActiveAttempts
|
||||
db = db.Where("proving_status = ?", int(types.ProvingTaskUnassigned))
|
||||
db = db.Where("total_attempts < ?", maxTotalAttempts)
|
||||
db = db.Where("active_attempts < ?", maxActiveAttempts)
|
||||
db = db.Where("batch_proofs_status = ?", int(types.BatchProofsStatusReady))
|
||||
db = db.Where("bundle.deleted_at IS NULL")
|
||||
if err := db.Count(&count).Error; err != nil {
|
||||
return 0, fmt.Errorf("Bundle.GetUnassignedBundleCount error: %w", err)
|
||||
@@ -83,18 +87,21 @@ func (o *Bundle) GetUnassignedBundleCount(ctx context.Context, maxActiveAttempts
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// GetAssignedBundles retrieves assigned bundles based on the specified limit.
|
||||
// GetAssignedBundle retrieves assigned bundle based on the specified limit.
|
||||
// The returned bundle sorts in ascending order by their index.
|
||||
func (o *Bundle) GetAssignedBundles(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, limit uint64) ([]*Bundle, error) {
|
||||
var bundle []*Bundle
|
||||
func (o *Bundle) GetAssignedBundle(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8) (*Bundle, error) {
|
||||
var bundle Bundle
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM bundle WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND batch_proofs_status = %d AND bundle.deleted_at IS NULL ORDER BY bundle.index LIMIT %d;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, int(types.BatchProofsStatusReady), limit)
|
||||
sql := fmt.Sprintf("SELECT * FROM bundle WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND batch_proofs_status = %d AND bundle.deleted_at IS NULL ORDER BY bundle.index LIMIT 1;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, int(types.BatchProofsStatusReady))
|
||||
err := db.Raw(sql).Scan(&bundle).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Bundle.GetAssignedBundles error: %w", err)
|
||||
return nil, fmt.Errorf("Bundle.GetAssignedBatch error: %w", err)
|
||||
}
|
||||
return bundle, nil
|
||||
if bundle.StartBatchHash == "" || bundle.EndBatchHash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &bundle, nil
|
||||
}
|
||||
|
||||
// GetProvingStatusByHash retrieves the proving status of a bundle given its hash.
|
||||
|
||||
@@ -73,19 +73,22 @@ func (*Chunk) TableName() string {
|
||||
|
||||
// GetUnassignedChunk retrieves unassigned chunk based on the specified limit.
|
||||
// The returned chunks are sorted in ascending order by their index.
|
||||
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height, limit uint64) ([]*Chunk, error) {
|
||||
var chunks []*Chunk
|
||||
func (o *Chunk) GetUnassignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
|
||||
var chunk Chunk
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT %d;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, height, limit)
|
||||
err := db.Raw(sql).Scan(&chunks).Error
|
||||
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
|
||||
int(types.ProvingTaskUnassigned), maxTotalAttempts, maxActiveAttempts, height)
|
||||
err := db.Raw(sql).Scan(&chunk).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Chunk.GetUnassignedChunk error: %w", err)
|
||||
}
|
||||
return chunks, nil
|
||||
if chunk.Hash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &chunk, nil
|
||||
}
|
||||
|
||||
// GetUnassignedChunkCount retrieves unassigned chunk count based on the specified limit.
|
||||
// GetUnassignedChunkCount retrieves unassigned chunk count.
|
||||
func (o *Chunk) GetUnassignedChunkCount(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (int64, error) {
|
||||
var count int64
|
||||
db := o.db.WithContext(ctx)
|
||||
@@ -101,18 +104,21 @@ func (o *Chunk) GetUnassignedChunkCount(ctx context.Context, maxActiveAttempts,
|
||||
return count, nil
|
||||
}
|
||||
|
||||
// GetAssignedChunks retrieves assigned chunks based on the specified limit.
|
||||
// GetAssignedChunk retrieves assigned chunk based on the specified limit.
|
||||
// The returned chunks are sorted in ascending order by their index.
|
||||
func (o *Chunk) GetAssignedChunks(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64, limit uint64) ([]*Chunk, error) {
|
||||
var chunks []*Chunk
|
||||
func (o *Chunk) GetAssignedChunk(ctx context.Context, maxActiveAttempts, maxTotalAttempts uint8, height uint64) (*Chunk, error) {
|
||||
var chunk Chunk
|
||||
db := o.db.WithContext(ctx)
|
||||
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT %d;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, height, limit)
|
||||
err := db.Raw(sql).Scan(&chunks).Error
|
||||
sql := fmt.Sprintf("SELECT * FROM chunk WHERE proving_status = %d AND total_attempts < %d AND active_attempts < %d AND end_block_number <= %d AND chunk.deleted_at IS NULL ORDER BY chunk.index LIMIT 1;",
|
||||
int(types.ProvingTaskAssigned), maxTotalAttempts, maxActiveAttempts, height)
|
||||
err := db.Raw(sql).Scan(&chunk).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Chunk.GetAssignedChunks error: %w", err)
|
||||
return nil, fmt.Errorf("Chunk.GetAssignedChunk error: %w", err)
|
||||
}
|
||||
return chunks, nil
|
||||
if chunk.Hash == "" {
|
||||
return nil, nil
|
||||
}
|
||||
return &chunk, nil
|
||||
}
|
||||
|
||||
// GetChunksByBatchHash retrieves the chunks associated with a specific batch hash.
|
||||
|
||||
@@ -117,6 +117,27 @@ func (o *ProverTask) GetProverTasksByHashes(ctx context.Context, taskType messag
|
||||
return proverTasks, nil
|
||||
}
|
||||
|
||||
// GetFailedProverTasksByHash retrieves the failed ProverTask records associated with the specified hash.
|
||||
// The returned prover task objects are sorted in descending order by their ids.
|
||||
func (o *ProverTask) GetFailedProverTasksByHash(ctx context.Context, taskType message.ProofType, hash string, limit int) ([]*ProverTask, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Where("task_type", int(taskType))
|
||||
db = db.Where("task_id", hash)
|
||||
db = db.Where("proving_status = ?", int(types.ProverProofInvalid))
|
||||
db = db.Order("id desc")
|
||||
|
||||
if limit != 0 {
|
||||
db = db.Limit(limit)
|
||||
}
|
||||
|
||||
var proverTasks []*ProverTask
|
||||
if err := db.Find(&proverTasks).Error; err != nil {
|
||||
return nil, fmt.Errorf("ProverTask.GetFailedProverTasksByHash error: %w, hash: %v", err, hash)
|
||||
}
|
||||
return proverTasks, nil
|
||||
}
|
||||
|
||||
// GetProverTaskByUUIDAndPublicKey get prover task taskID by uuid and public key
|
||||
func (o *ProverTask) GetProverTaskByUUIDAndPublicKey(ctx context.Context, uuid, publicKey string) (*ProverTask, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
@@ -148,24 +169,6 @@ func (o *ProverTask) GetAssignedTaskOfOtherProvers(ctx context.Context, taskType
|
||||
return proverTasks, nil
|
||||
}
|
||||
|
||||
// GetTaskOfOtherProvers get the chunk/batch task of prover
|
||||
func (o *ProverTask) GetTaskOfProver(ctx context.Context, taskType message.ProofType, taskID, proverPublicKey, proverVersion string) (*ProverTask, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
db = db.Model(&ProverTask{})
|
||||
db = db.Where("task_type", int(taskType))
|
||||
db = db.Where("task_id", taskID)
|
||||
db = db.Where("prover_public_key", proverPublicKey)
|
||||
db = db.Where("prover_version", proverVersion)
|
||||
db = db.Limit(1)
|
||||
|
||||
var proverTask ProverTask
|
||||
err := db.Find(&proverTask).Error
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("ProverTask.GetTaskOfProver error: %w, taskID: %v, publicKey:%s", err, taskID, proverPublicKey)
|
||||
}
|
||||
return &proverTask, nil
|
||||
}
|
||||
|
||||
// GetProvingStatusByTaskID retrieves the proving status of a prover task
|
||||
func (o *ProverTask) GetProvingStatusByTaskID(ctx context.Context, taskType message.ProofType, taskID string) (types.ProverProveStatus, error) {
|
||||
db := o.db.WithContext(ctx)
|
||||
|
||||
@@ -18,6 +18,8 @@ const (
|
||||
ProverName = "prover_name"
|
||||
// ProverVersion the prover version for context
|
||||
ProverVersion = "prover_version"
|
||||
// ProverProviderTypeKey the prover provider type for context
|
||||
ProverProviderTypeKey = "prover_provider_type"
|
||||
// HardForkName the hard fork name for context
|
||||
HardForkName = "hard_fork_name"
|
||||
)
|
||||
@@ -28,13 +30,22 @@ type LoginSchema struct {
|
||||
Token string `json:"token"`
|
||||
}
|
||||
|
||||
type MessageWithoutProverProviderType struct {
|
||||
Challenge string `json:"challenge"`
|
||||
ProverVersion string `json:"prover_version"`
|
||||
ProverName string `json:"prover_name"`
|
||||
ProverTypes []ProverType `json:"prover_types"`
|
||||
VKs []string `json:"vks"`
|
||||
}
|
||||
|
||||
// Message the login message struct
|
||||
type Message struct {
|
||||
Challenge string `form:"challenge" json:"challenge" binding:"required"`
|
||||
ProverVersion string `form:"prover_version" json:"prover_version" binding:"required"`
|
||||
ProverName string `form:"prover_name" json:"prover_name" binding:"required"`
|
||||
ProverTypes []ProverType `form:"prover_types" json:"prover_types"`
|
||||
VKs []string `form:"vks" json:"vks"`
|
||||
Challenge string `form:"challenge" json:"challenge" binding:"required"`
|
||||
ProverVersion string `form:"prover_version" json:"prover_version" binding:"required"`
|
||||
ProverName string `form:"prover_name" json:"prover_name" binding:"required"`
|
||||
ProverProviderType ProverProviderType `form:"prover_provider_type" json:"prover_provider_type,omitempty"`
|
||||
ProverTypes []ProverType `form:"prover_types" json:"prover_types"`
|
||||
VKs []string `form:"vks" json:"vks"`
|
||||
}
|
||||
|
||||
// LoginParameterWithHardForkName constructs new payload for login
|
||||
@@ -53,7 +64,7 @@ type LoginParameter struct {
|
||||
// SignWithKey auth message with private key and set public key in auth message's Identity
|
||||
func (a *LoginParameter) SignWithKey(priv *ecdsa.PrivateKey) error {
|
||||
// Hash identity content
|
||||
hash, err := a.Message.Hash()
|
||||
hash, err := Hash(a.Message)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@@ -70,7 +81,14 @@ func (a *LoginParameter) SignWithKey(priv *ecdsa.PrivateKey) error {
|
||||
|
||||
// Verify verifies the message of auth.
|
||||
func (a *LoginParameter) Verify() (bool, error) {
|
||||
hash, err := a.Message.Hash()
|
||||
var hash []byte
|
||||
var err error
|
||||
if a.Message.ProverProviderType == ProverProviderTypeUndefined {
|
||||
// for backward compatibility, calculate hash without ProverProviderType
|
||||
hash, err = Hash(a.Message.ToMessageWithoutProverProviderType())
|
||||
} else {
|
||||
hash, err = Hash(a.Message)
|
||||
}
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
@@ -85,15 +103,14 @@ func (a *LoginParameter) Verify() (bool, error) {
|
||||
return isValid, nil
|
||||
}
|
||||
|
||||
// Hash returns the hash of the auth message, which should be the message used
|
||||
// to construct the Signature.
|
||||
func (i *Message) Hash() ([]byte, error) {
|
||||
byt, err := rlp.EncodeToBytes(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
func (m *Message) ToMessageWithoutProverProviderType() MessageWithoutProverProviderType {
|
||||
return MessageWithoutProverProviderType{
|
||||
Challenge: m.Challenge,
|
||||
ProverVersion: m.ProverVersion,
|
||||
ProverName: m.ProverName,
|
||||
ProverTypes: m.ProverTypes,
|
||||
VKs: m.VKs,
|
||||
}
|
||||
hash := crypto.Keccak256Hash(byt)
|
||||
return hash[:], nil
|
||||
}
|
||||
|
||||
// DecodeAndUnmarshalPubkey decodes a hex-encoded public key and unmarshal it into an ecdsa.PublicKey
|
||||
@@ -111,3 +128,14 @@ func (i *Message) DecodeAndUnmarshalPubkey(pubKeyHex string) (*ecdsa.PublicKey,
|
||||
}
|
||||
return pubKey, nil
|
||||
}
|
||||
|
||||
// Hash returns the hash of the auth message, which should be the message used
|
||||
// to construct the Signature.
|
||||
func Hash(i interface{}) ([]byte, error) {
|
||||
byt, err := rlp.EncodeToBytes(i)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
hash := crypto.Keccak256Hash(byt)
|
||||
return hash[:], nil
|
||||
}
|
||||
|
||||
@@ -18,11 +18,12 @@ func TestAuthMessageSignAndVerify(t *testing.T) {
|
||||
t.Run("sign", func(t *testing.T) {
|
||||
authMsg = LoginParameter{
|
||||
Message: Message{
|
||||
ProverName: "test1",
|
||||
ProverVersion: "v0.0.1",
|
||||
Challenge: "abcdef",
|
||||
ProverTypes: []ProverType{ProverTypeBatch},
|
||||
VKs: []string{"vk1", "vk2"},
|
||||
ProverName: "test1",
|
||||
ProverVersion: "v0.0.1",
|
||||
Challenge: "abcdef",
|
||||
ProverProviderType: ProverProviderTypeInternal,
|
||||
ProverTypes: []ProverType{ProverTypeBatch},
|
||||
VKs: []string{"vk1", "vk2"},
|
||||
},
|
||||
PublicKey: publicKeyHex,
|
||||
}
|
||||
@@ -59,11 +60,12 @@ func TestGenerateSignature(t *testing.T) {
|
||||
|
||||
authMsg := LoginParameter{
|
||||
Message: Message{
|
||||
ProverName: "test",
|
||||
ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
|
||||
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
|
||||
ProverTypes: []ProverType{ProverTypeChunk},
|
||||
VKs: []string{"mock_vk"},
|
||||
ProverName: "test",
|
||||
ProverVersion: "v4.4.45-37af5ef5-38a68e2-1c5093c",
|
||||
Challenge: "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJleHAiOjE3MjQ4Mzg0ODUsIm9yaWdfaWF0IjoxNzI0ODM0ODg1LCJyYW5kb20iOiJ6QmdNZGstNGc4UzNUNTFrVEFsYk1RTXg2TGJ4SUs4czY3ejM2SlNuSFlJPSJ9.x9PvihhNx2w4_OX5uCrv8QJCNYVQkIi-K2k8XFXYmik",
|
||||
ProverProviderType: ProverProviderTypeInternal,
|
||||
ProverTypes: []ProverType{ProverTypeChunk},
|
||||
VKs: []string{"mock_vk"},
|
||||
},
|
||||
PublicKey: publicKeyHex,
|
||||
}
|
||||
|
||||
@@ -40,3 +40,26 @@ func MakeProverType(proofType message.ProofType) ProverType {
|
||||
return ProverTypeUndefined
|
||||
}
|
||||
}
|
||||
|
||||
// ProverProviderType represents the type of prover provider.
|
||||
type ProverProviderType uint8
|
||||
|
||||
func (r ProverProviderType) String() string {
|
||||
switch r {
|
||||
case ProverProviderTypeInternal:
|
||||
return "prover provider type internal"
|
||||
case ProverProviderTypeExternal:
|
||||
return "prover provider type external"
|
||||
default:
|
||||
return fmt.Sprintf("prover provider type: %d", r)
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
// ProverProviderTypeUndefined is an unknown prover provider type
|
||||
ProverProviderTypeUndefined ProverProviderType = iota
|
||||
// ProverProviderTypeInternal is an internal prover provider type
|
||||
ProverProviderTypeInternal
|
||||
// ProverProviderTypeExternal is an external prover provider type
|
||||
ProverProviderTypeExternal
|
||||
)
|
||||
|
||||
17
coordinator/internal/utils/prover_name.go
Normal file
17
coordinator/internal/utils/prover_name.go
Normal file
@@ -0,0 +1,17 @@
|
||||
package utils
|
||||
|
||||
import "strings"
|
||||
|
||||
// IsExternalProverNameMatch checks if the local and remote external prover names belong to the same provider.
|
||||
// It returns true if they do, otherwise false.
|
||||
func IsExternalProverNameMatch(localName, remoteName string) bool {
|
||||
local := strings.Split(localName, "_")
|
||||
remote := strings.Split(remoteName, "_")
|
||||
|
||||
if len(local) < 3 || len(remote) < 3 {
|
||||
return false
|
||||
}
|
||||
|
||||
// note the name of cloud prover is in the format of "cloud_prover_{provider-name}_index"
|
||||
return local[0] == remote[0] && local[1] == remote[1] && local[2] == remote[2]
|
||||
}
|
||||
@@ -79,11 +79,12 @@ func (r *mockProver) challenge(t *testing.T) string {
|
||||
func (r *mockProver) login(t *testing.T, challengeString string, proverTypes []types.ProverType) (string, int, string) {
|
||||
authMsg := types.LoginParameter{
|
||||
Message: types.Message{
|
||||
Challenge: challengeString,
|
||||
ProverName: r.proverName,
|
||||
ProverVersion: r.proverVersion,
|
||||
ProverTypes: proverTypes,
|
||||
VKs: []string{"mock_vk"},
|
||||
Challenge: challengeString,
|
||||
ProverName: r.proverName,
|
||||
ProverVersion: r.proverVersion,
|
||||
ProverProviderType: types.ProverProviderTypeInternal,
|
||||
ProverTypes: proverTypes,
|
||||
VKs: []string{"mock_vk"},
|
||||
},
|
||||
PublicKey: r.publicKey(),
|
||||
}
|
||||
|
||||
18
go.work.sum
18
go.work.sum
@@ -448,8 +448,6 @@ github.com/PuerkitoBio/urlesc v0.0.0-20170810143723-de5bf2ad4578/go.mod h1:uGdko
|
||||
github.com/Shopify/goreferrer v0.0.0-20220729165902-8cddb4f5de06/go.mod h1:7erjKLwalezA0k99cWs5L11HWOAPNjdUZ6RxH1BXbbM=
|
||||
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6 h1:fLjPD/aNc3UIOA6tDi6QXUemppXK3P9BI7mr2hd6gx8=
|
||||
github.com/StackExchange/wmi v0.0.0-20180116203802-5d049714c4a6/go.mod h1:3eOhrUMpNV+6aFIbp5/iudMxNCF27Vw2OZgy4xEx0Fg=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2 h1:N0y9ASrJ0F6h0QaC3o6uJb3NIZ9VKLjCM7NQbSmF7WI=
|
||||
github.com/VictoriaMetrics/fastcache v1.12.2/go.mod h1:AmC+Nzz1+3G2eCPapF6UcsnkThDcMsQicp4xDukwJYI=
|
||||
github.com/aclements/go-moremath v0.0.0-20210112150236-f10218a38794/go.mod h1:7e+I0LQFUI9AXWxOfsQROs9xPhoJtbsyWcjJqDd4KPY=
|
||||
github.com/aead/siphash v1.0.1 h1:FwHfE/T45KPKYuuSAKyyvE+oPWcaQ+CUmFW0bPlM+kg=
|
||||
github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo=
|
||||
@@ -631,7 +629,7 @@ github.com/dave/jennifer v1.2.0 h1:S15ZkFMRoJ36mGAQgWL1tnr0NQJh9rZ8qatseX/VbBc=
|
||||
github.com/dave/jennifer v1.2.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg=
|
||||
github.com/dchest/blake512 v1.0.0 h1:oDFEQFIqFSeuA34xLtXZ/rWxCXdSjirjzPhey5EUvmA=
|
||||
github.com/dchest/blake512 v1.0.0/go.mod h1:FV1x7xPPLWukZlpDpWQ88rF/SFwZ5qbskrzhLMB92JI=
|
||||
github.com/deckarep/golang-set v0.0.0-20180603214616-504e848d77ea/go.mod h1:93vsz/8Wt4joVM7c2AVqh+YRMiUSc14yDtF28KmMOgQ=
|
||||
github.com/deckarep/golang-set v1.8.0/go.mod h1:5nI87KwE7wgsBU1F4GKAw2Qod7p5kyS383rP6+o6qqo=
|
||||
github.com/deepmap/oapi-codegen v1.6.0 h1:w/d1ntwh91XI0b/8ja7+u5SvA4IFfM0UNNLmiDR1gg0=
|
||||
github.com/deepmap/oapi-codegen v1.6.0/go.mod h1:ryDa9AgbELGeB+YEXE1dR53yAjHwFvE9iAUlWl9Al3M=
|
||||
github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU=
|
||||
@@ -720,8 +718,6 @@ github.com/go-kit/log v0.1.0/go.mod h1:zbhenjAZHb184qTLMA9ZjW7ThYL0H2mk7Q6pNt4vb
|
||||
github.com/go-kit/log v0.2.1 h1:MRVx0/zhvdseW+Gza6N9rVzU/IVzaeE1SFI4raAhmBU=
|
||||
github.com/go-kit/log v0.2.1/go.mod h1:NwTd00d/i8cPZ3xOwwiv2PO5MOcx78fFErGNcVmBjv0=
|
||||
github.com/go-logfmt/logfmt v0.5.0/go.mod h1:wCYkCAKZfumFQihp8CzCvQ3paCTfi41vtzG1KdI/P7A=
|
||||
github.com/go-logfmt/logfmt v0.5.1 h1:otpy5pqBCBZ1ng9RQ0dPu4PN7ba75Y/aA+UpowDyNVA=
|
||||
github.com/go-logfmt/logfmt v0.5.1/go.mod h1:WYhtIu8zTZfxdn5+rREduYbwxfcBr/Vr6KEVveWlfTs=
|
||||
github.com/go-martini/martini v0.0.0-20170121215854-22fa46961aab/go.mod h1:/P9AEU963A2AYjv4d1V5eVL1CQbEJq6aCNHDDjibzu8=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||
@@ -841,6 +837,7 @@ github.com/hashicorp/go-cleanhttp v0.5.0/go.mod h1:JpRdi6/HCYpAwUzNwuwqhbovhLtng
|
||||
github.com/hashicorp/go-cty-funcs v0.0.0-20230405223818-a090f58aa992 h1:fYOrSfO5C9PmFGtmRWSYGqq52SOoE2dXMtAn2Xzh1LQ=
|
||||
github.com/hashicorp/go-cty-funcs v0.0.0-20230405223818-a090f58aa992/go.mod h1:Abjk0jbRkDaNCzsRhOv2iDCofYpX1eVsjozoiK63qLA=
|
||||
github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ=
|
||||
github.com/hashicorp/go-hclog v1.2.0/go.mod h1:whpDNt7SSdeAju8AWKIWsul05p54N/39EeqMAyrmvFQ=
|
||||
github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M=
|
||||
github.com/hashicorp/go-immutable-radix v1.0.0/go.mod h1:0y9vanUI8NX6FsYoO3zeMjhV/C5i9g4Q3DwcSNZ4P60=
|
||||
github.com/hashicorp/go-immutable-radix v1.3.1 h1:DKHmCUm2hRBK510BaiZlwvpD40f8bJFeZnpfm2KLowc=
|
||||
@@ -862,6 +859,7 @@ github.com/hashicorp/go-uuid v1.0.1/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/b
|
||||
github.com/hashicorp/go-uuid v1.0.3/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro=
|
||||
github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8=
|
||||
github.com/hashicorp/golang-lru v0.5.4 h1:YDjusn29QI/Das2iO9M0BHnIbxPeyuCHsjMW+lJfyTc=
|
||||
github.com/hashicorp/golang-lru v0.5.4/go.mod h1:iADmTwqILo4mZ8BN3D2Q6+9jd8WM5uGBxy+E8yxSoD4=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1 h1://i05Jqznmb2EXqa39Nsvyan2o5XyMowW5fnCKW5RPI=
|
||||
github.com/hashicorp/hcl/v2 v2.19.1/go.mod h1:ThLC89FV4p9MPW804KVbe/cEXoQ8NZEh+JtMeeGErHE=
|
||||
@@ -1009,6 +1007,7 @@ github.com/mattn/go-ieproxy v0.0.0-20190702010315-6dee0af9227d/go.mod h1:31jz6HN
|
||||
github.com/mattn/go-isatty v0.0.3/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.4/go.mod h1:M+lRXTBqGeGNdLjl/ufCoiOlB5xdOkqRJdNxMWT7Zi4=
|
||||
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
|
||||
github.com/mattn/go-isatty v0.0.10/go.mod h1:qgIWMr58cqv1PHHyhnkY9lrL7etaEgOFcMEpPG5Rm84=
|
||||
github.com/mattn/go-isatty v0.0.11/go.mod h1:PhnuNfih5lzO57/f3n+odYbM4JtupLOxQOAqxQCu2WE=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.17/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM=
|
||||
@@ -1111,7 +1110,6 @@ github.com/prometheus/common v0.26.0/go.mod h1:M7rCNAaPfAosfx8veZJCuw84e35h3Cfd9
|
||||
github.com/prometheus/procfs v0.0.8/go.mod h1:7Qr8sr6344vo1JqZ6HhLceV9o3AJ1Ff+GxbHq6oeK9A=
|
||||
github.com/prometheus/procfs v0.1.3/go.mod h1:lV6e/gmhEcM9IjHGsFOCxxuZ+z1YqCvr4OA4YeYWdaU=
|
||||
github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA=
|
||||
github.com/prometheus/tsdb v0.7.1 h1:YZcsG11NqnK4czYLrWd9mpEuAJIHVQLwdrleYfszMAA=
|
||||
github.com/protolambda/bls12-381-util v0.0.0-20220416220906-d8552aa452c7/go.mod h1:IToEjHuttnUzwZI5KBSM/LOOW3qLbbrHOEfp3SbECGY=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0 h1:MkV+77GLUNo5oJ0jf870itWm3D0Sjh7+Za9gazKc5LQ=
|
||||
github.com/rcrowley/go-metrics v0.0.0-20200313005456-10cdbea86bc0/go.mod h1:bCqnVzQkZxMG4s8nGwiZ5l3QUCyqpo9Y+/ZMZ9VjZe4=
|
||||
@@ -1120,6 +1118,7 @@ github.com/retailnext/hllpp v1.0.1-0.20180308014038-101a6d2f8b52/go.mod h1:RDpi1
|
||||
github.com/rogpeppe/fastuuid v1.2.0 h1:Ppwyp6VYCF1nvBTXL3trRso7mXMlRrw9ooo375wvi2s=
|
||||
github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
|
||||
github.com/russross/blackfriday v1.6.0 h1:KqfZb0pUVN2lYqZUYRddxF4OR8ZMURnJIG5Y3VRLtww=
|
||||
github.com/russross/blackfriday v1.6.0/go.mod h1:ti0ldHuxg49ri4ksnFxlkCfN+hvslNlmVHqNRXXJNAY=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
@@ -1142,8 +1141,7 @@ github.com/scroll-tech/da-codec v0.1.1-0.20241014152913-2703f226fb0b/go.mod h1:4
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240607130425-e2becce6a1a4/go.mod h1:byf/mZ8jLYUCnUePTicjJWn+RvKdxDn7buS6glTnMwQ=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20240821074444-b3fa00861e5e/go.mod h1:swB5NSp8pKNDuYsTxfR08bHS6L56i119PBx8fxvV8Cs=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241010064814-3d88e870ae22/go.mod h1:r9FwtxCtybMkTbWYCyBuevT9TW3zHmOTHqD082Uh+Oo=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241023093931-91c2f9c27f4d h1:vuv7fGKEDtoeetI6RkKt8RAByJsYZBWk9Vo6gShv65c=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20241023093931-91c2f9c27f4d/go.mod h1:PWEOTg6LeWlJAlFJauO0msSLXWnpHmE+mVh5txtfeRM=
|
||||
github.com/scroll-tech/go-ethereum v1.10.14-0.20250206083728-ea43834c198f/go.mod h1:Ik3OBLl7cJxPC+CFyCBYNXBPek4wpdzkWehn/y5qLM8=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/segmentio/kafka-go v0.1.0/go.mod h1:X6itGqS9L4jDletMsxZ7Dz+JFWxM6JHfPOCvTvk+EJo=
|
||||
github.com/segmentio/kafka-go v0.2.0 h1:HtCSf6B4gN/87yc5qTl7WsxPKQIIGXLPPM1bMCPOsoY=
|
||||
@@ -1304,6 +1302,7 @@ golang.org/x/crypto v0.0.0-20210817164053-32db794688a5/go.mod h1:GvvjBRRGRdwPK5y
|
||||
golang.org/x/crypto v0.1.0/go.mod h1:RecgLatLF4+eUMCP1PoPZQb+cVrJcOPbHkTkbkB9sbw=
|
||||
golang.org/x/crypto v0.6.0/go.mod h1:OFC/31mSvZgRz0V1QTNCzfAI1aIRzbiufJtkMIlEp58=
|
||||
golang.org/x/crypto v0.7.0/go.mod h1:pYwdfH91IfpZVANVyUOhSIPZaFoJGxTFbZhFTx+dXZU=
|
||||
golang.org/x/crypto v0.12.0/go.mod h1:NF0Gs7EO5K4qLn+Ylc+fih8BSTeIjAP05siRnAh98yw=
|
||||
golang.org/x/crypto v0.17.0/go.mod h1:gCAAfMLgwOJRpTjQ2zCCt2OcSfYMTeZVSRtQlPC7Nq4=
|
||||
golang.org/x/crypto v0.18.0/go.mod h1:R0j02AL6hcrfOiy9T4ZYp/rcWeMxM3L6QYxlOuEG1mg=
|
||||
golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU=
|
||||
@@ -1403,6 +1402,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20190922100055-0a153f010e69/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190924154521-2837fb4f24fe/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191001151750-bb3f8db39f24/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191008105621-543471e840be/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191204072324-ce4227a45e2e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191228213918-04cbcbbfeed8/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200106162015-b016eb3dc98e/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
@@ -1439,7 +1439,6 @@ golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.4.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.13.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.14.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.16.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
|
||||
@@ -1449,6 +1448,7 @@ golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2 h1:IRJeR9r1pYWsHKTRe/I
|
||||
golang.org/x/telemetry v0.0.0-20240228155512-f48c80bd79b2/go.mod h1:TeRTkGYfJXctD9OcfyVLyj2J3IxLnKwHJR8f4D8a3YE=
|
||||
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
|
||||
golang.org/x/term v0.15.0/go.mod h1:BDl952bC7+uMoWR75FIrCDx79TPU9oHkTZ9yRbYOrX0=
|
||||
golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58=
|
||||
golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY=
|
||||
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
|
||||
373
prover/Cargo.lock
generated
373
prover/Cargo.lock
generated
@@ -441,6 +441,55 @@ version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0c4b4d0bd25bd0b74681c0ad21497610ce1b7c91b1022cd21c80c6fbdd9476b0"
|
||||
|
||||
[[package]]
|
||||
name = "axum"
|
||||
version = "0.6.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "3b829e4e32b91e643de6eafe82b1d90675f5874230191a4ffbc1b336dec4d6bf"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"axum-core",
|
||||
"bitflags 1.3.2",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"hyper 0.14.28",
|
||||
"itoa",
|
||||
"matchit",
|
||||
"memchr",
|
||||
"mime",
|
||||
"percent-encoding",
|
||||
"pin-project-lite",
|
||||
"rustversion",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"serde_path_to_error",
|
||||
"serde_urlencoded",
|
||||
"sync_wrapper",
|
||||
"tokio",
|
||||
"tower",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "axum-core"
|
||||
version = "0.3.4"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "759fa577a247914fd3f7f76d62972792636412fbfd634cd452f6a385a74d2d2c"
|
||||
dependencies = [
|
||||
"async-trait",
|
||||
"bytes",
|
||||
"futures-util",
|
||||
"http 0.2.12",
|
||||
"http-body 0.4.6",
|
||||
"mime",
|
||||
"rustversion",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "backtrace"
|
||||
version = "0.3.71"
|
||||
@@ -501,6 +550,26 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bindgen"
|
||||
version = "0.69.5"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "271383c67ccabffb7381723dea0672a673f292304fcb45c01cc648c7a8d58088"
|
||||
dependencies = [
|
||||
"bitflags 2.5.0",
|
||||
"cexpr",
|
||||
"clang-sys",
|
||||
"itertools 0.10.5",
|
||||
"lazy_static",
|
||||
"lazycell",
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"regex",
|
||||
"rustc-hash",
|
||||
"shlex",
|
||||
"syn 2.0.66",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bit-set"
|
||||
version = "0.5.3"
|
||||
@@ -703,6 +772,17 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "bzip2-sys"
|
||||
version = "0.1.11+1.0.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "736a955f3fa7875102d57c82b8cac37ec45224a07fd32d58f9f7a186b6cd4cdc"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "c-kzg"
|
||||
version = "1.0.2"
|
||||
@@ -728,6 +808,15 @@ dependencies = [
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cexpr"
|
||||
version = "0.6.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6fac387a98bb7c37292057cffc56d62ecb629900026402633ae9160df93a8766"
|
||||
dependencies = [
|
||||
"nom",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "cfg-if"
|
||||
version = "0.1.10"
|
||||
@@ -764,6 +853,17 @@ dependencies = [
|
||||
"inout",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clang-sys"
|
||||
version = "1.8.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0b023947811758c97c59bf9d1c188fd619ad4718dcaa767947df1cadb14f39f4"
|
||||
dependencies = [
|
||||
"glob",
|
||||
"libc",
|
||||
"libloading",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "clap"
|
||||
version = "4.5.4"
|
||||
@@ -1136,6 +1236,12 @@ dependencies = [
|
||||
"subtle",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "dotenv"
|
||||
version = "0.15.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77c90badedccf4105eca100756a0b1289e191f6fcbdadd3cee1d2f614f97da8f"
|
||||
|
||||
[[package]]
|
||||
name = "dotenvy"
|
||||
version = "0.15.7"
|
||||
@@ -2551,18 +2657,60 @@ dependencies = [
|
||||
"spin 0.5.2",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lazycell"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "830d08ce1d1d941e6b30645f1a0eb5643013d835ce3779a5fc208261dbe10f55"
|
||||
|
||||
[[package]]
|
||||
name = "libc"
|
||||
version = "0.2.155"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "97b3888a4aecf77e811145cadf6eef5901f4782c53886191b2f693f24761847c"
|
||||
|
||||
[[package]]
|
||||
name = "libloading"
|
||||
version = "0.8.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "fc2f4eb4bc735547cfed7c0a4922cbd04a4655978c09b54f1f7b228750664c34"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"windows-targets 0.48.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libm"
|
||||
version = "0.2.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "4ec2a862134d2a7d32d7983ddcdd1c4923530833c9f2ea1a44fc5fa473989058"
|
||||
|
||||
[[package]]
|
||||
name = "librocksdb-sys"
|
||||
version = "0.17.1+9.9.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "2b7869a512ae9982f4d46ba482c2a304f1efd80c6412a3d4bf57bb79a619679f"
|
||||
dependencies = [
|
||||
"bindgen",
|
||||
"bzip2-sys",
|
||||
"cc",
|
||||
"libc",
|
||||
"libz-sys",
|
||||
"lz4-sys",
|
||||
"zstd-sys 2.0.13+zstd.1.5.6",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "libz-sys"
|
||||
version = "1.1.20"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d2d16453e800a8cf6dd2fc3eb4bc99b786a9b90c663b8559a5b1a041bf89e472"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "linux-raw-sys"
|
||||
version = "0.4.14"
|
||||
@@ -2612,6 +2760,31 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lz4-sys"
|
||||
version = "1.10.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "109de74d5d2353660401699a4174a4ff23fcc649caf553df71933c7fb45ad868"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchers"
|
||||
version = "0.1.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8263075bb86c5a1b1427b5ae862e8889656f126e9f77c484496e8b47cf5c5558"
|
||||
dependencies = [
|
||||
"regex-automata 0.1.10",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "matchit"
|
||||
version = "0.7.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0e7465ac9959cc2b1404e8e2367b43684a6d13790fe23056cc8c6c5a6b7bcb94"
|
||||
|
||||
[[package]]
|
||||
name = "maybe-rayon"
|
||||
version = "0.1.1"
|
||||
@@ -2634,6 +2807,12 @@ version = "0.3.17"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6877bb514081ee2a7ff5ef9de3281f14a4dd4bceac4c09388074a6b5df8a139a"
|
||||
|
||||
[[package]]
|
||||
name = "minimal-lexical"
|
||||
version = "0.2.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "68354c5c6bd36d73ff3feceb05efa59b6acb7626617f4962be322a825e61f79a"
|
||||
|
||||
[[package]]
|
||||
name = "miniz_oxide"
|
||||
version = "0.7.3"
|
||||
@@ -2746,6 +2925,26 @@ dependencies = [
|
||||
"tempfile",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nom"
|
||||
version = "7.1.3"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "d273983c5a657a70a3e8f2a01329822f3b8c8172b73826411a55751e404a0a4a"
|
||||
dependencies = [
|
||||
"memchr",
|
||||
"minimal-lexical",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "nu-ansi-term"
|
||||
version = "0.46.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "77a8165726e8236064dbb45459242600304b42a5ea24ee2948e18e023bf7ba84"
|
||||
dependencies = [
|
||||
"overload",
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "num"
|
||||
version = "0.4.3"
|
||||
@@ -2974,6 +3173,12 @@ dependencies = [
|
||||
"vcpkg",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "overload"
|
||||
version = "0.1.1"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "b15813163c1d831bf4a13c3610c05c0d03b39feb07f7e09fa234dac9b15aaf39"
|
||||
|
||||
[[package]]
|
||||
name = "pairing"
|
||||
version = "0.23.0"
|
||||
@@ -3267,7 +3472,7 @@ dependencies = [
|
||||
"rand",
|
||||
"rand_chacha",
|
||||
"rand_xorshift",
|
||||
"regex-syntax",
|
||||
"regex-syntax 0.8.3",
|
||||
"rusty-fork",
|
||||
"tempfile",
|
||||
"unarray",
|
||||
@@ -3278,6 +3483,7 @@ name = "prover"
|
||||
version = "0.1.0"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"base64 0.13.1",
|
||||
"clap",
|
||||
"ctor 0.2.8",
|
||||
@@ -3298,6 +3504,7 @@ dependencies = [
|
||||
"reqwest-middleware",
|
||||
"reqwest-retry",
|
||||
"rlp",
|
||||
"scroll-proving-sdk",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"sled",
|
||||
@@ -3489,8 +3696,17 @@ checksum = "c117dbdfde9c8308975b6a18d71f3f385c89461f7b3fb054288ecf2a2058ba4c"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-automata",
|
||||
"regex-syntax",
|
||||
"regex-automata 0.4.6",
|
||||
"regex-syntax 0.8.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-automata"
|
||||
version = "0.1.10"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "6c230d73fb8d8c1b9c0b3135c5142a8acee3a0558fb8db5cf1cb65f8d7862132"
|
||||
dependencies = [
|
||||
"regex-syntax 0.6.29",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -3501,9 +3717,15 @@ checksum = "86b83b8b9847f9bf95ef68afb0b8e6cdb80f498442f5179a29fad448fcc1eaea"
|
||||
dependencies = [
|
||||
"aho-corasick",
|
||||
"memchr",
|
||||
"regex-syntax",
|
||||
"regex-syntax 0.8.3",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.6.29"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f162c6dd7b008981e4d40210aca20b4bd0f9b60ca9271061b07f78537722f2e1"
|
||||
|
||||
[[package]]
|
||||
name = "regex-syntax"
|
||||
version = "0.8.3"
|
||||
@@ -3787,6 +4009,16 @@ dependencies = [
|
||||
"syn 1.0.109",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rocksdb"
|
||||
version = "0.23.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "26ec73b20525cb235bad420f911473b69f9fe27cc856c5461bccd7e4af037f43"
|
||||
dependencies = [
|
||||
"libc",
|
||||
"librocksdb-sys",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "ruint"
|
||||
version = "1.12.1"
|
||||
@@ -3995,6 +4227,36 @@ version = "1.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "94143f37725109f92c262ed2cf5e59bce7498c01bcc1502d7b9afe439a4e9f49"
|
||||
|
||||
[[package]]
|
||||
name = "scroll-proving-sdk"
|
||||
version = "0.1.0"
|
||||
source = "git+https://github.com/scroll-tech/scroll-proving-sdk.git?rev=160db6c#160db6ceec45235f13b0f2581802a614f7e90a4b"
|
||||
dependencies = [
|
||||
"anyhow",
|
||||
"async-trait",
|
||||
"axum",
|
||||
"clap",
|
||||
"dotenv",
|
||||
"ethers-core 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
"ethers-providers 2.0.7 (git+https://github.com/scroll-tech/ethers-rs.git?branch=v2.0.7)",
|
||||
"hex",
|
||||
"http 1.1.0",
|
||||
"log",
|
||||
"prover 0.13.0",
|
||||
"rand",
|
||||
"reqwest 0.12.4",
|
||||
"reqwest-middleware",
|
||||
"reqwest-retry",
|
||||
"rlp",
|
||||
"rocksdb",
|
||||
"serde",
|
||||
"serde_json",
|
||||
"tiny-keccak",
|
||||
"tokio",
|
||||
"tracing",
|
||||
"tracing-subscriber",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "scrypt"
|
||||
version = "0.10.0"
|
||||
@@ -4167,6 +4429,16 @@ dependencies = [
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_path_to_error"
|
||||
version = "0.1.16"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "af99884400da37c88f5e9146b7f1fd0fbcae8f6eec4e9da38b67d05486f814a6"
|
||||
dependencies = [
|
||||
"itoa",
|
||||
"serde",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "serde_stacker"
|
||||
version = "0.1.11"
|
||||
@@ -4265,6 +4537,30 @@ dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "sharded-slab"
|
||||
version = "0.1.7"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "f40ca3c46823713e0d4209592e8d6e826aa57e928f09752619fc696c499637f6"
|
||||
dependencies = [
|
||||
"lazy_static",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "shlex"
|
||||
version = "1.3.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
|
||||
|
||||
[[package]]
|
||||
name = "signal-hook-registry"
|
||||
version = "1.4.2"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "a9e9e0b4211b72e7b8b6e85c807d36c212bdb33ea8587f7569562a84df5465b1"
|
||||
dependencies = [
|
||||
"libc",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "signature"
|
||||
version = "2.2.0"
|
||||
@@ -4580,6 +4876,16 @@ dependencies = [
|
||||
"winapi",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "thread_local"
|
||||
version = "1.1.8"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "8b9ef9bad013ada3808854ceac7b46812a6465ba368859a37e2100283d2d719c"
|
||||
dependencies = [
|
||||
"cfg-if 1.0.0",
|
||||
"once_cell",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "threadpool"
|
||||
version = "1.8.1"
|
||||
@@ -4623,11 +4929,26 @@ dependencies = [
|
||||
"bytes",
|
||||
"libc",
|
||||
"mio",
|
||||
"num_cpus",
|
||||
"parking_lot 0.12.3",
|
||||
"pin-project-lite",
|
||||
"signal-hook-registry",
|
||||
"socket2",
|
||||
"tokio-macros",
|
||||
"windows-sys 0.48.0",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-macros"
|
||||
version = "2.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "5b8a1e28f2deaa14e508979454cb3a223b10b938b45af148bc0986de36f1923b"
|
||||
dependencies = [
|
||||
"proc-macro2",
|
||||
"quote",
|
||||
"syn 2.0.66",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tokio-native-tls"
|
||||
version = "0.3.1"
|
||||
@@ -4717,6 +5038,7 @@ dependencies = [
|
||||
"tokio",
|
||||
"tower-layer",
|
||||
"tower-service",
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4737,6 +5059,7 @@ version = "0.1.40"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c3523ab5a71916ccf420eebdf5521fcef02141234bbc0b8a49f2fdc4544364ef"
|
||||
dependencies = [
|
||||
"log",
|
||||
"pin-project-lite",
|
||||
"tracing-attributes",
|
||||
"tracing-core",
|
||||
@@ -4760,6 +5083,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "c06d3da6113f116aaee68e4d601191614c9053067f9ab7f6edbcb161237daa54"
|
||||
dependencies = [
|
||||
"once_cell",
|
||||
"valuable",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -4772,6 +5096,35 @@ dependencies = [
|
||||
"tracing",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-log"
|
||||
version = "0.2.0"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ee855f1f400bd0e5c02d150ae5de3840039a3f54b025156404e34c23c03f47c3"
|
||||
dependencies = [
|
||||
"log",
|
||||
"once_cell",
|
||||
"tracing-core",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "tracing-subscriber"
|
||||
version = "0.3.18"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "ad0f048c97dbd9faa9b7df56362b8ebcaa52adb06b498c050d2f4e32f90a7a8b"
|
||||
dependencies = [
|
||||
"matchers",
|
||||
"nu-ansi-term",
|
||||
"once_cell",
|
||||
"regex",
|
||||
"sharded-slab",
|
||||
"smallvec",
|
||||
"thread_local",
|
||||
"tracing",
|
||||
"tracing-core",
|
||||
"tracing-log",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "try-lock"
|
||||
version = "0.2.5"
|
||||
@@ -5478,7 +5831,7 @@ name = "zstd-safe"
|
||||
version = "7.0.0"
|
||||
source = "git+https://github.com/scroll-tech/zstd-rs?branch=hack/mul-block#5c0892b6567dab31394d701477183ce9d6a32aca"
|
||||
dependencies = [
|
||||
"zstd-sys",
|
||||
"zstd-sys 2.0.9+zstd.1.5.5",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
@@ -5489,3 +5842,13 @@ dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "zstd-sys"
|
||||
version = "2.0.13+zstd.1.5.6"
|
||||
source = "registry+https://github.com/rust-lang/crates.io-index"
|
||||
checksum = "38ff0f21cfee8f97d94cef41359e0c89aa6113028ab0291aa8ca0038995a95aa"
|
||||
dependencies = [
|
||||
"cc",
|
||||
"pkg-config",
|
||||
]
|
||||
|
||||
@@ -31,6 +31,7 @@ halo2_proofs = { git = "https://github.com/scroll-tech/halo2.git", branch = "v1.
|
||||
snark-verifier-sdk = { git = "https://github.com/scroll-tech/snark-verifier", branch = "develop", default-features = false, features = ["loader_halo2", "loader_evm", "halo2-pse"] }
|
||||
prover_darwin = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.12.2", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
prover_darwin_v2 = { git = "https://github.com/scroll-tech/zkevm-circuits.git", tag = "v0.13.1", package = "prover", default-features = false, features = ["parallel_syn", "scroll"] }
|
||||
scroll-proving-sdk = { git = "https://github.com/scroll-tech/scroll-proving-sdk.git", rev = "160db6c"}
|
||||
base64 = "0.13.1"
|
||||
reqwest = { version = "0.12.4", features = ["gzip"] }
|
||||
reqwest-middleware = "0.3"
|
||||
@@ -42,6 +43,7 @@ rand = "0.8.5"
|
||||
eth-keystore = "0.5.0"
|
||||
rlp = "0.5.2"
|
||||
tokio = "1.37.0"
|
||||
async-trait = "0.1"
|
||||
sled = "0.34.7"
|
||||
http = "1.1.0"
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
|
||||
@@ -1,26 +1,30 @@
|
||||
{
|
||||
"prover_name": "prover-1",
|
||||
"keystore_path": "keystore.json",
|
||||
"keystore_password": "prover-pwd",
|
||||
"db_path": "unique-db-path-for-prover-1",
|
||||
"prover_types": [2],
|
||||
"low_version_circuit": {
|
||||
"hard_fork_name": "bernoulli",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"high_version_circuit": {
|
||||
"hard_fork_name": "curie",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"sdk_config": {
|
||||
"prover_name_prefix": "prover-1",
|
||||
"keys_dir": "keys",
|
||||
"coordinator": {
|
||||
"base_url": "http://localhost:8555",
|
||||
"retry_count": 10,
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
"base_url": "http://localhost:8555",
|
||||
"retry_count": 10,
|
||||
"retry_wait_time_sec": 10,
|
||||
"connection_timeout_sec": 30
|
||||
},
|
||||
"l2geth": {
|
||||
"endpoint": "http://localhost:9999"
|
||||
}
|
||||
"endpoint": "http://localhost:9999"
|
||||
},
|
||||
"prover": {
|
||||
"circuit_types": [1,2,3],
|
||||
"circuit_version": "v0.13.1"
|
||||
},
|
||||
"db_path": "unique-db-path-for-prover-1"
|
||||
},
|
||||
"low_version_circuit": {
|
||||
"hard_fork_name": "Darvin",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
},
|
||||
"high_version_circuit": {
|
||||
"hard_fork_name": "DarvinV2",
|
||||
"params_path": "params",
|
||||
"assets_path": "assets"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,55 +1,4 @@
|
||||
use anyhow::{bail, Result};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::fs::File;
|
||||
|
||||
use crate::types::ProverType;
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub hard_fork_name: String,
|
||||
pub params_path: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct CoordinatorConfig {
|
||||
pub base_url: String,
|
||||
pub retry_count: u32,
|
||||
pub retry_wait_time_sec: u64,
|
||||
pub connection_timeout_sec: u64,
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct L2GethConfig {
|
||||
pub endpoint: String,
|
||||
}
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
pub struct Config {
|
||||
pub prover_name: String,
|
||||
pub keystore_path: String,
|
||||
pub keystore_password: String,
|
||||
pub db_path: String,
|
||||
pub prover_types: Vec<ProverType>,
|
||||
pub low_version_circuit: CircuitConfig,
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
pub coordinator: CoordinatorConfig,
|
||||
pub l2geth: Option<L2GethConfig>,
|
||||
}
|
||||
|
||||
impl Config {
|
||||
pub fn from_reader<R>(reader: R) -> Result<Self>
|
||||
where
|
||||
R: std::io::Read,
|
||||
{
|
||||
serde_json::from_reader(reader).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn from_file(file_name: String) -> Result<Self> {
|
||||
let file = File::open(file_name)?;
|
||||
Config::from_reader(&file)
|
||||
}
|
||||
}
|
||||
|
||||
static SCROLL_PROVER_ASSETS_DIR_ENV_NAME: &str = "SCROLL_PROVER_ASSETS_DIR";
|
||||
static mut SCROLL_PROVER_ASSETS_DIRS: Vec<String> = vec![];
|
||||
|
||||
@@ -1,142 +0,0 @@
|
||||
mod api;
|
||||
mod errors;
|
||||
pub mod listener;
|
||||
pub mod types;
|
||||
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use std::rc::Rc;
|
||||
|
||||
use api::Api;
|
||||
use errors::*;
|
||||
use listener::Listener;
|
||||
use tokio::runtime::Runtime;
|
||||
use types::*;
|
||||
|
||||
use crate::{config::Config, key_signer::KeySigner};
|
||||
|
||||
pub use errors::ProofStatusNotOKError;
|
||||
|
||||
pub struct CoordinatorClient<'a> {
|
||||
api: Api,
|
||||
token: Option<String>,
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
rt: Runtime,
|
||||
listener: Box<dyn Listener>,
|
||||
vks: Vec<String>,
|
||||
}
|
||||
|
||||
impl<'a> CoordinatorClient<'a> {
|
||||
pub fn new(
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
listener: Box<dyn Listener>,
|
||||
vks: Vec<String>,
|
||||
) -> Result<Self> {
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
let api = Api::new(
|
||||
&config.coordinator.base_url,
|
||||
core::time::Duration::from_secs(config.coordinator.connection_timeout_sec),
|
||||
config.coordinator.retry_count,
|
||||
config.coordinator.retry_wait_time_sec,
|
||||
)?;
|
||||
let mut client = Self {
|
||||
api,
|
||||
token: None,
|
||||
config,
|
||||
key_signer,
|
||||
rt,
|
||||
listener,
|
||||
vks,
|
||||
};
|
||||
client.login()?;
|
||||
Ok(client)
|
||||
}
|
||||
|
||||
fn login(&mut self) -> Result<()> {
|
||||
let api = &self.api;
|
||||
let challenge_response = self.rt.block_on(api.challenge())?;
|
||||
if challenge_response.errcode != ErrorCode::Success {
|
||||
bail!("challenge failed: {}", challenge_response.errmsg)
|
||||
}
|
||||
let mut token: String;
|
||||
if let Some(r) = challenge_response.data {
|
||||
token = r.token;
|
||||
} else {
|
||||
bail!("challenge failed: got empty token")
|
||||
}
|
||||
|
||||
let login_message = LoginMessage {
|
||||
challenge: token.clone(),
|
||||
prover_name: self.config.prover_name.clone(),
|
||||
prover_version: crate::version::get_version(),
|
||||
prover_types: self.config.prover_types.clone(),
|
||||
vks: self.vks.clone(),
|
||||
};
|
||||
|
||||
let buffer = rlp::encode(&login_message);
|
||||
let signature = self.key_signer.sign_buffer(&buffer)?;
|
||||
let login_request = LoginRequest {
|
||||
message: login_message,
|
||||
public_key: self.key_signer.get_public_key(),
|
||||
signature,
|
||||
};
|
||||
let login_response = self.rt.block_on(api.login(&login_request, &token))?;
|
||||
if login_response.errcode != ErrorCode::Success {
|
||||
bail!("login failed: {}", login_response.errmsg)
|
||||
}
|
||||
if let Some(r) = login_response.data {
|
||||
token = r.token;
|
||||
} else {
|
||||
bail!("login failed: got empty token")
|
||||
}
|
||||
self.token = Some(token);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn action_with_re_login<T, F, R>(&mut self, req: &R, mut f: F) -> Result<Response<T>>
|
||||
where
|
||||
F: FnMut(&mut Self, &R) -> Result<Response<T>>,
|
||||
{
|
||||
let response = f(self, req)?;
|
||||
if response.errcode == ErrorCode::ErrJWTTokenExpired {
|
||||
log::info!("JWT expired, attempting to re-login");
|
||||
self.login().context("JWT expired, re-login failed")?;
|
||||
log::info!("re-login success");
|
||||
return self.action_with_re_login(req, f);
|
||||
} else if response.errcode != ErrorCode::Success {
|
||||
bail!("action failed: {}", response.errmsg)
|
||||
}
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
fn do_get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
|
||||
self.rt
|
||||
.block_on(self.api.get_task(req, self.token.as_ref().unwrap()))
|
||||
}
|
||||
|
||||
pub fn get_task(&mut self, req: &GetTaskRequest) -> Result<Response<GetTaskResponseData>> {
|
||||
self.action_with_re_login(req, |s, req| s.do_get_task(req))
|
||||
}
|
||||
|
||||
fn do_submit_proof(
|
||||
&mut self,
|
||||
req: &SubmitProofRequest,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
let response = self
|
||||
.rt
|
||||
.block_on(self.api.submit_proof(req, self.token.as_ref().unwrap()))?;
|
||||
self.listener.on_proof_submitted(req);
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
pub fn submit_proof(
|
||||
&mut self,
|
||||
req: &SubmitProofRequest,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
self.action_with_re_login(req, |s, req| s.do_submit_proof(req))
|
||||
}
|
||||
}
|
||||
@@ -1,144 +0,0 @@
|
||||
use crate::{coordinator_client::ProofStatusNotOKError, types::ProofStatus};
|
||||
|
||||
use super::{errors::*, types::*};
|
||||
use anyhow::{bail, Result};
|
||||
use core::time::Duration;
|
||||
use reqwest::{header::CONTENT_TYPE, Url};
|
||||
use reqwest_middleware::{ClientBuilder, ClientWithMiddleware};
|
||||
use reqwest_retry::{policies::ExponentialBackoff, RetryTransientMiddleware};
|
||||
use serde::Serialize;
|
||||
|
||||
pub struct Api {
|
||||
url_base: Url,
|
||||
send_timeout: Duration,
|
||||
pub client: ClientWithMiddleware,
|
||||
}
|
||||
|
||||
impl Api {
|
||||
pub fn new(
|
||||
url_base: &str,
|
||||
send_timeout: Duration,
|
||||
retry_count: u32,
|
||||
retry_wait_time_sec: u64,
|
||||
) -> Result<Self> {
|
||||
let retry_wait_duration = core::time::Duration::from_secs(retry_wait_time_sec);
|
||||
let retry_policy = ExponentialBackoff::builder()
|
||||
.retry_bounds(retry_wait_duration / 2, retry_wait_duration)
|
||||
.build_with_max_retries(retry_count);
|
||||
|
||||
let client = ClientBuilder::new(reqwest::Client::new())
|
||||
.with(RetryTransientMiddleware::new_with_policy(retry_policy))
|
||||
.build();
|
||||
|
||||
Ok(Self {
|
||||
url_base: Url::parse(url_base)?,
|
||||
send_timeout,
|
||||
client,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn challenge(&self) -> Result<Response<ChallengeResponseData>> {
|
||||
let method = "/coordinator/v1/challenge";
|
||||
let url = self.build_url(method)?;
|
||||
|
||||
let response = self
|
||||
.client
|
||||
.get(url)
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
.timeout(self.send_timeout)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
let response_body = response.text().await?;
|
||||
|
||||
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
pub async fn login(
|
||||
&self,
|
||||
req: &LoginRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<LoginResponseData>> {
|
||||
let method = "/coordinator/v1/login";
|
||||
self.post_with_token(method, req, token).await
|
||||
}
|
||||
|
||||
pub async fn get_task(
|
||||
&self,
|
||||
req: &GetTaskRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<GetTaskResponseData>> {
|
||||
let method = "/coordinator/v1/get_task";
|
||||
self.post_with_token(method, req, token).await
|
||||
}
|
||||
|
||||
pub async fn submit_proof(
|
||||
&self,
|
||||
req: &SubmitProofRequest,
|
||||
token: &String,
|
||||
) -> Result<Response<SubmitProofResponseData>> {
|
||||
let method = "/coordinator/v1/submit_proof";
|
||||
let response = self
|
||||
.post_with_token::<SubmitProofRequest, Response<SubmitProofResponseData>>(
|
||||
method, req, token,
|
||||
)
|
||||
.await?;
|
||||
|
||||
// when req's status already not ok, we mark the error returned from coordinator and will
|
||||
// ignore it later.
|
||||
if response.errcode == ErrorCode::ErrCoordinatorHandleZkProofFailure
|
||||
&& req.status != ProofStatus::Ok
|
||||
&& response
|
||||
.errmsg
|
||||
.contains("validator failure proof msg status not ok")
|
||||
{
|
||||
return Err(anyhow::anyhow!(ProofStatusNotOKError));
|
||||
}
|
||||
Ok(response)
|
||||
}
|
||||
|
||||
async fn post_with_token<Req, Resp>(
|
||||
&self,
|
||||
method: &str,
|
||||
req: &Req,
|
||||
token: &String,
|
||||
) -> Result<Resp>
|
||||
where
|
||||
Req: ?Sized + Serialize,
|
||||
Resp: serde::de::DeserializeOwned,
|
||||
{
|
||||
let url = self.build_url(method)?;
|
||||
let request_body = serde_json::to_string(req)?;
|
||||
|
||||
log::info!("[coordinator client], {method}, request: {request_body}");
|
||||
let response = self
|
||||
.client
|
||||
.post(url)
|
||||
.header(CONTENT_TYPE, "application/json")
|
||||
.bearer_auth(token)
|
||||
.body(request_body)
|
||||
.timeout(self.send_timeout)
|
||||
.send()
|
||||
.await?;
|
||||
|
||||
if response.status() != http::status::StatusCode::OK {
|
||||
log::error!(
|
||||
"[coordinator client], {method}, status not ok: {}",
|
||||
response.status()
|
||||
);
|
||||
bail!(
|
||||
"[coordinator client], {method}, status not ok: {}",
|
||||
response.status()
|
||||
)
|
||||
}
|
||||
|
||||
let response_body = response.text().await?;
|
||||
|
||||
log::info!("[coordinator client], {method}, response: {response_body}");
|
||||
serde_json::from_str(&response_body).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
|
||||
fn build_url(&self, method: &str) -> Result<Url> {
|
||||
self.url_base.join(method).map_err(|e| anyhow::anyhow!(e))
|
||||
}
|
||||
}
|
||||
@@ -1,65 +0,0 @@
|
||||
use serde::{Deserialize, Deserializer};
|
||||
use std::fmt;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum ErrorCode {
|
||||
Success,
|
||||
InternalServerError,
|
||||
|
||||
ErrProverStatsAPIParameterInvalidNo,
|
||||
ErrProverStatsAPIProverTaskFailure,
|
||||
ErrProverStatsAPIProverTotalRewardFailure,
|
||||
|
||||
ErrCoordinatorParameterInvalidNo,
|
||||
ErrCoordinatorGetTaskFailure,
|
||||
ErrCoordinatorHandleZkProofFailure,
|
||||
ErrCoordinatorEmptyProofData,
|
||||
|
||||
ErrJWTCommonErr,
|
||||
ErrJWTTokenExpired,
|
||||
|
||||
Undefined(i32),
|
||||
}
|
||||
|
||||
impl ErrorCode {
|
||||
fn from_i32(v: i32) -> Self {
|
||||
match v {
|
||||
0 => ErrorCode::Success,
|
||||
500 => ErrorCode::InternalServerError,
|
||||
10001 => ErrorCode::ErrProverStatsAPIParameterInvalidNo,
|
||||
10002 => ErrorCode::ErrProverStatsAPIProverTaskFailure,
|
||||
10003 => ErrorCode::ErrProverStatsAPIProverTotalRewardFailure,
|
||||
20001 => ErrorCode::ErrCoordinatorParameterInvalidNo,
|
||||
20002 => ErrorCode::ErrCoordinatorGetTaskFailure,
|
||||
20003 => ErrorCode::ErrCoordinatorHandleZkProofFailure,
|
||||
20004 => ErrorCode::ErrCoordinatorEmptyProofData,
|
||||
50000 => ErrorCode::ErrJWTCommonErr,
|
||||
50001 => ErrorCode::ErrJWTTokenExpired,
|
||||
_ => {
|
||||
log::error!("get unexpected error code from coordinator: {v}");
|
||||
ErrorCode::Undefined(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for ErrorCode {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: i32 = i32::deserialize(deserializer)?;
|
||||
Ok(ErrorCode::from_i32(v))
|
||||
}
|
||||
}
|
||||
|
||||
// ====================================================
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct ProofStatusNotOKError;
|
||||
|
||||
impl fmt::Display for ProofStatusNotOKError {
|
||||
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
|
||||
write!(f, "proof status not ok")
|
||||
}
|
||||
}
|
||||
@@ -1,5 +0,0 @@
|
||||
use super::SubmitProofRequest;
|
||||
|
||||
pub trait Listener {
|
||||
fn on_proof_submitted(&self, req: &SubmitProofRequest);
|
||||
}
|
||||
@@ -1,86 +0,0 @@
|
||||
use super::errors::ErrorCode;
|
||||
use crate::types::{ProofFailureType, ProofStatus, ProverType, TaskType};
|
||||
use rlp::{Encodable, RlpStream};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Deserialize)]
|
||||
pub struct Response<T> {
|
||||
pub errcode: ErrorCode,
|
||||
pub errmsg: String,
|
||||
pub data: Option<T>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginMessage {
|
||||
pub challenge: String,
|
||||
pub prover_name: String,
|
||||
pub prover_version: String,
|
||||
pub prover_types: Vec<ProverType>,
|
||||
pub vks: Vec<String>,
|
||||
}
|
||||
|
||||
impl Encodable for LoginMessage {
|
||||
fn rlp_append(&self, s: &mut RlpStream) {
|
||||
let num_fields = 5;
|
||||
s.begin_list(num_fields);
|
||||
s.append(&self.challenge);
|
||||
s.append(&self.prover_version);
|
||||
s.append(&self.prover_name);
|
||||
// The ProverType in go side is an type alias of uint8
|
||||
// A uint8 slice is treated as a string when doing the rlp encoding
|
||||
let prover_types = self
|
||||
.prover_types
|
||||
.iter()
|
||||
.map(|prover_type: &ProverType| prover_type.to_u8())
|
||||
.collect::<Vec<u8>>();
|
||||
s.append(&prover_types);
|
||||
s.begin_list(self.vks.len());
|
||||
for vk in &self.vks {
|
||||
s.append(vk);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginRequest {
|
||||
pub message: LoginMessage,
|
||||
pub public_key: String,
|
||||
pub signature: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct LoginResponseData {
|
||||
pub time: String,
|
||||
pub token: String,
|
||||
}
|
||||
|
||||
pub type ChallengeResponseData = LoginResponseData;
|
||||
|
||||
#[derive(Default, Serialize, Deserialize)]
|
||||
pub struct GetTaskRequest {
|
||||
pub task_types: Vec<TaskType>,
|
||||
pub prover_height: Option<u64>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct GetTaskResponseData {
|
||||
pub uuid: String,
|
||||
pub task_id: String,
|
||||
pub task_type: TaskType,
|
||||
pub task_data: String,
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct SubmitProofRequest {
|
||||
pub uuid: String,
|
||||
pub task_id: String,
|
||||
pub task_type: TaskType,
|
||||
pub status: ProofStatus,
|
||||
pub proof: String,
|
||||
pub failure_type: Option<ProofFailureType>,
|
||||
pub failure_msg: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct SubmitProofResponseData {}
|
||||
@@ -1,57 +0,0 @@
|
||||
use crate::types::CommonHash;
|
||||
use anyhow::Result;
|
||||
use ethers_core::types::BlockNumber;
|
||||
use tokio::runtime::Runtime;
|
||||
|
||||
use serde::{de::DeserializeOwned, Serialize};
|
||||
use std::fmt::Debug;
|
||||
|
||||
use ethers_providers::{Http, Provider};
|
||||
|
||||
pub struct GethClient {
|
||||
id: String,
|
||||
provider: Provider<Http>,
|
||||
rt: Runtime,
|
||||
}
|
||||
|
||||
impl GethClient {
|
||||
pub fn new(id: &str, api_url: &str) -> Result<Self> {
|
||||
let provider = Provider::<Http>::try_from(api_url)?;
|
||||
let rt = tokio::runtime::Builder::new_current_thread()
|
||||
.enable_all()
|
||||
.build()?;
|
||||
|
||||
Ok(Self {
|
||||
id: id.to_string(),
|
||||
provider,
|
||||
rt,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_block_trace_by_hash<T>(&mut self, hash: &CommonHash) -> Result<T>
|
||||
where
|
||||
T: Serialize + DeserializeOwned + Debug + Send,
|
||||
{
|
||||
log::info!(
|
||||
"{}: calling get_block_trace_by_hash, hash: {:#?}",
|
||||
self.id,
|
||||
hash
|
||||
);
|
||||
|
||||
let trace_future = self
|
||||
.provider
|
||||
.request("scroll_getBlockTraceByNumberOrHash", [format!("{hash:#x}")]);
|
||||
|
||||
let trace = self.rt.block_on(trace_future)?;
|
||||
Ok(trace)
|
||||
}
|
||||
|
||||
pub fn block_number(&mut self) -> Result<BlockNumber> {
|
||||
log::info!("{}: calling block_number", self.id);
|
||||
|
||||
let trace_future = self.provider.request("eth_blockNumber", ());
|
||||
|
||||
let trace = self.rt.block_on(trace_future)?;
|
||||
Ok(trace)
|
||||
}
|
||||
}
|
||||
@@ -1,103 +0,0 @@
|
||||
use std::path::Path;
|
||||
|
||||
use anyhow::Result;
|
||||
use ethers_core::{
|
||||
k256::{
|
||||
ecdsa::{signature::hazmat::PrehashSigner, RecoveryId, Signature, SigningKey},
|
||||
elliptic_curve::{sec1::ToEncodedPoint, FieldBytes},
|
||||
PublicKey, Secp256k1, SecretKey,
|
||||
},
|
||||
types::Signature as EthSignature,
|
||||
};
|
||||
|
||||
use ethers_core::types::{H256, U256};
|
||||
use hex::ToHex;
|
||||
use tiny_keccak::{Hasher, Keccak};
|
||||
|
||||
pub struct KeySigner {
|
||||
public_key: PublicKey,
|
||||
signer: SigningKey,
|
||||
}
|
||||
|
||||
impl KeySigner {
|
||||
pub fn new(key_path: &str, passwd: &str) -> Result<Self> {
|
||||
let p = Path::new(key_path);
|
||||
|
||||
let secret = if !p.exists() {
|
||||
log::info!("[key_signer] key_path not exists, create one");
|
||||
let dir = p.parent().unwrap();
|
||||
let name = p.file_name().and_then(|s| s.to_str());
|
||||
let mut rng = rand::thread_rng();
|
||||
let (secret, _) = eth_keystore::new(dir, &mut rng, passwd, name)?;
|
||||
secret
|
||||
} else {
|
||||
log::info!("[key_signer] key_path already exists, load it");
|
||||
eth_keystore::decrypt_key(key_path, passwd).map_err(|e| anyhow::anyhow!(e))?
|
||||
};
|
||||
|
||||
let secret_key = SecretKey::from_bytes(secret.as_slice().into())?;
|
||||
|
||||
let signer = SigningKey::from(secret_key.clone());
|
||||
|
||||
Ok(Self {
|
||||
public_key: secret_key.public_key(),
|
||||
signer,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn get_public_key(&self) -> String {
|
||||
let v: Vec<u8> = Vec::from(self.public_key.to_encoded_point(true).as_bytes());
|
||||
buffer_to_hex(&v, false)
|
||||
}
|
||||
|
||||
/// Signs the provided hash.
|
||||
pub fn sign_hash(&self, hash: H256) -> Result<EthSignature> {
|
||||
let signer = &self.signer as &dyn PrehashSigner<(Signature, RecoveryId)>;
|
||||
let (recoverable_sig, recovery_id) = signer.sign_prehash(hash.as_ref())?;
|
||||
|
||||
let v = u8::from(recovery_id) as u64;
|
||||
|
||||
let r_bytes: FieldBytes<Secp256k1> = recoverable_sig.r().into();
|
||||
let s_bytes: FieldBytes<Secp256k1> = recoverable_sig.s().into();
|
||||
let r = U256::from_big_endian(r_bytes.as_slice());
|
||||
let s = U256::from_big_endian(s_bytes.as_slice());
|
||||
|
||||
Ok(EthSignature { r, s, v })
|
||||
}
|
||||
|
||||
pub fn sign_buffer<T>(&self, buffer: &T) -> Result<String>
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
let pre_hash = keccak256(buffer);
|
||||
|
||||
let hash = H256::from(pre_hash);
|
||||
let sig = self.sign_hash(hash)?;
|
||||
|
||||
Ok(buffer_to_hex(&sig.to_vec(), true))
|
||||
}
|
||||
}
|
||||
|
||||
fn buffer_to_hex<T>(buffer: &T, has_prefix: bool) -> String
|
||||
where
|
||||
T: AsRef<[u8]>,
|
||||
{
|
||||
if has_prefix {
|
||||
format!("0x{}", buffer.encode_hex::<String>())
|
||||
} else {
|
||||
buffer.encode_hex::<String>()
|
||||
}
|
||||
}
|
||||
|
||||
/// Compute the Keccak-256 hash of input bytes.
|
||||
///
|
||||
/// Note that strings are interpreted as UTF-8 bytes,
|
||||
pub fn keccak256<T: AsRef<[u8]>>(bytes: T) -> [u8; 32] {
|
||||
let mut output = [0u8; 32];
|
||||
|
||||
let mut hasher = Keccak::v256();
|
||||
hasher.update(bytes.as_ref());
|
||||
hasher.finalize(&mut output);
|
||||
|
||||
output
|
||||
}
|
||||
@@ -2,26 +2,19 @@
|
||||
#![feature(core_intrinsics)]
|
||||
|
||||
mod config;
|
||||
mod coordinator_client;
|
||||
mod geth_client;
|
||||
mod key_signer;
|
||||
mod prover;
|
||||
mod task_cache;
|
||||
mod task_processor;
|
||||
mod types;
|
||||
mod utils;
|
||||
mod version;
|
||||
mod zk_circuits_handler;
|
||||
|
||||
use anyhow::Result;
|
||||
use clap::{ArgAction, Parser};
|
||||
use config::{AssetsDirEnvConfig, Config};
|
||||
use prover::Prover;
|
||||
use std::rc::Rc;
|
||||
use task_cache::{ClearCacheCoordinatorListener, TaskCache};
|
||||
use task_processor::TaskProcessor;
|
||||
use prover::{LocalProver, LocalProverConfig};
|
||||
use scroll_proving_sdk::{
|
||||
prover::ProverBuilder,
|
||||
utils::{get_version, init_tracing},
|
||||
};
|
||||
use utils::get_prover_type;
|
||||
|
||||
/// Simple program to greet a person
|
||||
#[derive(Parser, Debug)]
|
||||
#[clap(disable_version_flag = true)]
|
||||
struct Args {
|
||||
@@ -38,49 +31,38 @@ struct Args {
|
||||
log_file: Option<String>,
|
||||
}
|
||||
|
||||
fn start() -> Result<()> {
|
||||
#[tokio::main]
|
||||
async fn main() -> anyhow::Result<()> {
|
||||
init_tracing();
|
||||
|
||||
let args = Args::parse();
|
||||
|
||||
if args.version {
|
||||
println!("version is {}", version::get_version());
|
||||
println!("version is {}", get_version());
|
||||
std::process::exit(0);
|
||||
}
|
||||
|
||||
utils::log_init(args.log_file);
|
||||
let cfg = LocalProverConfig::from_file(args.config_file)?;
|
||||
let sdk_config = cfg.sdk_config.clone();
|
||||
let mut prover_types = vec![];
|
||||
sdk_config
|
||||
.prover
|
||||
.circuit_types
|
||||
.iter()
|
||||
.for_each(|circuit_type| {
|
||||
if let Some(pt) = get_prover_type(*circuit_type) {
|
||||
if !prover_types.contains(&pt) {
|
||||
prover_types.push(pt);
|
||||
}
|
||||
}
|
||||
});
|
||||
let local_prover = LocalProver::new(cfg, prover_types);
|
||||
let prover = ProverBuilder::new(sdk_config)
|
||||
.with_proving_service(Box::new(local_prover))
|
||||
.build()
|
||||
.await?;
|
||||
|
||||
let config: Config = Config::from_file(args.config_file)?;
|
||||
|
||||
if let Err(e) = AssetsDirEnvConfig::init() {
|
||||
log::error!("AssetsDirEnvConfig init failed: {:#}", e);
|
||||
std::process::exit(-2);
|
||||
}
|
||||
|
||||
let task_cache = Rc::new(TaskCache::new(&config.db_path)?);
|
||||
|
||||
let coordinator_listener = Box::new(ClearCacheCoordinatorListener {
|
||||
task_cache: task_cache.clone(),
|
||||
});
|
||||
|
||||
let prover = Prover::new(&config, coordinator_listener)?;
|
||||
|
||||
log::info!(
|
||||
"prover start successfully. name: {}, type: {:?}, publickey: {}, version: {}",
|
||||
config.prover_name,
|
||||
config.prover_types,
|
||||
prover.get_public_key(),
|
||||
version::get_version(),
|
||||
);
|
||||
|
||||
let task_processor = TaskProcessor::new(&prover, task_cache);
|
||||
|
||||
task_processor.start();
|
||||
prover.run().await;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let result = start();
|
||||
if let Err(e) = result {
|
||||
log::error!("main exit with error {:#}", e)
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,194 +1,192 @@
|
||||
use anyhow::{bail, Context, Error, Ok, Result};
|
||||
use ethers_core::types::U64;
|
||||
|
||||
use std::{cell::RefCell, rc::Rc};
|
||||
|
||||
use crate::{
|
||||
config::Config,
|
||||
coordinator_client::{listener::Listener, types::*, CoordinatorClient},
|
||||
geth_client::GethClient,
|
||||
key_signer::KeySigner,
|
||||
types::{ProofFailureType, ProofStatus, ProverType, TaskType},
|
||||
utils::{get_prover_type, get_task_types},
|
||||
types::ProverType,
|
||||
utils::get_prover_type,
|
||||
zk_circuits_handler::{CircuitsHandler, CircuitsHandlerProvider},
|
||||
};
|
||||
use anyhow::{anyhow, Result};
|
||||
use async_trait::async_trait;
|
||||
use scroll_proving_sdk::{
|
||||
config::Config as SdkConfig,
|
||||
prover::{
|
||||
proving_service::{
|
||||
GetVkRequest, GetVkResponse, ProveRequest, ProveResponse, QueryTaskRequest,
|
||||
QueryTaskResponse, TaskStatus,
|
||||
},
|
||||
ProvingService,
|
||||
},
|
||||
};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{
|
||||
fs::File,
|
||||
sync::{Arc, Mutex},
|
||||
time::{SystemTime, UNIX_EPOCH},
|
||||
};
|
||||
use tokio::{runtime::Handle, sync::RwLock, task::JoinHandle};
|
||||
|
||||
use super::types::{ProofDetail, Task};
|
||||
|
||||
pub struct Prover<'a> {
|
||||
config: &'a Config,
|
||||
key_signer: Rc<KeySigner>,
|
||||
circuits_handler_provider: RefCell<CircuitsHandlerProvider<'a>>,
|
||||
coordinator_client: RefCell<CoordinatorClient<'a>>,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct LocalProverConfig {
|
||||
pub sdk_config: SdkConfig,
|
||||
pub high_version_circuit: CircuitConfig,
|
||||
pub low_version_circuit: CircuitConfig,
|
||||
}
|
||||
|
||||
impl<'a> Prover<'a> {
|
||||
pub fn new(config: &'a Config, coordinator_listener: Box<dyn Listener>) -> Result<Self> {
|
||||
let keystore_path = &config.keystore_path;
|
||||
let keystore_password = &config.keystore_password;
|
||||
|
||||
let geth_client = if config
|
||||
.prover_types
|
||||
.iter()
|
||||
.any(|element| *element == ProverType::Chunk)
|
||||
{
|
||||
Some(Rc::new(RefCell::new(
|
||||
GethClient::new(
|
||||
&config.prover_name,
|
||||
&config.l2geth.as_ref().unwrap().endpoint,
|
||||
)
|
||||
.context("failed to create l2 geth_client")?,
|
||||
)))
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
let provider = CircuitsHandlerProvider::new(config, geth_client.clone())
|
||||
.context("failed to create circuits handler provider")?;
|
||||
|
||||
let vks = provider.init_vks(config.prover_types.clone(), config, geth_client.clone());
|
||||
|
||||
let key_signer = Rc::new(KeySigner::new(keystore_path, keystore_password)?);
|
||||
let coordinator_client =
|
||||
CoordinatorClient::new(config, Rc::clone(&key_signer), coordinator_listener, vks)
|
||||
.context("failed to create coordinator_client")?;
|
||||
|
||||
let prover = Prover {
|
||||
config,
|
||||
key_signer: Rc::clone(&key_signer),
|
||||
circuits_handler_provider: RefCell::new(provider),
|
||||
coordinator_client: RefCell::new(coordinator_client),
|
||||
geth_client,
|
||||
};
|
||||
|
||||
Ok(prover)
|
||||
impl LocalProverConfig {
|
||||
pub fn from_reader<R>(reader: R) -> Result<Self>
|
||||
where
|
||||
R: std::io::Read,
|
||||
{
|
||||
serde_json::from_reader(reader).map_err(|e| anyhow!(e))
|
||||
}
|
||||
|
||||
pub fn get_public_key(&self) -> String {
|
||||
self.key_signer.get_public_key()
|
||||
pub fn from_file(file_name: String) -> Result<Self> {
|
||||
let file = File::open(file_name)?;
|
||||
Self::from_reader(&file)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn fetch_task(&self) -> Result<Task> {
|
||||
log::info!("[prover] start to fetch_task");
|
||||
#[derive(Clone, Serialize, Deserialize)]
|
||||
pub struct CircuitConfig {
|
||||
pub hard_fork_name: String,
|
||||
pub params_path: String,
|
||||
pub assets_path: String,
|
||||
}
|
||||
|
||||
let task_types: Vec<TaskType> =
|
||||
self.config
|
||||
.prover_types
|
||||
.iter()
|
||||
.fold(Vec::new(), |mut acc, prover_type| {
|
||||
acc.extend(get_task_types(*prover_type));
|
||||
acc
|
||||
});
|
||||
pub struct LocalProver {
|
||||
config: LocalProverConfig,
|
||||
prover_types: Vec<ProverType>,
|
||||
circuits_handler_provider: RwLock<CircuitsHandlerProvider>,
|
||||
next_task_id: Arc<Mutex<u64>>,
|
||||
current_task: Arc<Mutex<Option<JoinHandle<Result<String>>>>>,
|
||||
}
|
||||
|
||||
let mut req = GetTaskRequest {
|
||||
task_types,
|
||||
prover_height: None,
|
||||
};
|
||||
|
||||
if self
|
||||
.config
|
||||
.prover_types
|
||||
.iter()
|
||||
.any(|element| *element == ProverType::Chunk)
|
||||
{
|
||||
let latest_block_number = self.get_latest_block_number_value()?;
|
||||
if let Some(v) = latest_block_number {
|
||||
if v.as_u64() == 0 {
|
||||
bail!("omit to prove task of the genesis block")
|
||||
#[async_trait]
|
||||
impl ProvingService for LocalProver {
|
||||
fn is_local(&self) -> bool {
|
||||
true
|
||||
}
|
||||
async fn get_vks(&self, req: GetVkRequest) -> GetVkResponse {
|
||||
let mut prover_types = vec![];
|
||||
req.circuit_types.iter().for_each(|circuit_type| {
|
||||
if let Some(pt) = get_prover_type(*circuit_type) {
|
||||
if !prover_types.contains(&pt) {
|
||||
prover_types.push(pt);
|
||||
}
|
||||
req.prover_height = Some(v.as_u64());
|
||||
} else {
|
||||
log::error!("[prover] failed to fetch latest confirmed block number, got None");
|
||||
bail!("failed to fetch latest confirmed block number, got None")
|
||||
}
|
||||
}
|
||||
let resp = self.coordinator_client.borrow_mut().get_task(&req)?;
|
||||
});
|
||||
|
||||
match resp.data {
|
||||
Some(d) => Ok(Task::from(d)),
|
||||
None => {
|
||||
bail!("data of get_task empty, while error_code is success. there may be something wrong in response data or inner logic.")
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prove_task(&self, task: &Task) -> Result<ProofDetail> {
|
||||
let prover_type = match get_prover_type(task.task_type) {
|
||||
Some(pt) => Ok(pt),
|
||||
None => {
|
||||
bail!("unsupported prover_type.")
|
||||
}
|
||||
}?;
|
||||
log::info!("[prover] start to prove_task, task id: {}", task.id);
|
||||
let handler: Rc<Box<dyn CircuitsHandler>> = self
|
||||
let vks = self
|
||||
.circuits_handler_provider
|
||||
.borrow_mut()
|
||||
.get_circuits_handler(&task.hard_fork_name, prover_type)
|
||||
.context("failed to get circuit handler")?;
|
||||
self.do_prove(task, handler)
|
||||
.read()
|
||||
.await
|
||||
.init_vks(&self.config, prover_types)
|
||||
.await;
|
||||
GetVkResponse { vks, error: None }
|
||||
}
|
||||
async fn prove(&self, req: ProveRequest) -> ProveResponse {
|
||||
let handler = self
|
||||
.circuits_handler_provider
|
||||
.write()
|
||||
.await
|
||||
.get_circuits_handler(&req.hard_fork_name, self.prover_types.clone())
|
||||
.expect("failed to get circuit handler");
|
||||
|
||||
match self.do_prove(req, handler).await {
|
||||
Ok(resp) => resp,
|
||||
Err(e) => ProveResponse {
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("failed to request proof: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
fn do_prove(&self, task: &Task, handler: Rc<Box<dyn CircuitsHandler>>) -> Result<ProofDetail> {
|
||||
let mut proof_detail = ProofDetail {
|
||||
id: task.id.clone(),
|
||||
proof_type: task.task_type,
|
||||
async fn query_task(&self, req: QueryTaskRequest) -> QueryTaskResponse {
|
||||
let handle = self.current_task.lock().unwrap().take();
|
||||
if let Some(handle) = handle {
|
||||
if handle.is_finished() {
|
||||
return match handle.await {
|
||||
Ok(Ok(proof)) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Success,
|
||||
proof: Some(proof),
|
||||
..Default::default()
|
||||
},
|
||||
Ok(Err(e)) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("proving task failed: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
Err(e) => QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some(format!("proving task panicked: {}", e)),
|
||||
..Default::default()
|
||||
},
|
||||
};
|
||||
} else {
|
||||
*self.current_task.lock().unwrap() = Some(handle);
|
||||
return QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Proving,
|
||||
..Default::default()
|
||||
};
|
||||
}
|
||||
}
|
||||
// If no handle is found
|
||||
QueryTaskResponse {
|
||||
task_id: req.task_id,
|
||||
status: TaskStatus::Failed,
|
||||
error: Some("no proving task is running".to_string()),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
proof_detail.proof_data = handler.get_proof_data(task.task_type, task)?;
|
||||
Ok(proof_detail)
|
||||
}
|
||||
|
||||
pub fn submit_proof(&self, proof_detail: ProofDetail, task: &Task) -> Result<()> {
|
||||
log::info!(
|
||||
"[prover] start to submit_proof, task id: {}",
|
||||
proof_detail.id
|
||||
);
|
||||
|
||||
let request = SubmitProofRequest {
|
||||
uuid: task.uuid.clone(),
|
||||
task_id: proof_detail.id,
|
||||
task_type: proof_detail.proof_type,
|
||||
status: ProofStatus::Ok,
|
||||
proof: proof_detail.proof_data,
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
self.do_submit(&request)
|
||||
}
|
||||
|
||||
pub fn submit_error(
|
||||
&self,
|
||||
task: &Task,
|
||||
failure_type: ProofFailureType,
|
||||
error: Error,
|
||||
) -> Result<()> {
|
||||
log::info!("[prover] start to submit_error, task id: {}", task.id);
|
||||
let request = SubmitProofRequest {
|
||||
uuid: task.uuid.clone(),
|
||||
task_id: task.id.clone(),
|
||||
task_type: task.task_type,
|
||||
status: ProofStatus::Error,
|
||||
failure_type: Some(failure_type),
|
||||
failure_msg: Some(format!("{:#}", error)),
|
||||
..Default::default()
|
||||
};
|
||||
self.do_submit(&request)
|
||||
}
|
||||
|
||||
fn do_submit(&self, request: &SubmitProofRequest) -> Result<()> {
|
||||
self.coordinator_client.borrow_mut().submit_proof(request)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn get_latest_block_number_value(&self) -> Result<Option<U64>> {
|
||||
let number = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.block_number()?;
|
||||
Ok(number.as_number())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl LocalProver {
|
||||
pub fn new(config: LocalProverConfig, prover_types: Vec<ProverType>) -> Self {
|
||||
let circuits_handler_provider = CircuitsHandlerProvider::new(config.clone())
|
||||
.expect("failed to create circuits handler provider");
|
||||
|
||||
Self {
|
||||
config,
|
||||
prover_types,
|
||||
circuits_handler_provider: RwLock::new(circuits_handler_provider),
|
||||
next_task_id: Arc::new(Mutex::new(0)),
|
||||
current_task: Arc::new(Mutex::new(None)),
|
||||
}
|
||||
}
|
||||
|
||||
async fn do_prove(
|
||||
&self,
|
||||
req: ProveRequest,
|
||||
handler: Arc<Box<dyn CircuitsHandler>>,
|
||||
) -> Result<ProveResponse> {
|
||||
let task_id = {
|
||||
let mut next_task_id = self.next_task_id.lock().unwrap();
|
||||
*next_task_id += 1;
|
||||
*next_task_id
|
||||
};
|
||||
|
||||
let duration = SystemTime::now().duration_since(UNIX_EPOCH).unwrap();
|
||||
let created_at = duration.as_secs() as f64 + duration.subsec_nanos() as f64 * 1e-9;
|
||||
|
||||
let req_clone = req.clone();
|
||||
let handle = Handle::current();
|
||||
let task_handle =
|
||||
tokio::task::spawn_blocking(move || handle.block_on(handler.get_proof_data(req_clone)));
|
||||
|
||||
*self.current_task.lock().unwrap() = Some(task_handle);
|
||||
|
||||
Ok(ProveResponse {
|
||||
task_id: task_id.to_string(),
|
||||
circuit_type: req.circuit_type,
|
||||
circuit_version: req.circuit_version,
|
||||
hard_fork_name: req.hard_fork_name,
|
||||
status: TaskStatus::Proving,
|
||||
created_at,
|
||||
input: Some(req.input),
|
||||
..Default::default()
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,66 +0,0 @@
|
||||
use anyhow::{Ok, Result};
|
||||
|
||||
use super::coordinator_client::{listener::Listener, types::SubmitProofRequest};
|
||||
use crate::types::TaskWrapper;
|
||||
use sled::{Config, Db};
|
||||
use std::rc::Rc;
|
||||
|
||||
pub struct TaskCache {
|
||||
db: Db,
|
||||
}
|
||||
|
||||
impl TaskCache {
|
||||
pub fn new(db_path: &String) -> Result<Self> {
|
||||
let config = Config::new().path(db_path);
|
||||
let db = config.open()?;
|
||||
log::info!("[task_cache] initiate successfully to {db_path}");
|
||||
Ok(Self { db })
|
||||
}
|
||||
|
||||
pub fn put_task(&self, task_wrapper: &TaskWrapper) -> Result<()> {
|
||||
let k = task_wrapper.task.id.clone().into_bytes();
|
||||
let v = serde_json::to_vec(task_wrapper)?;
|
||||
self.db.insert(k, v)?;
|
||||
log::info!(
|
||||
"[task_cache] put_task with task_id: {}",
|
||||
task_wrapper.task.id
|
||||
);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn get_last_task(&self) -> Result<Option<TaskWrapper>> {
|
||||
let last = self.db.last()?;
|
||||
if let Some((k, v)) = last {
|
||||
let kk = std::str::from_utf8(k.as_ref())?;
|
||||
let task_wrapper: TaskWrapper = serde_json::from_slice(v.as_ref())?;
|
||||
log::info!(
|
||||
"[task_cache] get_last_task with task_id: {kk}, count: {}",
|
||||
task_wrapper.get_count()
|
||||
);
|
||||
return Ok(Some(task_wrapper));
|
||||
}
|
||||
Ok(None)
|
||||
}
|
||||
|
||||
pub fn delete_task(&self, task_id: String) -> Result<()> {
|
||||
let k = task_id.clone().into_bytes();
|
||||
self.db.remove(k)?;
|
||||
log::info!("[task cache] delete_task with task_id: {task_id}");
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// ========================= listener ===========================
|
||||
|
||||
pub struct ClearCacheCoordinatorListener {
|
||||
pub task_cache: Rc<TaskCache>,
|
||||
}
|
||||
|
||||
impl Listener for ClearCacheCoordinatorListener {
|
||||
fn on_proof_submitted(&self, req: &SubmitProofRequest) {
|
||||
let result = self.task_cache.delete_task(req.task_id.clone());
|
||||
if let Err(e) = result {
|
||||
log::error!("delete task from embed db failed, {:#}", e);
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -1,89 +0,0 @@
|
||||
use super::{coordinator_client::ProofStatusNotOKError, prover::Prover, task_cache::TaskCache};
|
||||
use anyhow::{Context, Result};
|
||||
use std::rc::Rc;
|
||||
|
||||
pub struct TaskProcessor<'a> {
|
||||
prover: &'a Prover<'a>,
|
||||
task_cache: Rc<TaskCache>,
|
||||
}
|
||||
|
||||
impl<'a> TaskProcessor<'a> {
|
||||
pub fn new(prover: &'a Prover<'a>, task_cache: Rc<TaskCache>) -> Self {
|
||||
TaskProcessor { prover, task_cache }
|
||||
}
|
||||
|
||||
pub fn start(&self) {
|
||||
loop {
|
||||
log::info!("start a new round.");
|
||||
if let Err(err) = self.prove_and_submit() {
|
||||
if err.is::<ProofStatusNotOKError>() {
|
||||
log::info!("proof status not ok, downgrade level to info.");
|
||||
} else {
|
||||
log::error!("encounter error: {:#}", err);
|
||||
}
|
||||
} else {
|
||||
log::info!("prove & submit succeed.");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn prove_and_submit(&self) -> Result<()> {
|
||||
let task_from_cache = self
|
||||
.task_cache
|
||||
.get_last_task()
|
||||
.context("failed to peek from stack")?;
|
||||
|
||||
let mut task_wrapper = match task_from_cache {
|
||||
Some(t) => t,
|
||||
None => {
|
||||
let fetch_result = self.prover.fetch_task();
|
||||
if let Err(err) = fetch_result {
|
||||
std::thread::sleep(core::time::Duration::from_secs(10));
|
||||
return Err(err).context("failed to fetch task from coordinator");
|
||||
}
|
||||
fetch_result.unwrap().into()
|
||||
}
|
||||
};
|
||||
|
||||
if task_wrapper.get_count() <= 2 {
|
||||
task_wrapper.increment_count();
|
||||
self.task_cache
|
||||
.put_task(&task_wrapper)
|
||||
.context("failed to push task into stack, updating count")?;
|
||||
|
||||
log::info!(
|
||||
"start to prove task, task_type: {:?}, task_id: {}",
|
||||
task_wrapper.task.task_type,
|
||||
task_wrapper.task.id
|
||||
);
|
||||
let result = match self.prover.prove_task(&task_wrapper.task) {
|
||||
Ok(proof_detail) => self.prover.submit_proof(proof_detail, &task_wrapper.task),
|
||||
Err(error) => {
|
||||
log::error!(
|
||||
"failed to prove task, id: {}, error: {:#}",
|
||||
&task_wrapper.task.id,
|
||||
error
|
||||
);
|
||||
self.prover.submit_error(
|
||||
&task_wrapper.task,
|
||||
super::types::ProofFailureType::NoPanic,
|
||||
error,
|
||||
)
|
||||
}
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
// if tried times >= 3, it's probably due to circuit proving panic
|
||||
log::error!(
|
||||
"zk proving panic for task, task_type: {:?}, task_id: {}",
|
||||
task_wrapper.task.task_type,
|
||||
task_wrapper.task.id
|
||||
);
|
||||
self.prover.submit_error(
|
||||
&task_wrapper.task,
|
||||
super::types::ProofFailureType::Panic,
|
||||
anyhow::anyhow!("zk proving panic for task"),
|
||||
)
|
||||
}
|
||||
}
|
||||
@@ -1,59 +1,10 @@
|
||||
use ethers_core::types::H256;
|
||||
use serde::{Deserialize, Deserializer, Serialize, Serializer};
|
||||
|
||||
use crate::coordinator_client::types::GetTaskResponseData;
|
||||
use scroll_proving_sdk::prover::types::CircuitType;
|
||||
|
||||
pub type CommonHash = H256;
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq)]
|
||||
pub enum TaskType {
|
||||
Undefined,
|
||||
Chunk,
|
||||
Batch,
|
||||
Bundle,
|
||||
}
|
||||
|
||||
impl TaskType {
|
||||
fn from_u8(v: u8) -> Self {
|
||||
match v {
|
||||
1 => TaskType::Chunk,
|
||||
2 => TaskType::Batch,
|
||||
3 => TaskType::Bundle,
|
||||
_ => TaskType::Undefined,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for TaskType {
|
||||
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
|
||||
where
|
||||
S: Serializer,
|
||||
{
|
||||
match *self {
|
||||
TaskType::Undefined => serializer.serialize_u8(0),
|
||||
TaskType::Chunk => serializer.serialize_u8(1),
|
||||
TaskType::Batch => serializer.serialize_u8(2),
|
||||
TaskType::Bundle => serializer.serialize_u8(3),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl<'de> Deserialize<'de> for TaskType {
|
||||
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
|
||||
where
|
||||
D: Deserializer<'de>,
|
||||
{
|
||||
let v: u8 = u8::deserialize(deserializer)?;
|
||||
Ok(TaskType::from_u8(v))
|
||||
}
|
||||
}
|
||||
|
||||
impl Default for TaskType {
|
||||
fn default() -> Self {
|
||||
Self::Undefined
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
|
||||
pub enum ProverType {
|
||||
Chunk,
|
||||
@@ -70,13 +21,6 @@ impl ProverType {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_u8(self) -> u8 {
|
||||
match self {
|
||||
ProverType::Chunk => 1,
|
||||
ProverType::Batch => 2,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Serialize for ProverType {
|
||||
@@ -103,54 +47,18 @@ impl<'de> Deserialize<'de> for ProverType {
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct Task {
|
||||
pub uuid: String,
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub task_type: TaskType,
|
||||
pub task_type: CircuitType,
|
||||
pub task_data: String,
|
||||
#[serde(default)]
|
||||
pub hard_fork_name: String,
|
||||
}
|
||||
|
||||
impl From<GetTaskResponseData> for Task {
|
||||
fn from(value: GetTaskResponseData) -> Self {
|
||||
Self {
|
||||
uuid: value.uuid,
|
||||
id: value.task_id,
|
||||
task_type: value.task_type,
|
||||
task_data: value.task_data,
|
||||
hard_fork_name: value.hard_fork_name,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct TaskWrapper {
|
||||
pub task: Task,
|
||||
count: usize,
|
||||
}
|
||||
|
||||
impl TaskWrapper {
|
||||
pub fn increment_count(&mut self) {
|
||||
self.count += 1;
|
||||
}
|
||||
|
||||
pub fn get_count(&self) -> usize {
|
||||
self.count
|
||||
}
|
||||
}
|
||||
|
||||
impl From<Task> for TaskWrapper {
|
||||
fn from(task: Task) -> Self {
|
||||
TaskWrapper { task, count: 0 }
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Default)]
|
||||
pub struct ProofDetail {
|
||||
pub id: String,
|
||||
#[serde(rename = "type", default)]
|
||||
pub proof_type: TaskType,
|
||||
pub proof_type: CircuitType,
|
||||
pub proof_data: String,
|
||||
pub error: String,
|
||||
}
|
||||
|
||||
@@ -1,54 +1,18 @@
|
||||
use env_logger::Env;
|
||||
use std::{fs::OpenOptions, sync::Once};
|
||||
use crate::types::ProverType;
|
||||
use scroll_proving_sdk::prover::types::CircuitType;
|
||||
|
||||
use crate::types::{ProverType, TaskType};
|
||||
|
||||
static LOG_INIT: Once = Once::new();
|
||||
|
||||
/// Initialize log
|
||||
pub fn log_init(log_file: Option<String>) {
|
||||
LOG_INIT.call_once(|| {
|
||||
let mut builder = env_logger::Builder::from_env(Env::default().default_filter_or("info"));
|
||||
if let Some(file_path) = log_file {
|
||||
let target = Box::new(
|
||||
OpenOptions::new()
|
||||
.write(true)
|
||||
.create(true)
|
||||
.truncate(false)
|
||||
.open(file_path)
|
||||
.expect("Can't create log file"),
|
||||
);
|
||||
builder.target(env_logger::Target::Pipe(target));
|
||||
}
|
||||
builder.init();
|
||||
});
|
||||
}
|
||||
|
||||
// pub fn get_task_types(prover_types: Vec<ProverType>) -> Vec<TaskType> {
|
||||
// prover_types.into_iter().fold(Vec::new(), |mut acc, prover_type| {
|
||||
// match prover_type {
|
||||
// ProverType::Chunk => acc.push(TaskType::Chunk),
|
||||
// ProverType::Batch => {
|
||||
// acc.push(TaskType::Batch);
|
||||
// acc.push(TaskType::Bundle);
|
||||
// }
|
||||
// }
|
||||
// acc
|
||||
// })
|
||||
// }
|
||||
|
||||
pub fn get_task_types(prover_type: ProverType) -> Vec<TaskType> {
|
||||
pub fn get_circuit_types(prover_type: ProverType) -> Vec<CircuitType> {
|
||||
match prover_type {
|
||||
ProverType::Chunk => vec![TaskType::Chunk],
|
||||
ProverType::Batch => vec![TaskType::Batch, TaskType::Bundle],
|
||||
ProverType::Chunk => vec![CircuitType::Chunk],
|
||||
ProverType::Batch => vec![CircuitType::Batch, CircuitType::Bundle],
|
||||
}
|
||||
}
|
||||
|
||||
pub fn get_prover_type(task_type: TaskType) -> Option<ProverType> {
|
||||
pub fn get_prover_type(task_type: CircuitType) -> Option<ProverType> {
|
||||
match task_type {
|
||||
TaskType::Undefined => None,
|
||||
TaskType::Chunk => Some(ProverType::Chunk),
|
||||
TaskType::Batch => Some(ProverType::Batch),
|
||||
TaskType::Bundle => Some(ProverType::Batch),
|
||||
CircuitType::Undefined => None,
|
||||
CircuitType::Chunk => Some(ProverType::Chunk),
|
||||
CircuitType::Batch => Some(ProverType::Batch),
|
||||
CircuitType::Bundle => Some(ProverType::Batch),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,18 +0,0 @@
|
||||
use std::cell::OnceCell;
|
||||
|
||||
static DEFAULT_COMMIT: &str = "unknown";
|
||||
static mut VERSION: OnceCell<String> = OnceCell::new();
|
||||
|
||||
pub const TAG: &str = "v0.0.0";
|
||||
pub const DEFAULT_ZK_VERSION: &str = "000000-000000";
|
||||
|
||||
fn init_version() -> String {
|
||||
let commit = option_env!("GIT_REV").unwrap_or(DEFAULT_COMMIT);
|
||||
let tag = option_env!("GO_TAG").unwrap_or(TAG);
|
||||
let zk_version = option_env!("ZK_VERSION").unwrap_or(DEFAULT_ZK_VERSION);
|
||||
format!("{tag}-{commit}-{zk_version}")
|
||||
}
|
||||
|
||||
pub fn get_version() -> String {
|
||||
unsafe { VERSION.get_or_init(init_version).clone() }
|
||||
}
|
||||
@@ -2,16 +2,16 @@ mod common;
|
||||
mod darwin;
|
||||
mod darwin_v2;
|
||||
|
||||
use super::geth_client::GethClient;
|
||||
use crate::{
|
||||
config::{AssetsDirEnvConfig, Config},
|
||||
types::{ProverType, Task, TaskType},
|
||||
utils::get_task_types,
|
||||
config::AssetsDirEnvConfig, prover::LocalProverConfig, types::ProverType,
|
||||
utils::get_circuit_types,
|
||||
};
|
||||
use anyhow::{bail, Result};
|
||||
use async_trait::async_trait;
|
||||
use darwin::DarwinHandler;
|
||||
use darwin_v2::DarwinV2Handler;
|
||||
use std::{cell::RefCell, collections::HashMap, rc::Rc};
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use std::{collections::HashMap, sync::Arc};
|
||||
|
||||
type HardForkName = String;
|
||||
|
||||
@@ -21,36 +21,36 @@ pub mod utils {
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CircuitsHandler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>>;
|
||||
#[async_trait]
|
||||
pub trait CircuitsHandler: Send + Sync {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>>;
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &Task) -> Result<String>;
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String>;
|
||||
}
|
||||
|
||||
type CircuitsHandlerBuilder = fn(
|
||||
prover_type: ProverType,
|
||||
config: &Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>>;
|
||||
|
||||
pub struct CircuitsHandlerProvider<'a> {
|
||||
config: &'a Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
pub struct CircuitsHandlerProvider {
|
||||
config: LocalProverConfig,
|
||||
circuits_handler_builder_map: HashMap<HardForkName, CircuitsHandlerBuilder>,
|
||||
|
||||
current_fork_name: Option<HardForkName>,
|
||||
current_prover_type: Option<ProverType>,
|
||||
current_circuit: Option<Rc<Box<dyn CircuitsHandler>>>,
|
||||
current_circuit: Option<Arc<Box<dyn CircuitsHandler>>>,
|
||||
}
|
||||
|
||||
impl<'a> CircuitsHandlerProvider<'a> {
|
||||
pub fn new(config: &'a Config, geth_client: Option<Rc<RefCell<GethClient>>>) -> Result<Self> {
|
||||
impl CircuitsHandlerProvider {
|
||||
pub fn new(config: LocalProverConfig) -> Result<Self> {
|
||||
let mut m: HashMap<HardForkName, CircuitsHandlerBuilder> = HashMap::new();
|
||||
|
||||
if let Err(e) = AssetsDirEnvConfig::init() {
|
||||
panic!("AssetsDirEnvConfig init failed: {:#}", e);
|
||||
}
|
||||
|
||||
fn handler_builder(
|
||||
prover_type: ProverType,
|
||||
config: &Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!(
|
||||
"now init zk circuits handler, hard_fork_name: {}",
|
||||
@@ -58,10 +58,9 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
);
|
||||
AssetsDirEnvConfig::enable_first();
|
||||
DarwinHandler::new(
|
||||
prover_type,
|
||||
prover_types,
|
||||
&config.low_version_circuit.params_path,
|
||||
&config.low_version_circuit.assets_path,
|
||||
geth_client,
|
||||
)
|
||||
.map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
@@ -71,9 +70,8 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
);
|
||||
|
||||
fn next_handler_builder(
|
||||
prover_type: ProverType,
|
||||
config: &Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &LocalProverConfig,
|
||||
) -> Result<Box<dyn CircuitsHandler>> {
|
||||
log::info!(
|
||||
"now init zk circuits handler, hard_fork_name: {}",
|
||||
@@ -81,10 +79,9 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
);
|
||||
AssetsDirEnvConfig::enable_second();
|
||||
DarwinV2Handler::new(
|
||||
prover_type,
|
||||
prover_types,
|
||||
&config.high_version_circuit.params_path,
|
||||
&config.high_version_circuit.assets_path,
|
||||
geth_client,
|
||||
)
|
||||
.map(|handler| Box::new(handler) as Box<dyn CircuitsHandler>)
|
||||
}
|
||||
@@ -96,10 +93,8 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
|
||||
let provider = CircuitsHandlerProvider {
|
||||
config,
|
||||
geth_client,
|
||||
circuits_handler_builder_map: m,
|
||||
current_fork_name: None,
|
||||
current_prover_type: None,
|
||||
current_circuit: None,
|
||||
};
|
||||
|
||||
@@ -109,8 +104,8 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
pub fn get_circuits_handler(
|
||||
&mut self,
|
||||
hard_fork_name: &String,
|
||||
prover_type: ProverType,
|
||||
) -> Result<Rc<Box<dyn CircuitsHandler>>> {
|
||||
prover_types: Vec<ProverType>,
|
||||
) -> Result<Arc<Box<dyn CircuitsHandler>>> {
|
||||
match &self.current_fork_name {
|
||||
Some(fork_name) if fork_name == hard_fork_name => {
|
||||
log::info!("get circuits handler from cache");
|
||||
@@ -126,13 +121,12 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
);
|
||||
if let Some(builder) = self.circuits_handler_builder_map.get(hard_fork_name) {
|
||||
log::info!("building circuits handler for {hard_fork_name}");
|
||||
let handler = builder(prover_type, self.config, self.geth_client.clone())
|
||||
let handler = builder(prover_types, &self.config)
|
||||
.expect("failed to build circuits handler");
|
||||
self.current_fork_name = Some(hard_fork_name.clone());
|
||||
self.current_prover_type = Some(prover_type);
|
||||
let rc_handler = Rc::new(handler);
|
||||
self.current_circuit = Some(rc_handler.clone());
|
||||
Ok(rc_handler)
|
||||
let arc_handler = Arc::new(handler);
|
||||
self.current_circuit = Some(arc_handler.clone());
|
||||
Ok(arc_handler)
|
||||
} else {
|
||||
bail!("missing builder, there must be something wrong.")
|
||||
}
|
||||
@@ -140,39 +134,32 @@ impl<'a> CircuitsHandlerProvider<'a> {
|
||||
}
|
||||
}
|
||||
|
||||
pub fn init_vks(
|
||||
pub async fn init_vks(
|
||||
&self,
|
||||
config: &LocalProverConfig,
|
||||
prover_types: Vec<ProverType>,
|
||||
config: &'a Config,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Vec<String> {
|
||||
self.circuits_handler_builder_map
|
||||
.iter()
|
||||
.flat_map(|(hard_fork_name, build)| {
|
||||
let geth_client_clone = geth_client.clone();
|
||||
prover_types
|
||||
.iter()
|
||||
.flat_map(move |prover_type| {
|
||||
let handler = build(*prover_type, config, geth_client_clone.clone())
|
||||
.expect("failed to build circuits handler");
|
||||
let mut vks = Vec::new();
|
||||
for (hard_fork_name, build) in self.circuits_handler_builder_map.iter() {
|
||||
let handler =
|
||||
build(prover_types.clone(), config).expect("failed to build circuits handler");
|
||||
|
||||
get_task_types(*prover_type)
|
||||
.into_iter()
|
||||
.map(move |task_type| {
|
||||
let vk = handler
|
||||
.get_vk(task_type)
|
||||
.map_or("".to_string(), utils::encode_vk);
|
||||
log::info!(
|
||||
"vk for {hard_fork_name}, is {vk}, task_type: {:?}",
|
||||
task_type
|
||||
);
|
||||
vk
|
||||
})
|
||||
.filter(|vk| !vk.is_empty())
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect::<Vec<String>>()
|
||||
})
|
||||
.collect()
|
||||
for prover_type in prover_types.iter() {
|
||||
for task_type in get_circuit_types(*prover_type).into_iter() {
|
||||
let vk = handler
|
||||
.get_vk(task_type)
|
||||
.await
|
||||
.map_or("".to_string(), utils::encode_vk);
|
||||
log::info!(
|
||||
"vk for {hard_fork_name}, is {vk}, task_type: {:?}",
|
||||
task_type
|
||||
);
|
||||
if !vk.is_empty() {
|
||||
vks.push(vk)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
vks
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use super::{common::*, CircuitsHandler};
|
||||
use crate::{
|
||||
geth_client::GethClient,
|
||||
types::{ProverType, TaskType},
|
||||
};
|
||||
use crate::types::ProverType;
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::types::{CommonHash, Task};
|
||||
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
|
||||
use crate::types::CommonHash;
|
||||
use std::env;
|
||||
|
||||
use prover_darwin::{
|
||||
aggregator::Prover as BatchProver,
|
||||
@@ -37,16 +37,10 @@ pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DarwinHandler {
|
||||
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RefCell<BatchProver<'static>>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
chunk_prover: Option<RwLock<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RwLock<BatchProver<'static>>>,
|
||||
}
|
||||
|
||||
impl DarwinHandler {
|
||||
@@ -54,7 +48,6 @@ impl DarwinHandler {
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
@@ -63,7 +56,6 @@ impl DarwinHandler {
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
@@ -81,12 +73,12 @@ impl DarwinHandler {
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
|
||||
handler.chunk_prover = Some(RwLock::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
|
||||
handler.batch_prover = Some(RwLock::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
@@ -95,22 +87,18 @@ impl DarwinHandler {
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
prover_type: ProverType,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
|
||||
pub fn new(prover_types: Vec<ProverType>, params_dir: &str, assets_dir: &str) -> Result<Self> {
|
||||
Self::new_multi(prover_types, params_dir, assets_dir)
|
||||
}
|
||||
|
||||
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
async fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof =
|
||||
prover
|
||||
.borrow_mut()
|
||||
.write()
|
||||
.await
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
@@ -118,13 +106,13 @@ impl DarwinHandler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
|
||||
async fn gen_chunk_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let chunk_traces: Vec<BlockTrace> = serde_json::from_str(&prove_request.input)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_traces).await?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
async fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
@@ -136,13 +124,13 @@ impl DarwinHandler {
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
|
||||
let is_valid = prover.read().await.check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.borrow_mut().gen_batch_proof(
|
||||
let batch_proof = prover.write().await.gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
@@ -153,17 +141,18 @@ impl DarwinHandler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
|
||||
async fn gen_batch_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
|
||||
async fn gen_bundle_proof_raw(
|
||||
&self,
|
||||
bundle_task_detail: BundleTaskDetail,
|
||||
) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
|
||||
let bundle_proof = prover.write().await.gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
@@ -174,100 +163,45 @@ impl DarwinHandler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
|
||||
async fn gen_bundle_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail).await?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.is_empty() {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for hash in block_hashes.iter() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a).is_none() {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b).is_none() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| get_block_number(trace).unwrap_or(0))
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!(
|
||||
"[prover] block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for DarwinHandler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self
|
||||
.chunk_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk()),
|
||||
TaskType::Batch => self
|
||||
CircuitType::Chunk => self.chunk_prover.as_ref().unwrap().read().await.get_vk(),
|
||||
CircuitType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_batch_vk()),
|
||||
TaskType::Bundle => self
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_batch_vk(),
|
||||
CircuitType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_bundle_vk()),
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_bundle_vk(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self.gen_chunk_proof(task),
|
||||
TaskType::Batch => self.gen_batch_proof(task),
|
||||
TaskType::Bundle => self.gen_bundle_proof(task),
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
match prove_request.circuit_type {
|
||||
CircuitType::Chunk => self.gen_chunk_proof(prove_request).await,
|
||||
CircuitType::Batch => self.gen_batch_proof(prove_request).await,
|
||||
CircuitType::Bundle => self.gen_bundle_proof(prove_request).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@@ -280,11 +214,12 @@ mod tests {
|
||||
use super::*;
|
||||
use crate::zk_circuits_handler::utils::encode_vk;
|
||||
use prover_darwin::utils::chunk_trace_to_witness_block;
|
||||
use scroll_proving_sdk::utils::init_tracing;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
crate::utils::log_init(None);
|
||||
init_tracing();
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
@@ -312,19 +247,18 @@ mod tests {
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinHandler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
|
||||
let chunk_vk = chunk_handler.get_vk(CircuitType::Chunk).await.unwrap();
|
||||
|
||||
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
check_vk(CircuitType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_infos = vec![];
|
||||
@@ -338,18 +272,18 @@ mod tests {
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace).await?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler = chunk_handler;
|
||||
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
|
||||
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_vk = batch_handler.get_vk(CircuitType::Batch).await.unwrap();
|
||||
check_vk(CircuitType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_task_detail = make_batch_task_detail(chunk_infos, chunk_proofs);
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
@@ -369,19 +303,19 @@ mod tests {
|
||||
// }
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
|
||||
fn check_vk(proof_type: CircuitType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: TaskType) -> Result<String> {
|
||||
fn read_vk(proof_type: CircuitType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
TaskType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
TaskType::Batch => BATCH_VK_PATH.clone(),
|
||||
TaskType::Bundle => todo!(),
|
||||
TaskType::Undefined => unreachable!(),
|
||||
CircuitType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
CircuitType::Batch => BATCH_VK_PATH.clone(),
|
||||
CircuitType::Bundle => todo!(),
|
||||
CircuitType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
|
||||
@@ -1,14 +1,14 @@
|
||||
use super::{common::*, CircuitsHandler};
|
||||
use crate::{
|
||||
geth_client::GethClient,
|
||||
types::{ProverType, TaskType},
|
||||
};
|
||||
use crate::types::ProverType;
|
||||
use anyhow::{bail, Context, Ok, Result};
|
||||
use async_trait::async_trait;
|
||||
use once_cell::sync::Lazy;
|
||||
use scroll_proving_sdk::prover::{proving_service::ProveRequest, CircuitType};
|
||||
use serde::Deserialize;
|
||||
use tokio::sync::RwLock;
|
||||
|
||||
use crate::types::{CommonHash, Task};
|
||||
use std::{cell::RefCell, cmp::Ordering, env, rc::Rc};
|
||||
use crate::types::CommonHash;
|
||||
use std::env;
|
||||
|
||||
use prover_darwin_v2::{
|
||||
aggregator::Prover as BatchProver,
|
||||
@@ -37,16 +37,10 @@ pub struct ChunkTaskDetail {
|
||||
pub block_hashes: Vec<CommonHash>,
|
||||
}
|
||||
|
||||
fn get_block_number(block_trace: &BlockTrace) -> Option<u64> {
|
||||
block_trace.header.number.map(|n| n.as_u64())
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
pub struct DarwinV2Handler {
|
||||
chunk_prover: Option<RefCell<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RefCell<BatchProver<'static>>>,
|
||||
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
chunk_prover: Option<RwLock<ChunkProver<'static>>>,
|
||||
batch_prover: Option<RwLock<BatchProver<'static>>>,
|
||||
}
|
||||
|
||||
impl DarwinV2Handler {
|
||||
@@ -54,7 +48,6 @@ impl DarwinV2Handler {
|
||||
prover_types: Vec<ProverType>,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
let class_name = std::intrinsics::type_name::<Self>();
|
||||
let prover_types_set = prover_types
|
||||
@@ -63,7 +56,6 @@ impl DarwinV2Handler {
|
||||
let mut handler = Self {
|
||||
batch_prover: None,
|
||||
chunk_prover: None,
|
||||
geth_client,
|
||||
};
|
||||
let degrees: Vec<u32> = get_degrees(&prover_types_set, |prover_type| match prover_type {
|
||||
ProverType::Chunk => ZKEVM_DEGREES.clone(),
|
||||
@@ -81,12 +73,12 @@ impl DarwinV2Handler {
|
||||
for prover_type in prover_types_set {
|
||||
match prover_type {
|
||||
ProverType::Chunk => {
|
||||
handler.chunk_prover = Some(RefCell::new(ChunkProver::from_params_and_assets(
|
||||
handler.chunk_prover = Some(RwLock::new(ChunkProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)));
|
||||
}
|
||||
ProverType::Batch => {
|
||||
handler.batch_prover = Some(RefCell::new(BatchProver::from_params_and_assets(
|
||||
handler.batch_prover = Some(RwLock::new(BatchProver::from_params_and_assets(
|
||||
params_map, assets_dir,
|
||||
)))
|
||||
}
|
||||
@@ -95,22 +87,18 @@ impl DarwinV2Handler {
|
||||
Ok(handler)
|
||||
}
|
||||
|
||||
pub fn new(
|
||||
prover_type: ProverType,
|
||||
params_dir: &str,
|
||||
assets_dir: &str,
|
||||
geth_client: Option<Rc<RefCell<GethClient>>>,
|
||||
) -> Result<Self> {
|
||||
Self::new_multi(vec![prover_type], params_dir, assets_dir, geth_client)
|
||||
pub fn new(prover_types: Vec<ProverType>, params_dir: &str, assets_dir: &str) -> Result<Self> {
|
||||
Self::new_multi(prover_types, params_dir, assets_dir)
|
||||
}
|
||||
|
||||
fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
async fn gen_chunk_proof_raw(&self, chunk_trace: Vec<BlockTrace>) -> Result<ChunkProof> {
|
||||
if let Some(prover) = self.chunk_prover.as_ref() {
|
||||
let chunk = ChunkProvingTask::from(chunk_trace);
|
||||
|
||||
let chunk_proof =
|
||||
prover
|
||||
.borrow_mut()
|
||||
.write()
|
||||
.await
|
||||
.gen_chunk_proof(chunk, None, None, self.get_output_dir())?;
|
||||
|
||||
return Ok(chunk_proof);
|
||||
@@ -118,13 +106,13 @@ impl DarwinV2Handler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_chunk_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
let chunk_trace = self.gen_chunk_traces(task)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_trace)?;
|
||||
async fn gen_chunk_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let chunk_traces: Vec<BlockTrace> = serde_json::from_str(&prove_request.input)?;
|
||||
let chunk_proof = self.gen_chunk_proof_raw(chunk_traces).await?;
|
||||
Ok(serde_json::to_string(&chunk_proof)?)
|
||||
}
|
||||
|
||||
fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
async fn gen_batch_proof_raw(&self, batch_task_detail: BatchTaskDetail) -> Result<BatchProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let chunk_hashes_proofs: Vec<(ChunkInfo, ChunkProof)> = batch_task_detail
|
||||
.chunk_infos
|
||||
@@ -136,13 +124,13 @@ impl DarwinV2Handler {
|
||||
let chunk_proofs: Vec<ChunkProof> =
|
||||
chunk_hashes_proofs.iter().map(|t| t.1.clone()).collect();
|
||||
|
||||
let is_valid = prover.borrow_mut().check_protocol_of_chunks(&chunk_proofs);
|
||||
let is_valid = prover.write().await.check_protocol_of_chunks(&chunk_proofs);
|
||||
|
||||
if !is_valid {
|
||||
bail!("non-match chunk protocol")
|
||||
}
|
||||
check_chunk_hashes("", &chunk_hashes_proofs).context("failed to check chunk info")?;
|
||||
let batch_proof = prover.borrow_mut().gen_batch_proof(
|
||||
let batch_proof = prover.write().await.gen_batch_proof(
|
||||
batch_task_detail.batch_proving_task,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
@@ -153,17 +141,18 @@ impl DarwinV2Handler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_batch_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_batch_proof for task {}", task.id);
|
||||
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail)?;
|
||||
async fn gen_batch_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let batch_task_detail: BatchTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let batch_proof = self.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
Ok(serde_json::to_string(&batch_proof)?)
|
||||
}
|
||||
|
||||
fn gen_bundle_proof_raw(&self, bundle_task_detail: BundleTaskDetail) -> Result<BundleProof> {
|
||||
async fn gen_bundle_proof_raw(
|
||||
&self,
|
||||
bundle_task_detail: BundleTaskDetail,
|
||||
) -> Result<BundleProof> {
|
||||
if let Some(prover) = self.batch_prover.as_ref() {
|
||||
let bundle_proof = prover.borrow_mut().gen_bundle_proof(
|
||||
let bundle_proof = prover.write().await.gen_bundle_proof(
|
||||
bundle_task_detail,
|
||||
None,
|
||||
self.get_output_dir(),
|
||||
@@ -174,100 +163,45 @@ impl DarwinV2Handler {
|
||||
unreachable!("please check errors in proof_type logic")
|
||||
}
|
||||
|
||||
fn gen_bundle_proof(&self, task: &crate::types::Task) -> Result<String> {
|
||||
log::info!("[circuit] gen_bundle_proof for task {}", task.id);
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail)?;
|
||||
async fn gen_bundle_proof(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
let bundle_task_detail: BundleTaskDetail = serde_json::from_str(&prove_request.input)?;
|
||||
let bundle_proof = self.gen_bundle_proof_raw(bundle_task_detail).await?;
|
||||
Ok(serde_json::to_string(&bundle_proof)?)
|
||||
}
|
||||
|
||||
fn get_output_dir(&self) -> Option<&str> {
|
||||
OUTPUT_DIR.as_deref()
|
||||
}
|
||||
|
||||
fn gen_chunk_traces(&self, task: &Task) -> Result<Vec<BlockTrace>> {
|
||||
let chunk_task_detail: ChunkTaskDetail = serde_json::from_str(&task.task_data)?;
|
||||
self.get_sorted_traces_by_hashes(&chunk_task_detail.block_hashes)
|
||||
}
|
||||
|
||||
fn get_sorted_traces_by_hashes(&self, block_hashes: &[CommonHash]) -> Result<Vec<BlockTrace>> {
|
||||
if block_hashes.is_empty() {
|
||||
log::error!("[prover] failed to get sorted traces: block_hashes are empty");
|
||||
bail!("block_hashes are empty")
|
||||
}
|
||||
|
||||
let mut block_traces = Vec::new();
|
||||
for hash in block_hashes.iter() {
|
||||
let trace = self
|
||||
.geth_client
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.borrow_mut()
|
||||
.get_block_trace_by_hash(hash)?;
|
||||
block_traces.push(trace);
|
||||
}
|
||||
|
||||
block_traces.sort_by(|a, b| {
|
||||
if get_block_number(a).is_none() {
|
||||
Ordering::Less
|
||||
} else if get_block_number(b).is_none() {
|
||||
Ordering::Greater
|
||||
} else {
|
||||
get_block_number(a)
|
||||
.unwrap()
|
||||
.cmp(&get_block_number(b).unwrap())
|
||||
}
|
||||
});
|
||||
|
||||
let block_numbers: Vec<u64> = block_traces
|
||||
.iter()
|
||||
.map(|trace| get_block_number(trace).unwrap_or(0))
|
||||
.collect();
|
||||
let mut i = 0;
|
||||
while i < block_numbers.len() - 1 {
|
||||
if block_numbers[i] + 1 != block_numbers[i + 1] {
|
||||
log::error!(
|
||||
"[prover] block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
);
|
||||
bail!(
|
||||
"block numbers are not continuous, got {} and {}",
|
||||
block_numbers[i],
|
||||
block_numbers[i + 1]
|
||||
)
|
||||
}
|
||||
i += 1;
|
||||
}
|
||||
|
||||
Ok(block_traces)
|
||||
}
|
||||
}
|
||||
|
||||
#[async_trait]
|
||||
impl CircuitsHandler for DarwinV2Handler {
|
||||
fn get_vk(&self, task_type: TaskType) -> Option<Vec<u8>> {
|
||||
async fn get_vk(&self, task_type: CircuitType) -> Option<Vec<u8>> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self
|
||||
.chunk_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_vk()),
|
||||
TaskType::Batch => self
|
||||
CircuitType::Chunk => self.chunk_prover.as_ref().unwrap().read().await.get_vk(),
|
||||
CircuitType::Batch => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_batch_vk()),
|
||||
TaskType::Bundle => self
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_batch_vk(),
|
||||
CircuitType::Bundle => self
|
||||
.batch_prover
|
||||
.as_ref()
|
||||
.and_then(|prover| prover.borrow().get_bundle_vk()),
|
||||
.unwrap()
|
||||
.read()
|
||||
.await
|
||||
.get_bundle_vk(),
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
|
||||
fn get_proof_data(&self, task_type: TaskType, task: &crate::types::Task) -> Result<String> {
|
||||
match task_type {
|
||||
TaskType::Chunk => self.gen_chunk_proof(task),
|
||||
TaskType::Batch => self.gen_batch_proof(task),
|
||||
TaskType::Bundle => self.gen_bundle_proof(task),
|
||||
async fn get_proof_data(&self, prove_request: ProveRequest) -> Result<String> {
|
||||
match prove_request.circuit_type {
|
||||
CircuitType::Chunk => self.gen_chunk_proof(prove_request).await,
|
||||
CircuitType::Batch => self.gen_batch_proof(prove_request).await,
|
||||
CircuitType::Bundle => self.gen_bundle_proof(prove_request).await,
|
||||
_ => unreachable!(),
|
||||
}
|
||||
}
|
||||
@@ -284,11 +218,12 @@ mod tests {
|
||||
aggregator::eip4844, utils::chunk_trace_to_witness_block, BatchData, BatchHeader,
|
||||
MAX_AGG_SNARKS,
|
||||
};
|
||||
use scroll_proving_sdk::utils::init_tracing;
|
||||
use std::{path::PathBuf, sync::LazyLock};
|
||||
|
||||
#[ctor::ctor]
|
||||
fn init() {
|
||||
crate::utils::log_init(None);
|
||||
init_tracing();
|
||||
log::info!("logger initialized");
|
||||
}
|
||||
|
||||
@@ -316,19 +251,18 @@ mod tests {
|
||||
assert!(result);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_circuits() -> Result<()> {
|
||||
#[tokio::test]
|
||||
async fn test_circuits() -> Result<()> {
|
||||
let bi_handler = DarwinV2Handler::new_multi(
|
||||
vec![ProverType::Chunk, ProverType::Batch],
|
||||
&PARAMS_PATH,
|
||||
&ASSETS_PATH,
|
||||
None,
|
||||
)?;
|
||||
|
||||
let chunk_handler = bi_handler;
|
||||
let chunk_vk = chunk_handler.get_vk(TaskType::Chunk).unwrap();
|
||||
let chunk_vk = chunk_handler.get_vk(CircuitType::Chunk).await.unwrap();
|
||||
|
||||
check_vk(TaskType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
check_vk(CircuitType::Chunk, chunk_vk, "chunk vk must be available");
|
||||
let chunk_dir_paths = get_chunk_dir_paths()?;
|
||||
log::info!("chunk_dir_paths, {:?}", chunk_dir_paths);
|
||||
let mut chunk_traces = vec![];
|
||||
@@ -343,18 +277,18 @@ mod tests {
|
||||
chunk_infos.push(chunk_info);
|
||||
|
||||
log::info!("start to prove {chunk_id}");
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace)?;
|
||||
let chunk_proof = chunk_handler.gen_chunk_proof_raw(chunk_trace).await?;
|
||||
let proof_data = serde_json::to_string(&chunk_proof)?;
|
||||
dump_proof(chunk_id, proof_data)?;
|
||||
chunk_proofs.push(chunk_proof);
|
||||
}
|
||||
|
||||
let batch_handler = chunk_handler;
|
||||
let batch_vk = batch_handler.get_vk(TaskType::Batch).unwrap();
|
||||
check_vk(TaskType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_vk = batch_handler.get_vk(CircuitType::Batch).await.unwrap();
|
||||
check_vk(CircuitType::Batch, batch_vk, "batch vk must be available");
|
||||
let batch_task_detail = make_batch_task_detail(chunk_traces, chunk_proofs, None);
|
||||
log::info!("start to prove batch");
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail)?;
|
||||
let batch_proof = batch_handler.gen_batch_proof_raw(batch_task_detail).await?;
|
||||
let proof_data = serde_json::to_string(&batch_proof)?;
|
||||
dump_proof("batch_proof".to_string(), proof_data)?;
|
||||
|
||||
@@ -427,19 +361,19 @@ mod tests {
|
||||
}
|
||||
}
|
||||
|
||||
fn check_vk(proof_type: TaskType, vk: Vec<u8>, info: &str) {
|
||||
fn check_vk(proof_type: CircuitType, vk: Vec<u8>, info: &str) {
|
||||
log::info!("check_vk, {:?}", proof_type);
|
||||
let vk_from_file = read_vk(proof_type).unwrap();
|
||||
assert_eq!(vk_from_file, encode_vk(vk), "{info}")
|
||||
}
|
||||
|
||||
fn read_vk(proof_type: TaskType) -> Result<String> {
|
||||
fn read_vk(proof_type: CircuitType) -> Result<String> {
|
||||
log::info!("read_vk, {:?}", proof_type);
|
||||
let vk_file = match proof_type {
|
||||
TaskType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
TaskType::Batch => BATCH_VK_PATH.clone(),
|
||||
TaskType::Bundle => todo!(),
|
||||
TaskType::Undefined => unreachable!(),
|
||||
CircuitType::Chunk => CHUNK_VK_PATH.clone(),
|
||||
CircuitType::Batch => BATCH_VK_PATH.clone(),
|
||||
CircuitType::Bundle => todo!(),
|
||||
CircuitType::Undefined => unreachable!(),
|
||||
};
|
||||
|
||||
let data = std::fs::read(vk_file)?;
|
||||
|
||||
Reference in New Issue
Block a user