Move go-ssz to external repo under MIT license (#2722)

This commit is contained in:
Preston Van Loon
2019-05-29 21:04:25 -04:00
committed by terence tsao
parent 932e68571b
commit 55bedd0745
34 changed files with 33 additions and 2565 deletions

View File

@@ -129,6 +129,17 @@ http_archive(
url = "https://github.com/kubernetes/repo-infra/archive/df02ded38f9506e5bbcbf21702034b4fef815f2f.tar.gz",
)
http_archive(
name = "com_github_prysmaticlabs_go_ssz",
strip_prefix = "go-ssz-85eecc65d2c7a3b20501fe662210e5045f7bcbe1",
url = "https://github.com/prysmaticlabs/go-ssz/archive/85eecc65d2c7a3b20501fe662210e5045f7bcbe1.tar.gz",
sha256 = "9e753a6e5c4f6f7f3b4af584f326b1c650aee6af85fc98416fbe7d1579d6e4d7",
)
load("@com_github_prysmaticlabs_go_ssz//:deps.bzl", "go_ssz_dependencies")
go_ssz_dependencies()
go_repository(
name = "com_github_golang_mock",
commit = "51421b967af1f557f93a59e0057aaf15ca02e29c", # v1.2.0

View File

@@ -45,12 +45,12 @@ go_test(
"//shared/forkutil:go_default_library",
"//shared/hashutil:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"//shared/testutil:go_default_library",
"//shared/trieutil:go_default_library",
"@com_github_ethereum_go_ethereum//common:go_default_library",
"@com_github_ethereum_go_ethereum//core/types:go_default_library",
"@com_github_gogo_protobuf//proto:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_sirupsen_logrus//:go_default_library",
"@com_github_sirupsen_logrus//hooks/test:go_default_library",
],

View File

@@ -10,6 +10,7 @@ import (
"testing"
"time"
"github.com/prysmaticlabs/go-ssz"
"github.com/prysmaticlabs/prysm/beacon-chain/core/blocks"
"github.com/prysmaticlabs/prysm/beacon-chain/core/helpers"
"github.com/prysmaticlabs/prysm/beacon-chain/core/state"
@@ -18,7 +19,6 @@ import (
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/forkutil"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/prysmaticlabs/prysm/shared/ssz"
"github.com/prysmaticlabs/prysm/shared/testutil"
logTest "github.com/sirupsen/logrus/hooks/test"

View File

@@ -21,7 +21,7 @@ go_library(
"//shared/hashutil:go_default_library",
"//shared/mathutil:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@org_golang_google_grpc//codes:go_default_library",
"@org_golang_google_grpc//status:go_default_library",
],

View File

@@ -5,8 +5,8 @@ import (
"encoding/binary"
"fmt"
"github.com/prysmaticlabs/go-ssz"
pb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
// EncodeDepositData converts a deposit input proto into an a byte slice

View File

@@ -47,7 +47,6 @@ go_test(
"//proto/beacon/p2p/v1:go_default_library",
"//shared/event:go_default_library",
"//shared/featureconfig:go_default_library",
"//shared/ssz:go_default_library",
"//shared/testutil:go_default_library",
"@com_github_ethereum_go_ethereum//:go_default_library",
"@com_github_ethereum_go_ethereum//accounts/abi/bind:go_default_library",
@@ -57,6 +56,7 @@ go_test(
"@com_github_ethereum_go_ethereum//core/types:go_default_library",
"@com_github_ethereum_go_ethereum//crypto:go_default_library",
"@com_github_gogo_protobuf//proto:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_sirupsen_logrus//:go_default_library",
"@com_github_sirupsen_logrus//hooks/test:go_default_library",
],

View File

@@ -10,12 +10,12 @@ import (
"github.com/ethereum/go-ethereum"
"github.com/ethereum/go-ethereum/common"
"github.com/gogo/protobuf/proto"
"github.com/prysmaticlabs/go-ssz"
"github.com/prysmaticlabs/prysm/beacon-chain/core/helpers"
"github.com/prysmaticlabs/prysm/beacon-chain/db"
contracts "github.com/prysmaticlabs/prysm/contracts/deposit-contract"
pb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/ssz"
"github.com/prysmaticlabs/prysm/shared/testutil"
"github.com/sirupsen/logrus"
logTest "github.com/sirupsen/logrus/hooks/test"

View File

@@ -9,7 +9,6 @@ go_library(
"//contracts/deposit-contract:go_default_library",
"//shared/keystore:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"//shared/version:go_default_library",
"@com_github_ethereum_go_ethereum//accounts/abi/bind:go_default_library",
"@com_github_ethereum_go_ethereum//accounts/keystore:go_default_library",
@@ -17,6 +16,7 @@ go_library(
"@com_github_ethereum_go_ethereum//crypto:go_default_library",
"@com_github_ethereum_go_ethereum//ethclient:go_default_library",
"@com_github_ethereum_go_ethereum//rpc:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_sirupsen_logrus//:go_default_library",
"@com_github_urfave_cli//:go_default_library",
"@com_github_x_cray_logrus_prefixed_formatter//:go_default_library",

View File

@@ -18,10 +18,10 @@ import (
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/rpc"
"github.com/prysmaticlabs/go-ssz"
contracts "github.com/prysmaticlabs/prysm/contracts/deposit-contract"
prysmKeyStore "github.com/prysmaticlabs/prysm/shared/keystore"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/prysmaticlabs/prysm/shared/ssz"
"github.com/prysmaticlabs/prysm/shared/version"
"github.com/sirupsen/logrus"
"github.com/urfave/cli"

View File

@@ -15,8 +15,8 @@ go_library(
"//proto/beacon/p2p/v1:go_default_library",
"//shared/bls:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_pborman_uuid//:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@org_golang_x_crypto//pbkdf2:go_default_library",
"@org_golang_x_crypto//scrypt:go_default_library",
"@org_golang_x_crypto//sha3:go_default_library",
@@ -37,9 +37,9 @@ go_test(
"//shared/bytesutil:go_default_library",
"//shared/featureconfig:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"//shared/testutil:go_default_library",
"@com_github_gogo_protobuf//proto:go_default_library",
"@com_github_pborman_uuid//:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
],
)

View File

@@ -3,9 +3,9 @@ package keystore
import (
"bytes"
"github.com/prysmaticlabs/go-ssz"
pb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
// DepositInput for a given key. This input data can be used to when making a

View File

@@ -6,12 +6,12 @@ import (
"testing"
"github.com/gogo/protobuf/proto"
"github.com/prysmaticlabs/go-ssz"
pb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/bls"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/keystore"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
func init() {

View File

@@ -20,6 +20,6 @@ go_test(
embed = [":go_default_library"],
deps = [
"//shared/featureconfig:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
],
)

View File

@@ -5,8 +5,8 @@ import (
"reflect"
"testing"
"github.com/prysmaticlabs/go-ssz"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
func init() {

View File

@@ -1,42 +0,0 @@
load("@io_bazel_rules_go//go:def.bzl", "go_library", "go_test")
go_library(
name = "go_default_library",
srcs = [
"decode.go",
"doc.go",
"encode.go",
"hash.go",
"hash_cache.go",
"ssz_utils_cache.go",
],
importpath = "github.com/prysmaticlabs/prysm/shared/ssz",
visibility = ["//visibility:public"],
deps = [
"//shared/bytesutil:go_default_library",
"//shared/featureconfig:go_default_library",
"//shared/hashutil:go_default_library",
"//shared/params:go_default_library",
"@com_github_ethereum_go_ethereum//common:go_default_library",
"@com_github_karlseguin_ccache//:go_default_library",
"@com_github_prometheus_client_golang//prometheus:go_default_library",
"@com_github_prometheus_client_golang//prometheus/promauto:go_default_library",
],
)
go_test(
name = "go_default_test",
srcs = [
"decode_test.go",
"encode_test.go",
"example_and_test.go",
"example_encode_test.go",
"hash_cache_test.go",
"hash_test.go",
],
embed = [":go_default_library"],
deps = [
"//shared/bytesutil:go_default_library",
"//shared/featureconfig:go_default_library",
],
)

View File

@@ -1,144 +0,0 @@
# Simple Serialize (SSZ)
This package implements simple serialize algorithm specified in official Ethereum 2.0 [spec](https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md).
## Interface
### Encodable
A type is Encodable if it implements `EncodeSSZ` and `EncodeSSZSize` function.
```go
type Encodable interface {
EncodeSSZ(io.Writer) error
// Estimate the encoding size of the object without doing the actual encoding
EncodeSSZSize() (uint32, error)
}
```
### Decodable
A type is Decodable if it implements `DecodeSSZ()`.
```go
type Decodable interface {
DecodeSSZ(io.Reader) error
}
```
### Hashable
A type is Hashable if it implements `TreeHashSSZ()`.
```go
type Hashable interface {
TreeHashSSZ() ([32]byte, error)
}
```
## API
### Encoding function
```go
// Encode val and output the result into w.
func Encode(w io.Writer, val interface{}) error
```
```go
// EncodeSize returns the target encoding size without doing the actual encoding.
// This is an optional pass. You don't need to call this before the encoding unless you
// want to know the output size first.
func EncodeSize(val interface{}) (uint32, error)
```
### Decoding function
```go
// Decode data read from r and output it into the object pointed by pointer val.
func Decode(r io.Reader, val interface{}) error
```
### Hashing function
```go
// Tree-hash data into [32]byte
func TreeHash(val interface{}) ([32]byte, error)
````
## Usage
Say you have a struct like this
```go
type exampleStruct1 struct {
Field1 uint8
Field2 []byte
}
````
You implement the `Encoding` interface for it:
```go
func (e *exampleStruct1) EncodeSSZ(w io.Writer) error {
return Encode(w, *e)
}
func (e *exampleStruct1) EncodeSSZSize() (uint32, error) {
return EncodeSize(*e)
}
```
Now you can encode this object like this
```go
e1 := &exampleStruct1{
Field1: 10,
Field2: []byte{1, 2, 3, 4},
}
wBuf := new(bytes.Buffer)
if err = e1.EncodeSSZ(wBuf); err != nil {
return fmt.Errorf("failed to encode: %v", err)
}
encoding := wBuf.Bytes() // encoding becomes [0 0 0 9 10 0 0 0 4 1 2 3 4]
```
You can also get the estimated encoding size
```go
var encodeSize uint32
if encodeSize, err = e1.EncodeSSZSize(); err != nil {
return fmt.Errorf("failed to get encode size: %v", err)
}
// encodeSize becomes 13
```
To calculate tree-hash of the object
```go
var hash [32]byte
if hash, err = e1.TreeHashSSZ(); err != nil {
return fmt.Errorf("failed to hash: %v", err)
}
// hash stores the hashing result
```
Similarly, you can implement the `Decodable` interface for this struct
```go
func (e *exampleStruct1) DecodeSSZ(r io.Reader) error {
return Decode(r, e)
}
```
Now you can decode to create new struct
```go
e2 := new(exampleStruct1)
rBuf := bytes.NewReader(encoding)
if err = e2.DecodeSSZ(rBuf); err != nil {
return fmt.Errorf("failed to decode: %v", err)
}
// e2 now has the same content as e1
```
## Notes
### Supported data types
- uint8
- uint16
- uint32
- uint64
- slice
- array
- struct
- pointer (nil pointer is not supported)

View File

@@ -1,333 +0,0 @@
package ssz
import (
"encoding/binary"
"errors"
"fmt"
"io"
"reflect"
)
// Decodable defines the interface for support ssz decoding.
type Decodable interface {
DecodeSSZ(io.Reader) error
}
// Decode decodes data read from r and output it into the object pointed by pointer val.
func Decode(r io.Reader, val interface{}) error {
return decode(r, val)
}
func decode(r io.Reader, val interface{}) error {
if val == nil {
return newDecodeError("cannot decode into nil", nil)
}
rval := reflect.ValueOf(val)
rtyp := rval.Type()
// val must be a pointer, otherwise we refuse to decode
if rtyp.Kind() != reflect.Ptr {
return newDecodeError("can only decode into pointer target", rtyp)
}
if rval.IsNil() {
return newDecodeError("cannot output to pointer of nil", rtyp)
}
sszUtils, err := cachedSSZUtils(rval.Elem().Type())
if err != nil {
return newDecodeError(fmt.Sprint(err), rval.Elem().Type())
}
if _, err = sszUtils.decoder(r, rval.Elem()); err != nil {
return newDecodeError(fmt.Sprint(err), rval.Elem().Type())
}
return nil
}
func makeDecoder(typ reflect.Type) (dec decoder, err error) {
kind := typ.Kind()
switch {
case kind == reflect.Bool:
return decodeBool, nil
case kind == reflect.Uint8:
return decodeUint8, nil
case kind == reflect.Uint16:
return decodeUint16, nil
case kind == reflect.Uint32:
return decodeUint32, nil
case kind == reflect.Int32:
return decodeUint32, nil
case kind == reflect.Uint64:
return decodeUint64, nil
case kind == reflect.Slice && typ.Elem().Kind() == reflect.Uint8:
return decodeBytes, nil
case kind == reflect.Slice:
return makeSliceDecoder(typ)
case kind == reflect.Array && typ.Elem().Kind() == reflect.Uint8:
return decodeByteArray, nil
case kind == reflect.Array:
return makeArrayDecoder(typ)
case kind == reflect.Struct:
return makeStructDecoder(typ)
case kind == reflect.Ptr:
return makePtrDecoder(typ)
default:
return nil, fmt.Errorf("type %v is not deserializable", typ)
}
}
func decodeBool(r io.Reader, val reflect.Value) (uint32, error) {
b := make([]byte, 1)
if err := readBytes(r, 1, b); err != nil {
return 0, err
}
v := uint8(b[0])
if v == 0 {
val.SetBool(false)
} else if v == 1 {
val.SetBool(true)
} else {
return 0, fmt.Errorf("expect 0 or 1 for decoding bool but got %d", v)
}
return 1, nil
}
func decodeUint8(r io.Reader, val reflect.Value) (uint32, error) {
b := make([]byte, 1)
if err := readBytes(r, 1, b); err != nil {
return 0, err
}
val.SetUint(uint64(b[0]))
return 1, nil
}
func decodeUint16(r io.Reader, val reflect.Value) (uint32, error) {
b := make([]byte, 2)
if err := readBytes(r, 2, b); err != nil {
return 0, err
}
val.SetUint(uint64(binary.LittleEndian.Uint16(b)))
return 2, nil
}
func decodeUint32(r io.Reader, val reflect.Value) (uint32, error) {
b := make([]byte, 4)
if err := readBytes(r, 4, b); err != nil {
return 0, err
}
val.SetUint(uint64(binary.LittleEndian.Uint32(b)))
return 4, nil
}
func decodeUint64(r io.Reader, val reflect.Value) (uint32, error) {
b := make([]byte, 8)
if err := readBytes(r, 8, b); err != nil {
return 0, err
}
val.SetUint(uint64(binary.LittleEndian.Uint64(b)))
return 8, nil
}
func decodeBytes(r io.Reader, val reflect.Value) (uint32, error) {
sizeEnc := make([]byte, lengthBytes)
if err := readBytes(r, lengthBytes, sizeEnc); err != nil {
return 0, err
}
size := binary.LittleEndian.Uint32(sizeEnc)
if size == 0 {
val.SetBytes([]byte{})
return lengthBytes, nil
}
b := make([]byte, size)
if err := readBytes(r, int(size), b); err != nil {
return 0, err
}
val.SetBytes(b)
return lengthBytes + size, nil
}
func decodeByteArray(r io.Reader, val reflect.Value) (uint32, error) {
sizeEnc := make([]byte, lengthBytes)
if err := readBytes(r, lengthBytes, sizeEnc); err != nil {
return 0, err
}
size := binary.LittleEndian.Uint32(sizeEnc)
if size != uint32(val.Len()) {
return 0, fmt.Errorf("input byte array size (%d) isn't euqal to output array size (%d)", size, val.Len())
}
slice := val.Slice(0, val.Len()).Interface().([]byte)
if err := readBytes(r, int(size), slice); err != nil {
return 0, err
}
return lengthBytes + size, nil
}
func makeSliceDecoder(typ reflect.Type) (decoder, error) {
elemType := typ.Elem()
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(elemType)
if err != nil {
return nil, err
}
decoder := func(r io.Reader, val reflect.Value) (uint32, error) {
sizeEnc := make([]byte, lengthBytes)
if err := readBytes(r, lengthBytes, sizeEnc); err != nil {
return 0, fmt.Errorf("failed to decode header of slice: %v", err)
}
size := binary.LittleEndian.Uint32(sizeEnc)
if size == 0 {
// We prefer decode into nil, not empty slice
return lengthBytes, nil
}
for i, decodeSize := 0, uint32(0); decodeSize < size; i++ {
// Grow slice's capacity if necessary
if i >= val.Cap() {
newCap := val.Cap() * 2
// Skip initial small growth
if newCap < 4 {
newCap = 4
}
newVal := reflect.MakeSlice(val.Type(), val.Len(), newCap)
reflect.Copy(newVal, val)
val.Set(newVal)
}
// Add place holder for new element
if i >= val.Len() {
val.SetLen(i + 1)
}
// Decode and write into the new element
elemDecodeSize, err := elemSSZUtils.decoder(r, val.Index(i))
if err != nil {
return 0, fmt.Errorf("failed to decode element of slice: %v", err)
}
decodeSize += elemDecodeSize
}
return lengthBytes + size, nil
}
return decoder, nil
}
func makeArrayDecoder(typ reflect.Type) (decoder, error) {
elemType := typ.Elem()
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(elemType)
if err != nil {
return nil, err
}
decoder := func(r io.Reader, val reflect.Value) (uint32, error) {
sizeEnc := make([]byte, lengthBytes)
if err := readBytes(r, lengthBytes, sizeEnc); err != nil {
return 0, fmt.Errorf("failed to decode header of slice: %v", err)
}
size := binary.LittleEndian.Uint32(sizeEnc)
i, decodeSize := 0, uint32(0)
for ; i < val.Len() && decodeSize < size; i++ {
elemDecodeSize, err := elemSSZUtils.decoder(r, val.Index(i))
if err != nil {
return 0, fmt.Errorf("failed to decode element of slice: %v", err)
}
decodeSize += elemDecodeSize
}
if i < val.Len() {
return 0, errors.New("input is too short")
}
if decodeSize < size {
return 0, errors.New("input is too long")
}
return lengthBytes + size, nil
}
return decoder, nil
}
func makeStructDecoder(typ reflect.Type) (decoder, error) {
fields, err := structFields(typ)
if err != nil {
return nil, err
}
decoder := func(r io.Reader, val reflect.Value) (uint32, error) {
sizeEnc := make([]byte, lengthBytes)
if err := readBytes(r, lengthBytes, sizeEnc); err != nil {
return 0, fmt.Errorf("failed to decode header of struct: %v", err)
}
size := binary.LittleEndian.Uint32(sizeEnc)
if size == 0 {
return lengthBytes, nil
}
i, decodeSize := 0, uint32(0)
for ; i < len(fields) && decodeSize < size; i++ {
f := fields[i]
fieldDecodeSize, err := f.sszUtils.decoder(r, val.Field(f.index))
if err != nil {
return 0, fmt.Errorf("failed to decode field of slice: %v", err)
}
decodeSize += fieldDecodeSize
}
if i < len(fields) {
return 0, errors.New("input is too short")
}
if decodeSize < size {
return 0, errors.New("input is too long")
}
return lengthBytes + size, nil
}
return decoder, nil
}
func makePtrDecoder(typ reflect.Type) (decoder, error) {
elemType := typ.Elem()
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(elemType)
if err != nil {
return nil, err
}
// After considered the use case in Prysm, we've decided that:
// - We assume we will only encode/decode pointer of array, slice or struct.
// - The encoding for nil pointer shall be 0x00000000.
decoder := func(r io.Reader, val reflect.Value) (uint32, error) {
newVal := reflect.New(elemType)
elemDecodeSize, err := elemSSZUtils.decoder(r, newVal.Elem())
if err != nil {
return 0, fmt.Errorf("failed to decode to object pointed by pointer: %v", err)
}
if elemDecodeSize > lengthBytes {
val.Set(newVal)
} // Else we leave val to its default value which is nil.
return elemDecodeSize, nil
}
return decoder, nil
}
func readBytes(r io.Reader, size int, b []byte) error {
if size != len(b) {
return fmt.Errorf("output buffer size is %d while expected read size is %d", len(b), size)
}
readLen, err := r.Read(b)
if readLen != size {
return fmt.Errorf("can only read %d bytes while expected to read %d bytes", readLen, size)
}
if err != nil {
return fmt.Errorf("failed to read from input: %v", err)
}
return nil
}
// decodeError is what gets reported to the decoder user in error case.
type decodeError struct {
msg string
typ reflect.Type
}
func newDecodeError(msg string, typ reflect.Type) *decodeError {
return &decodeError{msg, typ}
}
func (err *decodeError) Error() string {
return fmt.Sprintf("decode error: %s for output type %v", err.msg, err.typ)
}

View File

@@ -1,230 +0,0 @@
package ssz
import (
"bytes"
"encoding/hex"
"fmt"
"reflect"
"testing"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
)
type decodeTest struct {
input string
ptr interface{}
value interface{}
error string
}
// Notice: spaces in the input string will be ignored.
var decodeTests = []decodeTest{
// bool
{input: "00", ptr: new(bool), value: false},
{input: "01", ptr: new(bool), value: true},
// uint8
{input: "00", ptr: new(uint8), value: uint8(0)},
{input: "01", ptr: new(uint8), value: uint8(1)},
{input: "10", ptr: new(uint8), value: uint8(16)},
{input: "80", ptr: new(uint8), value: uint8(128)},
{input: "FF", ptr: new(uint8), value: uint8(255)},
// uint16
{input: "0000", ptr: new(uint16), value: uint16(0)},
{input: "0100", ptr: new(uint16), value: uint16(1)},
{input: "1000", ptr: new(uint16), value: uint16(16)},
{input: "8000", ptr: new(uint16), value: uint16(128)},
{input: "FF00", ptr: new(uint16), value: uint16(255)},
{input: "FFFF", ptr: new(uint16), value: uint16(65535)},
// uint32
{input: "00000000", ptr: new(uint32), value: uint32(0)},
{input: "01000000", ptr: new(uint32), value: uint32(1)},
{input: "10000000", ptr: new(uint32), value: uint32(16)},
{input: "80000000", ptr: new(uint32), value: uint32(128)},
{input: "FF000000", ptr: new(uint32), value: uint32(255)},
{input: "FFFF0000", ptr: new(uint32), value: uint32(65535)},
{input: "FFFFFFFF", ptr: new(uint32), value: uint32(4294967295)},
// uint64
{input: "0000000000000000", ptr: new(uint64), value: uint64(0)},
{input: "0100000000000000", ptr: new(uint64), value: uint64(1)},
{input: "1000000000000000", ptr: new(uint64), value: uint64(16)},
{input: "8000000000000000", ptr: new(uint64), value: uint64(128)},
{input: "FF00000000000000", ptr: new(uint64), value: uint64(255)},
{input: "FFFF000000000000", ptr: new(uint64), value: uint64(65535)},
{input: "FFFFFFFF00000000", ptr: new(uint64), value: uint64(4294967295)},
{input: "FFFFFFFFFFFFFFFF", ptr: new(uint64), value: uint64(18446744073709551615)},
// bytes
{input: "00000000", ptr: new([]byte), value: []byte{}},
{input: "0100000001", ptr: new([]byte), value: []byte{1}},
{input: "06000000 010203040506", ptr: new([]byte), value: []byte{1, 2, 3, 4, 5, 6}},
// slice
{input: "00000000", ptr: new([]uint16), value: []uint16(nil)},
{input: "04000000 0100 0200", ptr: new([]uint16), value: []uint16{1, 2}},
{input: "18000000 08000000 0100 0200 0300 0400 08000000 0500 0600 0700 0800", ptr: new([][]uint16),
value: [][]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
},
},
// array
{input: "01000000 01", ptr: new([1]byte), value: [1]byte{1}},
{input: "06000000 010203040506", ptr: new([6]byte), value: [6]byte{1, 2, 3, 4, 5, 6}},
{input: "02000000 0100", ptr: new([1]uint16), value: [1]uint16{1}},
{input: "04000000 0100 0200", ptr: new([2]uint16), value: [2]uint16{1, 2}},
{input: "18000000 08000000 0100 0200 0300 0400 08000000 0500 0600 0700 0800", ptr: new([2][4]uint16),
value: [2][4]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
},
},
// struct
{input: "03000000 00 0000", ptr: new(simpleStruct), value: simpleStruct{}},
{input: "03000000 0200 01", ptr: new(simpleStruct), value: simpleStruct{B: 2, A: 1}},
{input: "07000000 03 02000000 0600", ptr: new(outerStruct),
value: outerStruct{
V: 3,
SubV: innerStruct{6},
}},
// slice + struct
{input: "12000000 0E000000 03000000 020001 03000000 040003", ptr: new(arrayStruct),
value: arrayStruct{
V: []simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
}},
{input: "16000000 07000000 03 02000000 0600 07000000 05 02000000 0700", ptr: new([]outerStruct),
value: []outerStruct{
{V: 3, SubV: innerStruct{V: 6}},
{V: 5, SubV: innerStruct{V: 7}},
}},
// pointer
{input: "03000000 0200 01", ptr: new(*simpleStruct), value: &simpleStruct{B: 2, A: 1}},
{input: "08000000 03000000 0200 01 03", ptr: new(pointerStruct),
value: pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}},
{input: "08000000 03000000 0200 01 03", ptr: new(*pointerStruct),
value: &pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}},
{input: "04000000 01020304", ptr: new(*[]uint8), value: &[]uint8{1, 2, 3, 4}},
{input: "10000000 0100000000000000 0200000000000000", ptr: new(*[]uint64), value: &[]uint64{1, 2}},
{input: "0E000000 03000000 0200 01 03000000 0400 03", ptr: new([]*simpleStruct),
value: []*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
},
{input: "0E000000 03000000 0200 01 03000000 0400 03", ptr: new([2]*simpleStruct),
value: [2]*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
},
{input: "18000000 08000000 03000000 0200 01 00 08000000 03000000 0400 03 01", ptr: new([]*pointerStruct),
value: []*pointerStruct{
{P: &simpleStruct{B: 2, A: 1}, V: 0},
{P: &simpleStruct{B: 4, A: 3}, V: 1},
},
},
// nil pointer
{input: "00000000", ptr: new(*[]uint8), value: (*[]uint8)(nil)},
{input: "05000000 00000000 00", ptr: new(pointerStruct), value: pointerStruct{}},
{input: "05000000 00000000 00", ptr: new(*pointerStruct), value: &pointerStruct{}},
{input: "08000000 00000000 00000000", ptr: new([]*pointerStruct), value: []*pointerStruct{nil, nil}},
// error: nil target
{input: "00", ptr: nil, value: nil, error: "decode error: cannot decode into nil for output type <nil>"},
// error: non-pointer target
{input: "00", ptr: uint8(0), error: "decode error: can only decode into pointer target for output type uint8"},
// error: unsupported type
{input: "00", ptr: new(string), error: "decode error: type string is not serializable for output type string"},
// error: bool: wrong input value
{input: "02", ptr: new(bool), error: "decode error: expect 0 or 1 for decoding bool but got 2 for output type bool"},
// error: uint16: wrong header
{input: "00", ptr: new(uint16), error: "decode error: can only read 1 bytes while expected to read 2 bytes for output type uint16"},
// error: bytes: wrong input
{input: "01000000", ptr: new([]byte), error: "decode error: can only read 0 bytes while expected to read 1 bytes for output type []uint8"},
// error: slice: wrong header
{input: "010000", ptr: new([]uint16), error: "decode error: failed to decode header of slice: can only read 3 bytes while expected to read 4 bytes for output type []uint16"},
// error: slice: wrong input
{input: "01000000", ptr: new([]uint16), error: "decode error: failed to decode element of slice: can only read 0 bytes while expected to read 2 bytes for output type []uint16"},
// error: byte array: wrong input
{input: "01000000 01", ptr: new([2]byte), error: "decode error: input byte array size (1) isn't euqal to output array size (2) for output type [2]uint8"},
// error: array: input too short
{input: "02000000 0100", ptr: new([2]uint16), error: "decode error: input is too short for output type [2]uint16"},
// error: array: input too long
{input: "04000000 0100 0200", ptr: new([1]uint16), error: "decode error: input is too long for output type [1]uint16"},
// error: struct: wrong header
{input: "010000", ptr: new(simpleStruct), error: "decode error: failed to decode header of struct: can only read 3 bytes while expected to read 4 bytes for output type ssz.simpleStruct"},
// error: struct: wrong input
{input: "03000000 01 02", ptr: new(simpleStruct), error: "decode error: failed to decode field of slice: can only read 0 bytes while expected to read 1 bytes for output type ssz.simpleStruct"},
// error: struct: input too short
{input: "02000000 0200", ptr: new(simpleStruct), error: "decode error: input is too short for output type ssz.simpleStruct"},
// error: struct: input too long
{input: "04000000 0200 01 01", ptr: new(simpleStruct), error: "decode error: input is too long for output type ssz.simpleStruct"},
}
func init() {
featureconfig.InitFeatureConfig(&featureconfig.FeatureFlagConfig{
CacheTreeHash: false,
})
}
func runTests(t *testing.T, decode func([]byte, interface{}) error) {
for i, test := range decodeTests {
input, err := hex.DecodeString(stripSpace(test.input))
if err != nil {
t.Errorf("test %d: invalid hex input %q", i, test.input)
continue
}
err = decode(input, test.ptr)
// Check unexpected error
if test.error == "" && err != nil {
t.Errorf("test %d: unexpected decode error: %v\ndecoding into %T\ninput %q",
i, err, test.ptr, test.input)
continue
}
// Check expected error
if test.error != "" && fmt.Sprint(err) != test.error {
t.Errorf("test %d: decode error mismatch\ngot %v\nwant %v\ndecoding into %T\ninput %q",
i, err, test.error, test.ptr, test.input)
continue
}
// Check expected output
if err == nil {
output := reflect.ValueOf(test.ptr).Elem().Interface()
if !reflect.DeepEqual(output, test.value) {
t.Errorf("test %d: value mismatch\ngot %#v\nwant %#v\ndecoding into %T\ninput %q",
i, output, test.value, test.ptr, test.input)
}
}
}
}
func TestDecodeWithByteReader(t *testing.T) {
runTests(t, func(input []byte, into interface{}) error {
return Decode(bytes.NewReader(input), into)
})
}

View File

@@ -1,23 +0,0 @@
/*
Package ssz implements the Simple Serialize algorithm specified at
https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md
Currently directly supported types:
bool
uint8
uint16
uint32
uint64
bytes
slice
struct
Types that can be implicitly supported:
address:
use byte slice of length 20 instead
hash:
use byte slice of length 32 instead if the hash is 32 bytes long, for example
*/
package ssz

View File

@@ -1,314 +0,0 @@
package ssz
import (
"encoding/binary"
"errors"
"fmt"
"io"
"reflect"
)
const lengthBytes = 4
// Encodable defines the interface for support ssz encoding.
type Encodable interface {
EncodeSSZ(io.Writer) error
EncodeSSZSize() (uint32, error)
}
// Encode encodes val and output the result into w.
func Encode(w io.Writer, val interface{}) error {
eb := &encbuf{}
if err := eb.encode(val); err != nil {
return err
}
return eb.toWriter(w)
}
// EncodeSize returns the target encoding size without doing the actual encoding.
// This is an optional pass. You don't need to call this before the encoding unless you
// want to know the output size first.
func EncodeSize(val interface{}) (uint32, error) {
return encodeSize(val)
}
type encbuf struct {
str []byte
}
func (w *encbuf) encode(val interface{}) error {
if val == nil {
return newEncodeError("untyped nil is not supported", nil)
}
rval := reflect.ValueOf(val)
sszUtils, err := cachedSSZUtils(rval.Type())
if err != nil {
return newEncodeError(fmt.Sprint(err), rval.Type())
}
if err = sszUtils.encoder(rval, w); err != nil {
return newEncodeError(fmt.Sprint(err), rval.Type())
}
return nil
}
func encodeSize(val interface{}) (uint32, error) {
if val == nil {
return 0, newEncodeError("untyped nil is not supported", nil)
}
rval := reflect.ValueOf(val)
sszUtils, err := cachedSSZUtils(rval.Type())
if err != nil {
return 0, newEncodeError(fmt.Sprint(err), rval.Type())
}
var size uint32
if size, err = sszUtils.encodeSizer(rval); err != nil {
return 0, newEncodeError(fmt.Sprint(err), rval.Type())
}
return size, nil
}
func (w *encbuf) toWriter(out io.Writer) error {
_, err := out.Write(w.str)
return err
}
func makeEncoder(typ reflect.Type) (encoder, encodeSizer, error) {
kind := typ.Kind()
switch {
case kind == reflect.Bool:
return encodeBool, func(reflect.Value) (uint32, error) { return 1, nil }, nil
case kind == reflect.Uint8:
return encodeUint8, func(reflect.Value) (uint32, error) { return 1, nil }, nil
case kind == reflect.Uint16:
return encodeUint16, func(reflect.Value) (uint32, error) { return 2, nil }, nil
case kind == reflect.Uint32:
return encodeUint32, func(reflect.Value) (uint32, error) { return 4, nil }, nil
case kind == reflect.Int32:
return encodeInt32, func(reflect.Value) (uint32, error) { return 4, nil }, nil
case kind == reflect.Uint64:
return encodeUint64, func(reflect.Value) (uint32, error) { return 8, nil }, nil
case kind == reflect.Slice && typ.Elem().Kind() == reflect.Uint8:
return makeBytesEncoder()
case kind == reflect.Slice:
return makeSliceEncoder(typ)
case kind == reflect.Array && typ.Elem().Kind() == reflect.Uint8:
return makeByteArrayEncoder()
case kind == reflect.Array:
return makeSliceEncoder(typ)
case kind == reflect.Struct:
return makeStructEncoder(typ)
case kind == reflect.Ptr:
return makePtrEncoder(typ)
default:
return nil, nil, fmt.Errorf("type %v is not serializable", typ)
}
}
func encodeBool(val reflect.Value, w *encbuf) error {
if val.Bool() {
w.str = append(w.str, uint8(1))
} else {
w.str = append(w.str, uint8(0))
}
return nil
}
func encodeUint8(val reflect.Value, w *encbuf) error {
v := val.Uint()
w.str = append(w.str, uint8(v))
return nil
}
func encodeUint16(val reflect.Value, w *encbuf) error {
v := val.Uint()
b := make([]byte, 2)
binary.LittleEndian.PutUint16(b, uint16(v))
w.str = append(w.str, b...)
return nil
}
func encodeUint32(val reflect.Value, w *encbuf) error {
v := val.Uint()
b := make([]byte, 4)
binary.LittleEndian.PutUint32(b, uint32(v))
w.str = append(w.str, b...)
return nil
}
func encodeInt32(val reflect.Value, w *encbuf) error {
v := val.Int()
b := make([]byte, 4)
binary.LittleEndian.PutUint32(b, uint32(v))
w.str = append(w.str, b...)
return nil
}
func encodeUint64(val reflect.Value, w *encbuf) error {
v := val.Uint()
b := make([]byte, 8)
binary.LittleEndian.PutUint64(b, uint64(v))
w.str = append(w.str, b...)
return nil
}
func makeBytesEncoder() (encoder, encodeSizer, error) {
encoder := func(val reflect.Value, w *encbuf) error {
b := val.Bytes()
sizeEnc := make([]byte, lengthBytes)
if len(val.Bytes()) >= 2<<32 {
return errors.New("bytes oversize")
}
binary.LittleEndian.PutUint32(sizeEnc, uint32(len(b)))
w.str = append(w.str, sizeEnc...)
w.str = append(w.str, val.Bytes()...)
return nil
}
encodeSizer := func(val reflect.Value) (uint32, error) {
if len(val.Bytes()) >= 2<<32 {
return 0, errors.New("bytes oversize")
}
return lengthBytes + uint32(len(val.Bytes())), nil
}
return encoder, encodeSizer, nil
}
func makeByteArrayEncoder() (encoder, encodeSizer, error) {
encoder := func(val reflect.Value, w *encbuf) error {
if !val.CanAddr() {
// Slice requires the value to be addressable.
// Make it addressable by copying.
copyVal := reflect.New(val.Type()).Elem()
copyVal.Set(val)
val = copyVal
}
sizeEnc := make([]byte, lengthBytes)
if val.Len() >= 2<<32 {
return errors.New("bytes oversize")
}
binary.LittleEndian.PutUint32(sizeEnc, uint32(val.Len()))
w.str = append(w.str, sizeEnc...)
w.str = append(w.str, val.Slice(0, val.Len()).Bytes()...)
return nil
}
encodeSizer := func(val reflect.Value) (uint32, error) {
if val.Len() >= 2<<32 {
return 0, errors.New("bytes oversize")
}
return lengthBytes + uint32(val.Len()), nil
}
return encoder, encodeSizer, nil
}
func makeSliceEncoder(typ reflect.Type) (encoder, encodeSizer, error) {
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(typ.Elem())
if err != nil {
return nil, nil, fmt.Errorf("failed to get ssz utils: %v", err)
}
encoder := func(val reflect.Value, w *encbuf) error {
origBufSize := len(w.str)
totalSizeEnc := make([]byte, lengthBytes)
w.str = append(w.str, totalSizeEnc...)
for i := 0; i < val.Len(); i++ {
if err := elemSSZUtils.encoder(val.Index(i), w); err != nil {
return fmt.Errorf("failed to encode element of slice/array: %v", err)
}
}
totalSize := len(w.str) - lengthBytes - origBufSize
if totalSize >= 2<<32 {
return errors.New("slice oversize")
}
binary.LittleEndian.PutUint32(totalSizeEnc, uint32(totalSize))
copy(w.str[origBufSize:origBufSize+lengthBytes], totalSizeEnc)
return nil
}
encodeSizer := func(val reflect.Value) (uint32, error) {
if val.Len() == 0 {
return lengthBytes, nil
}
elemSize, err := elemSSZUtils.encodeSizer(val.Index(0))
if err != nil {
return 0, errors.New("failed to get encode size of element of slice/array")
}
return lengthBytes + elemSize*uint32(val.Len()), nil
}
return encoder, encodeSizer, nil
}
func makeStructEncoder(typ reflect.Type) (encoder, encodeSizer, error) {
fields, err := structFields(typ)
if err != nil {
return nil, nil, err
}
encoder := func(val reflect.Value, w *encbuf) error {
origBufSize := len(w.str)
totalSizeEnc := make([]byte, lengthBytes)
w.str = append(w.str, totalSizeEnc...)
for _, f := range fields {
if err := f.sszUtils.encoder(val.Field(f.index), w); err != nil {
return fmt.Errorf("failed to encode field of struct: %v", err)
}
}
totalSize := len(w.str) - lengthBytes - origBufSize
if totalSize >= 2<<32 {
return errors.New("struct oversize")
}
binary.LittleEndian.PutUint32(totalSizeEnc, uint32(totalSize))
copy(w.str[origBufSize:origBufSize+lengthBytes], totalSizeEnc)
return nil
}
encodeSizer := func(val reflect.Value) (uint32, error) {
totalSize := uint32(0)
for _, f := range fields {
fieldSize, err := f.sszUtils.encodeSizer(val.Field(f.index))
if err != nil {
return 0, fmt.Errorf("failed to get encode size for field of struct: %v", err)
}
totalSize += fieldSize
}
return lengthBytes + totalSize, nil
}
return encoder, encodeSizer, nil
}
func makePtrEncoder(typ reflect.Type) (encoder, encodeSizer, error) {
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(typ.Elem())
if err != nil {
return nil, nil, err
}
// After considered the use case in Prysm, we've decided that:
// - We assume we will only encode/decode pointer of array, slice or struct.
// - The encoding for nil pointer shall be 0x00000000.
encoder := func(val reflect.Value, w *encbuf) error {
if val.IsNil() {
totalSizeEnc := make([]byte, lengthBytes)
w.str = append(w.str, totalSizeEnc...)
return nil
}
return elemSSZUtils.encoder(val.Elem(), w)
}
encodeSizer := func(val reflect.Value) (uint32, error) {
if val.IsNil() {
return lengthBytes, nil
}
return elemSSZUtils.encodeSizer(val.Elem())
}
return encoder, encodeSizer, nil
}
// encodeError is what gets reported to the encoder user in error case.
type encodeError struct {
msg string
typ reflect.Type
}
func newEncodeError(msg string, typ reflect.Type) *encodeError {
return &encodeError{msg, typ}
}
func (err *encodeError) Error() string {
return fmt.Sprintf("encode error: %s for input type %v", err.msg, err.typ)
}

View File

@@ -1,325 +0,0 @@
package ssz
import (
"bytes"
"encoding/hex"
"fmt"
"strings"
"testing"
)
type encTest struct {
val interface{}
output, error string
}
type encSizeTest struct {
val interface{}
size uint32
error string
}
// Notice: spaces in the output string will be ignored.
var encodeTests = []encTest{
// boolean
{val: false, output: "00"},
{val: true, output: "01"},
// uint8
{val: uint8(0), output: "00"},
{val: uint8(1), output: "01"},
{val: uint8(16), output: "10"},
{val: uint8(128), output: "80"},
{val: uint8(255), output: "FF"},
// uint16
{val: uint16(0), output: "0000"},
{val: uint16(1), output: "0100"},
{val: uint16(16), output: "1000"},
{val: uint16(128), output: "8000"},
{val: uint16(255), output: "FF00"},
{val: uint16(65535), output: "FFFF"},
// uint32
{val: uint32(0), output: "00000000"},
{val: uint32(1), output: "01000000"},
{val: uint32(16), output: "10000000"},
{val: uint32(128), output: "80000000"},
{val: uint32(255), output: "FF000000"},
{val: uint32(65535), output: "FFFF0000"},
{val: uint32(4294967295), output: "FFFFFFFF"},
// uint64
{val: uint64(0), output: "0000000000000000"},
{val: uint64(1), output: "0100000000000000"},
{val: uint64(16), output: "1000000000000000"},
{val: uint64(128), output: "8000000000000000"},
{val: uint64(255), output: "FF00000000000000"},
{val: uint64(65535), output: "FFFF000000000000"},
{val: uint64(4294967295), output: "FFFFFFFF00000000"},
{val: uint64(18446744073709551615), output: "FFFFFFFFFFFFFFFF"},
// bytes
{val: []byte{}, output: "00000000"},
{val: []byte{1}, output: "01000000 01"},
{val: []byte{1, 2, 3, 4, 5, 6}, output: "06000000 010203040506"},
// slice
{val: []uint16{}, output: "00000000"},
{val: []uint16{1}, output: "02000000 0100"},
{val: []uint16{1, 2}, output: "04000000 0100 0200"},
{val: [][]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, output: "18000000 08000000 0100 0200 0300 0400 08000000 0500 0600 0700 0800"},
// array
{val: [1]byte{1}, output: "01000000 01"},
{val: [6]byte{1, 2, 3, 4, 5, 6}, output: "06000000 010203040506"},
{val: [1]uint16{1}, output: "02000000 0100"},
{val: [2]uint16{1, 2}, output: "04000000 0100 0200"},
{val: [2][4]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, output: "18000000 08000000 0100 0200 0300 0400 08000000 0500 0600 0700 0800"},
// struct
{val: simpleStruct{}, output: "03000000 00 0000"},
{val: simpleStruct{B: 2, A: 1}, output: "03000000 0200 01"},
{val: outerStruct{
V: 3,
SubV: innerStruct{V: 6},
}, output: "07000000 03 02000000 0600"},
// slice + struct
{val: arrayStruct{
V: []simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
}, output: "12000000 0E000000 03000000 020001 03000000 040003"},
{val: []outerStruct{
{V: 3, SubV: innerStruct{V: 6}},
{V: 5, SubV: innerStruct{V: 7}},
}, output: "16000000 07000000 03 02000000 0600 07000000 05 02000000 0700"},
// pointer
{val: &simpleStruct{B: 2, A: 1}, output: "03000000 0200 01"},
{val: pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, output: "08000000 03000000 0200 01 03"},
{val: &pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, output: "08000000 03000000 0200 01 03"},
{val: &[]uint8{1, 2, 3, 4}, output: "04000000 01020304"},
{val: &[]uint64{1, 2}, output: "10000000 0100000000000000 0200000000000000"},
{val: []*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
}, output: "0E000000 03000000 0200 01 03000000 0400 03"},
{val: [2]*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
}, output: "0E000000 03000000 0200 01 03000000 0400 03"},
{val: []*pointerStruct{
{P: &simpleStruct{B: 2, A: 1}, V: 0},
{P: &simpleStruct{B: 4, A: 3}, V: 1},
}, output: "18000000 08000000 03000000 0200 01 00 08000000 03000000 0400 03 01"},
// nil pointer (not defined in spec)
{val: (*[]uint8)(nil), output: "00000000"},
{val: pointerStruct{}, output: "05000000 00000000 00"},
{val: &pointerStruct{}, output: "05000000 00000000 00"},
{val: []*pointerStruct{nil, nil}, output: "08000000 00000000 00000000"},
// error: untyped nil pointer
{val: nil, error: "encode error: untyped nil is not supported for input type <nil>"},
// error: unsupported type
{val: string("abc"), error: "encode error: type string is not serializable for input type string"},
}
var encodeSizeTests = []encSizeTest{
// boolean
{val: false, size: 1},
// uint8
{val: uint8(0), size: 1},
{val: uint8(255), size: 1},
// uint16
{val: uint16(0), size: 2},
{val: uint16(65535), size: 2},
// uint32
{val: uint32(0), size: 4},
{val: uint32(65535), size: 4},
// uint64
{val: uint64(0), size: 8},
{val: uint64(65535), size: 8},
// bytes
{val: []byte{}, size: 4},
{val: []byte{1}, size: 5},
{val: []byte{1, 2, 3, 4, 5, 6}, size: 10},
// slice
{val: []uint16{}, size: 4},
{val: []uint16{1}, size: 6},
{val: []uint16{1, 2}, size: 8},
{val: [][]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, size: 28},
// array
{val: [1]byte{1}, size: 5},
{val: [6]byte{1, 2, 3, 4, 5, 6}, size: 10},
{val: [1]uint16{1}, size: 6},
{val: [2]uint16{1, 2}, size: 8},
{val: [2][4]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, size: 28},
// struct
{val: simpleStruct{}, size: 7},
{val: simpleStruct{B: 2, A: 1}, size: 7},
{val: outerStruct{
V: 3,
SubV: innerStruct{V: 6},
}, size: 11},
// slice + struct
{val: arrayStruct{
V: []simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
}, size: 22},
{val: []outerStruct{
{V: 3, SubV: innerStruct{V: 6}},
{V: 5, SubV: innerStruct{V: 7}},
}, size: 26},
// pointer
{val: &simpleStruct{B: 2, A: 1}, size: 7},
{val: pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, size: 12},
{val: &pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, size: 12},
{val: &[]uint8{1, 2, 3, 4}, size: 8},
{val: &[]uint64{1, 2}, size: 20},
{val: []*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
}, size: 18},
{val: []*pointerStruct{
{P: &simpleStruct{B: 2, A: 1}, V: 0},
{P: &simpleStruct{B: 4, A: 3}, V: 1},
}, size: 28},
// nil pointer (not defined in spec)
{val: (*[]uint8)(nil), size: 4},
{val: pointerStruct{}, size: 9},
{val: &pointerStruct{}, size: 9},
{val: []*pointerStruct{nil, nil}, size: 12},
// error: untyped nil pointer
{val: nil, error: "encode error: untyped nil is not supported for input type <nil>"},
// error: unsupported type
{val: string("abc"), error: "encode error: type string is not serializable for input type string"},
}
func runEncTests(t *testing.T, encode func(val interface{}) ([]byte, error)) {
for i, test := range encodeTests {
output, err := encode(test.val)
// Check unexpected error
if test.error == "" && err != nil {
t.Errorf("test %d: unexpected error: %v\nvalue %#v\ntype %T",
i, err, test.val, test.val)
continue
}
// Check expected error
if test.error != "" && fmt.Sprint(err) != test.error {
t.Errorf("test %d: error mismatch\ngot %v\nwant %v\nvalue %#v\ntype %T",
i, err, test.error, test.val, test.val)
continue
}
// Check expected output
if err == nil && !bytes.Equal(output, unhex(test.output)) {
t.Errorf("test %d: output mismatch:\ngot %X\nwant %s\nvalue %#v\ntype %T",
i, output, stripSpace(test.output), test.val, test.val)
}
}
}
func runEncSizeTests(t *testing.T, encodeSize func(val interface{}) (uint32, error)) {
for i, test := range encodeSizeTests {
size, err := encodeSize(test.val)
// Check unexpected error
if test.error == "" && err != nil {
t.Errorf("test %d: unexpected error: %v\nvalue %#v\ntype %T",
i, err, test.val, test.val)
continue
}
// Check expected error
if test.error != "" && fmt.Sprint(err) != test.error {
t.Errorf("test %d: error mismatch\ngot %v\nwant %v\nvalue %#v\ntype %T",
i, err, test.error, test.val, test.val)
continue
}
// Check expected output
if err == nil && size != test.size {
t.Errorf("test %d: output mismatch:\ngot %d\nwant %d\nvalue %#v\ntype %T",
i, size, test.size, test.val, test.val)
}
}
}
func TestEncode(t *testing.T) {
runEncTests(t, func(val interface{}) ([]byte, error) {
b := new(bytes.Buffer)
err := Encode(b, val)
return b.Bytes(), err
})
}
func TestEncodeSize(t *testing.T) {
runEncSizeTests(t, func(val interface{}) (uint32, error) {
size, err := EncodeSize(val)
return size, err
})
}
// unhex converts a hex string to byte array.
func unhex(str string) []byte {
b, err := hex.DecodeString(stripSpace(str))
if err != nil {
panic(fmt.Sprintf("invalid hex string: %q", str))
}
return b
}
func stripSpace(str string) string {
return strings.Replace(str, " ", "", -1)
}
type simpleStruct struct {
B uint16
A uint8
}
type innerStruct struct {
V uint16
}
type outerStruct struct {
V uint8
SubV innerStruct
}
type arrayStruct struct {
V []simpleStruct
}
type pointerStruct struct {
P *simpleStruct
V uint8
}

View File

@@ -1,147 +0,0 @@
package ssz
import (
"bytes"
"io"
"reflect"
"testing"
)
type exampleStruct1 struct {
Field1 uint8
Field2 []byte
}
func (e *exampleStruct1) EncodeSSZ(w io.Writer) error {
// Need to pass value of struct for Encode function
// Later we can enhance the ssz implementation to support passing pointer, if necessary
return Encode(w, *e)
}
func (e *exampleStruct1) EncodeSSZSize() (uint32, error) {
return EncodeSize(*e)
}
func (e *exampleStruct1) DecodeSSZ(r io.Reader) error {
// Need to pass pointer of struct for Decode function
return Decode(r, e)
}
func (e *exampleStruct1) TreeHashSSZ() ([32]byte, error) {
return TreeHash(e)
}
type exampleStruct2 struct {
Field1 uint8 // a volatile, or host-specific field that doesn't need to be exported
Field2 []byte
}
// You can use a helper struct to only encode/decode custom fields of your struct
type exampleStruct2Export struct {
Field2 []byte
}
func (e *exampleStruct2) EncodeSSZ(w io.Writer) error {
return Encode(w, exampleStruct2Export{
e.Field2,
})
}
func (e *exampleStruct2) EncodeSSZSize() (uint32, error) {
return EncodeSize(exampleStruct2Export{
e.Field2,
})
}
func (e *exampleStruct2) DecodeSSZ(r io.Reader) error {
ee := new(exampleStruct2Export)
if err := Decode(r, ee); err != nil {
return err
}
e.Field2 = ee.Field2
return nil
}
func (e *exampleStruct2) TreeHashSSZ() ([32]byte, error) {
return TreeHash(exampleStruct2Export{
e.Field2,
})
}
func TestEncodeDecode_Struct1(t *testing.T) {
var err error
e1 := &exampleStruct1{
Field1: 10,
Field2: []byte{1, 2, 3, 4},
}
wBuf := new(bytes.Buffer)
if err = e1.EncodeSSZ(wBuf); err != nil {
t.Fatalf("failed to encode: %v", err)
}
encoding := wBuf.Bytes()
e2 := new(exampleStruct1)
rBuf := bytes.NewReader(encoding)
if err = e2.DecodeSSZ(rBuf); err != nil {
t.Fatalf("failed to decode: %v", err)
}
if !reflect.DeepEqual(*e1, *e2) {
t.Error("encode/decode algorithm don't match")
}
encodeSize := uint32(0)
if encodeSize, err = e1.EncodeSSZSize(); err != nil {
t.Errorf("failed to get encode size: %v", err)
}
if encodeSize != 13 {
t.Error("wrong encode size calculation result")
}
hash, err := e1.TreeHashSSZ()
if err != nil {
t.Fatalf("failed to hash: %v", err)
}
if !bytes.Equal(hash[:], unhex("898470f5d98653c8e4fb2c7ae771019402cca8ccaa71a9c2ea4ad129e3c431d0")) {
t.Errorf("wrong hash result")
}
}
func TestEncodeDecode_Struct2(t *testing.T) {
var err error
e1 := &exampleStruct2{
Field1: 10,
Field2: []byte{1, 2, 3, 4},
}
wBuf := new(bytes.Buffer)
if err = e1.EncodeSSZ(wBuf); err != nil {
t.Fatalf("failed to encode: %v", err)
}
encoding := wBuf.Bytes()
e2 := new(exampleStruct2)
rBuf := bytes.NewReader(encoding)
if err = e2.DecodeSSZ(rBuf); err != nil {
t.Fatalf("failed to decode: %v", err)
}
if !reflect.DeepEqual(e1.Field2, e2.Field2) {
t.Error("encode/decode algorithm don't match")
}
encodeSize := uint32(0)
if encodeSize, err = e1.EncodeSSZSize(); err != nil {
t.Errorf("failed to get encode size: %v", err)
}
if encodeSize != 12 {
t.Error("wrong encode size calculation result")
}
hash, err := e1.TreeHashSSZ()
if err != nil {
t.Fatalf("failed to hash: %v", err)
}
if !bytes.Equal(hash[:], unhex("b982eb8cf7e1d6f5ec77f0ae4a9ed44bde23da284488f498176a5123fe05e7dd")) {
t.Errorf("wrong hash result")
}
}

View File

@@ -1,37 +0,0 @@
package ssz_test
import (
"bytes"
"fmt"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
// SSZ encoding takes a given data object to the given io.Writer. The most
// common use case is to use bytes.Buffer to collect the results to a buffer
// and consume the result.
func ExampleEncode() {
// Given a data structure like this.
type data struct {
Field1 uint8
Field2 []byte
}
// And some basic data.
d := data{
Field1: 10,
Field2: []byte{1, 2, 3, 4},
}
// We use a bytes.Buffer as our io.Writer.
buffer := new(bytes.Buffer)
// ssz.Encode writes the encoded data to the buffer.
if err := ssz.Encode(buffer, d); err != nil {
// There was some failure with encoding SSZ.
// You should probably handle this error in a non-fatal way.
panic(err)
}
// And we can return the bytes from the buffer.
encodedBytes := buffer.Bytes()
fmt.Printf("ssz.Encode(%v) = %#x", d, encodedBytes)
}

View File

@@ -1,234 +0,0 @@
package ssz
import (
"bytes"
"encoding/binary"
"fmt"
"reflect"
"github.com/prysmaticlabs/prysm/shared/bytesutil"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
"github.com/prysmaticlabs/prysm/shared/hashutil"
)
const hashLengthBytes = 32
const sszChunkSize = 128
var useCache bool
// Hashable defines the interface for supporting tree-hash function.
type Hashable interface {
TreeHashSSZ() ([32]byte, error)
}
// TreeHash calculates tree-hash result for input value.
func TreeHash(val interface{}) ([32]byte, error) {
if val == nil {
return [32]byte{}, newHashError("untyped nil is not supported", nil)
}
rval := reflect.ValueOf(val)
sszUtils, err := cachedSSZUtils(rval.Type())
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
output, err := sszUtils.hasher(rval)
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
// Right-pad with 0 to make 32 bytes long, if necessary
paddedOutput := bytesutil.ToBytes32(output)
return paddedOutput, nil
}
type hashError struct {
msg string
typ reflect.Type
}
func (err *hashError) Error() string {
return fmt.Sprintf("hash error: %s for input type %v", err.msg, err.typ)
}
func newHashError(msg string, typ reflect.Type) *hashError {
return &hashError{msg, typ}
}
func makeHasher(typ reflect.Type) (hasher, error) {
useCache = featureconfig.FeatureConfig().CacheTreeHash
kind := typ.Kind()
switch {
case kind == reflect.Bool ||
kind == reflect.Uint8 ||
kind == reflect.Uint16 ||
kind == reflect.Uint32 ||
kind == reflect.Uint64 ||
kind == reflect.Int32:
return getEncoding, nil
case kind == reflect.Slice && typ.Elem().Kind() == reflect.Uint8 ||
kind == reflect.Array && typ.Elem().Kind() == reflect.Uint8:
return hashedEncoding, nil
case kind == reflect.Slice || kind == reflect.Array:
if useCache {
return makeSliceHasherCache(typ)
}
return makeSliceHasher(typ)
case kind == reflect.Struct:
if useCache {
return makeStructHasherCache(typ)
}
return makeStructHasher(typ)
case kind == reflect.Ptr:
return makePtrHasher(typ)
default:
return nil, fmt.Errorf("type %v is not hashable", typ)
}
}
func getEncoding(val reflect.Value) ([]byte, error) {
utils, err := cachedSSZUtilsNoAcquireLock(val.Type())
if err != nil {
return nil, err
}
buf := &encbuf{}
if err = utils.encoder(val, buf); err != nil {
return nil, err
}
writer := new(bytes.Buffer)
if err = buf.toWriter(writer); err != nil {
return nil, err
}
return writer.Bytes(), nil
}
func hashedEncoding(val reflect.Value) ([]byte, error) {
encoding, err := getEncoding(val)
if err != nil {
return nil, err
}
output := hashutil.Hash(encoding)
return output[:], nil
}
func makeSliceHasher(typ reflect.Type) (hasher, error) {
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(typ.Elem())
if err != nil {
return nil, fmt.Errorf("failed to get ssz utils: %v", err)
}
hasher := func(val reflect.Value) ([]byte, error) {
var elemHashList [][]byte
for i := 0; i < val.Len(); i++ {
elemHash, err := elemSSZUtils.hasher(val.Index(i))
if err != nil {
return nil, fmt.Errorf("failed to hash element of slice/array: %v", err)
}
elemHashList = append(elemHashList, elemHash)
}
output, err := merkleHash(elemHashList)
if err != nil {
return nil, fmt.Errorf("failed to calculate merkle hash of element hash list: %v", err)
}
return output, nil
}
return hasher, nil
}
func makeStructHasher(typ reflect.Type) (hasher, error) {
fields, err := structFields(typ)
if err != nil {
return nil, err
}
hasher := func(val reflect.Value) ([]byte, error) {
concatElemHash := make([]byte, 0)
for _, f := range fields {
elemHash, err := f.sszUtils.hasher(val.Field(f.index))
if err != nil {
return nil, fmt.Errorf("failed to hash field of struct: %v", err)
}
concatElemHash = append(concatElemHash, elemHash...)
}
result := hashutil.Hash(concatElemHash)
return result[:], nil
}
return hasher, nil
}
func makePtrHasher(typ reflect.Type) (hasher, error) {
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(typ.Elem())
if err != nil {
return nil, err
}
// TODO(1461): The tree-hash of nil pointer isn't defined in the spec.
// After considered the use case in Prysm, we've decided that:
// - We assume we will only tree-hash pointer of array, slice or struct.
// - The tree-hash for nil pointer shall be 0x00000000.
hasher := func(val reflect.Value) ([]byte, error) {
if val.IsNil() {
return hashedEncoding(val)
}
return elemSSZUtils.hasher(val.Elem())
}
return hasher, nil
}
// merkelHash implements a merkle-tree style hash algorithm.
//
// Please refer to the official spec for details:
// https://github.com/ethereum/eth2.0-specs/blob/master/specs/simple-serialize.md#tree-hash
//
// The overall idea is:
// 1. Create a bunch of bytes chunk (each has a size of sszChunkSize) from the input hash list.
// 2. Treat each bytes chunk as the leaf of a binary tree.
// 3. For every pair of leaves, we set their parent's value using the hash value of the concatenation of the two leaves.
// The original two leaves are then removed.
// 4. Keep doing step 3 until there's only one node left in the tree (the root).
// 5. Return the hash of the concatenation of the root and the data length encoding.
//
// Time complexity is O(n) given input list of size n.
func merkleHash(list [][]byte) ([]byte, error) {
// Assume len(list) < 2^64
dataLenEnc := make([]byte, hashLengthBytes)
binary.LittleEndian.PutUint64(dataLenEnc, uint64(len(list)))
var chunkz [][]byte
emptyChunk := make([]byte, sszChunkSize)
if len(list) == 0 {
chunkz = make([][]byte, 1)
chunkz[0] = emptyChunk
} else if len(list[0]) < sszChunkSize {
itemsPerChunk := sszChunkSize / len(list[0])
chunkz = make([][]byte, 0)
for i := 0; i < len(list); i += itemsPerChunk {
chunk := make([]byte, 0)
j := i + itemsPerChunk
if j > len(list) {
j = len(list)
}
// Every chunk should have sszChunkSize bytes except that the last one could have less bytes
for _, elemHash := range list[i:j] {
chunk = append(chunk, elemHash...)
}
chunkz = append(chunkz, chunk)
}
} else {
chunkz = list
}
for len(chunkz) > 1 {
if len(chunkz)%2 == 1 {
chunkz = append(chunkz, emptyChunk)
}
hashedChunkz := make([][]byte, 0)
for i := 0; i < len(chunkz); i += 2 {
hashedChunk := hashutil.Hash(append(chunkz[i], chunkz[i+1]...))
hashedChunkz = append(hashedChunkz, hashedChunk[:])
}
chunkz = hashedChunkz
}
result := hashutil.Hash(append(chunkz[0], dataLenEnc...))
return result[:], nil
}

View File

@@ -1,234 +0,0 @@
package ssz
import (
"errors"
"fmt"
"reflect"
"time"
"github.com/ethereum/go-ethereum/common"
"github.com/karlseguin/ccache"
"github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto"
"github.com/prysmaticlabs/prysm/shared/bytesutil"
"github.com/prysmaticlabs/prysm/shared/hashutil"
"github.com/prysmaticlabs/prysm/shared/params"
)
var (
// ErrNotMerkleRoot will be returned when a cache object is not a merkle root.
ErrNotMerkleRoot = errors.New("object is not a merkle root")
// maxCacheSize is 2x of the follow distance for additional cache padding.
// Requests should be only accessing blocks within recent blocks within the
// Eth1FollowDistance.
maxCacheSize = params.BeaconConfig().HashCacheSize
// Metrics
hashCacheMiss = promauto.NewCounter(prometheus.CounterOpts{
Name: "hash_cache_miss",
Help: "The number of hash requests that aren't present in the cache.",
})
hashCacheHit = promauto.NewCounter(prometheus.CounterOpts{
Name: "hash_cache_hit",
Help: "The number of hash requests that are present in the cache.",
})
hashCacheSize = promauto.NewGauge(prometheus.GaugeOpts{
Name: "hash_cache_size",
Help: "The number of hashes in the block cache",
})
)
// hashCacheS struct with one queue for looking up by hash.
type hashCacheS struct {
hashCache *ccache.Cache
}
// root specifies the hash of data in a struct
type root struct {
Hash common.Hash
MerkleRoot []byte
}
// newHashCache creates a new hash cache for storing/accessing root hashes from
// memory.
func newHashCache() *hashCacheS {
return &hashCacheS{
hashCache: ccache.New(ccache.Configure().MaxSize(maxCacheSize)),
}
}
// RootByEncodedHash fetches Root by the encoded hash of the object. Returns true with a
// reference to the root if exists. Otherwise returns false, nil.
func (b *hashCacheS) RootByEncodedHash(h common.Hash) (bool, *root, error) {
item := b.hashCache.Get(h.Hex())
if item == nil {
hashCacheMiss.Inc()
return false, nil, nil
}
hashCacheHit.Inc()
hInfo, ok := item.Value().(*root)
if !ok {
return false, nil, ErrNotMerkleRoot
}
return true, hInfo, nil
}
// TrieRootCached computes a trie root and add it to the cache.
// if the encoded hash of the object is in cache, it will be retrieved from cache.
// This method also trims the least recently added root info. if the cache size
// has reached the max cache size limit.
func (b *hashCacheS) TrieRootCached(val interface{}) ([32]byte, error) {
if val == nil {
return [32]byte{}, newHashError("untyped nil is not supported", nil)
}
rval := reflect.ValueOf(val)
hs, err := hashedEncoding(rval)
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
exists, fetchedInfo, err := b.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
var paddedOutput [32]byte
if exists {
paddedOutput = bytesutil.ToBytes32(fetchedInfo.MerkleRoot)
} else {
sszUtils, err := cachedSSZUtils(rval.Type())
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
output, err := sszUtils.hasher(rval)
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
// Right-pad with 0 to make 32 bytes long, if necessary.
paddedOutput = bytesutil.ToBytes32(output)
err = b.AddRoot(bytesutil.ToBytes32(hs), paddedOutput[:])
if err != nil {
return [32]byte{}, newHashError(fmt.Sprint(err), rval.Type())
}
}
return paddedOutput, nil
}
// MerkleHashCached adds a merkle object to the cache. This method also trims the
// least recently added root info if the cache size has reached the max cache
// size limit.
func (b *hashCacheS) MerkleHashCached(byteSlice [][]byte) ([]byte, error) {
mh := []byte{}
hs, err := hashedEncoding(reflect.ValueOf(byteSlice))
if err != nil {
return mh, newHashError(fmt.Sprint(err), reflect.TypeOf(byteSlice))
}
exists, fetchedInfo, err := b.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
return mh, newHashError(fmt.Sprint(err), reflect.TypeOf(byteSlice))
}
if exists {
mh = fetchedInfo.MerkleRoot
} else {
mh, err = merkleHash(byteSlice)
if err != nil {
return nil, err
}
mr := &root{
Hash: bytesutil.ToBytes32(hs),
MerkleRoot: mh,
}
b.hashCache.Set(mr.Hash.Hex(), mr, time.Hour)
hashCacheSize.Set(float64(b.hashCache.ItemCount()))
}
return mh, nil
}
// AddRoot adds an encodedhash of the object as key and a rootHash object to the cache.
// This method also trims the
// least recently added root info if the cache size has reached the max cache
// size limit.
func (b *hashCacheS) AddRoot(h common.Hash, rootB []byte) error {
mr := &root{
Hash: h,
MerkleRoot: rootB,
}
b.hashCache.Set(mr.Hash.Hex(), mr, time.Hour)
return nil
}
// MakeSliceHasherCache add caching mechanism to slice hasher.
func makeSliceHasherCache(typ reflect.Type) (hasher, error) {
elemSSZUtils, err := cachedSSZUtilsNoAcquireLock(typ.Elem())
if err != nil {
return nil, fmt.Errorf("failed to get ssz utils: %v", err)
}
hasher := func(val reflect.Value) ([]byte, error) {
hs, err := hashedEncoding(val)
if err != nil {
return nil, fmt.Errorf("failed to encode element of slice/array: %v", err)
}
exists, fetchedInfo, err := hashCache.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
return nil, fmt.Errorf("failed to encode element of slice/array: %v", err)
}
var output []byte
if exists {
output = fetchedInfo.MerkleRoot
} else {
var elemHashList [][]byte
for i := 0; i < val.Len(); i++ {
elemHash, err := elemSSZUtils.hasher(val.Index(i))
if err != nil {
return nil, fmt.Errorf("failed to hash element of slice/array: %v", err)
}
elemHashList = append(elemHashList, elemHash)
}
output, err = hashCache.MerkleHashCached(elemHashList)
if err != nil {
return nil, fmt.Errorf("failed to calculate merkle hash of element hash list: %v", err)
}
err := hashCache.AddRoot(bytesutil.ToBytes32(hs), output)
if err != nil {
return nil, fmt.Errorf("failed to add root to cache: %v", err)
}
hashCacheSize.Set(float64(hashCache.hashCache.ItemCount()))
}
return output, nil
}
return hasher, nil
}
func makeStructHasherCache(typ reflect.Type) (hasher, error) {
fields, err := structFields(typ)
if err != nil {
return nil, err
}
hasher := func(val reflect.Value) ([]byte, error) {
hs, err := hashedEncoding(val)
if err != nil {
return nil, fmt.Errorf("failed to encode element of slice/array: %v", err)
}
exists, fetchedInfo, err := hashCache.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
return nil, fmt.Errorf("failed to encode element of slice/array: %v", err)
}
var result [32]byte
if exists {
result = bytesutil.ToBytes32(fetchedInfo.MerkleRoot)
return result[:], nil
}
concatElemHash := make([]byte, 0)
for _, f := range fields {
elemHash, err := f.sszUtils.hasher(val.Field(f.index))
if err != nil {
return nil, fmt.Errorf("failed to hash field of struct: %v", err)
}
concatElemHash = append(concatElemHash, elemHash...)
}
result = hashutil.Hash(concatElemHash)
return result[:], nil
}
return hasher, nil
}

View File

@@ -1,148 +0,0 @@
package ssz
import (
"bytes"
"log"
"reflect"
"testing"
"time"
"github.com/prysmaticlabs/prysm/shared/bytesutil"
"github.com/prysmaticlabs/prysm/shared/featureconfig"
)
type junkObject struct {
D2Int64Slice [][]uint64
Uint uint64
Int64Slice []uint64
}
type tree struct {
First []*junkObject
Second []*junkObject
}
func generateJunkObject(size uint64) []*junkObject {
object := make([]*junkObject, size)
for i := uint64(0); i < uint64(len(object)); i++ {
d2Int64Slice := make([][]uint64, size)
is := make([]uint64, size)
uInt := uint64(time.Now().UnixNano())
is[i] = i
d2Int64Slice[i] = make([]uint64, size)
for j := uint64(0); j < uint64(len(object)); j++ {
d2Int64Slice[i][j] = i + j
}
object[i] = &junkObject{
D2Int64Slice: d2Int64Slice,
Uint: uInt,
Int64Slice: is,
}
}
return object
}
func TestObjCache_byHash(t *testing.T) {
cache := newHashCache()
byteSl := [][]byte{{0, 0}, {1, 1}}
mr, err := merkleHash(byteSl)
if err != nil {
t.Fatal(err)
}
hs, err := hashedEncoding(reflect.ValueOf(byteSl))
if err != nil {
t.Fatal(err)
}
exists, _, err := cache.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
t.Fatal(err)
}
if exists {
t.Error("Expected block info not to exist in empty cache")
}
if _, err := cache.MerkleHashCached(byteSl); err != nil {
t.Fatal(err)
}
exists, fetchedInfo, err := cache.RootByEncodedHash(bytesutil.ToBytes32(hs))
if err != nil {
t.Fatal(err)
}
if !exists {
t.Error("Expected blockInfo to exist")
}
if !bytes.Equal(mr, fetchedInfo.MerkleRoot) {
t.Errorf(
"Expected fetched info number to be %v, got %v",
mr,
fetchedInfo.MerkleRoot,
)
}
if fetchedInfo.Hash != bytesutil.ToBytes32(hs) {
t.Errorf(
"Expected fetched info hash to be %v, got %v",
hs,
fetchedInfo.Hash,
)
}
}
func TestMerkleHashWithCache(t *testing.T) {
cache := newHashCache()
for i := 0; i < 200; i++ {
runMerkleHashTests(t, func(val [][]byte) ([]byte, error) {
return merkleHash(val)
})
}
for i := 0; i < 200; i++ {
runMerkleHashTests(t, func(val [][]byte) ([]byte, error) {
return cache.MerkleHashCached(val)
})
}
}
func BenchmarkHashWithoutCache(b *testing.B) {
featureconfig.FeatureConfig().CacheTreeHash = false
First := generateJunkObject(100)
TreeHash(&tree{First: First, Second: First})
for n := 0; n < b.N; n++ {
TreeHash(&tree{First: First, Second: First})
}
}
func BenchmarkHashWithCache(b *testing.B) {
featureconfig.FeatureConfig().CacheTreeHash = true
First := generateJunkObject(100)
type tree struct {
First []*junkObject
Second []*junkObject
}
TreeHash(&tree{First: First, Second: First})
for n := 0; n < b.N; n++ {
TreeHash(&tree{First: First, Second: First})
}
}
func TestBlockCache_maxSize(t *testing.T) {
maxCacheSize = 10000
cache := newHashCache()
for i := uint64(0); i < uint64(maxCacheSize+1025); i++ {
if err := cache.AddRoot(bytesutil.ToBytes32(bytesutil.Bytes4(i)), []byte{1}); err != nil {
t.Fatal(err)
}
}
log.Printf(
"hash cache key size is %d, itemcount is %d",
maxCacheSize,
cache.hashCache.ItemCount(),
)
time.Sleep(1 * time.Second)
if int64(cache.hashCache.ItemCount()) > maxCacheSize {
t.Errorf(
"Expected hash cache key size to be %d, got %d",
maxCacheSize,
cache.hashCache.ItemCount(),
)
}
}

View File

@@ -1,220 +0,0 @@
package ssz
import (
"bytes"
"fmt"
"testing"
)
type hashTest struct {
val interface{}
output, error string
}
type merkleHashTest struct {
val [][]byte
output, error string
}
// Notice: spaces in the output string will be ignored.
var hashTests = []hashTest{
// boolean
{val: false, output: "0000000000000000000000000000000000000000000000000000000000000000"},
{val: true, output: "0100000000000000000000000000000000000000000000000000000000000000"},
// uint8
{val: uint8(0), output: "0000000000000000000000000000000000000000000000000000000000000000"},
{val: uint8(1), output: "0100000000000000000000000000000000000000000000000000000000000000"},
{val: uint8(16), output: "1000000000000000000000000000000000000000000000000000000000000000"},
{val: uint8(128), output: "8000000000000000000000000000000000000000000000000000000000000000"},
{val: uint8(255), output: "FF00000000000000000000000000000000000000000000000000000000000000"},
// uint16
{val: uint16(0), output: "0000000000000000000000000000000000000000000000000000000000000000"},
{val: uint16(1), output: "0100000000000000000000000000000000000000000000000000000000000000"},
{val: uint16(16), output: "1000000000000000000000000000000000000000000000000000000000000000"},
{val: uint16(128), output: "8000000000000000000000000000000000000000000000000000000000000000"},
{val: uint16(255), output: "FF00000000000000000000000000000000000000000000000000000000000000"},
{val: uint16(65535), output: "FFFF000000000000000000000000000000000000000000000000000000000000"},
// uint32
{val: uint32(0), output: "0000000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(1), output: "0100000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(16), output: "1000000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(128), output: "8000000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(255), output: "FF00000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(65535), output: "FFFF000000000000000000000000000000000000000000000000000000000000"},
{val: uint32(4294967295), output: "FFFFFFFF00000000000000000000000000000000000000000000000000000000"},
// uint64
{val: uint64(0), output: "0000000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(1), output: "0100000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(16), output: "1000000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(128), output: "8000000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(255), output: "FF00000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(65535), output: "FFFF000000000000000000000000000000000000000000000000000000000000"},
{val: uint64(4294967295), output: "FFFFFFFF00000000000000000000000000000000000000000000000000000000"},
{val: uint64(18446744073709551615), output: "FFFFFFFFFFFFFFFF000000000000000000000000000000000000000000000000"},
// bytes
{val: []byte{}, output: "E8E77626586F73B955364C7B4BBF0BB7F7685EBD40E852B164633A4ACBD3244C"},
{val: []byte{1}, output: "B2559FED89F0EC17542C216683DC6B75506F3754E0C045742936742CAE6343CA"},
{val: []byte{1, 2, 3, 4, 5, 6}, output: "1310542D28BE8E0B3FF72E985BC06232B9A30D93AE1AD2E33C5383A54AB5C9A7"},
//// slice
{val: []uint16{}, output: "DFDED4ED5AC76BA7379CFE7B3B0F53E768DCA8D45A34854E649CFC3C18CBD9CD"},
{val: []uint16{1}, output: "E3F121F639DAE19B7E2FD6F5002F321B83F17288A7CA7560F81C2ACE832CC5D5"},
{val: []uint16{1, 2}, output: "A9B7D66D80F70C6DA7060C3DEDB01E6ED6CEA251A3247093CBF27A439ECB0BEA"},
{val: [][]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, output: "1A400EB17C755E4445C2C57DD2D3A0200A290C56CD68957906DD7BFE04493B10"},
// array
{val: [1]byte{1}, output: "B2559FED89F0EC17542C216683DC6B75506F3754E0C045742936742CAE6343CA"},
{val: [6]byte{1, 2, 3, 4, 5, 6}, output: "1310542D28BE8E0B3FF72E985BC06232B9A30D93AE1AD2E33C5383A54AB5C9A7"},
{val: [1]uint16{1}, output: "E3F121F639DAE19B7E2FD6F5002F321B83F17288A7CA7560F81C2ACE832CC5D5"},
{val: [2]uint16{1, 2}, output: "A9B7D66D80F70C6DA7060C3DEDB01E6ED6CEA251A3247093CBF27A439ECB0BEA"},
{val: [2][4]uint16{
{1, 2, 3, 4},
{5, 6, 7, 8},
}, output: "1A400EB17C755E4445C2C57DD2D3A0200A290C56CD68957906DD7BFE04493B10"},
// struct
{val: simpleStruct{}, output: "99FF0D9125E1FC9531A11262E15AEB2C60509A078C4CC4C64CEFDFB06FF68647"},
{val: simpleStruct{B: 2, A: 1}, output: "D2B49B00C76582823E30B56FE608FF030EF7B6BD7DCC16B8994C9D74860A7E1C"},
{val: outerStruct{
V: 3,
SubV: innerStruct{V: 6},
}, output: "BB2F30386C55445381EEE7A33C3794227B8C8E4BE4CAA54506901A4DDFE79EE2"},
// slice + struct
{val: arrayStruct{
V: []simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
},
}, output: "F3032DCE4B4218187E34AA8B6EF87A3FABE1F8D734CE92796642DC6B2911277C"},
{val: []outerStruct{
{V: 3, SubV: innerStruct{V: 6}},
{V: 5, SubV: innerStruct{V: 7}},
}, output: "DE43BC05AA6B011121F9590C10DE1734291A595798C84A0E3EDD1CC1E6710908"},
// pointer
{val: &simpleStruct{B: 2, A: 1}, output: "D2B49B00C76582823E30B56FE608FF030EF7B6BD7DCC16B8994C9D74860A7E1C"},
{val: pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, output: "D365B04884AA7B9160F5E405796F0EB7521FC69BD79D934DA72EDA1FC98B5971"},
{val: &pointerStruct{P: &simpleStruct{B: 2, A: 1}, V: 3}, output: "D365B04884AA7B9160F5E405796F0EB7521FC69BD79D934DA72EDA1FC98B5971"},
{val: &[]uint8{1, 2, 3, 4}, output: "5C8046AB6A4E32E5C0017620A1844E5851074E4EDA685A920E8C70007E675E5C"},
{val: &[]uint64{1, 2}, output: "2F3E7F86CF5B91C6FC45FDF54254DE256F4FFFE775F0217C876961C4211E5DC2"},
{val: []*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
}, output: "1D5CDF2C53DD8AC743E17E1A7A8B1CB6E615FA63EC915347B3E9ACFB58F89158"},
{val: [2]*simpleStruct{
{B: 2, A: 1},
{B: 4, A: 3},
}, output: "1D5CDF2C53DD8AC743E17E1A7A8B1CB6E615FA63EC915347B3E9ACFB58F89158"},
{val: []*pointerStruct{
{P: &simpleStruct{B: 2, A: 1}, V: 0},
{P: &simpleStruct{B: 4, A: 3}, V: 1},
}, output: "4AC9B9E64A067F6C007C3FE8116519D86397BDA1D9FBEDEEDF39E50D132669C7"},
// nil pointer (not defined in spec)
{val: (*[]uint8)(nil), output: "E8E77626586F73B955364C7B4BBF0BB7F7685EBD40E852B164633A4ACBD3244C"},
{val: pointerStruct{}, output: "721B2869FA1238991B24C369E9ADB23142AFCD7C0B8454EF79C0EA82B7DEE977"},
{val: &pointerStruct{}, output: "721B2869FA1238991B24C369E9ADB23142AFCD7C0B8454EF79C0EA82B7DEE977"},
{val: []*pointerStruct{nil, nil}, output: "83CB52B40904E607A8E0AEF8A018A5A7489229CBCD591E7C6FB7E597BD4F76C3"},
// error: untyped nil pointer
{val: nil, error: "hash error: untyped nil is not supported for input type <nil>"},
// error: unsupported type
{val: string("abc"), error: "hash error: type string is not serializable for input type string"},
}
var merkleHashTests = []merkleHashTest{
{val: [][]byte{}, output: "DFDED4ED5AC76BA7379CFE7B3B0F53E768DCA8D45A34854E649CFC3C18CBD9CD"},
{val: [][]byte{{1, 2}, {3, 4}}, output: "64F741B8BAB62525A01F9084582C148FF56C82F96DC12E270D3E7B5103CF7B48"},
{val: [][]byte{
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2},
{3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3},
{4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4},
{5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5},
{6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6},
{7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7},
{8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8},
{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
{10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10},
}, output: "839D98509E2EFC53BD1DEA17403921A89856E275BBF4D56C600CC3F6730AAFFA"},
{val: [][]byte{
{1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1},
{2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2},
{3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3},
{4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4},
{5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5},
{6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6, 6},
{7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7, 7},
{8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8, 8},
{9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9, 9},
{10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10, 10},
}, output: "55DC6699E7B5713DD9102224C302996F931836C6DAE9A4EC6AB49C966F394685"},
}
func runHashTests(t *testing.T, hash func(val interface{}) ([32]byte, error)) {
for i, test := range hashTests {
output, err := hash(test.val)
// Check unexpected error
if test.error == "" && err != nil {
t.Errorf("test %d: unexpected error: %v\nvalue %#v\ntype %T",
i, err, test.val, test.val)
continue
}
// Check expected error
if test.error != "" && fmt.Sprint(err) != test.error {
t.Errorf("test %d: error mismatch\ngot %v\nwant %v\nvalue %#v\ntype %T",
i, err, test.error, test.val, test.val)
continue
}
// Check expected output
if err == nil && !bytes.Equal(output[:], unhex(test.output)) {
t.Errorf("test %d: output mismatch:\ngot %X\nwant %s\nvalue %#v\ntype %T",
i, output, stripSpace(test.output), test.val, test.val)
}
}
}
func runMerkleHashTests(t *testing.T, merkleHash func([][]byte) ([]byte, error)) {
for i, test := range merkleHashTests {
output, err := merkleHash(test.val)
// Check unexpected error
if test.error == "" && err != nil {
t.Errorf("test %d: unexpected error: %v\nvalue %#v\ntype %T",
i, err, test.val, test.val)
continue
}
// Check expected error
if test.error != "" && fmt.Sprint(err) != test.error {
t.Errorf("test %d: error mismatch\ngot %v\nwant %v\nvalue %#v\ntype %T",
i, err, test.error, test.val, test.val)
continue
}
// Check expected output
if err == nil && !bytes.Equal(output[:], unhex(test.output)) {
t.Errorf("test %d: output mismatch:\ngot %X\nwant %s\nvalue %#v\ntype %T",
i, output, stripSpace(test.output), test.val, test.val)
}
}
}
func TestHash(t *testing.T) {
runHashTests(t, func(val interface{}) ([32]byte, error) {
return TreeHash(val)
})
}
func TestMerkleHash(t *testing.T) {
runMerkleHashTests(t, func(val [][]byte) ([]byte, error) {
return merkleHash(val)
})
}

View File

@@ -1,112 +0,0 @@
package ssz
import (
"fmt"
"io"
"reflect"
"strings"
"sync"
)
type encoder func(reflect.Value, *encbuf) error
// Notice: we are not exactly following the spec which requires a decoder to return new index in the input buffer.
// Our io.Reader is already capable of tracking its latest read location, so we decide to return the decoded byte size
// instead. This makes our implementation look cleaner.
type decoder func(io.Reader, reflect.Value) (uint32, error)
type encodeSizer func(reflect.Value) (uint32, error)
type hasher func(reflect.Value) ([]byte, error)
type sszUtils struct {
encoder
encodeSizer
decoder
hasher
}
var (
sszUtilsCacheMutex sync.RWMutex
sszUtilsCache = make(map[reflect.Type]*sszUtils)
hashCache = newHashCache()
)
// Get cached encoder, encodeSizer and decoder implementation for a specified type.
// With a cache we can achieve O(1) amortized time overhead for creating encoder, encodeSizer and decoder.
func cachedSSZUtils(typ reflect.Type) (*sszUtils, error) {
sszUtilsCacheMutex.RLock()
utils := sszUtilsCache[typ]
sszUtilsCacheMutex.RUnlock()
if utils != nil {
return utils, nil
}
// If not found in cache, will get a new one and put it into the cache
sszUtilsCacheMutex.Lock()
defer sszUtilsCacheMutex.Unlock()
return cachedSSZUtilsNoAcquireLock(typ)
}
// This version is used when the caller is already holding the rw lock for sszUtilsCache.
// It doesn't acquire new rw lock so it's free to recursively call itself without getting into
// a deadlock situation.
//
// Make sure you are
func cachedSSZUtilsNoAcquireLock(typ reflect.Type) (*sszUtils, error) {
// Check again in case other goroutine has just acquired the lock
// and already updated the cache
utils := sszUtilsCache[typ]
if utils != nil {
return utils, nil
}
// Put a dummy value into the cache before generating.
// If the generator tries to lookup the type of itself,
// it will get the dummy value and won't call recursively forever.
sszUtilsCache[typ] = new(sszUtils)
utils, err := generateSSZUtilsForType(typ)
if err != nil {
// Don't forget to remove the dummy key when fail
delete(sszUtilsCache, typ)
return nil, err
}
// Overwrite the dummy value with real value
*sszUtilsCache[typ] = *utils
return sszUtilsCache[typ], nil
}
func generateSSZUtilsForType(typ reflect.Type) (utils *sszUtils, err error) {
utils = new(sszUtils)
if utils.encoder, utils.encodeSizer, err = makeEncoder(typ); err != nil {
return nil, err
}
if utils.decoder, err = makeDecoder(typ); err != nil {
return nil, err
}
if utils.hasher, err = makeHasher(typ); err != nil {
return nil, err
}
return utils, nil
}
type field struct {
index int
name string
sszUtils *sszUtils
}
func structFields(typ reflect.Type) (fields []field, err error) {
for i := 0; i < typ.NumField(); i++ {
f := typ.Field(i)
if strings.Contains(f.Name, "XXX") {
continue
}
utils, err := cachedSSZUtilsNoAcquireLock(f.Type)
if err != nil {
return nil, fmt.Errorf("failed to get ssz utils: %v", err)
}
name := f.Name
fields = append(fields, field{i, name, utils})
}
return fields, nil
}

View File

@@ -23,7 +23,6 @@ go_library(
"//shared/bytesutil:go_default_library",
"//shared/keystore:go_default_library",
"//shared/prometheus:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_boltdb_bolt//:go_default_library",
"@com_github_ethereum_go_ethereum//accounts/abi/bind:go_default_library",
"@com_github_ethereum_go_ethereum//common:go_default_library",
@@ -33,6 +32,7 @@ go_library(
"@com_github_gogo_protobuf//proto:go_default_library",
"@com_github_prometheus_client_golang//prometheus:go_default_library",
"@com_github_prometheus_client_golang//prometheus/promauto:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_sirupsen_logrus//:go_default_library",
"@io_k8s_api//core/v1:go_default_library",
"@io_k8s_apimachinery//pkg/apis/meta/v1:go_default_library",
@@ -83,7 +83,7 @@ go_image(
"//shared/bytesutil:go_default_library",
"//shared/keystore:go_default_library",
"//shared/prometheus:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_boltdb_bolt//:go_default_library",
"@com_github_ethereum_go_ethereum//accounts/abi/bind:go_default_library",
"@com_github_ethereum_go_ethereum//common:go_default_library",

View File

@@ -14,10 +14,10 @@ import (
"github.com/ethereum/go-ethereum/crypto"
"github.com/ethereum/go-ethereum/ethclient"
"github.com/ethereum/go-ethereum/rpc"
"github.com/prysmaticlabs/go-ssz"
contracts "github.com/prysmaticlabs/prysm/contracts/deposit-contract"
pb "github.com/prysmaticlabs/prysm/proto/cluster"
"github.com/prysmaticlabs/prysm/shared/keystore"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
var gasLimit = uint64(4000000)

View File

@@ -9,7 +9,7 @@ go_library(
visibility = ["//visibility:private"],
deps = [
"//proto/beacon/p2p/v1:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
],
)
@@ -30,7 +30,7 @@ go_image(
goos = "linux",
deps = [
"//proto/beacon/p2p/v1:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
],
race = "off",
pure = "on",

View File

@@ -13,8 +13,8 @@ import (
"log"
"net/http"
ssz "github.com/prysmaticlabs/go-ssz"
pb "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
"github.com/prysmaticlabs/prysm/shared/ssz"
)
func main() {

View File

@@ -8,7 +8,7 @@ go_library(
deps = [
"//shared/keystore:go_default_library",
"//shared/params:go_default_library",
"//shared/ssz:go_default_library",
"@com_github_prysmaticlabs_go_ssz//:go_default_library",
"@com_github_sirupsen_logrus//:go_default_library",
],
)

View File

@@ -9,9 +9,9 @@ import (
"io"
"os"
"github.com/prysmaticlabs/go-ssz"
"github.com/prysmaticlabs/prysm/shared/keystore"
"github.com/prysmaticlabs/prysm/shared/params"
"github.com/prysmaticlabs/prysm/shared/ssz"
"github.com/sirupsen/logrus"
)