mirror of
https://github.com/OffchainLabs/prysm.git
synced 2026-01-09 21:38:05 -05:00
Compare commits
1 Commits
fix-bid-ch
...
methodical
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
cec76aa833 |
31
cmd/ssz/BUILD.bazel
Normal file
31
cmd/ssz/BUILD.bazel
Normal file
@@ -0,0 +1,31 @@
|
||||
load("@io_bazel_rules_go//go:def.bzl", "go_binary")
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"benchmark.go",
|
||||
"generate.go",
|
||||
"ir.go",
|
||||
"main.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/cmd/ssz",
|
||||
visibility = ["//visibility:private"],
|
||||
deps = [
|
||||
"//proto/beacon/p2p/v1:go_default_library",
|
||||
"//proto/eth/v1:go_default_library",
|
||||
"//proto/eth/v1alpha1:go_default_library",
|
||||
"//sszgen:go_default_library",
|
||||
"//sszgen/backend:go_default_library",
|
||||
"//sszgen/testutil:go_default_library",
|
||||
"@com_github_ferranbt_fastssz//:go_default_library",
|
||||
"@com_github_golang_snappy//:go_default_library",
|
||||
"@com_github_urfave_cli_v2//:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_binary(
|
||||
name = "ssz",
|
||||
embed = [":go_default_library"],
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
315
cmd/ssz/benchmark.go
Normal file
315
cmd/ssz/benchmark.go
Normal file
@@ -0,0 +1,315 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
fssz "github.com/ferranbt/fastssz"
|
||||
"github.com/golang/snappy"
|
||||
"github.com/urfave/cli/v2"
|
||||
"io/fs"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"runtime/pprof"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
pbbeacon "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
pbethv1 "github.com/prysmaticlabs/prysm/proto/eth/v1"
|
||||
pbethv1alpha1 "github.com/prysmaticlabs/prysm/proto/eth/v1alpha1"
|
||||
)
|
||||
|
||||
const methodsetMethodical = "methodical"
|
||||
const methodsetFast = "fastssz"
|
||||
|
||||
var methodset string
|
||||
var benchmarkRepeat int
|
||||
var skipList string
|
||||
var benchmark = &cli.Command{
|
||||
Name: "benchmark",
|
||||
ArgsUsage: "<path to spectest repository>",
|
||||
Aliases: []string{"bench"},
|
||||
Usage: "Benchmark for comparing fastssz with methodical to generate profiling data",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "methodset",
|
||||
Value: "",
|
||||
Usage: "which methodset to evaluate, \"fastssz\" or \"methodical\"",
|
||||
Destination: &methodset,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "skip-list",
|
||||
Value: "",
|
||||
Usage: "comma-separated list of types to skip (useful for excluding that big ole BeaconState).",
|
||||
Destination: &skipList,
|
||||
},
|
||||
&cli.IntFlag{
|
||||
Name: "repeat",
|
||||
Usage: "how many times to repeat each unmarshal/marshal operation (increase for more stability)",
|
||||
Destination: &benchmarkRepeat,
|
||||
},
|
||||
},
|
||||
Action: func(c *cli.Context) error {
|
||||
// validate args
|
||||
spectestPath := c.Args().Get(0)
|
||||
if spectestPath == "" {
|
||||
cli.ShowCommandHelp(c, "benchmark")
|
||||
return fmt.Errorf("error: missing required <path to spectest repository> argument")
|
||||
}
|
||||
if methodset != methodsetMethodical && methodset != methodsetFast {
|
||||
cli.ShowCommandHelp(c, "benchmark")
|
||||
return fmt.Errorf("error: --methodset must be equal to \"fastssz\" or \"methodical\"")
|
||||
}
|
||||
|
||||
// initialize profiling, profilePath will fail if spectest path is weird
|
||||
ppath, err := profilePath(spectestPath, methodset)
|
||||
f, err := os.Create(ppath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
pprof.StartCPUProfile(f)
|
||||
defer pprof.StopCPUProfile()
|
||||
|
||||
skip := make(map[string]struct{})
|
||||
if skipList != "" {
|
||||
skipNames := strings.Split(skipList, ",")
|
||||
for _, s := range skipNames {
|
||||
skip[s] = struct{}{}
|
||||
}
|
||||
}
|
||||
// use regex to parse test cases out of a dirwalk
|
||||
tcs, err := findTestCases(spectestPath, skip)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Printf("Found %d test cases", len(tcs))
|
||||
for _, tc := range tcs {
|
||||
err := executeTestCase(tc, methodset, benchmarkRepeat)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
func profilePath(path string, methodset string) (string, error) {
|
||||
pre := regexp.MustCompile(`.*\/tests\/(mainnet|minimal)\/(altair|merge|phase0)\/ssz_static`)
|
||||
parts := pre.FindStringSubmatch(path)
|
||||
if len(parts) != 3 {
|
||||
return "", fmt.Errorf("unfamiliar spectest path, can't determine test configuration and phase")
|
||||
}
|
||||
return fmt.Sprintf("cpu-%s-%s-%s.%s.pprof", methodset, parts[1], parts[2], time.Now().Format("20060102-150405")), nil
|
||||
}
|
||||
|
||||
func executeTestCase(tc *TestCase, methodset string, repeat int) error {
|
||||
b, err := tc.MarshaledBytes()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
tys := make([]pbinit, 0)
|
||||
for _, c := range []map[string]pbinit{casesBeaconP2pV1,casesV1,casesV1Alpha1} {
|
||||
pi, ok := c[tc.typeName]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
tys = append(tys, pi)
|
||||
}
|
||||
for i := 0; i <= repeat; i++ {
|
||||
for _, fn := range tys {
|
||||
essz := fn()
|
||||
if methodset == methodsetFast {
|
||||
err := essz.UnmarshalSSZ(b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = essz.MarshalSSZ()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = essz.HashTreeRoot()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if methodset == methodsetMethodical {
|
||||
err := essz.XXUnmarshalSSZ(b)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = essz.XXMarshalSSZ()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = essz.XXHashTreeRoot()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findTestCases(path string, skip map[string]struct{}) ([]*TestCase, error) {
|
||||
var re = regexp.MustCompile(`.*\/tests\/(mainnet|minimal)\/(altair|merge|phase0)\/ssz_static\/(.*)\/ssz_random\/(case_\d+)`)
|
||||
tcs := make([]*TestCase, 0)
|
||||
testCaseFromPath := func (path string, d fs.DirEntry, err error) error {
|
||||
if !d.IsDir() {
|
||||
return nil
|
||||
}
|
||||
parts := re.FindStringSubmatch(path)
|
||||
if len(parts) != 5 {
|
||||
return nil
|
||||
}
|
||||
tc := &TestCase{
|
||||
path: path,
|
||||
config: parts[1],
|
||||
phase: parts[2],
|
||||
typeName: parts[3],
|
||||
caseId: parts[4],
|
||||
}
|
||||
if tc.config == "" || tc.phase == "" || tc.typeName == "" || tc.caseId == "" {
|
||||
return nil
|
||||
}
|
||||
if _, ok := skip[tc.typeName]; ok {
|
||||
return nil
|
||||
}
|
||||
tcs = append(tcs, tc)
|
||||
return nil
|
||||
}
|
||||
err := filepath.WalkDir(path, testCaseFromPath)
|
||||
|
||||
return tcs, err
|
||||
}
|
||||
|
||||
type SSZRoots struct {
|
||||
Root string `json:"root"`
|
||||
SigningRoot string `json:"signing_root"`
|
||||
}
|
||||
|
||||
type SSZValue struct {
|
||||
Message json.RawMessage `json:"message"`
|
||||
Signature string `json:"signature"`// hex encoded '0x...'
|
||||
}
|
||||
|
||||
type TestCase struct {
|
||||
path string
|
||||
config string
|
||||
phase string
|
||||
typeName string
|
||||
caseId string
|
||||
}
|
||||
|
||||
func (tc *TestCase) MarshaledBytes() ([]byte, error) {
|
||||
fh, err := os.Open(path.Join(tc.path, "serialized.ssz_snappy"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer fh.Close()
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_, err = buf.ReadFrom(fh)
|
||||
return snappy.Decode(nil, buf.Bytes())
|
||||
}
|
||||
|
||||
func (tc *TestCase) Value() (*SSZValue, error) {
|
||||
fh, err := os.Open(path.Join(tc.path, "value.yaml"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer fh.Close()
|
||||
d := json.NewDecoder(fh)
|
||||
v := &SSZValue{}
|
||||
err = d.Decode(v)
|
||||
return v, err
|
||||
}
|
||||
|
||||
func (tc *TestCase) Roots() (*SSZRoots, error) {
|
||||
fh, err := os.Open(path.Join(tc.path, "roots.yaml"))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer fh.Close()
|
||||
d := json.NewDecoder(fh)
|
||||
r := &SSZRoots{}
|
||||
err = d.Decode(r)
|
||||
return r, err
|
||||
}
|
||||
|
||||
//rootBytes, err := hex.DecodeString(rootsYaml.Root[2:])
|
||||
//require.NoError(t, err)
|
||||
//require.DeepEqual(t, rootBytes, root[:], "Did not receive expected hash tree root")
|
||||
|
||||
type ExperimentalSSZ interface {
|
||||
XXUnmarshalSSZ(buf []byte) error
|
||||
XXMarshalSSZ() ([]byte, error)
|
||||
XXHashTreeRoot() ([32]byte, error)
|
||||
fssz.Unmarshaler
|
||||
fssz.Marshaler
|
||||
fssz.HashRoot
|
||||
}
|
||||
|
||||
type pbinit func() ExperimentalSSZ
|
||||
|
||||
var casesBeaconP2pV1 = map[string]pbinit{
|
||||
"BeaconState": func() ExperimentalSSZ { return &pbbeacon.BeaconState{} },
|
||||
"DepositMessage": func() ExperimentalSSZ { return &pbbeacon.DepositMessage{} },
|
||||
"Fork": func() ExperimentalSSZ { return &pbbeacon.Fork{} },
|
||||
"ForkData": func() ExperimentalSSZ { return &pbbeacon.ForkData{} },
|
||||
"HistoricalBatch": func() ExperimentalSSZ { return &pbbeacon.HistoricalBatch{} },
|
||||
"PendingAttestation": func() ExperimentalSSZ { return &pbbeacon.PendingAttestation{} },
|
||||
"SigningData": func() ExperimentalSSZ { return &pbbeacon.SigningData{} },
|
||||
}
|
||||
|
||||
var casesV1 map[string]pbinit = map[string]pbinit{
|
||||
"AggregateAndProof": func() ExperimentalSSZ { return &pbethv1.AggregateAttestationAndProof{} },
|
||||
"Attestation": func() ExperimentalSSZ { return &pbethv1.Attestation{} },
|
||||
"AttestationData": func() ExperimentalSSZ { return &pbethv1.AttestationData{} },
|
||||
"AttesterSlashing": func() ExperimentalSSZ { return &pbethv1.AttesterSlashing{} },
|
||||
"BeaconBlock": func() ExperimentalSSZ { return &pbethv1.BeaconBlock{} },
|
||||
"BeaconBlockBody": func() ExperimentalSSZ { return &pbethv1.BeaconBlockBody{} },
|
||||
"BeaconBlockHeader": func() ExperimentalSSZ { return &pbethv1.BeaconBlockHeader{} },
|
||||
// exists in proto/eth/v1, but fastssz methods are not genrated for it
|
||||
//"BeaconState": func() ExperimentalSSZ { return &pbethv1.BeaconState{} },
|
||||
"Checkpoint": func() ExperimentalSSZ { return &pbethv1.Checkpoint{} },
|
||||
"Deposit": func() ExperimentalSSZ { return &pbethv1.Deposit{} },
|
||||
"DepositData": func() ExperimentalSSZ { return &pbethv1.Deposit_Data{} },
|
||||
"Eth1Data": func() ExperimentalSSZ { return &pbethv1.Eth1Data{} },
|
||||
// Fork is defined in proto/eth/v1 package, but fastssz methods are not generated
|
||||
//"Fork": func() ExperimentalSSZ { return &pbethv1.Fork{} },
|
||||
"IndexedAttestation": func() ExperimentalSSZ { return &pbethv1.IndexedAttestation{} },
|
||||
// PendingAttestation is defined in proto/eth/v1 package, but fastssz methods are not generated
|
||||
//"PendingAttestation": func() ExperimentalSSZ { return &pbethv1.PendingAttestation{} },
|
||||
"ProposerSlashing": func() ExperimentalSSZ { return &pbethv1.ProposerSlashing{} },
|
||||
"SignedAggregateAndProof": func() ExperimentalSSZ { return &pbethv1.SignedAggregateAttestationAndProof{} },
|
||||
"SignedBeaconBlock": func() ExperimentalSSZ { return &pbethv1.SignedBeaconBlock{} },
|
||||
"SignedBeaconBlockHeader": func() ExperimentalSSZ { return &pbethv1.SignedBeaconBlockHeader{} },
|
||||
"SignedVoluntaryExit": func() ExperimentalSSZ { return &pbethv1.SignedVoluntaryExit{} },
|
||||
"Validator": func() ExperimentalSSZ { return &pbethv1.Validator{} },
|
||||
"VoluntaryExit": func() ExperimentalSSZ { return &pbethv1.VoluntaryExit{} },
|
||||
}
|
||||
|
||||
var casesV1Alpha1 map[string]pbinit = map[string]pbinit{
|
||||
"AggregateAndProof": func() ExperimentalSSZ { return &pbethv1alpha1.AggregateAttestationAndProof{} },
|
||||
"Attestation": func() ExperimentalSSZ { return &pbethv1alpha1.Attestation{} },
|
||||
"AttestationData": func() ExperimentalSSZ { return &pbethv1alpha1.AttestationData{} },
|
||||
"AttesterSlashing": func() ExperimentalSSZ { return &pbethv1alpha1.AttesterSlashing{} },
|
||||
"BeaconBlock": func() ExperimentalSSZ { return &pbethv1alpha1.BeaconBlock{} },
|
||||
"BeaconBlockBody": func() ExperimentalSSZ { return &pbethv1alpha1.BeaconBlockBody{} },
|
||||
"BeaconBlockHeader": func() ExperimentalSSZ { return &pbethv1alpha1.BeaconBlockHeader{} },
|
||||
"Checkpoint": func() ExperimentalSSZ { return &pbethv1alpha1.Checkpoint{} },
|
||||
"Deposit": func() ExperimentalSSZ { return &pbethv1alpha1.Deposit{} },
|
||||
"DepositData": func() ExperimentalSSZ { return &pbethv1alpha1.Deposit_Data{} },
|
||||
"Eth1Data": func() ExperimentalSSZ { return &pbethv1alpha1.Eth1Data{} },
|
||||
"IndexedAttestation": func() ExperimentalSSZ { return &pbethv1alpha1.IndexedAttestation{} },
|
||||
"ProposerSlashing": func() ExperimentalSSZ { return &pbethv1alpha1.ProposerSlashing{} },
|
||||
"SignedAggregateAndProof": func() ExperimentalSSZ { return &pbethv1alpha1.SignedAggregateAttestationAndProof{} },
|
||||
"SignedBeaconBlock": func() ExperimentalSSZ { return &pbethv1alpha1.SignedBeaconBlock{} },
|
||||
"SignedBeaconBlockHeader": func() ExperimentalSSZ { return &pbethv1alpha1.SignedBeaconBlockHeader{} },
|
||||
"SignedVoluntaryExit": func() ExperimentalSSZ { return &pbethv1alpha1.SignedVoluntaryExit{} },
|
||||
"Validator": func() ExperimentalSSZ { return &pbethv1alpha1.Validator{} },
|
||||
"VoluntaryExit": func() ExperimentalSSZ { return &pbethv1alpha1.VoluntaryExit{} },
|
||||
}
|
||||
89
cmd/ssz/generate.go
Normal file
89
cmd/ssz/generate.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/backend"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var sourcePackage, output, typeNames string
|
||||
var generate = &cli.Command{
|
||||
Name: "generate",
|
||||
ArgsUsage: "<input package, eg github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1>",
|
||||
Aliases: []string{"gen"},
|
||||
Usage: "generate methodsets for a go struct type to support ssz ser/des",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "output",
|
||||
Value: "",
|
||||
Usage: "directory to write generated code (same as input by default)",
|
||||
Destination: &output,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "type-names",
|
||||
Value: "",
|
||||
Usage: "if specified, only generate methods for types specified in this comma-separated list",
|
||||
Destination: &typeNames,
|
||||
},
|
||||
},
|
||||
Action: func(c *cli.Context) error {
|
||||
sourcePackage = c.Args().Get(0)
|
||||
if sourcePackage == "" {
|
||||
cli.ShowCommandHelp(c, "generate")
|
||||
return fmt.Errorf("error: mising required <input package> argument")
|
||||
}
|
||||
var err error
|
||||
index := sszgen.NewPackageIndex()
|
||||
packageName, err := index.GetPackageName(sourcePackage)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rep := sszgen.NewRepresenter(index)
|
||||
|
||||
var specs []*sszgen.DeclarationRef
|
||||
if len(typeNames) > 0 {
|
||||
for _, n := range strings.Split(strings.TrimSpace(typeNames), ",") {
|
||||
specs = append(specs, &sszgen.DeclarationRef{Package: sourcePackage, Name: n})
|
||||
}
|
||||
} else {
|
||||
specs, err = index.DeclarationRefs(sourcePackage)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(specs) == 0 {
|
||||
return fmt.Errorf("Could not find any codegen targets in source package %s", sourcePackage)
|
||||
}
|
||||
|
||||
if output == "" {
|
||||
output = "methodical.ssz.go"
|
||||
}
|
||||
outFh, err := os.Create(output)
|
||||
defer outFh.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
g := backend.NewGenerator(packageName, sourcePackage)
|
||||
for _, s := range specs {
|
||||
fmt.Printf("Generating methods for %s/%s\n", s.Package, s.Name)
|
||||
typeRep, err := rep.GetDeclaration(s.Package, s.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
g.Generate(typeRep)
|
||||
}
|
||||
rbytes, err := g.Render()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
_, err = io.Copy(outFh, bytes.NewReader(rbytes))
|
||||
return err
|
||||
},
|
||||
}
|
||||
82
cmd/ssz/ir.go
Normal file
82
cmd/ssz/ir.go
Normal file
@@ -0,0 +1,82 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"strings"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/testutil"
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
var ir = &cli.Command{
|
||||
Name: "ir",
|
||||
ArgsUsage: "<input package, eg github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1>",
|
||||
Aliases: []string{"gen"},
|
||||
Usage: "generate intermediate representation for a go struct type. This data structure is used by the backend code generator. Outputting it to a source file an be useful for generating test cases and debugging.",
|
||||
Flags: []cli.Flag{
|
||||
&cli.StringFlag{
|
||||
Name: "output",
|
||||
Value: "",
|
||||
Usage: "file path to write generated code",
|
||||
Destination: &output,
|
||||
Required: true,
|
||||
},
|
||||
&cli.StringFlag{
|
||||
Name: "type-names",
|
||||
Value: "",
|
||||
Usage: "if specified, only generate types specified in this comma-separated list",
|
||||
Destination: &typeNames,
|
||||
},
|
||||
},
|
||||
Action: func(c *cli.Context) error {
|
||||
if c.NArg() > 0 {
|
||||
sourcePackage = c.Args().Get(0)
|
||||
}
|
||||
index := sszgen.NewPackageIndex()
|
||||
rep := sszgen.NewRepresenter(index)
|
||||
|
||||
var err error
|
||||
var specs []*sszgen.DeclarationRef
|
||||
if len(typeNames) > 0 {
|
||||
for _, n := range strings.Split(strings.TrimSpace(typeNames), ",") {
|
||||
specs = append(specs, &sszgen.DeclarationRef{Package: sourcePackage, Name: n})
|
||||
}
|
||||
} else {
|
||||
specs, err = index.DeclarationRefs(sourcePackage)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if len(specs) == 0 {
|
||||
return fmt.Errorf("Could not find any codegen targets in source package %s", sourcePackage)
|
||||
}
|
||||
|
||||
outFh, err := os.Create(output)
|
||||
defer outFh.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
renderedTypes := make([]string, 0)
|
||||
for _, s := range specs {
|
||||
typeRep, err := rep.GetDeclaration(s.Package, s.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rendered, err := testutil.RenderIntermediate(typeRep)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
renderedTypes = append(renderedTypes, rendered)
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_, err = io.Copy(outFh, strings.NewReader(strings.Join(renderedTypes, "\n")))
|
||||
return err
|
||||
},
|
||||
}
|
||||
20
cmd/ssz/main.go
Normal file
20
cmd/ssz/main.go
Normal file
@@ -0,0 +1,20 @@
|
||||
package main
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
|
||||
"github.com/urfave/cli/v2"
|
||||
)
|
||||
|
||||
func main() {
|
||||
app := &cli.App{
|
||||
Usage: "ssz support for prysm",
|
||||
Commands: []*cli.Command{benchmark, generate, ir},
|
||||
}
|
||||
|
||||
err := app.Run(os.Args)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
1
go.mod
1
go.mod
@@ -9,6 +9,7 @@ require (
|
||||
github.com/aristanetworks/goarista v0.0.0-20200805130819-fd197cf57d96
|
||||
github.com/bazelbuild/rules_go v0.23.2
|
||||
github.com/d4l3k/messagediff v1.2.1
|
||||
github.com/dave/jennifer v1.2.0
|
||||
github.com/dgraph-io/ristretto v0.0.4-0.20210318174700-74754f61e018
|
||||
github.com/dustin/go-humanize v1.0.0
|
||||
github.com/emicklei/dot v0.11.0
|
||||
|
||||
1
go.sum
1
go.sum
@@ -235,6 +235,7 @@ github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ3
|
||||
github.com/cyberdelia/templates v0.0.0-20141128023046-ca7fffd4298c/go.mod h1:GyV+0YP4qX0UQ7r2MoYZ+AvYDp12OF5yg4q8rGnyNh4=
|
||||
github.com/d4l3k/messagediff v1.2.1 h1:ZcAIMYsUg0EAp9X+tt8/enBE/Q8Yd5kzPynLyKptt9U=
|
||||
github.com/d4l3k/messagediff v1.2.1/go.mod h1:Oozbb1TVXFac9FtSIxHBMnBCq2qeH/2KkEQxENCrlLo=
|
||||
github.com/dave/jennifer v1.2.0 h1:S15ZkFMRoJ36mGAQgWL1tnr0NQJh9rZ8qatseX/VbBc=
|
||||
github.com/dave/jennifer v1.2.0/go.mod h1:fIb+770HOpJ2fmN9EPPKOqm1vMGhB+TwXKMZhrIygKg=
|
||||
github.com/davecgh/go-spew v0.0.0-20171005155431-ecdeabc65495/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
|
||||
@@ -46,7 +46,6 @@ ssz_gen_marshal(
|
||||
"BeaconBlockHeader",
|
||||
"Checkpoint",
|
||||
"Deposit",
|
||||
"DepositData",
|
||||
"Eth1Data",
|
||||
"IndexedAttestation",
|
||||
"ProposerSlashing",
|
||||
@@ -92,7 +91,8 @@ go_proto_library(
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
":ssz_generated_files",
|
||||
":ssz_generated_files", # keep
|
||||
"methodical.ssz.go",
|
||||
],
|
||||
embed = [
|
||||
":go_grpc_gateway_library",
|
||||
|
||||
2940
proto/eth/v1/methodical.ssz.go
Normal file
2940
proto/eth/v1/methodical.ssz.go
Normal file
File diff suppressed because it is too large
Load Diff
@@ -165,6 +165,7 @@ go_library(
|
||||
"sync_committee_mainnet.go",
|
||||
"sync_committee_minimal.go", # keep
|
||||
":ssz_generated_files", # keep
|
||||
"methodical.ssz.go",
|
||||
],
|
||||
embed = [
|
||||
":go_grpc_gateway_library",
|
||||
|
||||
1
spectest/shared/altair/ssz_static/BUILD.bazel
Normal file
1
spectest/shared/altair/ssz_static/BUILD.bazel
Normal file
@@ -0,0 +1 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
44
spectest/shared/phase0/ssz_static/sszstatic_test.go
Normal file
44
spectest/shared/phase0/ssz_static/sszstatic_test.go
Normal file
@@ -0,0 +1,44 @@
|
||||
package ssz_static
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/hex"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/golang/snappy"
|
||||
ethpb "github.com/prysmaticlabs/prysm/proto/eth/v1alpha1"
|
||||
|
||||
//"github.com/prysmaticlabs/prysm/shared/testutil"
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/spectest/utils"
|
||||
)
|
||||
|
||||
func TestFailingHTR(t *testing.T) {
|
||||
fh, err := os.Open("testdata/serialized.ssz_snappy")
|
||||
require.NoError(t, err)
|
||||
defer fh.Close()
|
||||
buf := bytes.NewBuffer(nil)
|
||||
_, err = buf.ReadFrom(fh)
|
||||
sszBytes, err := snappy.Decode(nil, buf.Bytes())
|
||||
require.NoError(t, err)
|
||||
o := ðpb.AggregateAttestationAndProof{}
|
||||
err = o.XXUnmarshalSSZ(sszBytes)
|
||||
err = o.UnmarshalSSZ(sszBytes)
|
||||
require.NoError(t, err, "Could not unmarshall serialized SSZ")
|
||||
|
||||
fh, err = os.Open("testdata/roots.yaml")
|
||||
require.NoError(t, err)
|
||||
defer fh.Close()
|
||||
buf = bytes.NewBuffer(nil)
|
||||
buf.ReadFrom(fh)
|
||||
rootsYaml := &SSZRoots{}
|
||||
require.NoError(t, utils.UnmarshalYaml(buf.Bytes(), rootsYaml))
|
||||
|
||||
root, err := o.HashTreeRoot()
|
||||
require.NoError(t, err)
|
||||
|
||||
rootBytes, err := hex.DecodeString(rootsYaml.Root[2:])
|
||||
require.NoError(t, err)
|
||||
require.DeepEqual(t, rootBytes, root[:], "Did not receive expected hash tree root")
|
||||
}
|
||||
32
sszgen/BUILD.bazel
Normal file
32
sszgen/BUILD.bazel
Normal file
@@ -0,0 +1,32 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"indexer.go",
|
||||
"parser.go",
|
||||
"representer.go",
|
||||
"tagparse.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//sszgen/types:go_default_library",
|
||||
"@org_golang_x_tools//go/packages:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"indexer_test.go",
|
||||
"parser_test.go",
|
||||
"representer_test.go",
|
||||
"tagparse_test.go",
|
||||
],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//shared/testutil/require:go_default_library",
|
||||
"//sszgen/types:go_default_library",
|
||||
],
|
||||
)
|
||||
46
sszgen/backend/BUILD.bazel
Normal file
46
sszgen/backend/BUILD.bazel
Normal file
@@ -0,0 +1,46 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"bool.go",
|
||||
"byte.go",
|
||||
"caster.go",
|
||||
"container.go",
|
||||
"genhtr.go",
|
||||
"genmarshal.go",
|
||||
"gensize.go",
|
||||
"genunmarshal.go",
|
||||
"list.go",
|
||||
"overlay.go",
|
||||
"pointer.go",
|
||||
"render.go",
|
||||
"uint.go",
|
||||
"union.go",
|
||||
"vector.go",
|
||||
"visitor.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/backend",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["//sszgen/types:go_default_library"],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = [
|
||||
"container_test.go",
|
||||
"genhtr_test.go",
|
||||
"genmarshal_test.go",
|
||||
"gensize_test.go",
|
||||
"genunmarshal_test.go",
|
||||
"render_test.go",
|
||||
],
|
||||
data = glob(["testdata/**"]),
|
||||
embed = [":go_default_library"],
|
||||
# dummy importpath to unbreak debugging
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/backend/tests-dummy",
|
||||
deps = [
|
||||
"//shared/testutil/require:go_default_library",
|
||||
"//sszgen/types:go_default_library",
|
||||
],
|
||||
)
|
||||
37
sszgen/backend/bool.go
Normal file
37
sszgen/backend/bool.go
Normal file
@@ -0,0 +1,37 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateBool struct {
|
||||
valRep *types.ValueBool
|
||||
targetPackage string
|
||||
casterConfig
|
||||
}
|
||||
|
||||
func (g *generateBool) generateHTRPutter(fieldName string) string {
|
||||
return fmt.Sprintf("hh.PutBool(%s)", fieldName)
|
||||
}
|
||||
|
||||
func (g *generateBool) coerce() func(string) string {
|
||||
return func(fieldName string) string {
|
||||
return fmt.Sprintf("%s(%s)", g.valRep.TypeName(), fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *generateBool) generateFixedMarshalValue(fieldName string) string {
|
||||
return fmt.Sprintf("dst = ssz.MarshalBool(dst, %s)", fieldName)
|
||||
}
|
||||
|
||||
func (g *generateBool) generateUnmarshalValue(fieldName string, offset string) string {
|
||||
convert := fmt.Sprintf("ssz.UnmarshalBool(%s)", offset)
|
||||
return fmt.Sprintf("%s = %s", fieldName, g.casterConfig.toOverlay(convert))
|
||||
}
|
||||
|
||||
func (g *generateBool) variableSizeSSZ(fieldname string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateBool{}
|
||||
36
sszgen/backend/byte.go
Normal file
36
sszgen/backend/byte.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateByte struct {
|
||||
*types.ValueByte
|
||||
targetPackage string
|
||||
}
|
||||
|
||||
func (g *generateByte) generateHTRPutter(fieldName string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateByte) coerce() func(string) string {
|
||||
return func(fieldName string) string {
|
||||
return fmt.Sprintf("%s(%s)", g.TypeName(), fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *generateByte) generateFixedMarshalValue(fieldName string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateByte) generateUnmarshalValue(fieldName string, s string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateByte) variableSizeSSZ(fieldname string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateByte{}
|
||||
33
sszgen/backend/caster.go
Normal file
33
sszgen/backend/caster.go
Normal file
@@ -0,0 +1,33 @@
|
||||
package backend
|
||||
|
||||
type caster interface {
|
||||
setToOverlay(func(string) string)
|
||||
setFromOverlay(func(string) string)
|
||||
}
|
||||
|
||||
type casterConfig struct {
|
||||
toOverlayFunc func(string) string
|
||||
fromOverlayFunc func(string) string
|
||||
}
|
||||
|
||||
func (c *casterConfig) setToOverlay(castFunc func(string) string) {
|
||||
c.toOverlayFunc = castFunc
|
||||
}
|
||||
|
||||
func (c *casterConfig) toOverlay(value string) string {
|
||||
if c.toOverlayFunc == nil {
|
||||
return value
|
||||
}
|
||||
return c.toOverlayFunc(value)
|
||||
}
|
||||
|
||||
func (c *casterConfig) setFromOverlay(castFunc func(string) string) {
|
||||
c.fromOverlayFunc = castFunc
|
||||
}
|
||||
|
||||
func (c *casterConfig) fromOverlay(value string) string {
|
||||
if c.fromOverlayFunc == nil {
|
||||
return value
|
||||
}
|
||||
return c.fromOverlayFunc(value)
|
||||
}
|
||||
70
sszgen/backend/container.go
Normal file
70
sszgen/backend/container.go
Normal file
@@ -0,0 +1,70 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
const receiverName = "c"
|
||||
|
||||
type generateContainer struct {
|
||||
*types.ValueContainer
|
||||
targetPackage string
|
||||
}
|
||||
|
||||
func (g *generateContainer) generateHTRPutter(fieldName string) string {
|
||||
tmpl := `if err := %s.XXHashTreeRootWith(hh); err != nil {
|
||||
return err
|
||||
}`
|
||||
return fmt.Sprintf(tmpl, fieldName)
|
||||
}
|
||||
|
||||
func (g *generateContainer) variableSizeSSZ(fieldName string) string {
|
||||
return fmt.Sprintf("%s.SizeSSZ()", fieldName)
|
||||
}
|
||||
|
||||
func (g *generateContainer) generateUnmarshalValue(fieldName string, sliceName string) string {
|
||||
t := `if err = %s.XXUnmarshalSSZ(%s); err != nil {
|
||||
return err
|
||||
}`
|
||||
return fmt.Sprintf(t, fieldName, sliceName)
|
||||
}
|
||||
|
||||
func (g *generateContainer) generateFixedMarshalValue(fieldName string) string {
|
||||
if g.IsVariableSized() {
|
||||
return fmt.Sprintf(`dst = ssz.WriteOffset(dst, offset)
|
||||
offset += %s.SizeSSZ()`, fieldName)
|
||||
}
|
||||
return g.generateDelegateFieldMarshalSSZ(fieldName)
|
||||
}
|
||||
|
||||
// method that generates code which calls the MarshalSSZ method of the field
|
||||
func (g *generateContainer) generateDelegateFieldMarshalSSZ(fieldName string) string {
|
||||
return fmt.Sprintf(`if dst, err = %s.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}`, fieldName)
|
||||
}
|
||||
|
||||
func (g *generateContainer) generateVariableMarshalValue(fieldName string) string {
|
||||
return g.generateDelegateFieldMarshalSSZ(fieldName)
|
||||
}
|
||||
|
||||
func (g *generateContainer) fixedOffset() int {
|
||||
offset := 0
|
||||
for _, c := range g.Contents {
|
||||
offset += c.Value.FixedSize()
|
||||
}
|
||||
return offset
|
||||
}
|
||||
|
||||
func (g *generateContainer) initializeValue(fieldName string) string {
|
||||
fqType := g.TypeName()
|
||||
if g.targetPackage != g.PackagePath() {
|
||||
fqType = importAlias(g.PackagePath()) + "." + fqType
|
||||
}
|
||||
return fmt.Sprintf("new(%s)", fullyQualifiedTypeName(g.ValueContainer, g.targetPackage))
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateContainer{}
|
||||
var _ valueInitializer = &generateContainer{}
|
||||
var _ htrPutter = &generateContainer{}
|
||||
52
sszgen/backend/container_test.go
Normal file
52
sszgen/backend/container_test.go
Normal file
File diff suppressed because one or more lines are too long
68
sszgen/backend/genhtr.go
Normal file
68
sszgen/backend/genhtr.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// ChunkSize is used to check if packed bytes align to the chunk sized used by the
|
||||
// merkleization algorithm. If not, the bytes should be zero-padded to the
|
||||
// nearest multiple of ChunkSize.
|
||||
const ChunkSize = 32
|
||||
|
||||
var htrTmpl = `func ({{.Receiver}} {{.Type}}) XXHashTreeRoot() ([32]byte, error) {
|
||||
hh := ssz.DefaultHasherPool.Get()
|
||||
if err := {{.Receiver}}.XXHashTreeRootWith(hh); err != nil {
|
||||
ssz.DefaultHasherPool.Put(hh)
|
||||
return [32]byte{}, err
|
||||
}
|
||||
root, err := hh.HashRoot()
|
||||
ssz.DefaultHasherPool.Put(hh)
|
||||
return root, err
|
||||
}
|
||||
|
||||
func ({{.Receiver}} {{.Type}}) XXHashTreeRootWith(hh *ssz.Hasher) (err error) {
|
||||
indx := hh.Index()
|
||||
{{.HTRSteps}}
|
||||
hh.Merkleize(indx)
|
||||
return nil
|
||||
}`
|
||||
|
||||
func GenerateHashTreeRoot(g *generateContainer) *generatedCode {
|
||||
htrTmpl, err := template.New("GenerateHashTreeRoot").Parse(htrTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
htrSteps := make([]string, 0)
|
||||
for i, c := range g.Contents {
|
||||
fieldName := fmt.Sprintf("%s.%s", receiverName, c.Key)
|
||||
htrSteps = append(htrSteps, fmt.Sprintf("\t// Field %d: %s", i, c.Key))
|
||||
vg := newValueGenerator(c.Value, g.targetPackage)
|
||||
htrp, ok := vg.(htrPutter)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
htrSteps = append(htrSteps, htrp.generateHTRPutter(fieldName))
|
||||
}
|
||||
err = htrTmpl.Execute(buf, struct{
|
||||
Receiver string
|
||||
Type string
|
||||
HTRSteps string
|
||||
}{
|
||||
Receiver: receiverName,
|
||||
Type: fmt.Sprintf("*%s", g.TypeName()),
|
||||
HTRSteps: strings.Join(htrSteps, "\n"),
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
// TODO: allow GenerateHashTreeRoot to return an error since template.Execute
|
||||
// can technically return an error (get rid of the panics)
|
||||
return &generatedCode{
|
||||
blocks: []string{string(buf.Bytes())},
|
||||
imports: extractImportsFromContainerFields(g.Contents, g.targetPackage),
|
||||
}
|
||||
}
|
||||
75
sszgen/backend/genhtr_test.go
Normal file
75
sszgen/backend/genhtr_test.go
Normal file
@@ -0,0 +1,75 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
// cases left to satisfy:
|
||||
// list-vector-byte
|
||||
func TestGenerateHashTreeRoot(t *testing.T) {
|
||||
b, err := os.ReadFile("testdata/TestGenerateHashTreeRoot.expected")
|
||||
require.NoError(t, err)
|
||||
expected := string(b)
|
||||
|
||||
vc, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
gc := &generateContainer{vc, ""}
|
||||
code := GenerateHashTreeRoot(gc)
|
||||
require.Equal(t, 4, len(code.imports))
|
||||
actual, err := normalizeFixtureString(code.blocks[0])
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestHTROverlayCoerce(t *testing.T) {
|
||||
pkg := "derp"
|
||||
expected := "hh.PutUint64(uint64(b.Slot))"
|
||||
val := &types.ValueOverlay{
|
||||
Name: "",
|
||||
Package: pkg,
|
||||
Underlying: &types.ValueUint{
|
||||
Name: "uint64",
|
||||
Size: 64,
|
||||
Package: pkg,
|
||||
},
|
||||
}
|
||||
gv := &generateOverlay{val, pkg}
|
||||
actual := gv.generateHTRPutter("b.Slot")
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestHTRContainer(t *testing.T) {
|
||||
pkg := "derp"
|
||||
expected := `if err := b.Fork.HashTreeRootWith(hh); err != nil {
|
||||
return err
|
||||
}`
|
||||
val := &types.ValueContainer{}
|
||||
gv := &generateContainer{val, pkg}
|
||||
actual := gv.generateHTRPutter("b.Fork")
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestHTRByteVector(t *testing.T) {
|
||||
pkg := "derp"
|
||||
fieldName := "c.GenesisValidatorsRoot"
|
||||
expected := `{
|
||||
if len(c.GenesisValidatorsRoot) != 32 {
|
||||
return ssz.ErrVectorLength
|
||||
}
|
||||
hh.PutBytes(c.GenesisValidatorsRoot)
|
||||
}`
|
||||
val := &types.ValueVector{
|
||||
ElementValue: &types.ValueByte{},
|
||||
Size: 32,
|
||||
}
|
||||
gv := &generateVector{
|
||||
valRep: val,
|
||||
targetPackage: pkg,
|
||||
}
|
||||
actual := gv.generateHTRPutter(fieldName)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
92
sszgen/backend/genmarshal.go
Normal file
92
sszgen/backend/genmarshal.go
Normal file
@@ -0,0 +1,92 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
var marshalBodyTmpl = `func ({{.Receiver}} {{.Type}}) XXMarshalSSZ() ([]byte, error) {
|
||||
buf := make([]byte, {{.Receiver}}.XXSizeSSZ())
|
||||
return {{.Receiver}}.XXMarshalSSZTo(buf[:0])
|
||||
}
|
||||
|
||||
func ({{.Receiver}} {{.Type}}) XXMarshalSSZTo(dst []byte) ([]byte, error) {
|
||||
var err error
|
||||
{{- .OffsetDeclaration -}}
|
||||
{{- .ValueMarshaling }}
|
||||
{{- .VariableValueMarshaling }}
|
||||
return dst, err
|
||||
}`
|
||||
|
||||
func GenerateMarshalSSZ(g *generateContainer) *generatedCode {
|
||||
sizeTmpl, err := template.New("GenerateMarshalSSZ").Parse(marshalBodyTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
|
||||
marshalValueBlocks := make([]string, 0)
|
||||
marshalVariableValueBlocks := make([]string, 0)
|
||||
offset := 0
|
||||
for i, c := range g.Contents {
|
||||
// only lists need the offset variable
|
||||
mg := newValueGenerator(c.Value, g.targetPackage)
|
||||
fieldName := fmt.Sprintf("%s.%s", receiverName, c.Key)
|
||||
marshalValueBlocks = append(marshalValueBlocks, fmt.Sprintf("\n\t// Field %d: %s", i, c.Key))
|
||||
vi, ok := mg.(valueInitializer)
|
||||
if ok {
|
||||
ini := vi.initializeValue(fieldName)
|
||||
if ini != "" {
|
||||
marshalValueBlocks = append(marshalValueBlocks , fmt.Sprintf("if %s == nil {\n\t%s = %s\n}", fieldName, fieldName, ini))
|
||||
}
|
||||
}
|
||||
mv := mg.generateFixedMarshalValue(fieldName)
|
||||
marshalValueBlocks = append(marshalValueBlocks, "\t" + mv)
|
||||
offset += c.Value.FixedSize()
|
||||
if !c.Value.IsVariableSized() {
|
||||
continue
|
||||
}
|
||||
vm, ok := mg.(variableMarshaller)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
vmc := vm.generateVariableMarshalValue(fieldName)
|
||||
if vmc != "" {
|
||||
marshalVariableValueBlocks = append(marshalVariableValueBlocks, fmt.Sprintf("\n\t// Field %d: %s", i, c.Key))
|
||||
marshalVariableValueBlocks = append(marshalVariableValueBlocks, "\t" + vmc)
|
||||
}
|
||||
}
|
||||
// only set the offset declaration if we need it
|
||||
// otherwise we'll have an unused variable (syntax error)
|
||||
offsetDeclaration := ""
|
||||
if g.IsVariableSized() {
|
||||
// if there are any variable sized values in the container, we'll need to set this offset declaration
|
||||
// so it gets rendered to the top of the marshal method
|
||||
offsetDeclaration = fmt.Sprintf("\noffset := %d\n", offset)
|
||||
}
|
||||
|
||||
err = sizeTmpl.Execute(buf, struct{
|
||||
Receiver string
|
||||
Type string
|
||||
OffsetDeclaration string
|
||||
ValueMarshaling string
|
||||
VariableValueMarshaling string
|
||||
}{
|
||||
Receiver: receiverName,
|
||||
Type: fmt.Sprintf("*%s", g.TypeName()),
|
||||
OffsetDeclaration: offsetDeclaration,
|
||||
ValueMarshaling: "\n" + strings.Join(marshalValueBlocks, "\n"),
|
||||
VariableValueMarshaling: "\n" + strings.Join(marshalVariableValueBlocks, "\n"),
|
||||
})
|
||||
// TODO: allow GenerateMarshalSSZ to return an error since template.Execute
|
||||
// can technically return an error
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return &generatedCode{
|
||||
blocks: []string{string(buf.Bytes())},
|
||||
imports: extractImportsFromContainerFields(g.Contents, g.targetPackage),
|
||||
}
|
||||
}
|
||||
25
sszgen/backend/genmarshal_test.go
Normal file
25
sszgen/backend/genmarshal_test.go
Normal file
@@ -0,0 +1,25 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func TestGenerateMarshalSSZ(t *testing.T) {
|
||||
b, err := os.ReadFile("testdata/TestGenerateMarshalSSZ.expected")
|
||||
require.NoError(t, err)
|
||||
expected := string(b)
|
||||
|
||||
vc, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
gc := &generateContainer{vc, ""}
|
||||
code := GenerateMarshalSSZ(gc)
|
||||
require.Equal(t, 4, len(code.imports))
|
||||
actual, err := normalizeFixtureString(code.blocks[0])
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
65
sszgen/backend/gensize.go
Normal file
65
sszgen/backend/gensize.go
Normal file
@@ -0,0 +1,65 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
|
||||
var sizeBodyTmpl = `func ({{.Receiver}} {{.Type}}) XXSizeSSZ() (int) {
|
||||
size := {{.FixedSize}}
|
||||
{{- .VariableSize }}
|
||||
return size
|
||||
}`
|
||||
|
||||
func GenerateSizeSSZ(g *generateContainer) *generatedCode {
|
||||
sizeTmpl, err := template.New("GenerateSizeSSZ").Parse(sizeBodyTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
|
||||
fixedSize := 0
|
||||
variableComputations := make([]string, 0)
|
||||
for _, c := range g.Contents {
|
||||
vg := newValueGenerator(c.Value, g.targetPackage)
|
||||
fixedSize += c.Value.FixedSize()
|
||||
if !c.Value.IsVariableSized() {
|
||||
continue
|
||||
}
|
||||
fieldName := fmt.Sprintf("%s.%s", receiverName, c.Key)
|
||||
vi, ok := vg.(valueInitializer)
|
||||
if ok {
|
||||
ini := vi.initializeValue(fieldName)
|
||||
if ini != "" {
|
||||
variableComputations = append(variableComputations, fmt.Sprintf("if %s == nil {\n\t%s = %s\n}", fieldName, fieldName, ini))
|
||||
}
|
||||
}
|
||||
cv := vg.variableSizeSSZ(fieldName)
|
||||
if cv != "" {
|
||||
variableComputations = append(variableComputations, fmt.Sprintf("\tsize += %s", cv))
|
||||
}
|
||||
}
|
||||
|
||||
err = sizeTmpl.Execute(buf, struct{
|
||||
Receiver string
|
||||
Type string
|
||||
FixedSize int
|
||||
VariableSize string
|
||||
}{
|
||||
Receiver: receiverName,
|
||||
Type: fmt.Sprintf("*%s", g.TypeName()),
|
||||
FixedSize: fixedSize,
|
||||
VariableSize: "\n" + strings.Join(variableComputations, "\n"),
|
||||
})
|
||||
// TODO: allow GenerateSizeSSZ to return an error since template.Execute
|
||||
// can technically return an error
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return &generatedCode{
|
||||
blocks: []string{string(buf.Bytes())},
|
||||
imports: extractImportsFromContainerFields(g.Contents, g.targetPackage),
|
||||
}
|
||||
}
|
||||
24
sszgen/backend/gensize_test.go
Normal file
24
sszgen/backend/gensize_test.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func TestGenerateSizeSSZ(t *testing.T) {
|
||||
b, err := os.ReadFile("testdata/TestGenerateSizeSSZ.expected")
|
||||
require.NoError(t, err)
|
||||
expected := string(b)
|
||||
|
||||
ty, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
gc := GenerateSizeSSZ(&generateContainer{ty, ""})
|
||||
require.Equal(t, 4, len(gc.imports))
|
||||
actual, err := normalizeFixtureString(gc.blocks[0])
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
185
sszgen/backend/genunmarshal.go
Normal file
185
sszgen/backend/genunmarshal.go
Normal file
@@ -0,0 +1,185 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
"strings"
|
||||
"text/template"
|
||||
)
|
||||
var generateUnmarshalSSZTmpl = `func ({{.Receiver}} {{.Type}}) XXUnmarshalSSZ(buf []byte) error {
|
||||
var err error
|
||||
size := uint64(len(buf))
|
||||
if size {{ .SizeInequality }} {{ .FixedOffset }} {
|
||||
return ssz.ErrSize
|
||||
}
|
||||
|
||||
{{ .SliceDeclaration }}
|
||||
{{ .ValueUnmarshaling }}
|
||||
return err
|
||||
}`
|
||||
|
||||
|
||||
func GenerateUnmarshalSSZ(g *generateContainer) *generatedCode {
|
||||
sizeInequality := "!="
|
||||
if g.IsVariableSized() {
|
||||
sizeInequality = "<"
|
||||
}
|
||||
ums := g.unmarshalSteps()
|
||||
unmarshalBlocks := make([]string, 0)
|
||||
for i, c := range g.Contents {
|
||||
unmarshalBlocks = append(unmarshalBlocks, fmt.Sprintf("\n\t// Field %d: %s", i, c.Key))
|
||||
mg := newValueGenerator(c.Value, g.targetPackage)
|
||||
fieldName := fmt.Sprintf("%s.%s", receiverName, c.Key)
|
||||
|
||||
vi, ok := mg.(valueInitializer)
|
||||
if ok {
|
||||
ini := vi.initializeValue(fieldName)
|
||||
if ini != "" {
|
||||
unmarshalBlocks = append(unmarshalBlocks, fmt.Sprintf("%s = %s", fieldName, ini))
|
||||
}
|
||||
}
|
||||
|
||||
sliceName := fmt.Sprintf("s%d", i)
|
||||
mv := mg.generateUnmarshalValue(fieldName, sliceName)
|
||||
if mv != "" {
|
||||
//unmarshalBlocks = append(unmarshalBlocks, fmt.Sprintf("\t%s = %s", fieldName, mv))
|
||||
unmarshalBlocks = append(unmarshalBlocks, mv)
|
||||
}
|
||||
|
||||
/*
|
||||
if !c.Value.IsVariableSized() {
|
||||
continue
|
||||
}
|
||||
_, ok := mg.(variableUnmarshaller)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
vm := mg.(variableUnmarshaller)
|
||||
vmc := vm.generateVariableUnmarshalValue(fieldName)
|
||||
if vmc != "" {
|
||||
unmarshalVariableBlocks = append(unmarshalVariableBlocks, fmt.Sprintf("\n\t// Field %d: %s", i, c.Key))
|
||||
unmarshalVariableBlocks = append(unmarshalVariableBlocks, "\t" + vmc)
|
||||
}
|
||||
*/
|
||||
}
|
||||
|
||||
sliceDeclarations := strings.Join([]string{ums.fixedSlices(), "", ums.variableSlices(g.fixedOffset())}, "\n")
|
||||
unmTmpl, err := template.New("GenerateUnmarshalSSZTmpl").Parse(generateUnmarshalSSZTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = unmTmpl.Execute(buf, struct{
|
||||
Receiver string
|
||||
Type string
|
||||
SizeInequality string
|
||||
FixedOffset int
|
||||
SliceDeclaration string
|
||||
ValueUnmarshaling string
|
||||
}{
|
||||
Receiver: receiverName,
|
||||
Type: fmt.Sprintf("*%s", g.TypeName()),
|
||||
SizeInequality: sizeInequality,
|
||||
FixedOffset: g.fixedOffset(),
|
||||
SliceDeclaration: sliceDeclarations,
|
||||
ValueUnmarshaling: strings.Join(unmarshalBlocks, "\n"),
|
||||
})
|
||||
// TODO: allow GenerateUnmarshalSSZ to return an error since template.Execute
|
||||
// can technically return an error
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return &generatedCode{
|
||||
blocks: []string{string(buf.Bytes())},
|
||||
imports: extractImportsFromContainerFields(g.Contents, g.targetPackage),
|
||||
}
|
||||
}
|
||||
|
||||
type unmarshalStep struct {
|
||||
valRep types.ValRep
|
||||
fieldNumber int
|
||||
fieldName string
|
||||
beginByte int
|
||||
endByte int
|
||||
previousVariable *unmarshalStep
|
||||
nextVariable *unmarshalStep
|
||||
}
|
||||
|
||||
type unmarshalStepSlice []*unmarshalStep
|
||||
|
||||
func (us *unmarshalStep) fixedSize() int {
|
||||
return us.valRep.FixedSize()
|
||||
}
|
||||
|
||||
func (us *unmarshalStep) variableOffset(outerFixedSize int) string {
|
||||
o := fmt.Sprintf("v%d := ssz.ReadOffset(buf[%d:%d]) // %s", us.fieldNumber, us.beginByte, us.endByte, us.fieldName)
|
||||
if us.previousVariable == nil {
|
||||
o += fmt.Sprintf("\nif v%d < %d {\n\treturn ssz.ErrInvalidVariableOffset\n}", us.fieldNumber, outerFixedSize)
|
||||
o += fmt.Sprintf("\nif v%d > size {\n\treturn ssz.ErrOffset\n}", us.fieldNumber)
|
||||
} else {
|
||||
o += fmt.Sprintf("\nif v%d > size || v%d < v%d {\n\treturn ssz.ErrOffset\n}", us.fieldNumber, us.fieldNumber, us.previousVariable.fieldNumber)
|
||||
}
|
||||
return o
|
||||
}
|
||||
|
||||
func (us *unmarshalStep) slice() string {
|
||||
if us.valRep.IsVariableSized() {
|
||||
if us.nextVariable == nil {
|
||||
return fmt.Sprintf("s%d := buf[v%d:]\t\t// %s", us.fieldNumber, us.fieldNumber, us.fieldName)
|
||||
}
|
||||
return fmt.Sprintf("s%d := buf[v%d:v%d]\t\t// %s", us.fieldNumber, us.fieldNumber, us.nextVariable.fieldNumber, us.fieldName)
|
||||
}
|
||||
return fmt.Sprintf("s%d := buf[%d:%d]\t\t// %s", us.fieldNumber, us.beginByte, us.endByte, us.fieldName)
|
||||
}
|
||||
|
||||
func (steps unmarshalStepSlice) fixedSlices() string {
|
||||
slices := make([]string, 0)
|
||||
for _, s := range steps {
|
||||
if s.valRep.IsVariableSized() {
|
||||
continue
|
||||
}
|
||||
slices = append(slices, s.slice())
|
||||
}
|
||||
return strings.Join(slices, "\n")
|
||||
}
|
||||
|
||||
func (steps unmarshalStepSlice) variableSlices(outerSize int) string {
|
||||
validate := make([]string, 0)
|
||||
assign := make([]string, 0)
|
||||
for _, s := range steps {
|
||||
if !s.valRep.IsVariableSized() {
|
||||
continue
|
||||
}
|
||||
validate = append(validate, s.variableOffset(outerSize))
|
||||
assign = append(assign, s.slice())
|
||||
}
|
||||
return strings.Join(append(validate, assign...), "\n")
|
||||
}
|
||||
|
||||
func (g *generateContainer) unmarshalSteps() unmarshalStepSlice{
|
||||
ums := make([]*unmarshalStep, 0)
|
||||
var begin, end int
|
||||
var prevVariable *unmarshalStep
|
||||
for i, c := range g.Contents {
|
||||
begin = end
|
||||
end += c.Value.FixedSize()
|
||||
um := &unmarshalStep{
|
||||
valRep: c.Value,
|
||||
fieldNumber: i,
|
||||
fieldName: fmt.Sprintf("%s.%s", receiverName, c.Key),
|
||||
beginByte: begin,
|
||||
endByte: end,
|
||||
}
|
||||
if c.Value.IsVariableSized() {
|
||||
if prevVariable != nil {
|
||||
um.previousVariable = prevVariable
|
||||
prevVariable.nextVariable = um
|
||||
}
|
||||
prevVariable = um
|
||||
}
|
||||
|
||||
ums = append(ums, um)
|
||||
}
|
||||
return ums
|
||||
}
|
||||
46
sszgen/backend/genunmarshal_test.go
Normal file
46
sszgen/backend/genunmarshal_test.go
Normal file
@@ -0,0 +1,46 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"os"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func TestGenerateUnmarshalSSZ(t *testing.T) {
|
||||
b, err := os.ReadFile("testdata/TestGenerateUnmarshalSSZ.expected")
|
||||
require.NoError(t, err)
|
||||
expected := string(b)
|
||||
|
||||
vc, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
gc := &generateContainer{vc, ""}
|
||||
code := GenerateUnmarshalSSZ(gc)
|
||||
require.Equal(t, 4, len(code.imports))
|
||||
actual, err := normalizeFixtureString(code.blocks[0])
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestUnmarshalSteps(t *testing.T) {
|
||||
fixturePath := "testdata/TestUnmarshalSteps.expected"
|
||||
b, err := os.ReadFile(fixturePath)
|
||||
require.NoError(t, err)
|
||||
expected, err := normalizeFixtureBytes(b)
|
||||
require.NoError(t, err)
|
||||
|
||||
vc, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
gc := &generateContainer{vc, "" }
|
||||
ums := gc.unmarshalSteps()
|
||||
require.Equal(t, 21, len(ums))
|
||||
require.Equal(t, ums[15].nextVariable.fieldNumber, ums[16].fieldNumber)
|
||||
|
||||
gotRaw := strings.Join([]string{ums.fixedSlices(), "", ums.variableSlices(gc.fixedOffset())}, "\n")
|
||||
actual, err := normalizeFixtureString(gotRaw)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
403
sszgen/backend/list.go
Normal file
403
sszgen/backend/list.go
Normal file
@@ -0,0 +1,403 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"text/template"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateList struct {
|
||||
valRep *types.ValueList
|
||||
targetPackage string
|
||||
casterConfig
|
||||
}
|
||||
|
||||
var generateListHTRPutterTmpl = `{
|
||||
if len({{.FieldName}}) > {{.MaxSize}} {
|
||||
return ssz.ErrListTooBig
|
||||
}
|
||||
subIndx := hh.Index()
|
||||
for _, {{.NestedFieldName}} := range {{.FieldName}} {
|
||||
{{.AppendCall}}
|
||||
}
|
||||
{{- .PadCall}}
|
||||
{{.Merkleize}}
|
||||
}`
|
||||
|
||||
type listPutterElements struct {
|
||||
FieldName string
|
||||
NestedFieldName string
|
||||
MaxSize int
|
||||
AppendCall string
|
||||
PadCall string
|
||||
Merkleize string
|
||||
}
|
||||
|
||||
func renderHtrListPutter(lpe listPutterElements) string {
|
||||
tmpl, err := template.New("renderHtrListPutter").Parse(generateListHTRPutterTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = tmpl.Execute(buf, lpe)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (g *generateList) generateHTRPutter(fieldName string) string {
|
||||
nestedFieldName := "o"
|
||||
if fieldName[0:1] == "o" && monoCharacter(fieldName) {
|
||||
nestedFieldName = fieldName + "o"
|
||||
}
|
||||
|
||||
// resolve pointers and overlays to their underlying types
|
||||
vr := g.valRep.ElementValue
|
||||
if vrp, isPointer := vr.(*types.ValuePointer); isPointer {
|
||||
vr = vrp.Referent
|
||||
}
|
||||
if vro, isOverlay := vr.(*types.ValueOverlay); isOverlay {
|
||||
vr = vro.Underlying
|
||||
}
|
||||
|
||||
lpe := listPutterElements{
|
||||
FieldName: fieldName,
|
||||
NestedFieldName: nestedFieldName,
|
||||
MaxSize: g.valRep.MaxSize,
|
||||
}
|
||||
switch v := vr.(type) {
|
||||
case *types.ValueByte:
|
||||
t := `if len(%s) > %d {
|
||||
return ssz.ErrBytesLength
|
||||
}
|
||||
hh.PutBytes(%s)`
|
||||
return fmt.Sprintf(t, fieldName, g.valRep.MaxSize, fieldName)
|
||||
case *types.ValueVector:
|
||||
gv := &generateVector{valRep: v, targetPackage: g.targetPackage}
|
||||
if gv.isByteVector() {
|
||||
lpe.AppendCall = gv.renderByteSliceAppend(nestedFieldName)
|
||||
mtmpl := `numItems := uint64(len(%s))
|
||||
hh.MerkleizeWithMixin(subIndx, numItems, ssz.CalculateLimit(%d, numItems, %d))`
|
||||
lpe.Merkleize = fmt.Sprintf(mtmpl, fieldName, g.valRep.MaxSize, v.FixedSize())
|
||||
return renderHtrListPutter(lpe)
|
||||
}
|
||||
case *types.ValueUint:
|
||||
lpe.AppendCall = fmt.Sprintf("hh.AppendUint%d(%s)", v.Size, nestedFieldName)
|
||||
if v.FixedSize() % ChunkSize != 0 {
|
||||
lpe.PadCall = "\nhh.FillUpTo32()"
|
||||
}
|
||||
mtmpl := `numItems := uint64(len(%s))
|
||||
hh.MerkleizeWithMixin(subIndx, numItems, ssz.CalculateLimit(%d, numItems, %d))`
|
||||
lpe.Merkleize = fmt.Sprintf(mtmpl, fieldName, g.valRep.MaxSize, v.FixedSize())
|
||||
return renderHtrListPutter(lpe)
|
||||
case *types.ValueContainer:
|
||||
gc := newValueGenerator(v, g.targetPackage)
|
||||
lpe.AppendCall = gc.generateHTRPutter(nestedFieldName)
|
||||
lpe.Merkleize = fmt.Sprintf("hh.MerkleizeWithMixin(subIndx, uint64(len(%s)), %d)", fieldName, g.valRep.MaxSize)
|
||||
return renderHtrListPutter(lpe)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported type combination - list of %v", v))
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
var generateListGenerateUnmarshalValueFixedTmpl = `{
|
||||
if len({{.SliceName}}) % {{.ElementSize}} != 0 {
|
||||
return fmt.Errorf("misaligned bytes: {{.FieldName}} length is %d, which is not a multiple of {{.ElementSize}}", len({{.SliceName}}))
|
||||
}
|
||||
numElem := len({{.SliceName}}) / {{.ElementSize}}
|
||||
if numElem > {{ .MaxSize }} {
|
||||
return fmt.Errorf("ssz-max exceeded: {{.FieldName}} has %d elements, ssz-max is {{.MaxSize}}", numElem)
|
||||
}
|
||||
{{.FieldName}} = make([]{{.TypeName}}, numElem)
|
||||
for {{.LoopVar}} := 0; {{.LoopVar}} < numElem; {{.LoopVar}}++ {
|
||||
var tmp {{.TypeName}}
|
||||
{{.Initializer}}
|
||||
tmpSlice := {{.SliceName}}[{{.LoopVar}}*{{.NestedFixedSize}}:(1+{{.LoopVar}})*{{.NestedFixedSize}}]
|
||||
{{.NestedUnmarshal}}
|
||||
{{.FieldName}}[{{.LoopVar}}] = tmp
|
||||
}
|
||||
}`
|
||||
|
||||
var generateListGenerateUnmarshalValueVariableTmpl = `{
|
||||
// empty lists are zero length, so make sure there is room for an offset
|
||||
// before attempting to unmarshal it
|
||||
if len({{.SliceName}}) > 3 {
|
||||
firstOffset := ssz.ReadOffset({{.SliceName}}[0:4])
|
||||
if firstOffset % 4 != 0 {
|
||||
return fmt.Errorf("misaligned list bytes: when decoding {{.FieldName}}, end-of-list offset is %d, which is not a multiple of 4 (offset size)", firstOffset)
|
||||
}
|
||||
listLen := firstOffset / 4
|
||||
if listLen > {{.MaxSize}} {
|
||||
return fmt.Errorf("ssz-max exceeded: {{.FieldName}} has %d elements, ssz-max is {{.MaxSize}}", listLen)
|
||||
}
|
||||
listOffsets := make([]uint64, listLen)
|
||||
for {{.LoopVar}} := 0; uint64({{.LoopVar}}) < listLen; {{.LoopVar}}++ {
|
||||
listOffsets[{{.LoopVar}}] = ssz.ReadOffset({{.SliceName}}[{{.LoopVar}}*4:({{.LoopVar}}+1)*4])
|
||||
}
|
||||
{{.FieldName}} = make([]{{.TypeName}}, len(listOffsets))
|
||||
for {{.LoopVar}} := 0; {{.LoopVar}} < len(listOffsets); {{.LoopVar}}++ {
|
||||
var tmp {{.TypeName}}
|
||||
{{.Initializer}}
|
||||
var tmpSlice []byte
|
||||
if {{.LoopVar}}+1 == len(listOffsets) {
|
||||
tmpSlice = {{.SliceName}}[listOffsets[{{.LoopVar}}]:]
|
||||
} else {
|
||||
tmpSlice = {{.SliceName}}[listOffsets[{{.LoopVar}}]:listOffsets[{{.LoopVar}}+1]]
|
||||
}
|
||||
{{.NestedUnmarshal}}
|
||||
{{.FieldName}}[{{.LoopVar}}] = tmp
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
func (g *generateList) generateUnmarshalVariableValue(fieldName string, sliceName string) string {
|
||||
loopVar := "i"
|
||||
if fieldName[0:1] == "i" && monoCharacter(fieldName) {
|
||||
loopVar = fieldName + "i"
|
||||
}
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
vi, ok := gg.(valueInitializer)
|
||||
var initializer string
|
||||
if ok {
|
||||
initializer = vi.initializeValue("tmp")
|
||||
if initializer != "" {
|
||||
initializer = "tmp = " + initializer
|
||||
}
|
||||
}
|
||||
tmpl, err := template.New("generateListGenerateUnmarshalValueVariableTmpl").Parse(generateListGenerateUnmarshalValueVariableTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = tmpl.Execute(buf, struct{
|
||||
LoopVar string
|
||||
SliceName string
|
||||
ElementSize int
|
||||
TypeName string
|
||||
FieldName string
|
||||
MaxSize int
|
||||
Initializer string
|
||||
NestedFixedSize int
|
||||
NestedUnmarshal string
|
||||
}{
|
||||
LoopVar: loopVar,
|
||||
SliceName: sliceName,
|
||||
ElementSize: g.valRep.ElementValue.FixedSize(),
|
||||
TypeName: fullyQualifiedTypeName(g.valRep.ElementValue, g.targetPackage),
|
||||
FieldName: fieldName,
|
||||
MaxSize: g.valRep.MaxSize,
|
||||
Initializer: initializer,
|
||||
NestedFixedSize: g.valRep.ElementValue.FixedSize(),
|
||||
NestedUnmarshal: gg.generateUnmarshalValue("tmp", "tmpSlice"),
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (g *generateList) generateUnmarshalFixedValue(fieldName string, sliceName string) string {
|
||||
loopVar := "i"
|
||||
if fieldName[0:1] == "i" && monoCharacter(fieldName) {
|
||||
loopVar = fieldName + "i"
|
||||
}
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
nestedUnmarshal := ""
|
||||
switch g.valRep.ElementValue.(type) {
|
||||
case *types.ValueByte:
|
||||
return fmt.Sprintf("%s = append([]byte{}, %s...)", fieldName, g.casterConfig.toOverlay(sliceName))
|
||||
default:
|
||||
nestedUnmarshal = gg.generateUnmarshalValue("tmp", "tmpSlice")
|
||||
}
|
||||
vi, ok := gg.(valueInitializer)
|
||||
var initializer string
|
||||
if ok {
|
||||
initializer = vi.initializeValue("tmp")
|
||||
if initializer != "" {
|
||||
initializer = "tmp = " + initializer
|
||||
}
|
||||
}
|
||||
tmpl, err := template.New("generateListGenerateUnmarshalValueFixedTmpl").Parse(generateListGenerateUnmarshalValueFixedTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = tmpl.Execute(buf, struct{
|
||||
LoopVar string
|
||||
SliceName string
|
||||
ElementSize int
|
||||
TypeName string
|
||||
FieldName string
|
||||
MaxSize int
|
||||
Initializer string
|
||||
NestedFixedSize int
|
||||
NestedUnmarshal string
|
||||
}{
|
||||
LoopVar: loopVar,
|
||||
SliceName: sliceName,
|
||||
ElementSize: g.valRep.ElementValue.FixedSize(),
|
||||
TypeName: fullyQualifiedTypeName(g.valRep.ElementValue, g.targetPackage),
|
||||
FieldName: fieldName,
|
||||
MaxSize: g.valRep.MaxSize,
|
||||
Initializer: initializer,
|
||||
NestedFixedSize: g.valRep.ElementValue.FixedSize(),
|
||||
NestedUnmarshal: nestedUnmarshal,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (g *generateList) generateUnmarshalValue(fieldName string, sliceName string) string {
|
||||
if g.valRep.ElementValue.IsVariableSized() {
|
||||
return g.generateUnmarshalVariableValue(fieldName, sliceName)
|
||||
} else {
|
||||
return g.generateUnmarshalFixedValue(fieldName, sliceName)
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
func (g *generateList) generateFixedMarshalValue(fieldName string) string {
|
||||
tmpl := `dst = ssz.WriteOffset(dst, offset)
|
||||
offset += %s
|
||||
`
|
||||
offset := g.variableSizeSSZ(fieldName)
|
||||
|
||||
return fmt.Sprintf(tmpl, offset)
|
||||
}
|
||||
|
||||
var variableSizedListTmpl = `func() int {
|
||||
s := 0
|
||||
for _, o := range {{ .FieldName }} {
|
||||
s += 4
|
||||
s += {{ .SizeComputation }}
|
||||
}
|
||||
return s
|
||||
}()`
|
||||
|
||||
func (g *generateList) variableSizeSSZ(fieldName string) string {
|
||||
if !g.valRep.ElementValue.IsVariableSized() {
|
||||
return fmt.Sprintf("len(%s) * %d", fieldName, g.valRep.ElementValue.FixedSize())
|
||||
}
|
||||
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
vslTmpl, err := template.New("variableSizedListTmpl").Parse(variableSizedListTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = vslTmpl.Execute(buf, struct{
|
||||
FieldName string
|
||||
SizeComputation string
|
||||
}{
|
||||
FieldName: fieldName,
|
||||
SizeComputation: gg.variableSizeSSZ("o"),
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf.Bytes())
|
||||
}
|
||||
|
||||
var generateVariableMarshalValueTmpl = `if len({{ .FieldName }}) > {{ .MaxSize }} {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
|
||||
for _, o := range {{ .FieldName }} {
|
||||
if len(o) != {{ .ElementSize }} {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o)
|
||||
}`
|
||||
|
||||
var tmplVariableOffsetManagement = `{
|
||||
offset = 4 * len({{.FieldName}})
|
||||
for _, {{.NestedFieldName}} := range {{.FieldName}} {
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += {{.SizeComputation}}
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
func variableOffsetManagement(vg valueGenerator, fieldName, nestedFieldName string) string {
|
||||
vomt, err := template.New("tmplVariableOffsetManagement").Parse(tmplVariableOffsetManagement)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = vomt.Execute(buf, struct{
|
||||
FieldName string
|
||||
NestedFieldName string
|
||||
SizeComputation string
|
||||
}{
|
||||
FieldName: fieldName,
|
||||
NestedFieldName: nestedFieldName,
|
||||
SizeComputation: vg.variableSizeSSZ(nestedFieldName),
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf.Bytes())
|
||||
}
|
||||
|
||||
var tmplGenerateMarshalValueList = `if len({{.FieldName}}) > {{.MaxSize}} {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
{{.OffsetManagement}}{{.MarshalValue}}`
|
||||
|
||||
func (g *generateList) generateVariableMarshalValue(fieldName string) string {
|
||||
mvTmpl, err := template.New("tmplGenerateMarshalValueList").Parse(tmplGenerateMarshalValueList)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var marshalValue string
|
||||
var offsetMgmt string
|
||||
switch g.valRep.ElementValue.(type) {
|
||||
case *types.ValueByte:
|
||||
marshalValue = fmt.Sprintf("dst = append(dst, %s...)", fieldName)
|
||||
default:
|
||||
nestedFieldName := "o"
|
||||
if fieldName[0:1] == "o" && monoCharacter(fieldName) {
|
||||
nestedFieldName = fieldName + "o"
|
||||
}
|
||||
t := `for _, %s := range %s {
|
||||
%s
|
||||
}`
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
var internal string
|
||||
if g.valRep.ElementValue.IsVariableSized() {
|
||||
vm, ok := gg.(variableMarshaller)
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("variable size type does not implement variableMarshaller: %v", g.valRep.ElementValue))
|
||||
}
|
||||
internal = vm.generateVariableMarshalValue(nestedFieldName)
|
||||
offsetMgmt = variableOffsetManagement(gg, fieldName, nestedFieldName)
|
||||
} else {
|
||||
internal = gg.generateFixedMarshalValue(nestedFieldName)
|
||||
}
|
||||
marshalValue = fmt.Sprintf(t, nestedFieldName, fieldName, internal)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = mvTmpl.Execute(buf, struct{
|
||||
FieldName string
|
||||
MaxSize int
|
||||
MarshalValue string
|
||||
OffsetManagement string
|
||||
}{
|
||||
FieldName: fieldName,
|
||||
MaxSize: g.valRep.MaxSize,
|
||||
MarshalValue: marshalValue,
|
||||
OffsetManagement: offsetMgmt,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf.Bytes())
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateList{}
|
||||
89
sszgen/backend/overlay.go
Normal file
89
sszgen/backend/overlay.go
Normal file
@@ -0,0 +1,89 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateOverlay struct {
|
||||
*types.ValueOverlay
|
||||
targetPackage string
|
||||
}
|
||||
|
||||
func (g *generateOverlay) toOverlay() func(string) string {
|
||||
wrapper := g.TypeName()
|
||||
if g.targetPackage != g.PackagePath() {
|
||||
wrapper = importAlias(g.PackagePath()) + "." + wrapper
|
||||
}
|
||||
return func(value string) string {
|
||||
return fmt.Sprintf("%s(%s)", wrapper, value)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *generateOverlay) generateVariableMarshalValue(fieldName string) string {
|
||||
gg := newValueGenerator(g.Underlying, g.targetPackage)
|
||||
vm, ok := gg.(variableMarshaller)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
return vm.generateVariableMarshalValue(fieldName)
|
||||
}
|
||||
|
||||
func (g *generateOverlay) generateUnmarshalValue(fieldName string, sliceName string) string {
|
||||
gg := newValueGenerator(g.Underlying, g.targetPackage)
|
||||
c, ok := gg.(caster)
|
||||
if ok {
|
||||
c.setToOverlay(g.toOverlay())
|
||||
}
|
||||
umv := gg.generateUnmarshalValue(fieldName, sliceName)
|
||||
if g.IsBitfield() {
|
||||
switch t := g.Underlying.(type) {
|
||||
case *types.ValueList:
|
||||
return fmt.Sprintf(`if err = ssz.ValidateBitlist(%s, %d); err != nil {
|
||||
return err
|
||||
}
|
||||
%s`, sliceName, t.MaxSize, umv)
|
||||
}
|
||||
}
|
||||
return umv
|
||||
}
|
||||
|
||||
func (g *generateOverlay) generateFixedMarshalValue(fieldName string) string {
|
||||
gg := newValueGenerator(g.Underlying, g.targetPackage)
|
||||
uc, ok := gg.(coercer)
|
||||
if ok {
|
||||
return gg.generateFixedMarshalValue(uc.coerce()(fieldName))
|
||||
}
|
||||
return gg.generateFixedMarshalValue(fieldName)
|
||||
}
|
||||
|
||||
func (g *generateOverlay) variableSizeSSZ(fieldname string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateOverlay) generateHTRPutter(fieldName string) string {
|
||||
if g.IsBitfield() && g.Name == "Bitlist" {
|
||||
ul, ok := g.Underlying.(*types.ValueList)
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("unexpected underlying type for Bitlist, expected ValueList, got %v", g.Underlying))
|
||||
}
|
||||
t := `if len(%s) == 0 {
|
||||
return ssz.ErrEmptyBitlist
|
||||
}
|
||||
hh.PutBitlist(%s, %d)`
|
||||
return fmt.Sprintf(t, fieldName, fieldName, ul.MaxSize)
|
||||
}
|
||||
gg := newValueGenerator(g.Underlying, g.targetPackage)
|
||||
htrp, ok := gg.(htrPutter)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
uc, ok := gg.(coercer)
|
||||
if ok {
|
||||
c := uc.coerce()
|
||||
return htrp.generateHTRPutter(c(fieldName))
|
||||
}
|
||||
return htrp.generateHTRPutter(fieldName)
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateOverlay{}
|
||||
57
sszgen/backend/pointer.go
Normal file
57
sszgen/backend/pointer.go
Normal file
@@ -0,0 +1,57 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generatePointer struct {
|
||||
*types.ValuePointer
|
||||
targetPackage string
|
||||
}
|
||||
|
||||
func (g *generatePointer) generateHTRPutter(fieldName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
hp, ok := gg.(htrPutter)
|
||||
if !ok {
|
||||
return ""
|
||||
}
|
||||
return hp.generateHTRPutter(fieldName)
|
||||
}
|
||||
|
||||
func (g *generatePointer) generateFixedMarshalValue(fieldName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
return gg.generateFixedMarshalValue(fieldName)
|
||||
}
|
||||
|
||||
func (g *generatePointer) generateUnmarshalValue(fieldName string, sliceName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
return gg.generateUnmarshalValue(fieldName, sliceName)
|
||||
}
|
||||
|
||||
func (g *generatePointer) initializeValue(fieldName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
iv, ok := gg.(valueInitializer)
|
||||
if ok {
|
||||
return iv.initializeValue(fieldName)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generatePointer) generateVariableMarshalValue(fieldName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
vm, ok := gg.(variableMarshaller)
|
||||
if !ok {
|
||||
panic(fmt.Sprintf("variable size type does not implement variableMarshaller: %v", g.Referent))
|
||||
}
|
||||
return vm.generateVariableMarshalValue(fieldName)
|
||||
}
|
||||
|
||||
func (g *generatePointer) variableSizeSSZ(fieldName string) string {
|
||||
gg := newValueGenerator(g.Referent, g.targetPackage)
|
||||
return gg.variableSizeSSZ(fieldName)
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generatePointer{}
|
||||
var _ htrPutter = &generatePointer{}
|
||||
227
sszgen/backend/render.go
Normal file
227
sszgen/backend/render.go
Normal file
@@ -0,0 +1,227 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"go/format"
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generatedCode struct {
|
||||
blocks []string
|
||||
// key=package path, value=alias
|
||||
imports map[string]string
|
||||
}
|
||||
|
||||
func (gc *generatedCode) renderImportPairs() string {
|
||||
pairs := make([]string, 0)
|
||||
for k, v := range gc.imports {
|
||||
pairs = append(pairs, fmt.Sprintf("%s \"%s\"", v, k))
|
||||
}
|
||||
return strings.Join(pairs, "\n")
|
||||
}
|
||||
|
||||
func (gc *generatedCode) renderBlocks() string {
|
||||
return strings.Join(gc.blocks, "\n")
|
||||
}
|
||||
|
||||
func (gc *generatedCode) merge(right *generatedCode) {
|
||||
gc.blocks = append(gc.blocks, right.blocks...)
|
||||
if right.imports == nil {
|
||||
return
|
||||
}
|
||||
for k, v := range right.imports {
|
||||
// deduplicate imports and detect collisions
|
||||
// we should prevent collisions by normalizing import naming in a preprocessing pass
|
||||
if _, ok := gc.imports[k]; ok {
|
||||
continue
|
||||
}
|
||||
gc.imports[k] = v
|
||||
}
|
||||
}
|
||||
|
||||
// Generator needs to be initialized with the package name,
|
||||
// so use the new NewGenerator func for proper setup.
|
||||
type Generator struct {
|
||||
gc []*generatedCode
|
||||
packageName string
|
||||
packagePath string
|
||||
}
|
||||
|
||||
func NewGenerator(packageName, packagePath string) *Generator {
|
||||
return &Generator{
|
||||
packageName: packageName,
|
||||
packagePath: packagePath,
|
||||
}
|
||||
}
|
||||
|
||||
// TODO Generate should be able to return an error
|
||||
func (g *Generator) Generate(vr types.ValRep) {
|
||||
vc, ok := vr.(*types.ValueContainer)
|
||||
if !ok {
|
||||
panic("Can only generate method sets for container types at this time")
|
||||
}
|
||||
gc := &generateContainer{vc, g.packagePath}
|
||||
sizeSSZ := GenerateSizeSSZ(gc)
|
||||
if sizeSSZ != nil {
|
||||
g.gc = append(g.gc, sizeSSZ)
|
||||
}
|
||||
mSSZ := GenerateMarshalSSZ(gc)
|
||||
if mSSZ != nil {
|
||||
g.gc = append(g.gc, mSSZ)
|
||||
}
|
||||
uSSZ := GenerateUnmarshalSSZ(gc)
|
||||
if uSSZ != nil {
|
||||
g.gc = append(g.gc, uSSZ)
|
||||
}
|
||||
hSSZ := GenerateHashTreeRoot(gc)
|
||||
if hSSZ != nil {
|
||||
g.gc = append(g.gc, hSSZ)
|
||||
}
|
||||
}
|
||||
|
||||
var fileTemplate = `package {{.Package}}
|
||||
|
||||
{{ if .Imports -}}
|
||||
import (
|
||||
{{.Imports}}
|
||||
)
|
||||
{{- end }}
|
||||
|
||||
{{.Blocks}}`
|
||||
|
||||
func (g *Generator) Render() ([]byte, error) {
|
||||
if g.packagePath == "" {
|
||||
return nil, fmt.Errorf("missing packagePath: Generator requires a packagePath for code generation.")
|
||||
}
|
||||
if g.packageName == "" {
|
||||
return nil, fmt.Errorf("missing packageName: Generator requires a target package name for code generation.")
|
||||
}
|
||||
ft := template.New("generated.ssz.go")
|
||||
tmpl, err := ft.Parse(fileTemplate)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
final := &generatedCode{
|
||||
imports: map[string]string{
|
||||
"github.com/ferranbt/fastssz": "ssz",
|
||||
"fmt": "",
|
||||
},
|
||||
}
|
||||
for _, gc := range g.gc {
|
||||
final.merge(gc)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = tmpl.Execute(buf, struct {
|
||||
Package string
|
||||
Imports string
|
||||
Blocks string
|
||||
}{
|
||||
Package: g.packageName,
|
||||
Imports: final.renderImportPairs(),
|
||||
Blocks: final.renderBlocks(),
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return format.Source(buf.Bytes())
|
||||
}
|
||||
|
||||
type valueGenerator interface {
|
||||
variableSizeSSZ(fieldname string) string
|
||||
generateFixedMarshalValue(string) string
|
||||
generateUnmarshalValue(string, string) string
|
||||
generateHTRPutter(string) string
|
||||
}
|
||||
|
||||
type valueInitializer interface {
|
||||
initializeValue(string) string
|
||||
}
|
||||
|
||||
type variableMarshaller interface {
|
||||
generateVariableMarshalValue(string) string
|
||||
}
|
||||
|
||||
type variableUnmarshaller interface {
|
||||
generateVariableUnmarshalValue(string) string
|
||||
}
|
||||
|
||||
type coercer interface {
|
||||
coerce() func(string) string
|
||||
}
|
||||
|
||||
type htrPutter interface {
|
||||
generateHTRPutter(string) string
|
||||
}
|
||||
|
||||
func newValueGenerator(vr types.ValRep, packagePath string) valueGenerator {
|
||||
switch ty := vr.(type) {
|
||||
case *types.ValueBool:
|
||||
return &generateBool{valRep: ty, targetPackage: packagePath}
|
||||
case *types.ValueByte:
|
||||
return &generateByte{ty, packagePath}
|
||||
case *types.ValueContainer:
|
||||
return &generateContainer{ty, packagePath}
|
||||
case *types.ValueList:
|
||||
return &generateList{valRep: ty, targetPackage: packagePath}
|
||||
case *types.ValueOverlay:
|
||||
return &generateOverlay{ty, packagePath}
|
||||
case *types.ValuePointer:
|
||||
return &generatePointer{ty, packagePath}
|
||||
case *types.ValueUint:
|
||||
return &generateUint{valRep: ty, targetPackage: packagePath}
|
||||
case *types.ValueUnion:
|
||||
return &generateUnion{ty, packagePath}
|
||||
case *types.ValueVector:
|
||||
return &generateVector{valRep: ty, targetPackage: packagePath}
|
||||
}
|
||||
panic(fmt.Sprintf("Cannot manage generation for unrecognized ValRep implementation %v", vr))
|
||||
}
|
||||
|
||||
func importAlias(packageName string) string {
|
||||
parts := strings.Split(packageName, "/")
|
||||
for i, p := range parts {
|
||||
if strings.Contains(p, ".") {
|
||||
continue
|
||||
}
|
||||
parts = parts[i:]
|
||||
break
|
||||
}
|
||||
return strings.ReplaceAll(strings.Join(parts, "_"), "-", "_")
|
||||
}
|
||||
|
||||
func fullyQualifiedTypeName(v types.ValRep, targetPackage string) string {
|
||||
tn := v.TypeName()
|
||||
if targetPackage == v.PackagePath() || v.PackagePath() == "" {
|
||||
return tn
|
||||
}
|
||||
parts := strings.Split(v.PackagePath(), "/")
|
||||
for i, p := range parts {
|
||||
if strings.Contains(p, ".") {
|
||||
continue
|
||||
}
|
||||
parts = parts[i:]
|
||||
break
|
||||
}
|
||||
pkg := strings.ReplaceAll(strings.Join(parts, "_"), "-", "_")
|
||||
if tn[0:1] == "*" {
|
||||
tn = tn[1:]
|
||||
pkg = "*" + pkg
|
||||
}
|
||||
return pkg + "." + tn
|
||||
}
|
||||
|
||||
func extractImportsFromContainerFields(cfs []types.ContainerField, targetPackage string) map[string]string {
|
||||
imports := make(map[string]string)
|
||||
for _, cf := range cfs {
|
||||
pkg := cf.Value.PackagePath()
|
||||
if pkg == "" || pkg == targetPackage {
|
||||
continue
|
||||
}
|
||||
imports[pkg] = importAlias(pkg)
|
||||
}
|
||||
return imports
|
||||
}
|
||||
91
sszgen/backend/render_test.go
Normal file
91
sszgen/backend/render_test.go
Normal file
@@ -0,0 +1,91 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"go/format"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
var generator_generateFixture = `package derp
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
ssz "github.com/ferranbt/fastssz"
|
||||
derp "github.com/prysmaticlabs/derp/derp"
|
||||
)
|
||||
|
||||
func main() {
|
||||
fmt.printf("hello world")
|
||||
}
|
||||
`
|
||||
|
||||
func TestGenerator_Generate(t *testing.T) {
|
||||
gc := &generatedCode{
|
||||
blocks: []string{"func main() {\n\tfmt.printf(\"hello world\")\n}"},
|
||||
imports: map[string]string{
|
||||
"github.com/prysmaticlabs/derp/derp": "derp",
|
||||
"github.com/ferranbt/fastssz": "ssz",
|
||||
"fmt": "",
|
||||
},
|
||||
}
|
||||
g := &Generator{packagePath: "github.com/prysmaticlabs/derp", packageName: "derp"}
|
||||
g.gc = append(g.gc, gc)
|
||||
rendered, err := g.Render()
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, generator_generateFixture, string(rendered))
|
||||
}
|
||||
|
||||
func TestGenerator_GenerateBeaconState(t *testing.T) {
|
||||
b, err := os.ReadFile("testdata/TestGenerator_GenerateBeaconState.expected")
|
||||
require.NoError(t, err)
|
||||
formatted, err := format.Source(b)
|
||||
require.NoError(t, err)
|
||||
expected := string(formatted)
|
||||
|
||||
g := &Generator{
|
||||
packagePath: "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1",
|
||||
packageName: "ethereum_beacon_p2p_v1",
|
||||
}
|
||||
g.Generate(testFixBeaconState)
|
||||
rendered, err := g.Render()
|
||||
require.NoError(t, err)
|
||||
actual := string(rendered)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
|
||||
func TestImportAlias(t *testing.T) {
|
||||
cases := []struct{
|
||||
packageName string
|
||||
alias string
|
||||
}{
|
||||
{
|
||||
packageName: "github.com/derp/derp",
|
||||
alias: "derp_derp",
|
||||
},
|
||||
{
|
||||
packageName: "text/template",
|
||||
alias: "text_template",
|
||||
},
|
||||
{
|
||||
packageName: "fmt",
|
||||
alias: "fmt",
|
||||
},
|
||||
}
|
||||
for _, c := range cases {
|
||||
require.Equal(t, importAlias(c.packageName), c.alias)
|
||||
}
|
||||
}
|
||||
|
||||
func TestExtractImportsFromContainerFields(t *testing.T) {
|
||||
vc, ok := testFixBeaconState.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok)
|
||||
targetPackage := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
imports := extractImportsFromContainerFields(vc.Contents, targetPackage)
|
||||
require.Equal(t, 3, len(imports))
|
||||
require.Equal(t, "prysmaticlabs_eth2_types", imports["github.com/prysmaticlabs/eth2-types"])
|
||||
require.Equal(t, "prysmaticlabs_prysm_proto_eth_v1alpha1", imports["github.com/prysmaticlabs/prysm/proto/eth/v1alpha1"])
|
||||
require.Equal(t, "prysmaticlabs_go_bitfield", imports["github.com/prysmaticlabs/go-bitfield"])
|
||||
}
|
||||
14
sszgen/backend/testdata/TestGenerateHashTreeRoot.expected
vendored
Normal file
14
sszgen/backend/testdata/TestGenerateHashTreeRoot.expected
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
// HashTreeRoot ssz hashes the BeaconState object
|
||||
func (c *BeaconState) XXHashTreeRoot() ([32]byte, error) {
|
||||
hh := ssz.DefaultHasherPool.Get()
|
||||
if err := b.HashTreeRootWith(hh); err != nil {
|
||||
ssz.DefaultHasherPool.Put(hh)
|
||||
return [32]byte{}, err
|
||||
}
|
||||
root, err := hh.HashRoot()
|
||||
ssz.DefaultHasherPool.Put(hh)
|
||||
return root, err
|
||||
}
|
||||
|
||||
func (c *BeaconState) XXHashTreeRootWith(hh *ssz.Hasher) (err error) {
|
||||
}
|
||||
231
sszgen/backend/testdata/TestGenerateMarshalSSZ.expected
vendored
Normal file
231
sszgen/backend/testdata/TestGenerateMarshalSSZ.expected
vendored
Normal file
@@ -0,0 +1,231 @@
|
||||
func (c *BeaconState) XXMarshalSSZ() ([]byte, error) {
|
||||
buf := make([]byte, c.XXSizeSSZ())
|
||||
return c.XXMarshalSSZTo(buf[:0])
|
||||
}
|
||||
|
||||
func (c *BeaconState) XXMarshalSSZTo(dst []byte) ([]byte, error) {
|
||||
var err error
|
||||
offset := 2687377
|
||||
|
||||
// Field 0: GenesisTime
|
||||
dst = ssz.MarshalUint64(dst, c.GenesisTime)
|
||||
|
||||
// Field 1: GenesisValidatorsRoot
|
||||
if len(c.GenesisValidatorsRoot) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, c.GenesisValidatorsRoot...)
|
||||
|
||||
// Field 2: Slot
|
||||
dst = ssz.MarshalUint64(dst, uint64(c.Slot))
|
||||
|
||||
// Field 3: Fork
|
||||
if c.Fork == nil {
|
||||
c.Fork = new(prysmaticlabs_prysm_proto_beacon_p2p_v1.Fork)
|
||||
}
|
||||
if dst, err = c.Fork.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 4: LatestBlockHeader
|
||||
if c.LatestBlockHeader == nil {
|
||||
c.LatestBlockHeader = new(prysmaticlabs_prysm_proto_eth_v1alpha1.BeaconBlockHeader)
|
||||
}
|
||||
if dst, err = c.LatestBlockHeader.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 5: BlockRoots
|
||||
if len(c.BlockRoots) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.BlockRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 6: StateRoots
|
||||
if len(c.StateRoots) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.StateRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.HistoricalRoots) * 32
|
||||
|
||||
// Field 8: Eth1Data
|
||||
if c.Eth1Data == nil {
|
||||
c.Eth1Data = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
}
|
||||
if dst, err = c.Eth1Data.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Eth1DataVotes) * 72
|
||||
|
||||
// Field 10: Eth1DepositIndex
|
||||
dst = ssz.MarshalUint64(dst, c.Eth1DepositIndex)
|
||||
|
||||
// Field 11: Validators
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Validators) * 121
|
||||
|
||||
// Field 12: Balances
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Balances) * 8
|
||||
|
||||
// Field 13: RandaoMixes
|
||||
if len(c.RandaoMixes) != 65536 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.RandaoMixes {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 14: Slashings
|
||||
if len(c.Slashings) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.Slashings {
|
||||
dst = ssz.MarshalUint64(dst, o)
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += func() int {
|
||||
s := 0
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += func() int {
|
||||
s := 0
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
|
||||
// Field 17: JustificationBits
|
||||
if len([]byte(c.JustificationBits)) != 1 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, []byte(c.JustificationBits)...)
|
||||
|
||||
// Field 18: PreviousJustifiedCheckpoint
|
||||
if c.PreviousJustifiedCheckpoint == nil {
|
||||
c.PreviousJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.PreviousJustifiedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 19: CurrentJustifiedCheckpoint
|
||||
if c.CurrentJustifiedCheckpoint == nil {
|
||||
c.CurrentJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.CurrentJustifiedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 20: FinalizedCheckpoint
|
||||
if c.FinalizedCheckpoint == nil {
|
||||
c.FinalizedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.FinalizedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
if len(c.HistoricalRoots) > 16777216 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.HistoricalRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
if len(c.Eth1DataVotes) > 2048 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Eth1DataVotes {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 11: Validators
|
||||
if len(c.Validators) > 1099511627776 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Validators {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 12: Balances
|
||||
if len(c.Balances) > 1099511627776 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Balances {
|
||||
dst = ssz.MarshalUint64(dst, o)
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
if len(c.PreviousEpochAttestations) > 4096 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
{
|
||||
offset = 4 * len(c.PreviousEpochAttestations)
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += o.SizeSSZ()
|
||||
}
|
||||
}
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
if len(c.CurrentEpochAttestations) > 4096 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
{
|
||||
offset = 4 * len(c.CurrentEpochAttestations)
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += o.SizeSSZ()
|
||||
}
|
||||
}
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return dst, err
|
||||
}
|
||||
24
sszgen/backend/testdata/TestGenerateSizeSSZ.expected
vendored
Normal file
24
sszgen/backend/testdata/TestGenerateSizeSSZ.expected
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
func (c *BeaconState) XXSizeSSZ() int {
|
||||
size := 2687377
|
||||
size += len(c.HistoricalRoots) * 32
|
||||
size += len(c.Eth1DataVotes) * 72
|
||||
size += len(c.Validators) * 121
|
||||
size += len(c.Balances) * 8
|
||||
size += func() int {
|
||||
s := 0
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
size += func() int {
|
||||
s := 0
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
return size
|
||||
}
|
||||
293
sszgen/backend/testdata/TestGenerateUnmarshalSSZ.expected
vendored
Normal file
293
sszgen/backend/testdata/TestGenerateUnmarshalSSZ.expected
vendored
Normal file
@@ -0,0 +1,293 @@
|
||||
func (c *BeaconState) XXUnmarshalSSZ(buf []byte) error {
|
||||
var err error
|
||||
size := uint64(len(buf))
|
||||
if size < 2687377 {
|
||||
return ssz.ErrSize
|
||||
}
|
||||
|
||||
s0 := buf[0:8] // c.GenesisTime
|
||||
s1 := buf[8:40] // c.GenesisValidatorsRoot
|
||||
s2 := buf[40:48] // c.Slot
|
||||
s3 := buf[48:64] // c.Fork
|
||||
s4 := buf[64:176] // c.LatestBlockHeader
|
||||
s5 := buf[176:262320] // c.BlockRoots
|
||||
s6 := buf[262320:524464] // c.StateRoots
|
||||
s8 := buf[524468:524540] // c.Eth1Data
|
||||
s10 := buf[524544:524552] // c.Eth1DepositIndex
|
||||
s13 := buf[524560:2621712] // c.RandaoMixes
|
||||
s14 := buf[2621712:2687248] // c.Slashings
|
||||
s17 := buf[2687256:2687257] // c.JustificationBits
|
||||
s18 := buf[2687257:2687297] // c.PreviousJustifiedCheckpoint
|
||||
s19 := buf[2687297:2687337] // c.CurrentJustifiedCheckpoint
|
||||
s20 := buf[2687337:2687377] // c.FinalizedCheckpoint
|
||||
|
||||
v7 := ssz.ReadOffset(buf[524464:524468]) // c.HistoricalRoots
|
||||
if v7 < 2687377 {
|
||||
return ssz.ErrInvalidVariableOffset
|
||||
}
|
||||
if v7 > size {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v9 := ssz.ReadOffset(buf[524540:524544]) // c.Eth1DataVotes
|
||||
if v9 > size || v9 < v7 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v11 := ssz.ReadOffset(buf[524552:524556]) // c.Validators
|
||||
if v11 > size || v11 < v9 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v12 := ssz.ReadOffset(buf[524556:524560]) // c.Balances
|
||||
if v12 > size || v12 < v11 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v15 := ssz.ReadOffset(buf[2687248:2687252]) // c.PreviousEpochAttestations
|
||||
if v15 > size || v15 < v12 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v16 := ssz.ReadOffset(buf[2687252:2687256]) // c.CurrentEpochAttestations
|
||||
if v16 > size || v16 < v15 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
s7 := buf[v7:v9] // c.HistoricalRoots
|
||||
s9 := buf[v9:v11] // c.Eth1DataVotes
|
||||
s11 := buf[v11:v12] // c.Validators
|
||||
s12 := buf[v12:v15] // c.Balances
|
||||
s15 := buf[v15:v16] // c.PreviousEpochAttestations
|
||||
s16 := buf[v16:] // c.CurrentEpochAttestations
|
||||
|
||||
// Field 0: GenesisTime
|
||||
c.GenesisTime = ssz.UnmarshallUint64(s0)
|
||||
|
||||
// Field 1: GenesisValidatorsRoot
|
||||
c.GenesisValidatorsRoot = append([]byte{}, s1...)
|
||||
|
||||
// Field 2: Slot
|
||||
c.Slot = prysmaticlabs_eth2_types.Slot(ssz.UnmarshallUint64(s2))
|
||||
|
||||
// Field 3: Fork
|
||||
c.Fork = new(prysmaticlabs_prysm_proto_beacon_p2p_v1.Fork)
|
||||
if err = c.Fork.UnmarshalSSZ(s3); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 4: LatestBlockHeader
|
||||
c.LatestBlockHeader = new(prysmaticlabs_prysm_proto_eth_v1alpha1.BeaconBlockHeader)
|
||||
if err = c.LatestBlockHeader.UnmarshalSSZ(s4); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 5: BlockRoots
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s5[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.BlockRoots = append(c.BlockRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 6: StateRoots
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s6[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.StateRoots = append(c.StateRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
{
|
||||
if len(s7)%32 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.HistoricalRoots length is %d, which is not a multiple of 32", len(s7))
|
||||
}
|
||||
numElem := len(s7) / 32
|
||||
if numElem > 16777216 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.HistoricalRoots has %d elements, ssz-max is 16777216", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp []byte
|
||||
|
||||
tmpSlice := s7[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.HistoricalRoots = append(c.HistoricalRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 8: Eth1Data
|
||||
c.Eth1Data = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
if err = c.Eth1Data.UnmarshalSSZ(s8); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
{
|
||||
if len(s9)%72 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Eth1DataVotes length is %d, which is not a multiple of 72", len(s9))
|
||||
}
|
||||
numElem := len(s9) / 72
|
||||
if numElem > 2048 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Eth1DataVotes has %d elements, ssz-max is 2048", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data
|
||||
tmp = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
tmpSlice := s9[i*72 : (1+i)*72]
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.Eth1DataVotes = append(c.Eth1DataVotes, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 10: Eth1DepositIndex
|
||||
c.Eth1DepositIndex = ssz.UnmarshallUint64(s10)
|
||||
|
||||
// Field 11: Validators
|
||||
{
|
||||
if len(s11)%121 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Validators length is %d, which is not a multiple of 121", len(s11))
|
||||
}
|
||||
numElem := len(s11) / 121
|
||||
if numElem > 1099511627776 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Validators has %d elements, ssz-max is 1099511627776", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_eth_v1alpha1.Validator
|
||||
tmp = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Validator)
|
||||
tmpSlice := s11[i*121 : (1+i)*121]
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.Validators = append(c.Validators, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 12: Balances
|
||||
{
|
||||
if len(s12)%8 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Balances length is %d, which is not a multiple of 8", len(s12))
|
||||
}
|
||||
numElem := len(s12) / 8
|
||||
if numElem > 1099511627776 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Balances has %d elements, ssz-max is 1099511627776", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp uint64
|
||||
|
||||
tmpSlice := s12[i*8 : (1+i)*8]
|
||||
tmp = ssz.UnmarshallUint64(tmpSlice)
|
||||
c.Balances = append(c.Balances, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 13: RandaoMixes
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 65536; i++ {
|
||||
tmpSlice := s13[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.RandaoMixes = append(c.RandaoMixes, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 14: Slashings
|
||||
{
|
||||
var tmp uint64
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s14[i*8 : (1+i)*8]
|
||||
tmp = ssz.UnmarshallUint64(tmpSlice)
|
||||
c.Slashings = append(c.Slashings, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
{
|
||||
// empty lists are zero length, so make sure there is room for an offset
|
||||
// before attempting to unmarshal it
|
||||
if len(s15) > 3 {
|
||||
firstOffset := ssz.ReadOffset(s15[0:4])
|
||||
if firstOffset%4 != 0 {
|
||||
return fmt.Errorf("misaligned list bytes: when decoding c.PreviousEpochAttestations, end-of-list offset is %d, which is not a multiple of 4 (offset size)", firstOffset)
|
||||
}
|
||||
listLen := firstOffset / 4
|
||||
if listLen > 4096 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.PreviousEpochAttestations has %d elements, ssz-max is 4096", listLen)
|
||||
}
|
||||
listOffsets := make([]uint64, listLen)
|
||||
for i := 0; uint64(i) < listLen; i++ {
|
||||
listOffsets[i] = ssz.ReadOffset(s15[i*4 : (i+1)*4])
|
||||
}
|
||||
for i := 0; i < len(listOffsets); i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_beacon_p2p_v1.PendingAttestation
|
||||
tmp = new(prysmaticlabs_prysm_proto_beacon_p2p_v1.PendingAttestation)
|
||||
var tmpSlice []byte
|
||||
if i+1 == len(listOffsets) {
|
||||
tmpSlice = s15[listOffsets[i]:]
|
||||
} else {
|
||||
tmpSlice = s15[listOffsets[i]:listOffsets[i+1]]
|
||||
}
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.PreviousEpochAttestations = append(c.PreviousEpochAttestations, tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
{
|
||||
// empty lists are zero length, so make sure there is room for an offset
|
||||
// before attempting to unmarshal it
|
||||
if len(s16) > 3 {
|
||||
firstOffset := ssz.ReadOffset(s16[0:4])
|
||||
if firstOffset%4 != 0 {
|
||||
return fmt.Errorf("misaligned list bytes: when decoding c.CurrentEpochAttestations, end-of-list offset is %d, which is not a multiple of 4 (offset size)", firstOffset)
|
||||
}
|
||||
listLen := firstOffset / 4
|
||||
if listLen > 4096 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.CurrentEpochAttestations has %d elements, ssz-max is 4096", listLen)
|
||||
}
|
||||
listOffsets := make([]uint64, listLen)
|
||||
for i := 0; uint64(i) < listLen; i++ {
|
||||
listOffsets[i] = ssz.ReadOffset(s16[i*4 : (i+1)*4])
|
||||
}
|
||||
for i := 0; i < len(listOffsets); i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_beacon_p2p_v1.PendingAttestation
|
||||
tmp = new(prysmaticlabs_prysm_proto_beacon_p2p_v1.PendingAttestation)
|
||||
var tmpSlice []byte
|
||||
if i+1 == len(listOffsets) {
|
||||
tmpSlice = s16[listOffsets[i]:]
|
||||
} else {
|
||||
tmpSlice = s16[listOffsets[i]:listOffsets[i+1]]
|
||||
}
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.CurrentEpochAttestations = append(c.CurrentEpochAttestations, tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Field 17: JustificationBits
|
||||
c.JustificationBits = append([]byte{}, prysmaticlabs_go_bitfield.Bitvector4(s17)...)
|
||||
|
||||
// Field 18: PreviousJustifiedCheckpoint
|
||||
c.PreviousJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.PreviousJustifiedCheckpoint.UnmarshalSSZ(s18); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 19: CurrentJustifiedCheckpoint
|
||||
c.CurrentJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.CurrentJustifiedCheckpoint.UnmarshalSSZ(s19); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 20: FinalizedCheckpoint
|
||||
c.FinalizedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.FinalizedCheckpoint.UnmarshalSSZ(s20); err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
559
sszgen/backend/testdata/TestGenerator_GenerateBeaconState.expected
vendored
Normal file
559
sszgen/backend/testdata/TestGenerator_GenerateBeaconState.expected
vendored
Normal file
@@ -0,0 +1,559 @@
|
||||
package ethereum_beacon_p2p_v1
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
ssz "github.com/ferranbt/fastssz"
|
||||
prysmaticlabs_eth2_types "github.com/prysmaticlabs/eth2-types"
|
||||
prysmaticlabs_go_bitfield "github.com/prysmaticlabs/go-bitfield"
|
||||
prysmaticlabs_prysm_proto_eth_v1alpha1 "github.com/prysmaticlabs/prysm/proto/eth/v1alpha1"
|
||||
)
|
||||
|
||||
func (c *BeaconState) XXSizeSSZ() int {
|
||||
size := 2687377
|
||||
size += len(c.HistoricalRoots) * 32
|
||||
size += len(c.Eth1DataVotes) * 72
|
||||
size += len(c.Validators) * 121
|
||||
size += len(c.Balances) * 8
|
||||
size += func() int {
|
||||
s := 0
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
size += func() int {
|
||||
s := 0
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
return size
|
||||
}
|
||||
func (c *BeaconState) XXMarshalSSZ() ([]byte, error) {
|
||||
buf := make([]byte, c.XXSizeSSZ())
|
||||
return c.XXMarshalSSZTo(buf[:0])
|
||||
}
|
||||
|
||||
func (c *BeaconState) XXMarshalSSZTo(dst []byte) ([]byte, error) {
|
||||
var err error
|
||||
offset := 2687377
|
||||
|
||||
// Field 0: GenesisTime
|
||||
dst = ssz.MarshalUint64(dst, c.GenesisTime)
|
||||
|
||||
// Field 1: GenesisValidatorsRoot
|
||||
if len(c.GenesisValidatorsRoot) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, c.GenesisValidatorsRoot...)
|
||||
|
||||
// Field 2: Slot
|
||||
dst = ssz.MarshalUint64(dst, uint64(c.Slot))
|
||||
|
||||
// Field 3: Fork
|
||||
if c.Fork == nil {
|
||||
c.Fork = new(Fork)
|
||||
}
|
||||
if dst, err = c.Fork.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 4: LatestBlockHeader
|
||||
if c.LatestBlockHeader == nil {
|
||||
c.LatestBlockHeader = new(prysmaticlabs_prysm_proto_eth_v1alpha1.BeaconBlockHeader)
|
||||
}
|
||||
if dst, err = c.LatestBlockHeader.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 5: BlockRoots
|
||||
if len(c.BlockRoots) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.BlockRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 6: StateRoots
|
||||
if len(c.StateRoots) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.StateRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.HistoricalRoots) * 32
|
||||
|
||||
// Field 8: Eth1Data
|
||||
if c.Eth1Data == nil {
|
||||
c.Eth1Data = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
}
|
||||
if dst, err = c.Eth1Data.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Eth1DataVotes) * 72
|
||||
|
||||
// Field 10: Eth1DepositIndex
|
||||
dst = ssz.MarshalUint64(dst, c.Eth1DepositIndex)
|
||||
|
||||
// Field 11: Validators
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Validators) * 121
|
||||
|
||||
// Field 12: Balances
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += len(c.Balances) * 8
|
||||
|
||||
// Field 13: RandaoMixes
|
||||
if len(c.RandaoMixes) != 65536 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.RandaoMixes {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 14: Slashings
|
||||
if len(c.Slashings) != 8192 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
for _, o := range c.Slashings {
|
||||
dst = ssz.MarshalUint64(dst, o)
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += func() int {
|
||||
s := 0
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += func() int {
|
||||
s := 0
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
s += 4
|
||||
s += o.SizeSSZ()
|
||||
}
|
||||
return s
|
||||
}()
|
||||
|
||||
// Field 17: JustificationBits
|
||||
if len([]byte(c.JustificationBits)) != 1 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, []byte(c.JustificationBits)...)
|
||||
|
||||
// Field 18: PreviousJustifiedCheckpoint
|
||||
if c.PreviousJustifiedCheckpoint == nil {
|
||||
c.PreviousJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.PreviousJustifiedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 19: CurrentJustifiedCheckpoint
|
||||
if c.CurrentJustifiedCheckpoint == nil {
|
||||
c.CurrentJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.CurrentJustifiedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 20: FinalizedCheckpoint
|
||||
if c.FinalizedCheckpoint == nil {
|
||||
c.FinalizedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
}
|
||||
if dst, err = c.FinalizedCheckpoint.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
if len(c.HistoricalRoots) > 16777216 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.HistoricalRoots {
|
||||
if len(o) != 32 {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
dst = append(dst, o...)
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
if len(c.Eth1DataVotes) > 2048 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Eth1DataVotes {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 11: Validators
|
||||
if len(c.Validators) > 1099511627776 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Validators {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 12: Balances
|
||||
if len(c.Balances) > 1099511627776 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
for _, o := range c.Balances {
|
||||
dst = ssz.MarshalUint64(dst, o)
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
if len(c.PreviousEpochAttestations) > 4096 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
{
|
||||
offset = 4 * len(c.PreviousEpochAttestations)
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += o.SizeSSZ()
|
||||
}
|
||||
}
|
||||
for _, o := range c.PreviousEpochAttestations {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
if len(c.CurrentEpochAttestations) > 4096 {
|
||||
return nil, ssz.ErrListTooBig
|
||||
}
|
||||
{
|
||||
offset = 4 * len(c.CurrentEpochAttestations)
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
dst = ssz.WriteOffset(dst, offset)
|
||||
offset += o.SizeSSZ()
|
||||
}
|
||||
}
|
||||
for _, o := range c.CurrentEpochAttestations {
|
||||
if dst, err = o.XXMarshalSSZTo(dst); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return dst, err
|
||||
}
|
||||
func (c *BeaconState) XXUnmarshalSSZ(buf []byte) error {
|
||||
var err error
|
||||
size := uint64(len(buf))
|
||||
if size < 2687377 {
|
||||
return ssz.ErrSize
|
||||
}
|
||||
|
||||
s0 := buf[0:8] // c.GenesisTime
|
||||
s1 := buf[8:40] // c.GenesisValidatorsRoot
|
||||
s2 := buf[40:48] // c.Slot
|
||||
s3 := buf[48:64] // c.Fork
|
||||
s4 := buf[64:176] // c.LatestBlockHeader
|
||||
s5 := buf[176:262320] // c.BlockRoots
|
||||
s6 := buf[262320:524464] // c.StateRoots
|
||||
s8 := buf[524468:524540] // c.Eth1Data
|
||||
s10 := buf[524544:524552] // c.Eth1DepositIndex
|
||||
s13 := buf[524560:2621712] // c.RandaoMixes
|
||||
s14 := buf[2621712:2687248] // c.Slashings
|
||||
s17 := buf[2687256:2687257] // c.JustificationBits
|
||||
s18 := buf[2687257:2687297] // c.PreviousJustifiedCheckpoint
|
||||
s19 := buf[2687297:2687337] // c.CurrentJustifiedCheckpoint
|
||||
s20 := buf[2687337:2687377] // c.FinalizedCheckpoint
|
||||
|
||||
v7 := ssz.ReadOffset(buf[524464:524468]) // c.HistoricalRoots
|
||||
if v7 < 2687377 {
|
||||
return ssz.ErrInvalidVariableOffset
|
||||
}
|
||||
if v7 > size {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v9 := ssz.ReadOffset(buf[524540:524544]) // c.Eth1DataVotes
|
||||
if v9 > size || v9 < v7 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v11 := ssz.ReadOffset(buf[524552:524556]) // c.Validators
|
||||
if v11 > size || v11 < v9 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v12 := ssz.ReadOffset(buf[524556:524560]) // c.Balances
|
||||
if v12 > size || v12 < v11 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v15 := ssz.ReadOffset(buf[2687248:2687252]) // c.PreviousEpochAttestations
|
||||
if v15 > size || v15 < v12 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v16 := ssz.ReadOffset(buf[2687252:2687256]) // c.CurrentEpochAttestations
|
||||
if v16 > size || v16 < v15 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
s7 := buf[v7:v9] // c.HistoricalRoots
|
||||
s9 := buf[v9:v11] // c.Eth1DataVotes
|
||||
s11 := buf[v11:v12] // c.Validators
|
||||
s12 := buf[v12:v15] // c.Balances
|
||||
s15 := buf[v15:v16] // c.PreviousEpochAttestations
|
||||
s16 := buf[v16:] // c.CurrentEpochAttestations
|
||||
|
||||
// Field 0: GenesisTime
|
||||
c.GenesisTime = ssz.UnmarshallUint64(s0)
|
||||
|
||||
// Field 1: GenesisValidatorsRoot
|
||||
c.GenesisValidatorsRoot = append([]byte{}, s1...)
|
||||
|
||||
// Field 2: Slot
|
||||
c.Slot = prysmaticlabs_eth2_types.Slot(ssz.UnmarshallUint64(s2))
|
||||
|
||||
// Field 3: Fork
|
||||
c.Fork = new(Fork)
|
||||
if err = c.Fork.UnmarshalSSZ(s3); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 4: LatestBlockHeader
|
||||
c.LatestBlockHeader = new(prysmaticlabs_prysm_proto_eth_v1alpha1.BeaconBlockHeader)
|
||||
if err = c.LatestBlockHeader.UnmarshalSSZ(s4); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 5: BlockRoots
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s5[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.BlockRoots = append(c.BlockRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 6: StateRoots
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s6[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.StateRoots = append(c.StateRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 7: HistoricalRoots
|
||||
{
|
||||
if len(s7)%32 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.HistoricalRoots length is %d, which is not a multiple of 32", len(s7))
|
||||
}
|
||||
numElem := len(s7) / 32
|
||||
if numElem > 16777216 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.HistoricalRoots has %d elements, ssz-max is 16777216", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp []byte
|
||||
|
||||
tmpSlice := s7[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.HistoricalRoots = append(c.HistoricalRoots, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 8: Eth1Data
|
||||
c.Eth1Data = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
if err = c.Eth1Data.UnmarshalSSZ(s8); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 9: Eth1DataVotes
|
||||
{
|
||||
if len(s9)%72 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Eth1DataVotes length is %d, which is not a multiple of 72", len(s9))
|
||||
}
|
||||
numElem := len(s9) / 72
|
||||
if numElem > 2048 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Eth1DataVotes has %d elements, ssz-max is 2048", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data
|
||||
tmp = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Eth1Data)
|
||||
tmpSlice := s9[i*72 : (1+i)*72]
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.Eth1DataVotes = append(c.Eth1DataVotes, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 10: Eth1DepositIndex
|
||||
c.Eth1DepositIndex = ssz.UnmarshallUint64(s10)
|
||||
|
||||
// Field 11: Validators
|
||||
{
|
||||
if len(s11)%121 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Validators length is %d, which is not a multiple of 121", len(s11))
|
||||
}
|
||||
numElem := len(s11) / 121
|
||||
if numElem > 1099511627776 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Validators has %d elements, ssz-max is 1099511627776", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp *prysmaticlabs_prysm_proto_eth_v1alpha1.Validator
|
||||
tmp = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Validator)
|
||||
tmpSlice := s11[i*121 : (1+i)*121]
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.Validators = append(c.Validators, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 12: Balances
|
||||
{
|
||||
if len(s12)%8 != 0 {
|
||||
return fmt.Errorf("misaligned bytes: c.Balances length is %d, which is not a multiple of 8", len(s12))
|
||||
}
|
||||
numElem := len(s12) / 8
|
||||
if numElem > 1099511627776 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.Balances has %d elements, ssz-max is 1099511627776", numElem)
|
||||
}
|
||||
for i := 0; i < numElem; i++ {
|
||||
var tmp uint64
|
||||
|
||||
tmpSlice := s12[i*8 : (1+i)*8]
|
||||
tmp = ssz.UnmarshallUint64(tmpSlice)
|
||||
c.Balances = append(c.Balances, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 13: RandaoMixes
|
||||
{
|
||||
var tmp []byte
|
||||
for i := 0; i < 65536; i++ {
|
||||
tmpSlice := s13[i*32 : (1+i)*32]
|
||||
tmp = append([]byte{}, tmpSlice...)
|
||||
c.RandaoMixes = append(c.RandaoMixes, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 14: Slashings
|
||||
{
|
||||
var tmp uint64
|
||||
for i := 0; i < 8192; i++ {
|
||||
tmpSlice := s14[i*8 : (1+i)*8]
|
||||
tmp = ssz.UnmarshallUint64(tmpSlice)
|
||||
c.Slashings = append(c.Slashings, tmp)
|
||||
}
|
||||
}
|
||||
|
||||
// Field 15: PreviousEpochAttestations
|
||||
{
|
||||
// empty lists are zero length, so make sure there is room for an offset
|
||||
// before attempting to unmarshal it
|
||||
if len(s15) > 3 {
|
||||
firstOffset := ssz.ReadOffset(s15[0:4])
|
||||
if firstOffset%4 != 0 {
|
||||
return fmt.Errorf("misaligned list bytes: when decoding c.PreviousEpochAttestations, end-of-list offset is %d, which is not a multiple of 4 (offset size)", firstOffset)
|
||||
}
|
||||
listLen := firstOffset / 4
|
||||
if listLen > 4096 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.PreviousEpochAttestations has %d elements, ssz-max is 4096", listLen)
|
||||
}
|
||||
listOffsets := make([]uint64, listLen)
|
||||
for i := 0; uint64(i) < listLen; i++ {
|
||||
listOffsets[i] = ssz.ReadOffset(s15[i*4 : (i+1)*4])
|
||||
}
|
||||
for i := 0; i < len(listOffsets); i++ {
|
||||
var tmp *PendingAttestation
|
||||
tmp = new(PendingAttestation)
|
||||
var tmpSlice []byte
|
||||
if i+1 == len(listOffsets) {
|
||||
tmpSlice = s15[listOffsets[i]:]
|
||||
} else {
|
||||
tmpSlice = s15[listOffsets[i]:listOffsets[i+1]]
|
||||
}
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.PreviousEpochAttestations = append(c.PreviousEpochAttestations, tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Field 16: CurrentEpochAttestations
|
||||
{
|
||||
// empty lists are zero length, so make sure there is room for an offset
|
||||
// before attempting to unmarshal it
|
||||
if len(s16) > 3 {
|
||||
firstOffset := ssz.ReadOffset(s16[0:4])
|
||||
if firstOffset%4 != 0 {
|
||||
return fmt.Errorf("misaligned list bytes: when decoding c.CurrentEpochAttestations, end-of-list offset is %d, which is not a multiple of 4 (offset size)", firstOffset)
|
||||
}
|
||||
listLen := firstOffset / 4
|
||||
if listLen > 4096 {
|
||||
return fmt.Errorf("ssz-max exceeded: c.CurrentEpochAttestations has %d elements, ssz-max is 4096", listLen)
|
||||
}
|
||||
listOffsets := make([]uint64, listLen)
|
||||
for i := 0; uint64(i) < listLen; i++ {
|
||||
listOffsets[i] = ssz.ReadOffset(s16[i*4 : (i+1)*4])
|
||||
}
|
||||
for i := 0; i < len(listOffsets); i++ {
|
||||
var tmp *PendingAttestation
|
||||
tmp = new(PendingAttestation)
|
||||
var tmpSlice []byte
|
||||
if i+1 == len(listOffsets) {
|
||||
tmpSlice = s16[listOffsets[i]:]
|
||||
} else {
|
||||
tmpSlice = s16[listOffsets[i]:listOffsets[i+1]]
|
||||
}
|
||||
if err = tmp.UnmarshalSSZ(tmpSlice); err != nil {
|
||||
return err
|
||||
}
|
||||
c.CurrentEpochAttestations = append(c.CurrentEpochAttestations, tmp)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Field 17: JustificationBits
|
||||
c.JustificationBits = append([]byte{}, prysmaticlabs_go_bitfield.Bitvector4(s17)...)
|
||||
|
||||
// Field 18: PreviousJustifiedCheckpoint
|
||||
c.PreviousJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.PreviousJustifiedCheckpoint.UnmarshalSSZ(s18); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 19: CurrentJustifiedCheckpoint
|
||||
c.CurrentJustifiedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.CurrentJustifiedCheckpoint.UnmarshalSSZ(s19); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Field 20: FinalizedCheckpoint
|
||||
c.FinalizedCheckpoint = new(prysmaticlabs_prysm_proto_eth_v1alpha1.Checkpoint)
|
||||
if err = c.FinalizedCheckpoint.UnmarshalSSZ(s20); err != nil {
|
||||
return err
|
||||
}
|
||||
return err
|
||||
}
|
||||
|
||||
49
sszgen/backend/testdata/TestUnmarshalSteps.expected
vendored
Normal file
49
sszgen/backend/testdata/TestUnmarshalSteps.expected
vendored
Normal file
@@ -0,0 +1,49 @@
|
||||
s0 := buf[0:8] // c.GenesisTime
|
||||
s1 := buf[8:40] // c.GenesisValidatorsRoot
|
||||
s2 := buf[40:48] // c.Slot
|
||||
s3 := buf[48:64] // c.Fork
|
||||
s4 := buf[64:176] // c.LatestBlockHeader
|
||||
s5 := buf[176:262320] // c.BlockRoots
|
||||
s6 := buf[262320:524464] // c.StateRoots
|
||||
s8 := buf[524468:524540] // c.Eth1Data
|
||||
s10 := buf[524544:524552] // c.Eth1DepositIndex
|
||||
s13 := buf[524560:2621712] // c.RandaoMixes
|
||||
s14 := buf[2621712:2687248] // c.Slashings
|
||||
s17 := buf[2687256:2687257] // c.JustificationBits
|
||||
s18 := buf[2687257:2687297] // c.PreviousJustifiedCheckpoint
|
||||
s19 := buf[2687297:2687337] // c.CurrentJustifiedCheckpoint
|
||||
s20 := buf[2687337:2687377] // c.FinalizedCheckpoint
|
||||
|
||||
v7 := ssz.ReadOffset(buf[524464:524468]) // c.HistoricalRoots
|
||||
if v7 < 2687377 {
|
||||
return ssz.ErrInvalidVariableOffset
|
||||
}
|
||||
if v7 > size {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v9 := ssz.ReadOffset(buf[524540:524544]) // c.Eth1DataVotes
|
||||
if v9 > size || v9 < v7 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v11 := ssz.ReadOffset(buf[524552:524556]) // c.Validators
|
||||
if v11 > size || v11 < v9 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v12 := ssz.ReadOffset(buf[524556:524560]) // c.Balances
|
||||
if v12 > size || v12 < v11 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v15 := ssz.ReadOffset(buf[2687248:2687252]) // c.PreviousEpochAttestations
|
||||
if v15 > size || v15 < v12 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
v16 := ssz.ReadOffset(buf[2687252:2687256]) // c.CurrentEpochAttestations
|
||||
if v16 > size || v16 < v15 {
|
||||
return ssz.ErrOffset
|
||||
}
|
||||
s7 := buf[v7:v9] // c.HistoricalRoots
|
||||
s9 := buf[v9:v11] // c.Eth1DataVotes
|
||||
s11 := buf[v11:v12] // c.Validators
|
||||
s12 := buf[v12:v15] // c.Balances
|
||||
s15 := buf[v15:v16] // c.PreviousEpochAttestations
|
||||
s16 := buf[v16:] // c.CurrentEpochAttestations
|
||||
39
sszgen/backend/uint.go
Normal file
39
sszgen/backend/uint.go
Normal file
@@ -0,0 +1,39 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateUint struct {
|
||||
valRep *types.ValueUint
|
||||
targetPackage string
|
||||
casterConfig
|
||||
}
|
||||
|
||||
func (g *generateUint) coerce() func(string) string {
|
||||
return func(fieldName string) string {
|
||||
return fmt.Sprintf("%s(%s)", g.valRep.TypeName(), fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
func (g *generateUint) generateUnmarshalValue(fieldName string, offset string) string {
|
||||
// mispelling of Unmarshall due to misspelling of method exported by fastssz
|
||||
convert := fmt.Sprintf("ssz.UnmarshallUint%d(%s)", g.valRep.Size, offset)
|
||||
return fmt.Sprintf("%s = %s", fieldName, g.casterConfig.toOverlay(convert))
|
||||
}
|
||||
|
||||
func (g *generateUint) generateFixedMarshalValue(fieldName string) string {
|
||||
return fmt.Sprintf("dst = ssz.MarshalUint%d(dst, %s)", g.valRep.Size, fieldName)
|
||||
}
|
||||
|
||||
func (g *generateUint) generateHTRPutter(fieldName string) string {
|
||||
return fmt.Sprintf("hh.PutUint%d(%s)", g.valRep.Size, fieldName)
|
||||
}
|
||||
|
||||
func (g *generateUint) variableSizeSSZ(fieldname string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateUint{}
|
||||
28
sszgen/backend/union.go
Normal file
28
sszgen/backend/union.go
Normal file
@@ -0,0 +1,28 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateUnion struct {
|
||||
*types.ValueUnion
|
||||
targetPackage string
|
||||
}
|
||||
|
||||
func (g *generateUnion) generateHTRPutter(fieldName string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateUnion) generateUnmarshalValue(fieldName string, s string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateUnion) generateFixedMarshalValue(fieldName string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateUnion) variableSizeSSZ(fieldname string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateUnion{}
|
||||
224
sszgen/backend/vector.go
Normal file
224
sszgen/backend/vector.go
Normal file
@@ -0,0 +1,224 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"text/template"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type generateVector struct {
|
||||
valRep *types.ValueVector
|
||||
targetPackage string
|
||||
casterConfig
|
||||
}
|
||||
|
||||
func (g *generateVector) generateUnmarshalValue(fieldName string, sliceName string) string {
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
switch g.valRep.ElementValue.(type) {
|
||||
case *types.ValueByte:
|
||||
t := `%s = make([]byte, 0, %d)
|
||||
%s = append(%s, %s...)`
|
||||
return fmt.Sprintf(t, fieldName, g.valRep.Size, fieldName, fieldName, g.casterConfig.toOverlay(sliceName))
|
||||
default:
|
||||
loopVar := "i"
|
||||
if fieldName[0:1] == "i" && monoCharacter(fieldName) {
|
||||
loopVar = fieldName + "i"
|
||||
}
|
||||
t := `{
|
||||
var tmp {{ .TypeName }}
|
||||
{{.FieldName}} = make([]{{.TypeName}}, {{.NumElements}})
|
||||
for {{ .LoopVar }} := 0; {{ .LoopVar }} < {{ .NumElements }}; {{ .LoopVar }} ++ {
|
||||
tmpSlice := {{ .SliceName }}[{{ .LoopVar }}*{{ .NestedFixedSize }}:(1+{{ .LoopVar }})*{{ .NestedFixedSize }}]
|
||||
{{ .NestedUnmarshal }}
|
||||
{{ .FieldName }}[{{.LoopVar}}] = tmp
|
||||
}
|
||||
}`
|
||||
tmpl, err := template.New("tmplgenerateUnmarshalValueDefault").Parse(t)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
nvr := g.valRep.ElementValue
|
||||
err = tmpl.Execute(buf, struct{
|
||||
TypeName string
|
||||
SliceName string
|
||||
NumElements int
|
||||
NestedFixedSize int
|
||||
LoopVar string
|
||||
NestedUnmarshal string
|
||||
FieldName string
|
||||
}{
|
||||
TypeName: fullyQualifiedTypeName(nvr, g.targetPackage),
|
||||
SliceName: sliceName,
|
||||
NumElements: g.valRep.FixedSize() / g.valRep.ElementValue.FixedSize(),
|
||||
NestedFixedSize: g.valRep.ElementValue.FixedSize(),
|
||||
LoopVar: loopVar,
|
||||
NestedUnmarshal: gg.generateUnmarshalValue("tmp", "tmpSlice"),
|
||||
FieldName: fieldName,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf.Bytes())
|
||||
}
|
||||
}
|
||||
|
||||
var tmplGenerateMarshalValueVector = `if len({{.FieldName}}) != {{.Size}} {
|
||||
return nil, ssz.ErrBytesLength
|
||||
}
|
||||
{{.MarshalValue}}`
|
||||
|
||||
func (g *generateVector) generateFixedMarshalValue(fieldName string) string {
|
||||
mvTmpl, err := template.New("tmplGenerateMarshalValueVector").Parse(tmplGenerateMarshalValueVector)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
var marshalValue string
|
||||
switch g.valRep.ElementValue.(type) {
|
||||
case *types.ValueByte:
|
||||
marshalValue = fmt.Sprintf("dst = append(dst, %s...)", fieldName)
|
||||
default:
|
||||
nestedFieldName := "o"
|
||||
if fieldName[0:1] == "o" && monoCharacter(fieldName) {
|
||||
nestedFieldName = fieldName + "o"
|
||||
}
|
||||
t := `for _, %s := range %s {
|
||||
%s
|
||||
}`
|
||||
gg := newValueGenerator(g.valRep.ElementValue, g.targetPackage)
|
||||
internal := gg.generateFixedMarshalValue(nestedFieldName)
|
||||
marshalValue = fmt.Sprintf(t, nestedFieldName, fieldName, internal)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = mvTmpl.Execute(buf, struct{
|
||||
FieldName string
|
||||
Size int
|
||||
MarshalValue string
|
||||
}{
|
||||
FieldName: fieldName,
|
||||
Size: g.valRep.Size,
|
||||
MarshalValue: marshalValue,
|
||||
})
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return string(buf.Bytes())
|
||||
}
|
||||
|
||||
var generateVectorHTRPutterTmpl = `{
|
||||
if len({{.FieldName}}) != {{.Size}} {
|
||||
return ssz.ErrVectorLength
|
||||
}
|
||||
subIndx := hh.Index()
|
||||
for _, {{.NestedFieldName}} := range {{.FieldName}} {
|
||||
{{.AppendCall}}
|
||||
}
|
||||
{{.Merkleize}}
|
||||
}`
|
||||
|
||||
type vecPutterElements struct {
|
||||
FieldName string
|
||||
NestedFieldName string
|
||||
Size int
|
||||
AppendCall string
|
||||
Merkleize string
|
||||
}
|
||||
|
||||
func renderHtrVecPutter(lpe vecPutterElements) string {
|
||||
tmpl, err := template.New("renderHtrVecPutter").Parse(generateVectorHTRPutterTmpl)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
buf := bytes.NewBuffer(nil)
|
||||
err = tmpl.Execute(buf, lpe)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return buf.String()
|
||||
}
|
||||
|
||||
func (g *generateVector) isByteVector() bool {
|
||||
_, isByte := g.valRep.ElementValue.(*types.ValueByte)
|
||||
return isByte
|
||||
}
|
||||
|
||||
func (g *generateVector) renderByteSliceAppend(fieldName string) string {
|
||||
t := `if len(%s) != %d {
|
||||
return ssz.ErrBytesLength
|
||||
}
|
||||
hh.Append(%s)`
|
||||
return fmt.Sprintf(t, fieldName, g.valRep.Size, fieldName)
|
||||
}
|
||||
|
||||
func (g *generateVector) generateHTRPutter(fieldName string) string {
|
||||
nestedFieldName := "o"
|
||||
if fieldName[0:1] == "o" && monoCharacter(fieldName) {
|
||||
nestedFieldName = fieldName + "o"
|
||||
}
|
||||
|
||||
// resolve pointers and overlays to their underlying types
|
||||
vr := g.valRep.ElementValue
|
||||
if vrp, isPointer := vr.(*types.ValuePointer); isPointer {
|
||||
vr = vrp.Referent
|
||||
}
|
||||
if vro, isOverlay := vr.(*types.ValueOverlay); isOverlay {
|
||||
vr = vro.Underlying
|
||||
}
|
||||
|
||||
vpe := vecPutterElements{
|
||||
FieldName: fieldName,
|
||||
NestedFieldName: nestedFieldName,
|
||||
Size: g.valRep.Size,
|
||||
}
|
||||
|
||||
switch v := vr.(type) {
|
||||
case *types.ValueByte:
|
||||
t := `if len(%s) != %d {
|
||||
return ssz.ErrBytesLength
|
||||
}
|
||||
hh.PutBytes(%s)`
|
||||
return fmt.Sprintf(t, fieldName, g.valRep.Size, fieldName)
|
||||
case *types.ValueVector:
|
||||
gv := &generateVector{valRep: v, targetPackage: g.targetPackage}
|
||||
if gv.isByteVector() {
|
||||
vpe.AppendCall = gv.renderByteSliceAppend(nestedFieldName)
|
||||
vpe.Merkleize = "hh.Merkleize(subIndx)"
|
||||
return renderHtrVecPutter(vpe)
|
||||
}
|
||||
case *types.ValueUint:
|
||||
vpe.AppendCall = fmt.Sprintf("hh.AppendUint%d(%s)", v.Size, nestedFieldName)
|
||||
vpe.Merkleize = "hh.Merkleize(subIndx)"
|
||||
return renderHtrVecPutter(vpe)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported type combination - vector of %v", v))
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func monoCharacter(s string) bool {
|
||||
ch := s[0]
|
||||
for i := 1; i < len(s); i++ {
|
||||
if s[i] == ch {
|
||||
continue
|
||||
}
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (g *generateVector) variableSizeSSZ(fieldName string) string {
|
||||
if !g.valRep.ElementValue.IsVariableSized() {
|
||||
return fmt.Sprintf("len(%s) * %d", fieldName, g.valRep.ElementValue.FixedSize())
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (g *generateVector) coerce() func(string) string {
|
||||
return func(fieldName string) string {
|
||||
return fmt.Sprintf("%s(%s)", g.valRep.TypeName(), fieldName)
|
||||
}
|
||||
}
|
||||
|
||||
var _ valueGenerator = &generateVector{}
|
||||
23
sszgen/backend/visitor.go
Normal file
23
sszgen/backend/visitor.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package backend
|
||||
|
||||
import "github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
|
||||
type visitor func(vr types.ValRep)
|
||||
|
||||
func visit(vr types.ValRep, v visitor) {
|
||||
v(vr)
|
||||
switch t := vr.(type) {
|
||||
case *types.ValueContainer:
|
||||
for _, f := range t.Contents {
|
||||
visit(f.Value, v)
|
||||
}
|
||||
case *types.ValueVector:
|
||||
visit(t.ElementValue, v)
|
||||
case *types.ValueList:
|
||||
visit(t.ElementValue, v)
|
||||
case *types.ValuePointer:
|
||||
visit(t.Referent, v)
|
||||
case *types.ValueOverlay:
|
||||
visit(t.Underlying, v)
|
||||
}
|
||||
}
|
||||
68
sszgen/indexer.go
Normal file
68
sszgen/indexer.go
Normal file
@@ -0,0 +1,68 @@
|
||||
package sszgen
|
||||
|
||||
type PackageIndex struct {
|
||||
sourcePackage string
|
||||
index map[string]PackageParser
|
||||
structCache map[[2]string]*ParseNode
|
||||
}
|
||||
|
||||
func NewPackageIndex() *PackageIndex {
|
||||
pi := &PackageIndex{
|
||||
index: make(map[string]PackageParser),
|
||||
structCache: make(map[[2]string]*ParseNode),
|
||||
}
|
||||
return pi
|
||||
}
|
||||
|
||||
func (pi *PackageIndex) getParser(packagePath string) (PackageParser, error) {
|
||||
pkg, ok := pi.index[packagePath]
|
||||
if ok {
|
||||
return pkg, nil
|
||||
}
|
||||
pkg, err := NewPackageParser(packagePath)
|
||||
if err == nil {
|
||||
pi.index[packagePath] = pkg
|
||||
}
|
||||
return pkg, err
|
||||
}
|
||||
|
||||
func (pi *PackageIndex) DeclarationRefs(packagePath string) ([]*DeclarationRef, error) {
|
||||
pkg, err := pi.getParser(packagePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
refs := make([]*DeclarationRef, 0)
|
||||
for _, p := range pkg.AllParseNodes() {
|
||||
refs = append(refs, p.DeclarationRef())
|
||||
}
|
||||
return refs, nil
|
||||
}
|
||||
|
||||
// go does not require the package path to match the package name
|
||||
// the package of the methods generated by this tool must match the
|
||||
// package name used by other files in the package, so we need to
|
||||
// expose that value through the parser/index.
|
||||
func (pi *PackageIndex) GetPackageName(packagePath string) (string, error) {
|
||||
pkg, err := pi.getParser(packagePath)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
return pkg.PackageName()
|
||||
}
|
||||
|
||||
func (pi *PackageIndex) GetType(packagePath, typeName string) (*ParseNode, error) {
|
||||
cached, ok := pi.structCache[[2]string{packagePath,typeName}]
|
||||
if ok {
|
||||
return cached, nil
|
||||
}
|
||||
pkg, err := pi.getParser(packagePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
ts, err := pkg.GetType(typeName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pi.structCache[[2]string{packagePath,typeName}] = ts
|
||||
return ts, nil
|
||||
}
|
||||
29
sszgen/indexer_test.go
Normal file
29
sszgen/indexer_test.go
Normal file
@@ -0,0 +1,29 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
)
|
||||
|
||||
func newTestIndexer() *PackageIndex {
|
||||
return &PackageIndex{
|
||||
index: make(map[string]PackageParser),
|
||||
structCache: make(map[[2]string]*ParseNode),
|
||||
}
|
||||
}
|
||||
|
||||
func TestAddGet(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
pi := newTestIndexer()
|
||||
sourceFiles := []string{"testdata/simple.go"}
|
||||
pp, err := newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
pi.index[packageName] = pp
|
||||
parser, err := pi.getParser(packageName)
|
||||
require.NoError(t, err)
|
||||
_, err = parser.GetType("NoImports")
|
||||
require.NoError(t, err)
|
||||
_, err = pi.GetType(packageName, "NoImports")
|
||||
require.NoError(t, err)
|
||||
}
|
||||
169
sszgen/parser.go
Normal file
169
sszgen/parser.go
Normal file
@@ -0,0 +1,169 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"strconv"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
"golang.org/x/tools/go/packages"
|
||||
)
|
||||
|
||||
type ParseNode struct {
|
||||
PackagePath string
|
||||
Name string
|
||||
typeSpec *ast.TypeSpec
|
||||
typeExpression ast.Expr
|
||||
FileParser FileParser
|
||||
PackageParser PackageParser
|
||||
ValRep types.ValRep
|
||||
Tag string
|
||||
}
|
||||
|
||||
func (pn *ParseNode) DeclarationRef() *DeclarationRef {
|
||||
return &DeclarationRef{Name: pn.Name, Package: pn.PackagePath}
|
||||
}
|
||||
|
||||
type DeclarationRef struct {
|
||||
Name string
|
||||
Package string
|
||||
}
|
||||
|
||||
func (ts *ParseNode) TypeExpression() ast.Expr {
|
||||
if ts.typeSpec != nil {
|
||||
return ts.typeSpec.Type
|
||||
}
|
||||
if ts.typeExpression != nil {
|
||||
return ts.typeExpression
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
type FileParser interface {
|
||||
ResolveAlias(string) (string, error)
|
||||
}
|
||||
|
||||
type astFileParser struct {
|
||||
file *ast.File
|
||||
filename string
|
||||
}
|
||||
|
||||
var _ FileParser = &astFileParser{}
|
||||
|
||||
func (afp *astFileParser) ResolveAlias(alias string) (string, error) {
|
||||
for _, imp := range afp.file.Imports {
|
||||
if imp.Name.Name == alias {
|
||||
resolved, err := strconv.Unquote(imp.Path.Value)
|
||||
return resolved, err
|
||||
}
|
||||
}
|
||||
return "", fmt.Errorf("Could not resolve alias %s from filename '%s'", alias, afp.filename)
|
||||
}
|
||||
|
||||
type PackageParser interface {
|
||||
Imports() ([]*ast.ImportSpec, error)
|
||||
AllParseNodes() []*ParseNode
|
||||
GetType(name string) (*ParseNode, error)
|
||||
Path() string // parser's package path
|
||||
PackageName() (string, error) // "real" name ie `package $NAME` declaration in source files in package
|
||||
}
|
||||
|
||||
type packageParser struct {
|
||||
packagePath string
|
||||
files map[string]*ast.File
|
||||
}
|
||||
|
||||
func (pp *packageParser) Imports() ([]*ast.ImportSpec, error) {
|
||||
imports := make([]*ast.ImportSpec, 0)
|
||||
for _, f := range pp.files {
|
||||
for _, imp := range f.Imports {
|
||||
imports = append(imports, imp)
|
||||
}
|
||||
}
|
||||
return imports, nil
|
||||
}
|
||||
|
||||
func (pp *packageParser) AllParseNodes() []*ParseNode {
|
||||
structs := make([]*ParseNode, 0)
|
||||
for fname, f := range pp.files {
|
||||
for name, obj := range f.Scope.Objects {
|
||||
if obj.Kind != ast.Typ {
|
||||
continue
|
||||
}
|
||||
typeSpec, ok := obj.Decl.(*ast.TypeSpec)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
ts := &ParseNode{
|
||||
Name: name,
|
||||
//TypeExpression: typeSpec.Type,
|
||||
typeSpec: typeSpec,
|
||||
FileParser: &astFileParser{filename: fname, file:f},
|
||||
PackagePath: pp.packagePath,
|
||||
}
|
||||
structs = append(structs, ts)
|
||||
}
|
||||
}
|
||||
return structs
|
||||
}
|
||||
|
||||
func (pp *packageParser) PackageName() (string, error) {
|
||||
for _, f := range pp.files {
|
||||
return f.Name.Name, nil
|
||||
}
|
||||
return "", fmt.Errorf("Could not determine package name for package path %s", pp.packagePath)
|
||||
}
|
||||
|
||||
func (pp *packageParser) GetType(name string) (*ParseNode, error) {
|
||||
for fname, f := range pp.files {
|
||||
for objName, obj := range f.Scope.Objects {
|
||||
if obj.Kind != ast.Typ {
|
||||
continue
|
||||
}
|
||||
typeSpec, ok := obj.Decl.(*ast.TypeSpec)
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if name == objName {
|
||||
return &ParseNode{
|
||||
Name: objName,
|
||||
//TypeExpression: typeSpec.Type,
|
||||
typeSpec: typeSpec,
|
||||
FileParser: &astFileParser{file: f, filename: fname},
|
||||
PackageParser: pp,
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil, fmt.Errorf("Could not find struct named '%s' in package %s", name, pp.packagePath)
|
||||
}
|
||||
|
||||
func (pp *packageParser) Path() string {
|
||||
return pp.packagePath
|
||||
}
|
||||
|
||||
func NewPackageParser(packageName string) (*packageParser, error) {
|
||||
cfg := &packages.Config{Mode: packages.NeedFiles | packages.NeedSyntax}
|
||||
pkgs, err := packages.Load(cfg, []string{packageName}...)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
for _, pkg := range pkgs {
|
||||
if pkg.ID != packageName {
|
||||
continue
|
||||
}
|
||||
pp := &packageParser{packagePath: pkg.ID, files: make(map[string]*ast.File)}
|
||||
for _, f := range pkg.GoFiles {
|
||||
syn, err := parser.ParseFile(token.NewFileSet(), f, nil, parser.AllErrors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pp.files[f] = syn
|
||||
}
|
||||
return pp, nil
|
||||
}
|
||||
return nil, fmt.Errorf("Package named '%s' could not be loaded from the go build system. Please make sure the current folder contains the go.mod for the target package, or that its go.mod is in a parent directory", packageName)
|
||||
}
|
||||
|
||||
45
sszgen/parser_test.go
Normal file
45
sszgen/parser_test.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"go/ast"
|
||||
"go/parser"
|
||||
"go/token"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
)
|
||||
|
||||
func TestFindStruct(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/types.pb.go"}
|
||||
pp, err := newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
_, err = pp.GetType("BeaconState")
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func newTestPackageParser(packageName string, files []string) (*packageParser, error) {
|
||||
pp := &packageParser{packagePath: packageName, files: make(map[string]*ast.File)}
|
||||
for _, f := range files {
|
||||
syn, err := parser.ParseFile(token.NewFileSet(), f, nil, parser.AllErrors)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
pp.files[f] = syn
|
||||
}
|
||||
return pp, nil
|
||||
}
|
||||
|
||||
func TestResolveImport(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/types.pb.go"}
|
||||
pp, err := newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
ts, err := pp.GetType("BeaconState")
|
||||
require.NoError(t, err)
|
||||
alias := "github_com_prysmaticlabs_eth2_types"
|
||||
path, err := ts.FileParser.ResolveAlias(alias)
|
||||
require.NoError(t, err)
|
||||
expectedPath := "github.com/prysmaticlabs/eth2-types"
|
||||
require.Equal(t, expectedPath, path)
|
||||
}
|
||||
194
sszgen/representer.go
Normal file
194
sszgen/representer.go
Normal file
@@ -0,0 +1,194 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"go/ast"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
type Representer struct {
|
||||
index *PackageIndex
|
||||
}
|
||||
|
||||
func NewRepresenter(pi *PackageIndex) *Representer {
|
||||
return &Representer{index: pi}
|
||||
}
|
||||
|
||||
type typeSpecMutator func(*ParseNode)
|
||||
|
||||
// this is used to copy a tag from a field down into a declaration
|
||||
// representation. This is usedTag to push tag data down into declaration parsing,
|
||||
// so that ssz-size/ssz-max can be applied to list/vector value types.
|
||||
func typeSpecMutatorCopyTag(source *ParseNode) typeSpecMutator {
|
||||
return func(target *ParseNode) {
|
||||
target.Tag = source.Tag
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Representer) GetDeclaration(packagePath, structName string, mutators ...typeSpecMutator) (types.ValRep, error) {
|
||||
ts, err := r.index.GetType(packagePath, structName)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// apply mutators to replicate any important ParseNode properties
|
||||
// from outer ParseNode
|
||||
for _, mut := range mutators {
|
||||
mut(ts)
|
||||
}
|
||||
switch ty := ts.typeSpec.Type.(type) {
|
||||
case *ast.StructType:
|
||||
vr := &types.ValueContainer{
|
||||
Name: ts.Name,
|
||||
Package: packagePath,
|
||||
}
|
||||
for _, f := range ty.Fields.List {
|
||||
// this filters out internal protobuf fields, but also serializers like us
|
||||
// can safely ignore unexported fields in general. We also ignore embedded
|
||||
// fields because I'm not sure if we should support them yet.
|
||||
if f.Names == nil || !ast.IsExported(f.Names[0].Name) {
|
||||
continue
|
||||
}
|
||||
fieldName := f.Names[0].Name
|
||||
s := &ParseNode{
|
||||
FileParser: ts.FileParser,
|
||||
PackageParser: ts.PackageParser,
|
||||
typeExpression: f.Type,
|
||||
}
|
||||
if f.Tag != nil {
|
||||
s.Tag = f.Tag.Value
|
||||
}
|
||||
rep, err := r.expandRepresentation(s)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
vr.Contents = append(vr.Contents, types.ContainerField{fieldName, rep})
|
||||
}
|
||||
return vr, nil
|
||||
case *ast.Ident:
|
||||
// in this case our type is like an "overlay" over a primitive, ie
|
||||
// type IntWithMethods int
|
||||
// the ValueOverlay value type exists to represent this situation.
|
||||
// These values require some special handling in codegen because
|
||||
// they must be cast to/from their underlying types when working
|
||||
// with their byte representation for un/marshaling, etc
|
||||
underlying, err := r.expandIdent(ty, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
// the underlying ValRep will be a primitive value and its .TypeName()
|
||||
// will reflect its storage type, not the overlay name
|
||||
return &types.ValueOverlay{Name: ts.Name, Package: packagePath, Underlying: underlying}, nil
|
||||
case *ast.ArrayType:
|
||||
// we can also have an "overlay" array, like the Bitlist types
|
||||
// from github.com/prysmaticlabs/go-bitfield
|
||||
//underlying, err := r.expandArray()
|
||||
underlying, err := r.expandArrayHead(ty, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &types.ValueOverlay{Name: ts.Name, Package: packagePath, Underlying: underlying}, nil
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported ast.Expr type for %v", ts.TypeExpression())
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Representer) expandRepresentation(ts *ParseNode) (types.ValRep, error) {
|
||||
switch ty := ts.typeExpression.(type) {
|
||||
case *ast.ArrayType:
|
||||
return r.expandArrayHead(ty, ts)
|
||||
case *ast.StarExpr:
|
||||
referentTS := &ParseNode{
|
||||
FileParser: ts.FileParser,
|
||||
PackageParser: ts.PackageParser,
|
||||
typeExpression: ty.X,
|
||||
}
|
||||
vr, err := r.expandRepresentation(referentTS)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return &types.ValuePointer{Referent: vr}, nil
|
||||
case *ast.SelectorExpr:
|
||||
packageAliasIdent := ty.X.(*ast.Ident)
|
||||
pa := packageAliasIdent.Name
|
||||
path, err := ts.FileParser.ResolveAlias(pa)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return r.GetDeclaration(path, ty.Sel.Name, typeSpecMutatorCopyTag(ts))
|
||||
case *ast.Ident:
|
||||
return r.expandIdent(ty, ts)
|
||||
default:
|
||||
return nil, fmt.Errorf("Unsupported ast.Expr type for %v", ts.TypeExpression())
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Representer) expandArrayHead(art *ast.ArrayType, ts *ParseNode) (types.ValRep, error) {
|
||||
dims, err := extractSSZDimensions(ts.Tag)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return r.expandArray(dims, art, ts)
|
||||
}
|
||||
|
||||
func (r *Representer) expandArray(dims []*SSZDimension, art *ast.ArrayType, ts *ParseNode) (types.ValRep, error) {
|
||||
if len(dims) == 0 {
|
||||
return nil, fmt.Errorf("do not have dimension information for type %v", ts)
|
||||
}
|
||||
d := dims[0]
|
||||
var elv types.ValRep
|
||||
var err error
|
||||
switch elt := art.Elt.(type) {
|
||||
case *ast.ArrayType:
|
||||
elv, err = r.expandArray(dims[1:], elt, ts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
default:
|
||||
elv, err = r.expandRepresentation(&ParseNode{
|
||||
FileParser: ts.FileParser,
|
||||
PackageParser: ts.PackageParser,
|
||||
typeExpression: elt,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if d.IsVector() {
|
||||
return &types.ValueVector{
|
||||
ElementValue: elv,
|
||||
Size: d.VectorLen(),
|
||||
}, nil
|
||||
}
|
||||
if d.IsList() {
|
||||
return &types.ValueList{
|
||||
ElementValue: elv,
|
||||
MaxSize: d.ListLen(),
|
||||
}, nil
|
||||
}
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
func (r *Representer) expandIdent(ident *ast.Ident, ts *ParseNode) (types.ValRep, error) {
|
||||
switch ident.Name {
|
||||
case "bool":
|
||||
return &types.ValueBool{Name: ident.Name}, nil
|
||||
case "byte":
|
||||
return &types.ValueByte{Name: ident.Name}, nil
|
||||
case "uint8":
|
||||
return &types.ValueUint{Size: 8, Name: ident.Name}, nil
|
||||
case "uint16":
|
||||
return &types.ValueUint{Size: 16, Name:ident.Name}, nil
|
||||
case "uint32":
|
||||
return &types.ValueUint{Size: 32, Name: ident.Name}, nil
|
||||
case "uint64":
|
||||
return &types.ValueUint{Size: 64, Name: ident.Name}, nil
|
||||
case "uint128":
|
||||
return &types.ValueUint{Size: 128, Name: ident.Name}, nil
|
||||
case "uint256":
|
||||
return &types.ValueUint{Size: 256, Name: ident.Name}, nil
|
||||
default:
|
||||
return r.GetDeclaration(ts.PackageParser.Path(), ident.Name, typeSpecMutatorCopyTag(ts))
|
||||
}
|
||||
}
|
||||
362
sszgen/representer_test.go
Normal file
362
sszgen/representer_test.go
Normal file
@@ -0,0 +1,362 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func TestGetSimpleRepresentation(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/simple.go"}
|
||||
pp, err:= newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
pi := newTestIndexer()
|
||||
pi.index[packageName] = pp
|
||||
rep := NewRepresenter(pi)
|
||||
structName := "NoImports"
|
||||
_, err = rep.GetDeclaration(packageName, structName)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func setupSimpleRepresenter() *Representer {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/simple.go"}
|
||||
pp, _ := newTestPackageParser(packageName, sourceFiles)
|
||||
pi := newTestIndexer()
|
||||
pi.index[packageName] = pp
|
||||
return NewRepresenter(pi)
|
||||
}
|
||||
|
||||
func TestPrimitiveAliasRepresentation(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "AliasedPrimitive"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
overlay, ok := r.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "type declaration over primitive type should result in a ValueOverlay")
|
||||
require.Equal(t, "uint64", overlay.Underlying.TypeName())
|
||||
}
|
||||
|
||||
// TestSimpleStructRepresentation ensures that a type declaration like:
|
||||
// type AliasedPrimitive uint64
|
||||
// will be represented like ValueOverlay{Name: "AliasedPrimitive", Underlying: ValueUint{Name: "uint64"}}
|
||||
func TestSimpleStructRepresentation(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
// test simple "overlay" values
|
||||
overlayValRep, err := container.GetField("MuhPrim")
|
||||
require.NoError(t, err)
|
||||
overlay, ok := overlayValRep.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueOverlay type, got %v", typename(overlayValRep))
|
||||
require.Equal(t, "AliasedPrimitive", overlay.TypeName())
|
||||
require.Equal(t, overlay.Underlying.TypeName(), "uint64")
|
||||
|
||||
uintValRep, err := container.GetField("GenesisTime")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, true, ok, "Expected \"GenesisTime\" to be in container")
|
||||
require.Equal(t, "uint64", uintValRep.TypeName())
|
||||
uintType, ok := uintValRep.(*types.ValueUint)
|
||||
require.Equal(t, true, ok, "Expected \"GenesisTime\" to be a ValueUint, got %v", typename(uintValRep))
|
||||
require.Equal(t, types.UintSize(64), uintType.Size)
|
||||
}
|
||||
|
||||
// Tests that 1 and 2 dimensional vectors are represented as expected
|
||||
func TestStructVectors(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
vectorValRep, err := container.GetField("GenesisValidatorsRoot")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, true, ok, "Expected \"GenesisValidatorsRoot\" to be in container")
|
||||
vector, ok := vectorValRep.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueVector type, got %v", typename(vectorValRep))
|
||||
require.Equal(t, "[]byte", vector.TypeName())
|
||||
byteVal, ok := vector.ElementValue.(*types.ValueByte)
|
||||
require.Equal(t, true, ok, "Expected the ElementValue a ValueByte type, got %v", typename(vector))
|
||||
require.Equal(t, byteVal.TypeName(), "byte")
|
||||
require.Equal(t, 32, vector.Size)
|
||||
|
||||
vectorValRep2d, err := container.GetField("BlockRoots")
|
||||
require.NoError(t, err)
|
||||
vector2d, ok := vectorValRep2d.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected \"BlockRoots\" to be type ValueVector, got %v", typename(vector2d))
|
||||
require.Equal(t, 8192, vector2d.Size)
|
||||
vector1d, ok := vector2d.ElementValue.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the element type of \"BlockRoots\" to be type ValueVector, got %v", typename(vector1d))
|
||||
require.Equal(t, 32, vector1d.Size)
|
||||
vector1dElement, ok := vector1d.ElementValue.(*types.ValueByte)
|
||||
require.Equal(t, true, ok, "Expected the element type of \"BlockRoots\" to be type ValueVector, got %v", typename(vector2d.ElementValue))
|
||||
require.Equal(t, "byte", vector1dElement.TypeName())
|
||||
}
|
||||
|
||||
// tests that ssz dimensions are assigned correctly with a vector nested in a list
|
||||
func TestVectorInListInStruct(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
listValRep, err := container.GetField("HistoricalRoots")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[][]byte", listValRep.TypeName())
|
||||
list, ok := listValRep.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueOverlay type, got %v", typename(listValRep))
|
||||
require.Equal(t, 16777216, list.MaxSize, "Unexpected value for list max size based on parsed ssz tags")
|
||||
|
||||
require.Equal(t, "[]byte", list.ElementValue.TypeName())
|
||||
vector, ok := list.ElementValue.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueVector type, got %v", typename(list.ElementValue))
|
||||
require.Equal(t, 32, vector.Size)
|
||||
|
||||
require.Equal(t, "byte", vector.ElementValue.TypeName())
|
||||
_, ok = vector.ElementValue.(*types.ValueByte)
|
||||
require.Equal(t, true, ok, "Expected the ElementValue a ValueByte type, got %v", typename(vector))
|
||||
}
|
||||
|
||||
func TestContainerField(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
fieldValRep, err := container.GetField("ContainerField")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "ContainerType", fieldValRep.TypeName())
|
||||
field, ok := fieldValRep.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(fieldValRep))
|
||||
require.Equal(t, 1, len(field.Fields()))
|
||||
|
||||
refFieldValRep, err := container.GetField("ContainerRefField")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "AnotherContainerType", refFieldValRep.TypeName())
|
||||
refField, ok := refFieldValRep.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(refFieldValRep))
|
||||
require.Equal(t, 1, len(refField.Fields()))
|
||||
}
|
||||
|
||||
func TestListContainers(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
conlistValRep, err := container.GetField("ContainerList")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]ContainerType", conlistValRep.TypeName())
|
||||
conlist, ok := conlistValRep.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueListtype, got %v", typename(conlistValRep))
|
||||
require.Equal(t, 23, conlist.MaxSize)
|
||||
require.Equal(t, "ContainerType", conlist.ElementValue.TypeName())
|
||||
|
||||
conVecValRep, err := container.GetField("ContainerVector")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]ContainerType", conVecValRep.TypeName())
|
||||
conVec, ok := conVecValRep.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueVec, got %v", typename(conVecValRep))
|
||||
require.Equal(t, 42, conVec.Size)
|
||||
require.Equal(t, "ContainerType", conVec.ElementValue.TypeName())
|
||||
|
||||
conVecValRefRep, err := container.GetField("ContainerVectorRef")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]*ContainerType", conVecValRefRep.TypeName())
|
||||
conVecRef, ok := conVecValRefRep.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueVector, got %v", typename(conVecValRefRep))
|
||||
conVecRefPointer, ok := conVecRef.ElementValue.(*types.ValuePointer) //
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValuePointer, got %v", typename(conVecRef.ElementValue))
|
||||
conVecReferent, ok := conVecRefPointer.Referent.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer, got %v", typename(conVecRefPointer.Referent))
|
||||
require.Equal(t, "ContainerType", conVecReferent.TypeName())
|
||||
require.Equal(t, 17, conVecRef.Size)
|
||||
|
||||
conListValRefRep, err := container.GetField("ContainerListRef")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]*ContainerType", conListValRefRep.TypeName())
|
||||
conListRef, ok := conListValRefRep.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueList, got %v", typename(conListValRefRep))
|
||||
conListRefPointer, ok := conListRef.ElementValue.(*types.ValuePointer) //
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValuePointer, got %v", typename(conListRef.ElementValue))
|
||||
conListReferent, ok := conListRefPointer.Referent.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer, got %v", typename(conListRefPointer.Referent))
|
||||
require.Equal(t, "ContainerType", conListReferent.TypeName())
|
||||
require.Equal(t, 9000, conListRef.MaxSize)
|
||||
}
|
||||
|
||||
func TestListOfOverlays(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
overlayListRep, err := container.GetField("OverlayList")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]AliasedPrimitive", overlayListRep.TypeName())
|
||||
overlayList, ok := overlayListRep.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected a ValueList, got %v", typename(overlayListRep))
|
||||
require.Equal(t, 11, overlayList.MaxSize)
|
||||
require.Equal(t, "AliasedPrimitive", overlayList.ElementValue.TypeName())
|
||||
overlay, ok := overlayList.ElementValue.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected a ValueOverly, got %v", typename(overlayList.ElementValue))
|
||||
require.Equal(t, "uint64", overlay.Underlying.TypeName())
|
||||
underlying, ok := overlay.Underlying.(*types.ValueUint)
|
||||
require.Equal(t, true, ok, "Expected a ValueUint, got %v", typename(overlay.Underlying))
|
||||
require.Equal(t, types.UintSize(64), underlying.Size)
|
||||
|
||||
overlayListRefRep, err := container.GetField("OverlayListRef")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]*AliasedPrimitive", overlayListRefRep.TypeName())
|
||||
overlayRefList, ok := overlayListRefRep.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected a ValueList, got %v", typename(overlayListRep))
|
||||
require.Equal(t, 58, overlayRefList.MaxSize)
|
||||
require.Equal(t, "*AliasedPrimitive", overlayRefList.ElementValue.TypeName())
|
||||
overlayPointer, ok := overlayRefList.ElementValue.(*types.ValuePointer)
|
||||
require.Equal(t, true, ok, "Expected a ValuePointer, got %v", typename(overlayRefList.ElementValue))
|
||||
require.Equal(t, "AliasedPrimitive", overlayPointer.Referent.TypeName())
|
||||
overlayRef, ok := overlayPointer.Referent.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected a ValueOverlay, got %v", typename(overlayPointer.Referent))
|
||||
require.Equal(t, "uint64", overlayRef.Underlying.TypeName())
|
||||
underlyingRef, ok := overlay.Underlying.(*types.ValueUint)
|
||||
require.Equal(t, true, ok, "Expected a ValueUint, got %v", typename(overlayRef.Underlying))
|
||||
require.Equal(t, types.UintSize(64), underlyingRef.Size)
|
||||
}
|
||||
|
||||
func TestVectorOfOverlays(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
rep := setupSimpleRepresenter()
|
||||
typeName := "NoImports"
|
||||
r, err := rep.GetDeclaration(packageName, typeName)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, typeName, r.TypeName())
|
||||
container, ok := r.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueContainer type, got %v", typename(r))
|
||||
|
||||
overlayVectorRep, err := container.GetField("OverlayVector")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]AliasedPrimitive", overlayVectorRep.TypeName())
|
||||
overlayVector, ok := overlayVectorRep.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected a ValueList, got %v", typename(overlayVectorRep))
|
||||
require.Equal(t, 23, overlayVector.Size)
|
||||
require.Equal(t, "AliasedPrimitive", overlayVector.ElementValue.TypeName())
|
||||
overlay, ok := overlayVector.ElementValue.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected a ValueOverly, got %v", typename(overlayVector.ElementValue))
|
||||
require.Equal(t, "uint64", overlay.Underlying.TypeName())
|
||||
underlying, ok := overlay.Underlying.(*types.ValueUint)
|
||||
require.Equal(t, true, ok, "Expected a ValueUint, got %v", typename(overlay.Underlying))
|
||||
require.Equal(t, types.UintSize(64), underlying.Size)
|
||||
|
||||
overlayVectorRefRep, err := container.GetField("OverlayVectorRef")
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, "[]*AliasedPrimitive", overlayVectorRefRep.TypeName())
|
||||
overlayRefVector, ok := overlayVectorRefRep.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected a ValueVector, got %v", typename(overlayVectorRep))
|
||||
require.Equal(t, 13, overlayRefVector.Size)
|
||||
require.Equal(t, "*AliasedPrimitive", overlayRefVector.ElementValue.TypeName())
|
||||
overlayPointer, ok := overlayRefVector.ElementValue.(*types.ValuePointer)
|
||||
require.Equal(t, true, ok, "Expected a ValuePointer, got %v", typename(overlayRefVector.ElementValue))
|
||||
require.Equal(t, "AliasedPrimitive", overlayPointer.Referent.TypeName())
|
||||
overlayRef, ok := overlayPointer.Referent.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected a ValueOverlay, got %v", typename(overlayPointer.Referent))
|
||||
require.Equal(t, "uint64", overlayRef.Underlying.TypeName())
|
||||
underlyingRef, ok := overlay.Underlying.(*types.ValueUint)
|
||||
require.Equal(t, true, ok, "Expected a ValueUint, got %v", typename(overlayRef.Underlying))
|
||||
require.Equal(t, types.UintSize(64), underlyingRef.Size)
|
||||
}
|
||||
|
||||
// Test cross-package traversal
|
||||
|
||||
func TestGetRepresentationMultiPackage(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/types.pb.go"}
|
||||
pp, err:= newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
pi := newTestIndexer()
|
||||
pi.index[packageName] = pp
|
||||
rep := NewRepresenter(pi)
|
||||
structName := "BeaconState"
|
||||
_, err = rep.GetDeclaration(packageName, structName)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestBitlist(t *testing.T) {
|
||||
packageName := "github.com/prysmaticlabs/prysm/proto/beacon/p2p/v1"
|
||||
sourceFiles := []string{"testdata/types.pb.go"}
|
||||
pp, err:= newTestPackageParser(packageName, sourceFiles)
|
||||
require.NoError(t, err)
|
||||
pi := newTestIndexer()
|
||||
pi.index[packageName] = pp
|
||||
rep := NewRepresenter(pi)
|
||||
structName := "TestBitlist"
|
||||
testBitlist, err := rep.GetDeclaration(packageName, structName)
|
||||
require.NoError(t, err)
|
||||
|
||||
container, ok := testBitlist.(*types.ValueContainer)
|
||||
require.Equal(t, true, ok, "Expected \"TestBitlist\" to be type ValueContainer, got %v", typename(testBitlist))
|
||||
|
||||
overlayValRep, err:= container.GetField("AggregationBits")
|
||||
require.NoError(t, err)
|
||||
overlay, ok := overlayValRep.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueOverlay type, got %v", typename(overlayValRep))
|
||||
require.Equal(t, "Bitlist", overlay.TypeName())
|
||||
require.Equal(t, "[]byte", overlay.Underlying.TypeName())
|
||||
underlying, ok := overlay.Underlying.(*types.ValueList)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueList type, got %v", typename(overlayValRep))
|
||||
require.Equal(t, 2048, underlying.MaxSize)
|
||||
require.Equal(t, "byte", underlying.ElementValue.TypeName())
|
||||
_, ok = underlying.ElementValue.(*types.ValueByte)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueByte type, got %v", typename(underlying.ElementValue))
|
||||
|
||||
overlayVecValRep, err:= container.GetField("JustificationBits")
|
||||
require.NoError(t, err)
|
||||
overlayVec, ok := overlayVecValRep.(*types.ValueOverlay)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueOverlay type, got %v", typename(overlayVec))
|
||||
require.Equal(t, "Bitvector4", overlayVec.TypeName())
|
||||
require.Equal(t, "[]byte", overlay.Underlying.TypeName())
|
||||
underlyingVec, ok := overlayVec.Underlying.(*types.ValueVector)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueVector type, got %v", typename(overlayVecValRep))
|
||||
require.Equal(t, 1, underlyingVec.Size)
|
||||
require.Equal(t, "byte", underlyingVec.ElementValue.TypeName())
|
||||
_, ok = underlyingVec.ElementValue.(*types.ValueByte)
|
||||
require.Equal(t, true, ok, "Expected the result to be a ValueByte type, got %v", typename(underlyingVec.ElementValue))
|
||||
}
|
||||
|
||||
func typename(v interface{}) string {
|
||||
ty := reflect.TypeOf(v)
|
||||
if ty.Kind() == reflect.Ptr {
|
||||
return "*" + ty.Elem().Name()
|
||||
} else {
|
||||
return ty.Name()
|
||||
}
|
||||
}
|
||||
154
sszgen/tagparse.go
Normal file
154
sszgen/tagparse.go
Normal file
@@ -0,0 +1,154 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
)
|
||||
|
||||
type tokenState int
|
||||
|
||||
const (
|
||||
tsBegin tokenState = iota
|
||||
tsLabel
|
||||
tsValue
|
||||
tsCloseTick
|
||||
)
|
||||
|
||||
type TagParser struct {
|
||||
sc scanner.Scanner
|
||||
buffer string
|
||||
}
|
||||
|
||||
func (tp *TagParser) Init(tag string) {
|
||||
sr := strings.NewReader(tag)
|
||||
tp.sc = scanner.Scanner{}
|
||||
tp.sc.Init(sr)
|
||||
tp.sc.Filename = "tag"
|
||||
tp.sc.Mode ^= scanner.ScanRawStrings
|
||||
}
|
||||
|
||||
func (tp TagParser) GetSSZTags() map[string]string {
|
||||
var labelStr string
|
||||
var state tokenState
|
||||
tags := make(map[string]string)
|
||||
for tok := tp.sc.Scan(); tok != scanner.EOF; tok = tp.sc.Scan() {
|
||||
if state == tsCloseTick {
|
||||
panic("undefined beyhavior when scanning beyond the end of the tag")
|
||||
}
|
||||
txt := tp.sc.TokenText()
|
||||
switch txt {
|
||||
case "`":
|
||||
if state == tsLabel {
|
||||
state = tsCloseTick
|
||||
continue
|
||||
}
|
||||
if state == tsBegin {
|
||||
state = tsLabel
|
||||
continue
|
||||
}
|
||||
case ":":
|
||||
if state == tsLabel {
|
||||
state = tsValue
|
||||
continue
|
||||
}
|
||||
case "\"":
|
||||
continue
|
||||
default:
|
||||
if state == tsValue {
|
||||
tags[labelStr] = trimQuotes(string(txt))
|
||||
state = tsLabel
|
||||
labelStr = ""
|
||||
continue
|
||||
}
|
||||
if state == tsLabel {
|
||||
labelStr += string(txt)
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
return tags
|
||||
}
|
||||
|
||||
// cannot compare untyped nil to typed nil
|
||||
// this value gives us a nil with type of *int
|
||||
// to compare to ssz-size = '?' values
|
||||
var nilInt *int
|
||||
|
||||
func extractSSZDimensions(tag string) ([]*SSZDimension, error) {
|
||||
tp := &TagParser{}
|
||||
tp.Init(tag)
|
||||
tags := tp.GetSSZTags()
|
||||
sszSizes, sizeDefined := tags["ssz-size"]
|
||||
sszMax, maxDefined:= tags["ssz-max"]
|
||||
if !sizeDefined {
|
||||
if !maxDefined {
|
||||
return nil, fmt.Errorf("No ssz-size or ssz-max tags found for element.")
|
||||
}
|
||||
max, err := strconv.Atoi(sszMax)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return []*SSZDimension{{ListLength: &max}}, nil
|
||||
}
|
||||
dims := make([]*SSZDimension, 0)
|
||||
for _, sz := range strings.Split(sszSizes, ",") {
|
||||
if sz == "?" {
|
||||
if sszMax != "" {
|
||||
max, err := strconv.Atoi(sszMax)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dims = append(dims, &SSZDimension{ListLength: &max})
|
||||
sszMax = ""
|
||||
} else {
|
||||
return nil, fmt.Errorf("More than one wildcard in ssz-size, or ssz-max undefined in tag %s", tag)
|
||||
}
|
||||
} else {
|
||||
vsize, err := strconv.Atoi(sz)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
dims = append(dims, &SSZDimension{VectorLength: &vsize})
|
||||
}
|
||||
}
|
||||
|
||||
return dims, nil
|
||||
}
|
||||
|
||||
type SSZDimension struct {
|
||||
VectorLength *int
|
||||
ListLength *int
|
||||
}
|
||||
|
||||
func (dim *SSZDimension) IsVector() bool {
|
||||
return dim.VectorLength != nilInt
|
||||
}
|
||||
|
||||
func (dim *SSZDimension) IsList() bool {
|
||||
return dim.ListLength != nilInt
|
||||
}
|
||||
|
||||
func (dim *SSZDimension) ListLen() int {
|
||||
return *dim.ListLength
|
||||
}
|
||||
|
||||
func (dim *SSZDimension) VectorLen() int {
|
||||
return *dim.VectorLength
|
||||
}
|
||||
|
||||
type SSZListBounds struct {
|
||||
SSZSize []*int
|
||||
SSZMax *int
|
||||
}
|
||||
|
||||
func trimQuotes(s string) string {
|
||||
if len(s) > 0 && s[0] == '"' {
|
||||
s = s[1:]
|
||||
}
|
||||
if len(s) > 0 && s[len(s)-1] == '"' {
|
||||
s = s[:len(s)-1]
|
||||
}
|
||||
return s
|
||||
}
|
||||
45
sszgen/tagparse_test.go
Normal file
45
sszgen/tagparse_test.go
Normal file
@@ -0,0 +1,45 @@
|
||||
package sszgen
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
)
|
||||
|
||||
func TestTokens(t *testing.T) {
|
||||
testTag := "`protobuf:\"bytes,2004,rep,name=historical_roots,json=historicalRoots,proto3\" json:\"historical_roots,omitempty\" ssz-max:\"16777216\" ssz-size:\"?,32\"`"
|
||||
tp := &TagParser{}
|
||||
tp.Init(testTag)
|
||||
tags := tp.GetSSZTags()
|
||||
sszSize, ok := tags["ssz-size"]
|
||||
require.Equal(t, true, ok)
|
||||
require.Equal(t, "?,32", sszSize)
|
||||
sszMax, ok := tags["ssz-max"]
|
||||
require.Equal(t, true, ok)
|
||||
require.Equal(t, "16777216", sszMax)
|
||||
}
|
||||
|
||||
func TestFullTag(t *testing.T) {
|
||||
tag := "`protobuf:\"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3\" json:\"genesis_validators_root,omitempty\" ssz-size:\"32\"`"
|
||||
_, err := extractSSZDimensions(tag)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestListOfVector(t *testing.T) {
|
||||
tag := "`protobuf:\"bytes,2004,rep,name=historical_roots,json=historicalRoots,proto3\" json:\"historical_roots,omitempty\" ssz-max:\"16777216\" ssz-size:\"?,32\"`"
|
||||
_, err := extractSSZDimensions(tag)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestWildcardSSZSize(t *testing.T) {
|
||||
tag := "`ssz-max:\"16777216\" ssz-size:\"?,32\"`"
|
||||
bounds, err := extractSSZDimensions(tag)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, 2, len(bounds))
|
||||
require.Equal(t, true, bounds[0].IsList())
|
||||
require.Equal(t, false, bounds[0].IsVector())
|
||||
require.Equal(t, 16777216, bounds[0].ListLen())
|
||||
require.Equal(t, false, bounds[1].IsList())
|
||||
require.Equal(t, true, bounds[1].IsVector())
|
||||
require.Equal(t, 32, bounds[1].VectorLen())
|
||||
}
|
||||
19
sszgen/testdata/BUILD.bazel
vendored
Normal file
19
sszgen/testdata/BUILD.bazel
vendored
Normal file
@@ -0,0 +1,19 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"simple.go",
|
||||
"types.pb.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/testdata",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//proto/eth/v1alpha1:go_default_library",
|
||||
"@com_github_golang_protobuf//proto:go_default_library",
|
||||
"@com_github_prysmaticlabs_eth2_types//:go_default_library",
|
||||
"@com_github_prysmaticlabs_go_bitfield//:go_default_library",
|
||||
"@org_golang_google_protobuf//reflect/protoreflect:go_default_library",
|
||||
"@org_golang_google_protobuf//runtime/protoimpl:go_default_library",
|
||||
],
|
||||
)
|
||||
33
sszgen/testdata/simple.go
vendored
Executable file
33
sszgen/testdata/simple.go
vendored
Executable file
@@ -0,0 +1,33 @@
|
||||
package ethereum_beacon_p2p_v1
|
||||
|
||||
type NoImports struct {
|
||||
state int
|
||||
sizeCache int
|
||||
unknownFields int
|
||||
|
||||
GenesisTime uint64 `protobuf:"varint,1001,opt,name=genesis_time,json=genesisTime,proto3" json:"genesis_time,omitempty"`
|
||||
GenesisValidatorsRoot []byte `protobuf:"bytes,1002,opt,name=genesis_validators_root,json=genesisValidatorsRoot,proto3" json:"genesis_validators_root,omitempty" ssz-size:"32"`
|
||||
BlockRoots [][]byte `protobuf:"bytes,2002,rep,name=block_roots,json=blockRoots,proto3" json:"block_roots,omitempty" ssz-size:"8192,32"`
|
||||
HistoricalRoots [][]byte `protobuf:"bytes,2004,rep,name=historical_roots,json=historicalRoots,proto3" json:"historical_roots,omitempty" ssz-max:"16777216" ssz-size:"?,32"`
|
||||
MuhPrim AliasedPrimitive
|
||||
ContainerField ContainerType
|
||||
ContainerRefField *AnotherContainerType
|
||||
ContainerList []ContainerType `ssz-max:"23"`
|
||||
ContainerVector []ContainerType `ssz-size:"42"`
|
||||
ContainerVectorRef []*ContainerType `ssz-size:"17"`
|
||||
ContainerListRef []*ContainerType `ssz-max:"9000"`
|
||||
OverlayList []AliasedPrimitive `ssz-max:"11"`
|
||||
OverlayListRef []*AliasedPrimitive `ssz-max:"58"`
|
||||
OverlayVector []AliasedPrimitive `ssz-size:"23"`
|
||||
OverlayVectorRef []*AliasedPrimitive `ssz-size:"13"`
|
||||
}
|
||||
|
||||
type AliasedPrimitive uint64
|
||||
|
||||
type ContainerType struct {
|
||||
MuhPrim AliasedPrimitive
|
||||
}
|
||||
|
||||
type AnotherContainerType struct {
|
||||
MuhPrim AliasedPrimitive
|
||||
}
|
||||
1118
sszgen/testdata/types.pb.go
generated
vendored
Executable file
1118
sszgen/testdata/types.pb.go
generated
vendored
Executable file
File diff suppressed because it is too large
Load Diff
22
sszgen/testutil/BUILD.bazel
Normal file
22
sszgen/testutil/BUILD.bazel
Normal file
@@ -0,0 +1,22 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["render.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/testutil",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = [
|
||||
"//sszgen/types:go_default_library",
|
||||
"@com_github_dave_jennifer//jen:go_default_library",
|
||||
],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["render_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = [
|
||||
"//shared/testutil/require:go_default_library",
|
||||
"//sszgen/types:go_default_library",
|
||||
],
|
||||
)
|
||||
109
sszgen/testutil/render.go
Normal file
109
sszgen/testutil/render.go
Normal file
@@ -0,0 +1,109 @@
|
||||
package testutil
|
||||
|
||||
import (
|
||||
"go/format"
|
||||
|
||||
jen "github.com/dave/jennifer/jen"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func RenderIntermediate(vr types.ValRep) (string, error){
|
||||
file := jen.NewFile("values")
|
||||
evr, err := encodeValRep(vr)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
v := jen.Var().Id(vr.TypeName()).Id("types").Dot("ValRep").Op("=").Add(evr)
|
||||
file.Add(v)
|
||||
|
||||
gs := file.GoString()
|
||||
b, err := format.Source([]byte(gs))
|
||||
return string(b), err
|
||||
}
|
||||
|
||||
func encodeValRep(vr types.ValRep) (jen.Code, error) {
|
||||
var c jen.Code
|
||||
switch ty := vr.(type) {
|
||||
case *types.ValueByte:
|
||||
values := []jen.Code{jen.Id("Name").Op(":").Lit(ty.Name)}
|
||||
if ty.Package != "" {
|
||||
values = append(values, jen.Id("Package").Op(":").Lit(ty.Package))
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValueByte").Values(values...)
|
||||
return s, nil
|
||||
case *types.ValueBool:
|
||||
values := []jen.Code{jen.Id("Name").Op(":").Lit(ty.Name)}
|
||||
if ty.Package != "" {
|
||||
values = append(values, jen.Id("Package").Op(":").Lit(ty.Package))
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValueBool").Values(values...)
|
||||
return s, nil
|
||||
case *types.ValueUint:
|
||||
s := jen.Op("&").Id("types").Dot("ValueUint").Values(
|
||||
jen.Id("Name").Op(":").Lit(ty.Name),
|
||||
jen.Id("Size").Op(":").Lit(int(ty.Size)),
|
||||
)
|
||||
return s, nil
|
||||
case *types.ValueVector:
|
||||
ev, err := encodeValRep(ty.ElementValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValueVector").Values(
|
||||
jen.Id("Size").Op(":").Lit(ty.Size),
|
||||
jen.Id("ElementValue").Op(":").Add(ev),
|
||||
)
|
||||
return s, nil
|
||||
case *types.ValueList:
|
||||
ev, err := encodeValRep(ty.ElementValue)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValueList").Values(
|
||||
jen.Id("MaxSize").Op(":").Lit(ty.MaxSize),
|
||||
jen.Id("ElementValue").Op(":").Add(ev),
|
||||
)
|
||||
return s, nil
|
||||
case *types.ValueOverlay:
|
||||
underlying, err := encodeValRep(ty.Underlying)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValueOverlay").Values(
|
||||
jen.Id("Name").Op(":").Lit(ty.Name),
|
||||
jen.Id("Package").Op(":").Lit(ty.Package),
|
||||
jen.Id("Underlying").Op(":").Add(underlying),
|
||||
)
|
||||
return s, nil
|
||||
case *types.ValuePointer:
|
||||
referent, err := encodeValRep(ty.Referent)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
s := jen.Op("&").Id("types").Dot("ValuePointer").Values(
|
||||
jen.Id("Referent").Op(":").Add(referent),
|
||||
)
|
||||
return s, nil
|
||||
case *types.ValueContainer:
|
||||
contents := make([]jen.Code, 0)
|
||||
for _, c := range ty.Contents {
|
||||
cvr, err := encodeValRep(c.Value)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
kv := jen.Values(jen.Id("Key").Op(":").Lit(c.Key),
|
||||
jen.Id("Value").Op(":").Add(cvr))
|
||||
contents = append(contents, kv)
|
||||
}
|
||||
fields := []jen.Code{
|
||||
jen.Id("Name").Op(":").Lit(ty.Name),
|
||||
jen.Id("Package").Op(":").Lit(ty.Package),
|
||||
jen.Id("Contents").Op(":").Index().Id("types").Dot("ContainerField").
|
||||
Values(contents...),
|
||||
}
|
||||
c = jen.Op("&").Id("types").Dot("ValueContainer").Values(fields...)
|
||||
case *types.ValueUnion:
|
||||
panic("not implemented")
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
34
sszgen/testutil/render_test.go
Normal file
34
sszgen/testutil/render_test.go
Normal file
@@ -0,0 +1,34 @@
|
||||
package testutil
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/prysmaticlabs/prysm/shared/testutil/require"
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
func TestRenderIntermediate(t *testing.T) {
|
||||
s := &types.ValueContainer{
|
||||
Name: "testing",
|
||||
Package: "github.com/prysmaticlabs/derp",
|
||||
Contents: []types.ContainerField{
|
||||
{
|
||||
Key: "OverlayUint",
|
||||
Value: &types.ValuePointer{Referent:
|
||||
&types.ValueOverlay{
|
||||
Name: "FakeContainer",
|
||||
Package: "github.com/prysmaticlabs/derp/derp",
|
||||
Underlying: &types.ValueUint{
|
||||
Name: "uint8",
|
||||
Size: 8,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
expected := ""
|
||||
actual, err := RenderIntermediate(s)
|
||||
require.NoError(t, err)
|
||||
require.Equal(t, expected, actual)
|
||||
}
|
||||
9
sszgen/testutil/testdata/BUILD.bazel
vendored
Normal file
9
sszgen/testutil/testdata/BUILD.bazel
vendored
Normal file
@@ -0,0 +1,9 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = ["examplevars.go"],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/testutil/testdata",
|
||||
visibility = ["//visibility:public"],
|
||||
deps = ["//sszgen/types:go_default_library"],
|
||||
)
|
||||
26
sszgen/testutil/testdata/examplevars.go
vendored
Normal file
26
sszgen/testutil/testdata/examplevars.go
vendored
Normal file
@@ -0,0 +1,26 @@
|
||||
// This file exists just to give me some bootstraping input to run through github.com/aloder/tojen
|
||||
// to speed up the tedious process of writing jen code
|
||||
package testdata
|
||||
|
||||
|
||||
import (
|
||||
"github.com/prysmaticlabs/prysm/sszgen/types"
|
||||
)
|
||||
|
||||
|
||||
var testing types.ValRep = &types.ValueContainer{
|
||||
Name: "testing",
|
||||
Package: "github.com/prysmaticlabs/derp",
|
||||
Contents: map[string]types.ValRep{
|
||||
"OverlayUint": &types.ValuePointer{
|
||||
Referent: &types.ValueOverlay{
|
||||
Name: "FakeContainer",
|
||||
Package: "github.com/prysmaticlabs/derp/derp",
|
||||
Underlying: &types.ValueUint{
|
||||
Name: "uint8",
|
||||
Size: 8,
|
||||
},
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
26
sszgen/types/BUILD.bazel
Normal file
26
sszgen/types/BUILD.bazel
Normal file
@@ -0,0 +1,26 @@
|
||||
load("@prysm//tools/go:def.bzl", "go_library", "go_test")
|
||||
|
||||
go_library(
|
||||
name = "go_default_library",
|
||||
srcs = [
|
||||
"bool.go",
|
||||
"byte.go",
|
||||
"container.go",
|
||||
"list.go",
|
||||
"overlay.go",
|
||||
"pointer.go",
|
||||
"uint.go",
|
||||
"union.go",
|
||||
"valrep.go",
|
||||
"vector.go",
|
||||
],
|
||||
importpath = "github.com/prysmaticlabs/prysm/sszgen/types",
|
||||
visibility = ["//visibility:public"],
|
||||
)
|
||||
|
||||
go_test(
|
||||
name = "go_default_test",
|
||||
srcs = ["container_test.go"],
|
||||
embed = [":go_default_library"],
|
||||
deps = ["//shared/testutil/require:go_default_library"],
|
||||
)
|
||||
24
sszgen/types/bool.go
Normal file
24
sszgen/types/bool.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package types
|
||||
|
||||
type ValueBool struct {
|
||||
Name string
|
||||
Package string
|
||||
}
|
||||
|
||||
func (vb *ValueBool) TypeName() string {
|
||||
return vb.Name
|
||||
}
|
||||
|
||||
func (vb *ValueBool) PackagePath() string {
|
||||
return vb.Package
|
||||
}
|
||||
|
||||
func (vb *ValueBool) FixedSize() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (vb *ValueBool) IsVariableSized() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueBool{}
|
||||
24
sszgen/types/byte.go
Normal file
24
sszgen/types/byte.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package types
|
||||
|
||||
type ValueByte struct {
|
||||
Name string
|
||||
Package string
|
||||
}
|
||||
|
||||
func (vb *ValueByte) TypeName() string {
|
||||
return vb.Name
|
||||
}
|
||||
|
||||
func (vb *ValueByte) PackagePath() string {
|
||||
return vb.Package
|
||||
}
|
||||
|
||||
func (vb *ValueByte) FixedSize() int {
|
||||
return 1
|
||||
}
|
||||
|
||||
func (vb *ValueByte) IsVariableSized() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueByte{}
|
||||
66
sszgen/types/container.go
Normal file
66
sszgen/types/container.go
Normal file
@@ -0,0 +1,66 @@
|
||||
package types
|
||||
|
||||
import "fmt"
|
||||
|
||||
type ContainerField struct {
|
||||
Key string
|
||||
Value ValRep
|
||||
}
|
||||
|
||||
type ValueContainer struct {
|
||||
Name string
|
||||
Package string
|
||||
Contents []ContainerField
|
||||
nameMap map[string]ValRep
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) Fields() []ContainerField {
|
||||
return vc.Contents
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) Append(name string, value ValRep) {
|
||||
vc.Contents = append(vc.Contents, ContainerField{name, value})
|
||||
if vc.nameMap == nil {
|
||||
vc.nameMap = make(map[string]ValRep)
|
||||
}
|
||||
vc.nameMap[name] = value
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) GetField(name string) (ValRep, error) {
|
||||
field, ok := vc.nameMap[name]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("Field named %s not found in container value mapping", name)
|
||||
}
|
||||
return field, nil
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) TypeName() string {
|
||||
return vc.Name
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) PackagePath() string {
|
||||
return vc.Package
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) FixedSize() int {
|
||||
if vc.IsVariableSized() {
|
||||
return 4
|
||||
}
|
||||
total := 0
|
||||
for _, c := range vc.Contents {
|
||||
o := c.Value
|
||||
total += o.FixedSize()
|
||||
}
|
||||
return total
|
||||
}
|
||||
|
||||
func (vc *ValueContainer) IsVariableSized() bool {
|
||||
for _, c := range vc.Contents {
|
||||
if c.Value.IsVariableSized() {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueContainer{}
|
||||
89
sszgen/types/container_test.go
Normal file
89
sszgen/types/container_test.go
Normal file
File diff suppressed because one or more lines are too long
24
sszgen/types/list.go
Normal file
24
sszgen/types/list.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package types
|
||||
|
||||
type ValueList struct {
|
||||
ElementValue ValRep
|
||||
MaxSize int
|
||||
}
|
||||
|
||||
func (vl *ValueList) TypeName() string {
|
||||
return "[]" + vl.ElementValue.TypeName()
|
||||
}
|
||||
|
||||
func (vl *ValueList) PackagePath() string {
|
||||
return vl.ElementValue.PackagePath()
|
||||
}
|
||||
|
||||
func (vl *ValueList) FixedSize() int {
|
||||
return 4
|
||||
}
|
||||
|
||||
func (vl *ValueList) IsVariableSized() bool {
|
||||
return true
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueList{}
|
||||
63
sszgen/types/overlay.go
Normal file
63
sszgen/types/overlay.go
Normal file
@@ -0,0 +1,63 @@
|
||||
package types
|
||||
|
||||
import "fmt"
|
||||
|
||||
type ValueOverlay struct {
|
||||
Name string
|
||||
Package string
|
||||
Underlying ValRep
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) TypeName() string {
|
||||
return vo.Name
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) PackagePath() string {
|
||||
return vo.Package
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) FixedSize() int {
|
||||
if vo.IsBitfield() {
|
||||
return vo.bitfieldFixedSize()
|
||||
}
|
||||
return vo.Underlying.FixedSize()
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) IsVariableSized() bool {
|
||||
return vo.Underlying.IsVariableSized()
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) IsBitfield() bool {
|
||||
if vo.Package == "github.com/prysmaticlabs/go-bitfield" {
|
||||
return true
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (vo *ValueOverlay) bitfieldFixedSize() int {
|
||||
switch vo.Name {
|
||||
case "Bitlist":
|
||||
return 4
|
||||
case "Bitlist64":
|
||||
return 4
|
||||
case "Bitvector4":
|
||||
return 1
|
||||
case "Bitvector8":
|
||||
return 1
|
||||
case "Bitvector32":
|
||||
return 4
|
||||
case "Bitvector64":
|
||||
return 8
|
||||
case "Bitvector128":
|
||||
return 16
|
||||
case "Bitvector256":
|
||||
return 32
|
||||
case "Bitvector512":
|
||||
return 64
|
||||
case "Bitvector1024":
|
||||
return 128
|
||||
}
|
||||
panic(fmt.Sprintf("Can't determine the correct size for bitfield type = %s", vo.Name))
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueOverlay{}
|
||||
23
sszgen/types/pointer.go
Normal file
23
sszgen/types/pointer.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package types
|
||||
|
||||
type ValuePointer struct {
|
||||
Referent ValRep
|
||||
}
|
||||
|
||||
func (vp *ValuePointer) TypeName() string {
|
||||
return "*" + vp.Referent.TypeName()
|
||||
}
|
||||
|
||||
func (vp *ValuePointer) PackagePath() string {
|
||||
return vp.Referent.PackagePath()
|
||||
}
|
||||
|
||||
func (vp *ValuePointer) FixedSize() int {
|
||||
return vp.Referent.FixedSize()
|
||||
}
|
||||
|
||||
func (vp *ValuePointer) IsVariableSized() bool {
|
||||
return vp.Referent.IsVariableSized()
|
||||
}
|
||||
|
||||
var _ ValRep = &ValuePointer{}
|
||||
36
sszgen/types/uint.go
Normal file
36
sszgen/types/uint.go
Normal file
@@ -0,0 +1,36 @@
|
||||
package types
|
||||
|
||||
type UintSize int
|
||||
|
||||
const (
|
||||
Uint8 UintSize = 8
|
||||
Uint16 UintSize = 16
|
||||
Uint32 UintSize = 32
|
||||
Uint64 UintSize = 64
|
||||
Uint128 UintSize = 128
|
||||
Uint256 UintSize = 256
|
||||
)
|
||||
|
||||
type ValueUint struct {
|
||||
Name string
|
||||
Size UintSize
|
||||
Package string
|
||||
}
|
||||
|
||||
func (vu *ValueUint) TypeName() string {
|
||||
return vu.Name
|
||||
}
|
||||
|
||||
func (vu *ValueUint) PackagePath() string {
|
||||
return vu.Package
|
||||
}
|
||||
|
||||
func (vu *ValueUint) FixedSize() int {
|
||||
return int(vu.Size)/8
|
||||
}
|
||||
|
||||
func (vu *ValueUint) IsVariableSized() bool {
|
||||
return false
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueUint{}
|
||||
23
sszgen/types/union.go
Normal file
23
sszgen/types/union.go
Normal file
@@ -0,0 +1,23 @@
|
||||
package types
|
||||
|
||||
type ValueUnion struct {
|
||||
Name string
|
||||
}
|
||||
|
||||
func (vu *ValueUnion) TypeName() string {
|
||||
return vu.Name
|
||||
}
|
||||
|
||||
func (vu *ValueUnion) PackagePath() string {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (vu *ValueUnion) FixedSize() int {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
func (vu *ValueUnion) IsVariableSized() bool {
|
||||
panic("not implemented")
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueUnion{}
|
||||
8
sszgen/types/valrep.go
Normal file
8
sszgen/types/valrep.go
Normal file
@@ -0,0 +1,8 @@
|
||||
package types
|
||||
|
||||
type ValRep interface {
|
||||
TypeName() string
|
||||
FixedSize() int
|
||||
PackagePath() string
|
||||
IsVariableSized() bool
|
||||
}
|
||||
24
sszgen/types/vector.go
Normal file
24
sszgen/types/vector.go
Normal file
@@ -0,0 +1,24 @@
|
||||
package types
|
||||
|
||||
type ValueVector struct {
|
||||
ElementValue ValRep
|
||||
Size int
|
||||
}
|
||||
|
||||
func (vv *ValueVector) TypeName() string {
|
||||
return "[]" + vv.ElementValue.TypeName()
|
||||
}
|
||||
|
||||
func (vv *ValueVector) FixedSize() int {
|
||||
return vv.Size * vv.ElementValue.FixedSize()
|
||||
}
|
||||
|
||||
func (vv *ValueVector) PackagePath() string {
|
||||
return vv.ElementValue.PackagePath()
|
||||
}
|
||||
|
||||
func (vv *ValueVector) IsVariableSized() bool {
|
||||
return vv.ElementValue.IsVariableSized()
|
||||
}
|
||||
|
||||
var _ ValRep = &ValueVector{}
|
||||
Reference in New Issue
Block a user