diff --git a/.gitignore b/.gitignore index f6e5694e17..612f4e09b1 100644 --- a/.gitignore +++ b/.gitignore @@ -33,3 +33,6 @@ dist # libfuzzer oom-* crash-* + +# deepsource cli +bin diff --git a/scripts/ci-coverage.sh b/scripts/ci-coverage.sh new file mode 100755 index 0000000000..448db0e413 --- /dev/null +++ b/scripts/ci-coverage.sh @@ -0,0 +1,16 @@ +#!/bin/bash + +# Run coverage tests +./bazel.sh --bazelrc=.buildkite-bazelrc coverage --config=remote-cache --features=norace --test_tag_filters="-race_on" --nocache_test_results -k //... + +# Collect all coverage results into a single file (for deepsource). +find $(./bazel.sh --bazelrc=.buildkite-bazelrc info bazel-testlogs) -iname coverage.dat | xargs -t -rd '\n' ./bazel.sh --bazelrc=.buildkite-bazelrc run //tools/gocovmerge:gocovmerge -- > /tmp/cover.out + +# Download deepsource CLI +curl https://deepsource.io/cli | sh + +# Upload to deepsource (requires DEEPSOURCE_DSN environment variable) +./bin/deepsource report --analyzer test-coverage --key go --value-file /tmp/cover.out + +# Upload to codecov (requires CODECOV_TOKEN environment variable) +bash <(curl -s https://codecov.io/bash) -s $(./bazel.sh info bazel-testlogs) -f '**/coverage.dat' diff --git a/tools/gocovmerge/BUILD.bazel b/tools/gocovmerge/BUILD.bazel new file mode 100644 index 0000000000..4221ddcb65 --- /dev/null +++ b/tools/gocovmerge/BUILD.bazel @@ -0,0 +1,16 @@ +load("@io_bazel_rules_go//go:def.bzl", "go_binary") +load("@prysm//tools/go:def.bzl", "go_library") + +go_library( + name = "go_default_library", + srcs = ["main.go"], + importpath = "github.com/prysmaticlabs/prysm/tools/gocovmerge", + visibility = ["//visibility:private"], + deps = ["@org_golang_x_tools//cover:go_default_library"], +) + +go_binary( + name = "gocovmerge", + embed = [":go_default_library"], + visibility = ["//visibility:public"], +) diff --git a/tools/gocovmerge/main.go b/tools/gocovmerge/main.go new file mode 100644 index 0000000000..a79a8d8273 --- /dev/null +++ b/tools/gocovmerge/main.go @@ -0,0 +1,142 @@ +// gocovmerge takes the results from multiple `go test -coverprofile` runs and +// merges them into one profile +// +// Copied, with minor changes, from https://github.com/wadey/gocovmerge under BSD-2-Clause License +package main + +import ( + "flag" + "fmt" + "io" + "log" + "os" + "sort" + + "golang.org/x/tools/cover" +) + +func mergeProfiles(p, merge *cover.Profile) { + if p.Mode != merge.Mode { + log.Fatalf("cannot merge profiles with different modes") + + } + // Since the blocks are sorted, we can keep track of where the last block + // was inserted and only look at the blocks after that as targets for merge + startIndex := 0 + for _, b := range merge.Blocks { + startIndex = mergeProfileBlock(p, b, startIndex) + + } + +} + +func mergeProfileBlock(p *cover.Profile, pb cover.ProfileBlock, startIndex int) int { + sortFunc := func(i int) bool { + pi := p.Blocks[i+startIndex] + return pi.StartLine >= pb.StartLine && (pi.StartLine != pb.StartLine || pi.StartCol >= pb.StartCol) + + } + + i := 0 + if !sortFunc(i) { + i = sort.Search(len(p.Blocks)-startIndex, sortFunc) + + } + i += startIndex + if i < len(p.Blocks) && p.Blocks[i].StartLine == pb.StartLine && p.Blocks[i].StartCol == pb.StartCol { + if p.Blocks[i].EndLine != pb.EndLine || p.Blocks[i].EndCol != pb.EndCol { + log.Fatalf("OVERLAP MERGE: %v %v %v", p.FileName, p.Blocks[i], pb) + + } + switch p.Mode { + case "set": + p.Blocks[i].Count |= pb.Count + case "count", "atomic": + p.Blocks[i].Count += pb.Count + default: + log.Fatalf("unsupported covermode: '%s'", p.Mode) + + } + + } else { + if i > 0 { + pa := p.Blocks[i-1] + if pa.EndLine >= pb.EndLine && (pa.EndLine != pb.EndLine || pa.EndCol > pb.EndCol) { + log.Fatalf("OVERLAP BEFORE: %v %v %v", p.FileName, pa, pb) + + } + + } + if i < len(p.Blocks)-1 { + pa := p.Blocks[i+1] + if pa.StartLine <= pb.StartLine && (pa.StartLine != pb.StartLine || pa.StartCol < pb.StartCol) { + log.Fatalf("OVERLAP AFTER: %v %v %v", p.FileName, pa, pb) + + } + + } + p.Blocks = append(p.Blocks, cover.ProfileBlock{}) + copy(p.Blocks[i+1:], p.Blocks[i:]) + p.Blocks[i] = pb + + } + return i + 1 + +} + +func addProfile(profiles []*cover.Profile, p *cover.Profile) []*cover.Profile { + i := sort.Search(len(profiles), func(i int) bool { return profiles[i].FileName >= p.FileName }) + if i < len(profiles) && profiles[i].FileName == p.FileName { + mergeProfiles(profiles[i], p) + + } else { + profiles = append(profiles, nil) + copy(profiles[i+1:], profiles[i:]) + profiles[i] = p + + } + return profiles + +} + +func dumpProfiles(profiles []*cover.Profile, out io.Writer) { + if len(profiles) == 0 { + return + + } + if _, err := fmt.Fprintf(out, "mode: %s\n", profiles[0].Mode); err != nil { + panic(err) + } + for _, p := range profiles { + for _, b := range p.Blocks { + if _, err := fmt.Fprintf(out, "%s:%d.%d,%d.%d %d %d\n", p.FileName, b.StartLine, b.StartCol, b.EndLine, b.EndCol, b.NumStmt, b.Count); err != nil { + panic(err) + } + + } + + } + +} + +func main() { + flag.Parse() + + var merged []*cover.Profile + + for _, file := range flag.Args() { + profiles, err := cover.ParseProfiles(file) + if err != nil { + log.Fatalf("failed to parse profiles: %v", err) + + } + for _, p := range profiles { + merged = addProfile(merged, p) + + } + + } + + dumpProfiles(merged, os.Stdout) + +}