diff --git a/.github/actions/add_comment/action.yml b/.github/actions/add_comment/action.yml new file mode 100644 index 000000000..5d579a492 --- /dev/null +++ b/.github/actions/add_comment/action.yml @@ -0,0 +1,41 @@ +name: Add Comment +description: "Add or update comment in the PR" +inputs: + marker: + description: "Text used to find the comment to update" + required: true + markdown_path: + description: "Path to the file containing markdown" + required: true + +runs: + using: "composite" + steps: + - name: Add/Update Comment + uses: actions/github-script@v7 + with: + script: | + const fs = require('fs'); + const marker = "${{ inputs.marker }}"; + const body = fs.readFileSync("${{ inputs.markdown_path }}", 'utf8'); + const { data: comments } = await github.rest.issues.listComments({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + }); + const existing = comments.find(c => c.body && c.body.startsWith(marker)); + if (existing) { + await github.rest.issues.updateComment({ + owner: context.repo.owner, + repo: context.repo.repo, + comment_id: existing.id, + body, + }); + } else { + await github.rest.issues.createComment({ + owner: context.repo.owner, + repo: context.repo.repo, + issue_number: context.issue.number, + body, + }); + } \ No newline at end of file diff --git a/.github/workflows/performance.yml b/.github/workflows/performance.yml new file mode 100644 index 000000000..c3bac090f --- /dev/null +++ b/.github/workflows/performance.yml @@ -0,0 +1,69 @@ +name: Performance + +on: + push: + branches: + - master + pull_request: + merge_group: + workflow_dispatch: + +concurrency: + group: ${{ github.workflow }}-${{ github.event.pull_request.number || github.ref }} + cancel-in-progress: true + +jobs: + examples: + timeout-minutes: 10 + strategy: + fail-fast: false + + defaults: + run: + shell: bash + + name: "Performance" + runs-on: ubuntu-22.04 + steps: + - name: Checkout + uses: actions/checkout@v4 + with: + submodules: true + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@v3 + + - name: Build Docker Image with cache + uses: docker/build-push-action@v6 + with: + context: . + file: performance/Dockerfile + tags: test-node:latest + load: true + cache-from: type=gha + cache-to: type=gha,mode=max + + - name: Run + run: | + ./performance/runner.sh + + - name: Set up Nim for aggragate script + uses: jiro4989/setup-nim-action@v2 + with: + nim-version: "2.x" + repo-token: ${{ secrets.GITHUB_TOKEN }} + + - name: Aggregate and display summary + env: + MARKER: "" + PR_HEAD_SHA: ${{ github.event.pull_request.head.sha }} + COMMENT_SUMMARY_PATH: "/tmp/perf-summary.md" + run: | + nim c -r -d:release -o:/tmp/aggregate_stats ./performance/aggregate_stats.nim + + - name: Post/Update PR Performance Comment + if: github.event_name == 'pull_request' + uses: ./.github/actions/add_comment + with: + marker: "" + markdown_path: "/tmp/perf-summary.md" diff --git a/performance/aggregate_stats.nim b/performance/aggregate_stats.nim new file mode 100644 index 000000000..d540a8894 --- /dev/null +++ b/performance/aggregate_stats.nim @@ -0,0 +1,130 @@ +import json +import os +import sequtils +import strutils +import strformat +import tables +import ./types + +const unknownFloat = -1.0 + +proc parseJsonFiles*(outputDir: string): seq[JsonNode] = + var jsons: seq[JsonNode] + + for kind, path in walkDir(outputDir): + if kind == pcFile and path.endsWith(".json"): + let content = readFile(path) + let json = parseJson(content) + + jsons.add(json) + + return jsons + +proc extractStats(scenario: JsonNode): Stats = + Stats( + scenarioName: scenario["scenarioName"].getStr(""), + totalSent: scenario["totalSent"].getInt(0), + totalReceived: scenario["totalReceived"].getInt(0), + latency: LatencyStats( + minLatencyMs: scenario["minLatencyMs"].getStr($unknownFloat).parseFloat(), + maxLatencyMs: scenario["maxLatencyMs"].getStr($unknownFloat).parseFloat(), + avgLatencyMs: scenario["avgLatencyMs"].getStr($unknownFloat).parseFloat(), + ), + ) + +proc getJsonResults*(jsons: seq[JsonNode]): seq[Table[string, Stats]] = + jsons.mapIt( + it["results"] + .getElems(@[]) + .mapIt(it.extractStats()) + .mapIt((it.scenarioName, it)).toTable + ) + +proc aggregateResults*( + jsonResults: seq[Table[string, Stats]] +): (Table[string, Stats], Table[string, int]) = + var aggragated: Table[string, Stats] + var validNodes: Table[string, int] + + for jsonResult in jsonResults: + for scenarioName, stats in jsonResult.pairs: + let startingStats = Stats( + scenarioName: scenarioName, + totalSent: 0, + totalReceived: 0, + latency: LatencyStats(minLatencyMs: Inf, maxLatencyMs: 0, avgLatencyMs: 0), + ) + discard aggragated.hasKeyOrPut(scenarioName, startingStats) + discard validNodes.hasKeyOrPut(scenarioName, 0) + + aggragated[scenarioName].totalSent += stats.totalSent + aggragated[scenarioName].totalReceived += stats.totalReceived + + let minL = stats.latency.minLatencyMs + let maxL = stats.latency.maxLatencyMs + let avgL = stats.latency.avgLatencyMs + if minL != unknownFloat and maxL != unknownFloat and avgL != unknownFloat: + if minL < aggragated[scenarioName].latency.minLatencyMs: + aggragated[scenarioName].latency.minLatencyMs = minL + + if maxL > aggragated[scenarioName].latency.maxLatencyMs: + aggragated[scenarioName].latency.maxLatencyMs = maxL + + aggragated[scenarioName].latency.avgLatencyMs += avgL + # used to store sum of averages + + validNodes[scenarioName] += 1 + + for scenarioName, stats in aggragated.mpairs: + let nodes = validNodes[scenarioName] + let globalAvgLatency = stats.latency.avgLatencyMs / float(nodes) + stats.latency.avgLatencyMs = globalAvgLatency + + return (aggragated, validNodes) + +proc getMarkdownReport*( + results: Table[string, Stats], + validNodes: Table[string, int], + marker: string, + commitSha: string, +): string = + var output: seq[string] + + output.add marker & "\n" + output.add "# 🏁 **Performance Summary**\n" + + output.add fmt"**Commit:** `{commitSha}`" + + output.add "| Scenario | Nodes | Total messages sent | Total messages received | Latency min (ms) | Latency max (ms) | Latency avg (ms) |" + output.add "|:---:|:---:|:---:|:---:|:---:|:---:|:---:|" + + for scenarioName, stats in results.pairs: + let nodes = validNodes[scenarioName] + output.add fmt"| {stats.scenarioName} | {nodes} | {stats.totalSent} | {stats.totalReceived} | {stats.latency.minLatencyMs:.3f} | {stats.latency.maxLatencyMs:.3f} | {stats.latency.avgLatencyMs:.3f} |" + + let markdown = output.join("\n") + + return markdown + +proc main() = + let outputDir = "performance/output" + let parsedJsons = parseJsonFiles(outputDir) + + let jsonResults = getJsonResults(parsedJsons) + let (aggregatedResults, validNodes) = aggregateResults(jsonResults) + + let marker = getEnv("MARKER", "") + let commitSha = getEnv("PR_HEAD_SHA", getEnv("GITHUB_SHA", "unknown")) + let markdown = getMarkdownReport(aggregatedResults, validNodes, marker, commitSha) + + echo markdown + + # For GitHub summary + let summaryPath = getEnv("GITHUB_STEP_SUMMARY", "/tmp/summary.txt") + writeFile(summaryPath, markdown & "\n") + + # For PR comment + let commentPath = getEnv("COMMENT_SUMMARY_PATH", "/tmp/summary.txt") + writeFile(commentPath, markdown & "\n") + +main() diff --git a/performance/scenarios.nim b/performance/scenarios.nim index be9234f5f..78f5d3de1 100644 --- a/performance/scenarios.nim +++ b/performance/scenarios.nim @@ -68,7 +68,7 @@ proc baseTest*() {.async.} = await sleepAsync(2.seconds) # --- Performance summary --- - let stats = getStats(receivedMessages[], sentMessages) + let stats = getStats(scenario, receivedMessages[], sentMessages) info "Performance summary", nodeId, stats = $stats let outputPath = "/output/" & hostname & ".json" diff --git a/performance/types.nim b/performance/types.nim new file mode 100644 index 000000000..dcb17f200 --- /dev/null +++ b/performance/types.nim @@ -0,0 +1,10 @@ +type LatencyStats* = object + minLatencyMs*: float + maxLatencyMs*: float + avgLatencyMs*: float + +type Stats* = object + scenarioName*: string + totalSent*: int + totalReceived*: int + latency*: LatencyStats diff --git a/performance/utils.nim b/performance/utils.nim index d4992502b..6b0167808 100644 --- a/performance/utils.nim +++ b/performance/utils.nim @@ -1,18 +1,19 @@ +import chronos +import hashes +import json +import metrics +import metrics/chronos_httpserver +import sequtils import stew/byteutils import stew/endians2 import strutils import strformat -import sequtils import tables -import hashes -import metrics -import metrics/chronos_httpserver -import chronos -import json import ../libp2p import ../libp2p/protocols/pubsub/rpc/messages import ../libp2p/muxers/mplex/lpchannel import ../libp2p/protocols/ping +import ./types const topic* = "test" @@ -170,11 +171,6 @@ proc publishMessagesWithWarmup*( return sentMessages -type LatencyStats* = object - minLatencyMs*: float - maxLatencyMs*: float - avgLatencyMs*: float - proc getLatencyStats*(latencies: seq[float]): LatencyStats = var minLatencyMs = 0.0 @@ -192,16 +188,20 @@ proc getLatencyStats*(latencies: seq[float]): LatencyStats = ) type Stats* = object + scenarioName*: string totalSent*: int totalReceived*: int latency*: LatencyStats proc getStats*( - receivedMessages: Table[uint64, float], sentMessages: seq[uint64] + scenarioName: string, + receivedMessages: Table[uint64, float], + sentMessages: seq[uint64], ): Stats = let latencyStats = getLatencyStats(receivedMessages.values().toSeq()) let stats = Stats( + scenarioName: scenarioName, totalSent: sentMessages.len, totalReceived: receivedMessages.len, latency: latencyStats, @@ -221,12 +221,12 @@ proc writeResultsToJson*(outputPath: string, scenario: string, stats: Stats) = %*{ "results": [ { - "scenario": scenario, + "scenarioName": scenario, "totalSent": stats.totalSent, "totalReceived": stats.totalReceived, - "minLatency": formatLatencyMs(stats.latency.minLatencyMs), - "maxLatency": formatLatencyMs(stats.latency.maxLatencyMs), - "avgLatency": formatLatencyMs(stats.latency.avgLatencyMs), + "minLatencyMs": formatLatencyMs(stats.latency.minLatencyMs), + "maxLatencyMs": formatLatencyMs(stats.latency.maxLatencyMs), + "avgLatencyMs": formatLatencyMs(stats.latency.avgLatencyMs), } ] }