feat(mix): sphinx (#1691)

This commit is contained in:
richΛrd
2025-09-18 09:04:44 -04:00
committed by GitHub
parent 94ad1dcbc8
commit e09457da12
7 changed files with 637 additions and 2 deletions

View File

@@ -20,7 +20,7 @@ proc fieldElementToBytes*(fe: FieldElement): seq[byte] =
proc generateRandomFieldElement*(): Result[FieldElement, string] =
let rng = HmacDrbgContext.new()
if rng.isNil:
return err("Failed to creat HmacDrbgContext with system randomness")
return err("Failed to create HmacDrbgContext with system randomness")
ok(Curve25519Key.random(rng[]))
# Generate a key pair (private key and public key are both FieldElements)

View File

@@ -0,0 +1,97 @@
import results, sugar, sequtils, strutils
import ./serialization
import stew/[base58, endians2]
import ../../[multicodec, multiaddress, peerid]
const
PeerIdByteLen = 39 # ed25519 and secp256k1 multihash length
MinMultiAddrComponentLen = 3
MaxMultiAddrComponentLen = 6 # quic + circuit relay
# TODO: Add support for ipv6, dns, dns4, ws/wss/sni support
proc multiAddrToBytes*(
multiAddr: MultiAddress
): Result[seq[byte], string] {.raises: [].} =
var ma = multiAddr
let sma = multiAddr.items().toSeq()
var res: seq[byte] = @[]
if not (sma.len >= MinMultiAddrComponentLen and sma.len <= MaxMultiAddrComponentLen):
return err("Invalid multiaddress format")
# Only IPV4 is supported
let isCircuitRelay = ?ma.contains(multiCodec("p2p-circuit"))
let baseP2PEndIdx = if isCircuitRelay: 4 else: 2
let baseAddr =
try:
if sma.len - 1 - baseP2PEndIdx < 0:
return err("Invalid multiaddress format")
sma[0 .. sma.len - baseP2PEndIdx].mapIt(it.tryGet()).foldl(a & b)
except LPError as exc:
return err("Could not obtain base address: " & exc.msg)
let isQuic = QUIC_V1_IP.match(baseAddr)
let isTCP = TCP_IP.match(baseAddr)
if not (isTCP or isQuic):
return err("Unsupported protocol")
# 4 bytes for the IP
let ip = ?ma.getPart(multiCodec("ip4")).value().protoArgument()
res.add(ip)
var port: string
if isQuic:
res.add(1.byte) # Protocol byte
let udpPortPart = ma.getPart(multiCodec("udp")).value()
port = $udpPortPart
elif isTCP:
res.add(0.byte) # Protocol byte
let tcpPortPart = ma.getPart(multiCodec("tcp")).value()
port = $tcpPortPart
let portNum = ?catch(port.split('/')[2].parseInt()).mapErr(x => x.msg)
res.add(portNum.uint16.toBytesBE())
# PeerID (39 bytes), if using circuit relay, this represents the relay server
let p2pPart = ?ma.getPart(multiCodec("p2p"))
let peerId = ?PeerId.init(?p2pPart.protoArgument()).mapErr(x => $x)
if peerId.data.len != PeerIdByteLen:
return err("unsupported PeerId key type")
res.add(peerId.data)
if isCircuitRelay:
let dstPart = ?sma[^1]
let dstPeerId = ?PeerId.init(?dstPart.protoArgument()).mapErr(x => $x)
if dstPeerId.data.len != PeerIdByteLen:
return err("unsupported PeerId key type")
res.add(dstPeerId.data)
if res.len > AddrSize:
return err("Address must be <= " & $AddrSize & " bytes")
return ok(res & newSeq[byte](AddrSize - res.len))
proc bytesToMultiAddr*(bytes: openArray[byte]): Result[MultiAddress, string] =
if bytes.len != AddrSize:
return err("Address must be exactly " & $AddrSize & " bytes")
let
ip = bytes[0 .. 3].mapIt($it).join(".")
protocol = if bytes[4] == 0: "tcp" else: "udp"
quic = if bytes[4] == 1: "/quic-v1" else: ""
port = uint16.fromBytesBE(bytes[5 .. 6])
# peerId1 represents the circuit relay server addr if p2p-circuit addr, otherwise it's the node's actual peerId
peerId1 = "/p2p/" & Base58.encode(bytes[7 ..< 46])
peerId2Bytes = bytes[7 + PeerIdByteLen ..< 7 + (PeerIdByteLen * 2)]
# peerId2 will contain a value only if this is a p2p-circuit address
peerId2 =
if peerId2Bytes != newSeq[byte](PeerIdByteLen):
"/p2p-circuit/p2p/" & Base58.encode(peerId2Bytes)
else:
""
return MultiAddress
.init("/ip4/" & ip & "/" & protocol & "/" & $port & quic & peerId1 & peerId2)
.mapErr(x => $x)

View File

@@ -32,6 +32,9 @@ proc init*(
): T =
return T(Alpha: alpha, Beta: beta, Gamma: gamma)
proc get*(header: Header): (seq[byte], seq[byte], seq[byte]) =
(header.Alpha, header.Beta, header.Gamma)
proc serialize*(header: Header): seq[byte] =
doAssert header.Alpha.len == AlphaSize,
"Alpha must be exactly " & $AlphaSize & " bytes"
@@ -160,6 +163,9 @@ type SphinxPacket* = object
proc init*(T: typedesc[SphinxPacket], header: Header, payload: seq[byte]): T =
T(Hdr: header, Payload: payload)
proc get*(packet: SphinxPacket): (Header, seq[byte]) =
(packet.Hdr, packet.Payload)
proc serialize*(packet: SphinxPacket): seq[byte] =
let headerBytes = packet.Hdr.serialize()
return headerBytes & packet.Payload

View File

@@ -0,0 +1,285 @@
import results, sequtils, stew/endians2
import ./[crypto, curve25519, serialization, tag_manager]
type ProcessingStatus* = enum
Exit # Packet processed successfully at exit
Intermediate # Packet processed successfully at intermediate node
Duplicate # Packet was discarded due to duplicate tag
InvalidMAC # Packet was discarded due to MAC verification failure
proc computeAlpha(
publicKeys: openArray[FieldElement]
): Result[(seq[byte], seq[seq[byte]]), string] =
## Compute alpha, an ephemeral public value. Each mix node uses its private key and
## alpha to derive a shared session key for that hop.
## This session key is used to decrypt and process one layer of the packet.
if publicKeys.len == 0:
return err("No public keys provided")
var
s: seq[seq[byte]] = newSeq[seq[byte]](publicKeys.len)
alpha_0: seq[byte]
alpha: FieldElement
secret: FieldElement
blinders: seq[FieldElement] = @[]
let x = generateRandomFieldElement().valueOr:
return err("Generate field element error: " & error)
blinders.add(x)
for i in 0 ..< publicKeys.len:
if publicKeys[i].len != FieldElementSize:
return err("Invalid public key size: " & $i)
# Compute alpha, shared secret, and blinder
if i == 0:
alpha = multiplyBasePointWithScalars([blinders[i]]).valueOr:
return err("Multiply base point with scalars error: " & error)
alpha_0 = fieldElementToBytes(alpha)
else:
alpha = multiplyPointWithScalars(alpha, [blinders[i]])
# TODO: Optimize point multiplication by multiplying scalars first
secret = multiplyPointWithScalars(publicKeys[i], blinders)
let blinder = bytesToFieldElement(
sha256_hash(fieldElementToBytes(alpha) & fieldElementToBytes(secret))
).valueOr:
return err("Error in bytes to field element conversion: " & error)
blinders.add(blinder)
s[i] = fieldElementToBytes(secret)
return ok((alpha_0, s))
proc deriveKeyMaterial(keyName: string, s: seq[byte]): seq[byte] =
@(keyName.toOpenArrayByte(0, keyName.high)) & s
proc computeFillerStrings(s: seq[seq[byte]]): Result[seq[byte], string] =
var filler: seq[byte] = @[] # Start with an empty filler string
for i in 1 ..< s.len:
# Derive AES key and IV
let
aes_key = deriveKeyMaterial("aes_key", s[i - 1]).kdf()
iv = deriveKeyMaterial("iv", s[i - 1]).kdf()
# Compute filler string
let
fillerLength = (t + 1) * k
zeroPadding = newSeq[byte](fillerLength)
filler = aes_ctr_start_index(
aes_key, iv, filler & zeroPadding, (((t + 1) * (r - i)) + t + 2) * k
)
return ok(filler)
const paddingLength = (((t + 1) * (r - L)) + 1) * k
# Function to compute:
proc computeBetaGamma(
s: seq[seq[byte]],
hop: openArray[Hop],
delay: openArray[seq[byte]],
destHop: Hop,
id: I,
): Result[tuple[beta: seq[byte], gamma: seq[byte]], string] =
## Calculates the following elements:
## - Beta: The nested encrypted routing information. It encodes the next hop address, the forwarding delay, integrity check Gamma for the next hop, and the Beta for subsequent hops.
## - Gamma: A message authentication code computed over Beta using the session key derived from Alpha. It ensures header integrity at each hop.
let sLen = s.len
var
beta: seq[byte]
gamma: seq[byte]
# Compute filler strings
let filler = computeFillerStrings(s).valueOr:
return err("Error in filler generation: " & error)
for i in countdown(sLen - 1, 0):
# Derive AES key, MAC key, and IV
let
beta_aes_key = deriveKeyMaterial("aes_key", s[i]).kdf()
mac_key = deriveKeyMaterial("mac_key", s[i]).kdf()
beta_iv = deriveKeyMaterial("iv", s[i]).kdf()
# Compute Beta and Gamma
if i == sLen - 1:
let destBytes = destHop.serialize()
let destPadding = destBytes & delay[i] & @id & newSeq[byte](paddingLength)
let aes = aes_ctr(beta_aes_key, beta_iv, destPadding)
beta = aes & filler
else:
let routingInfo = RoutingInfo.init(
hop[i + 1], delay[i], gamma, beta[0 .. (((r * (t + 1)) - t) * k) - 1]
)
let serializedRoutingInfo = routingInfo.serialize()
beta = aes_ctr(beta_aes_key, beta_iv, serializedRoutingInfo)
gamma = hmac(mac_key, beta).toSeq()
return ok((beta: beta, gamma: gamma))
proc computeDelta(s: seq[seq[byte]], msg: Message): Result[seq[byte], string] =
let sLen = s.len
var delta: seq[byte]
for i in countdown(sLen - 1, 0):
# Derive AES key and IV
let
delta_aes_key = deriveKeyMaterial("delta_aes_key", s[i]).kdf()
delta_iv = deriveKeyMaterial("delta_iv", s[i]).kdf()
# Compute Delta
if i == sLen - 1:
let serializedMsg = msg.serialize()
delta = aes_ctr(delta_aes_key, delta_iv, serializedMsg)
else:
delta = aes_ctr(delta_aes_key, delta_iv, delta)
return ok(delta)
proc wrapInSphinxPacket*(
msg: Message,
publicKeys: openArray[FieldElement],
delay: openArray[seq[byte]],
hop: openArray[Hop],
destHop: Hop,
): Result[seq[byte], string] =
# Compute alpha and shared secrets
let (alpha_0, s) = computeAlpha(publicKeys).valueOr:
return err("Error in alpha generation: " & error)
# Compute beta and gamma
let (beta_0, gamma_0) = computeBetaGamma(s, hop, delay, destHop, default(I)).valueOr:
return err("Error in beta and gamma generation: " & error)
# Compute delta
let delta_0 = computeDelta(s, msg).valueOr:
return err("Error in delta generation: " & error)
# Serialize sphinx packet
let sphinxPacket = SphinxPacket.init(Header.init(alpha_0, beta_0, gamma_0), delta_0)
let serialized = sphinxPacket.serialize()
return ok(serialized)
type ProcessedSphinxPacket* = object
case status*: ProcessingStatus
of ProcessingStatus.Exit:
destination*: Hop
messageChunk*: seq[byte]
of ProcessingStatus.Intermediate:
nextHop*: Hop
delayMs*: int
serializedSphinxPacket*: seq[byte]
else:
discard
proc processSphinxPacket*(
sphinxPacket: SphinxPacket, privateKey: FieldElement, tm: var TagManager
): Result[ProcessedSphinxPacket, string] =
let
(header, payload) = sphinxPacket.get()
(alpha, beta, gamma) = header.get()
# Compute shared secret
let alphaFE = bytesToFieldElement(alpha).valueOr:
return err("Error in bytes to field element conversion: " & error)
let
s = multiplyPointWithScalars(alphaFE, [privateKey])
sBytes = fieldElementToBytes(s)
# Check if the tag has been seen
if isTagSeen(tm, s):
return ok(ProcessedSphinxPacket(status: Duplicate))
# Compute MAC
let mac_key = deriveKeyMaterial("mac_key", sBytes).kdf()
if not (hmac(mac_key, beta).toSeq() == gamma):
# If MAC not verified
return ok(ProcessedSphinxPacket(status: InvalidMAC))
# Store the tag as seen
addTag(tm, s)
# Derive AES key and IV
let
beta_aes_key = deriveKeyMaterial("aes_key", sBytes).kdf()
beta_iv = deriveKeyMaterial("iv", sBytes).kdf()
delta_aes_key = deriveKeyMaterial("delta_aes_key", sBytes).kdf()
delta_iv = deriveKeyMaterial("delta_iv", sBytes).kdf()
# Compute delta
let delta_prime = aes_ctr(delta_aes_key, delta_iv, payload)
# Compute B
var zeroPadding = newSeq[byte]((t + 1) * k)
let B = aes_ctr(beta_aes_key, beta_iv, beta & zeroPadding)
# Check if B has the required prefix for the original message
zeroPadding = newSeq[byte](paddingLength)
if B[((t + 1) * k) .. ((t + 1) * k) + paddingLength - 1] == zeroPadding:
let hop = Hop.deserialize(B[0 .. AddrSize - 1]).valueOr:
return err(error)
if B[AddrSize .. ((t + 1) * k) - 1] == newSeq[byte](k + 2):
if delta_prime[0 .. (k - 1)] == newSeq[byte](k):
let msg = Message.deserialize(delta_prime).valueOr:
return err("Message deserialization error: " & error)
return ok(
ProcessedSphinxPacket(
status: Exit, destination: hop, messageChunk: msg[0 .. MessageSize - 1]
)
)
else:
return err("delta_prime should be all zeros")
elif B[0 .. (t * k) - 1] == newSeq[byte](t * k):
# TODO: handle REPLY case
discard
else:
# Extract routing information from B
let routingInfo = RoutingInfo.deserialize(B).valueOr:
return err("Routing info deserialization error: " & error)
let (address, delay, gamma_prime, beta_prime) = routingInfo.getRoutingInfo()
# Compute alpha
let blinder = bytesToFieldElement(sha256_hash(alpha & sBytes)).valueOr:
return err("Error in bytes to field element conversion: " & error)
let alphaFE = bytesToFieldElement(alpha).valueOr:
return err("Error in bytes to field element conversion: " & error)
let alpha_prime = multiplyPointWithScalars(alphaFE, [blinder])
# Serialize sphinx packet
let sphinxPkt = SphinxPacket.init(
Header.init(fieldElementToBytes(alpha_prime), beta_prime, gamma_prime),
delta_prime,
)
return ok(
ProcessedSphinxPacket(
status: Intermediate,
nextHop: address,
delayMs: uint16.fromBytes(delay).int,
serializedSphinxPacket: sphinxPkt.serialize(),
)
)

View File

@@ -0,0 +1,59 @@
{.used.}
import results, unittest
import ../../libp2p/protocols/mix/[serialization, multiaddr]
import ../../libp2p/multiaddress
template maddr(ma: string): MultiAddress =
MultiAddress.init(ma).tryGet()
template maddrConversionShouldFail(ma: string, msg: string) =
test msg:
echo MultiAddress
.init(ma)
.expect("could not initialize multiaddr")
.multiAddrToBytes()
.error()
check:
MultiAddress
.init(ma)
.expect("could not initialize multiaddr")
.multiAddrToBytes().isErr
suite "Utils tests":
test "multi_addr_conversion":
let multiAddrs = [
"/ip4/0.0.0.0/tcp/4242/p2p/16Uiu2HAmFkwLVsVh6gGPmSm9R3X4scJ5thVdKfWYeJsKeVrbcgVC",
"/ip4/10.0.0.1/tcp/1234/p2p/16Uiu2HAmDHw4mwBdEjxjJPhrt8Eq1kvDjXAuwkqCmhNiz363AFV2",
"/ip4/192.168.1.1/udp/8080/quic-v1/p2p/16Uiu2HAm6WNzw8AssyPscYYi8x1bY5wXyQrGTShRH75bh5dPCjBQ",
"/ip4/10.0.0.1/tcp/1234/p2p/16Uiu2HAmDHw4mwBdEjxjJPhrt8Eq1kvDjXAuwkqCmhNiz363AFV2/p2p-circuit/p2p/16Uiu2HAm6WNzw8AssyPscYYi8x1bY5wXyQrGTShRH75bh5dPCjBQ",
"/ip4/10.0.0.1/udp/1234/quic-v1/p2p/16Uiu2HAmDHw4mwBdEjxjJPhrt8Eq1kvDjXAuwkqCmhNiz363AFV2/p2p-circuit/p2p/16Uiu2HAm6WNzw8AssyPscYYi8x1bY5wXyQrGTShRH75bh5dPCjBQ",
]
for multiAddr in multiAddrs:
let ma = maddr(multiAddr)
let multiAddrBytes = ma.multiAddrToBytes().expect("conversion failed")
check multiAddrBytes.len == AddrSize
let deserializedMa = bytesToMultiAddr(multiAddrBytes).expect("conversion failed")
check deserializedMa == ma
maddrConversionShouldFail(
"/ip4/0.0.0.0/tcp/4242/quic-v1/p2p/16Uiu2HAmFkwLVsVh6gGPmSm9R3X4scJ5thVdKfWYeJsKeVrbcgVC",
"invalid_protocol",
)
maddrConversionShouldFail(
"/ip4/0.0.0.0/tcp/4242/p2p/QmcycySVeRSftFQGM392xCqDh6UUbhSU9ykNpxrFBPX3gJ",
"invalid_peerid_length",
)
maddrConversionShouldFail("/ip4/0.0.0.0/tcp/4242", "invalid_multiaddress_format")
maddrConversionShouldFail(
"/ip4/0.0.0.0/tcp/4242/p2p-circuit", "invalid_multiaddress_format_circuit_relay"
)
test "invalid_addr_length":
let invalidBytes = newSeq[byte](AddrSize - 1)
check:
bytesToMultiAddr(invalidBytes).isErr

188
tests/mix/testsphinx.nim Normal file
View File

@@ -0,0 +1,188 @@
{.used.}
import random, results, unittest
import ../../libp2p/crypto/crypto
import ../../libp2p/protocols/mix/[curve25519, serialization, sphinx, tag_manager]
import bearssl/rand
# Helper function to pad/truncate message
proc padMessage(message: openArray[byte], size: int): seq[byte] =
if message.len >= size:
return message[0 .. size - 1] # Truncate if larger
else:
result = @message
let paddingLength = size - message.len
result.add(newSeq[byte](paddingLength)) # Pad with zeros
# Helper function to create dummy data
proc createDummyData(): (
Message, seq[FieldElement], seq[FieldElement], seq[seq[byte]], seq[Hop], Hop
) =
let (privateKey1, publicKey1) = generateKeyPair().expect("generate keypair error")
let (privateKey2, publicKey2) = generateKeyPair().expect("generate keypair error")
let (privateKey3, publicKey3) = generateKeyPair().expect("generate keypair error")
let
privateKeys = @[privateKey1, privateKey2, privateKey3]
publicKeys = @[publicKey1, publicKey2, publicKey3]
delay = @[newSeq[byte](DelaySize), newSeq[byte](DelaySize), newSeq[byte](DelaySize)]
hops =
@[
Hop.init(newSeq[byte](AddrSize)),
Hop.init(newSeq[byte](AddrSize)),
Hop.init(newSeq[byte](AddrSize)),
]
message = newSeq[byte](MessageSize)
dest = Hop.init(newSeq[byte](AddrSize))
return (message, privateKeys, publicKeys, delay, hops, dest)
proc randomI(): I =
newRng()[].generate(I)
# Unit tests for sphinx.nim
suite "Sphinx Tests":
var tm: TagManager
setup:
tm = TagManager.new()
teardown:
clearTags(tm)
test "sphinx_wrap_and_process":
let (message, privateKeys, publicKeys, delay, hops, dest) = createDummyData()
let packetBytes = wrapInSphinxPacket(message, publicKeys, delay, hops, dest).expect(
"sphinx wrap error"
)
check packetBytes.len == PacketSize
let packet = SphinxPacket.deserialize(packetBytes).expect("Sphinx wrap error")
let processedSP1 =
processSphinxPacket(packet, privateKeys[0], tm).expect("Sphinx processing error")
check:
processedSP1.status == Intermediate
processedSP1.serializedSphinxPacket.len == PacketSize
let processedPacket1 = SphinxPacket
.deserialize(processedSP1.serializedSphinxPacket)
.expect("Sphinx wrap error")
let processedSP2 = processSphinxPacket(processedPacket1, privateKeys[1], tm).expect(
"Sphinx processing error"
)
check:
processedSP2.status == Intermediate
processedSP2.serializedSphinxPacket.len == PacketSize
let processedPacket2 = SphinxPacket
.deserialize(processedSP2.serializedSphinxPacket)
.expect("Sphinx wrap error")
let processedSP3 = processSphinxPacket(processedPacket2, privateKeys[2], tm).expect(
"Sphinx processing error"
)
check:
processedSP3.status == Exit
processedSP3.messageChunk == message
test "sphinx_wrap_empty_public_keys":
let (message, _, _, delay, _, dest) = createDummyData()
check wrapInSphinxPacket(message, @[], delay, @[], dest).isErr
test "sphinx_process_invalid_mac":
let (message, privateKeys, publicKeys, delay, hops, dest) = createDummyData()
let packetBytes = wrapInSphinxPacket(message, publicKeys, delay, hops, dest).expect(
"Sphinx wrap error"
)
check packetBytes.len == PacketSize
# Corrupt the MAC for testing
var tamperedPacketBytes = packetBytes
tamperedPacketBytes[0] = packetBytes[0] xor 0x01
let tamperedPacket =
SphinxPacket.deserialize(tamperedPacketBytes).expect("Sphinx wrap error")
let invalidMacPkt = processSphinxPacket(tamperedPacket, privateKeys[0], tm).expect(
"Sphinx processing error"
)
check invalidMacPkt.status == InvalidMAC
test "sphinx_process_duplicate_tag":
let (message, privateKeys, publicKeys, delay, hops, dest) = createDummyData()
let packetBytes = wrapInSphinxPacket(message, publicKeys, delay, hops, dest).expect(
"Sphinx wrap error"
)
check packetBytes.len == PacketSize
let packet = SphinxPacket.deserialize(packetBytes).expect("Sphinx wrap error")
# Process the packet twice to test duplicate tag handling
let processedSP1 =
processSphinxPacket(packet, privateKeys[0], tm).expect("Sphinx processing error")
check processedSP1.status == Intermediate
let processedSP2 =
processSphinxPacket(packet, privateKeys[0], tm).expect("Sphinx processing error")
check processedSP2.status == Duplicate
test "sphinx_wrap_and_process_message_sizes":
let MessageSizes = @[32, 64, 128, 256, 512]
for size in MessageSizes:
let (_, privateKeys, publicKeys, delay, hops, dest) = createDummyData()
var message = newSeq[byte](size)
randomize()
for i in 0 ..< size:
message[i] = byte(rand(256))
let paddedMessage = padMessage(message, MessageSize)
let packetBytes = wrapInSphinxPacket(paddedMessage, publicKeys, delay, hops, dest)
.expect("Sphinx wrap error")
check packetBytes.len == PacketSize
let packet = SphinxPacket.deserialize(packetBytes).expect("Sphinx wrap error")
let processedSP1 = processSphinxPacket(packet, privateKeys[0], tm).expect(
"Sphinx processing error"
)
check:
processedSP1.status == Intermediate
processedSP1.serializedSphinxPacket.len == PacketSize
let processedPacket1 = SphinxPacket
.deserialize(processedSP1.serializedSphinxPacket)
.expect("Sphinx wrap error")
let processedSP2 = processSphinxPacket(processedPacket1, privateKeys[1], tm)
.expect("Sphinx processing error")
check:
processedSP2.status == Intermediate
processedSP2.serializedSphinxPacket.len == PacketSize
let processedPacket2 = SphinxPacket
.deserialize(processedSP2.serializedSphinxPacket)
.expect("Sphinx wrap error")
let processedSP3 = processSphinxPacket(processedPacket2, privateKeys[2], tm)
.expect("Error in Sphinx processing")
check:
processedSP3.status == Exit
processedSP3.messageChunk == paddedMessage

View File

@@ -45,5 +45,5 @@ when defined(libp2p_autotls_support):
import
mix/[
testcrypto, testcurve25519, testtagmanager, testseqnogenerator, testserialization,
testmixmessage,
testmixmessage, testsphinx, testmultiaddr,
]