chore: replace hypersync with hyperpc

This commit is contained in:
moebius
2025-03-06 17:58:25 +01:00
parent e11a235b9a
commit 6bf4b9aa55
8 changed files with 422 additions and 492 deletions

View File

@@ -48,7 +48,6 @@
"test:cov": "vitest run --config vitest.config.ts --coverage"
},
"dependencies": {
"@envio-dev/hypersync-client": "0.6.3",
"@types/snarkjs": "0.7.9",
"@zk-kit/lean-imt": "2.2.2",
"maci-crypto": "2.5.0",

View File

@@ -1,6 +1,6 @@
import { poseidon } from "maci-crypto/build/ts/hashing.js";
import { Hash, Secret } from "../types/commitment.js";
import { Hex, bytesToNumber } from "viem";
import { Hex, bytesToBigInt, bytesToNumber } from "viem";
import { english, generateMnemonic, mnemonicToAccount } from "viem/accounts";
import { DataService } from "./data.service.js";
import {
@@ -204,7 +204,7 @@ export class AccountService {
label: Hash,
blockNumber: bigint,
timestamp: bigint,
txHash: Hash,
txHash: Hex,
): PoolAccount {
const precommitment = this._hashPrecommitment(nullifier, secret);
const commitment = this._hashCommitment(value, label, precommitment);
@@ -265,7 +265,7 @@ export class AccountService {
secret: Secret,
blockNumber: bigint,
timestamp: bigint,
txHash: Hash,
txHash: Hex,
): AccountCommitment {
// Update last update timestamp
if (timestamp > this.account.lastUpdateTimestamp) {
@@ -314,53 +314,6 @@ export class AccountService {
return newCommitment;
}
/**
* Process withdrawals for a given chain and block range
*
* @param chainId - The chain ID to process withdrawals for
* @param fromBlock - The starting block number
* @param foundAccounts - Map of accounts indexed by label
*/
private async _processWithdrawals(
chainId: number,
fromBlock: bigint,
foundAccounts: Map<Hash, PoolAccount>,
): Promise<void> {
const withdrawals = await this.dataService.getWithdrawals(chainId, {
fromBlock,
});
for (const withdrawal of withdrawals) {
for (const account of foundAccounts.values()) {
const isParentCommitment =
BigInt(account.deposit.nullifier) === BigInt(withdrawal.spentNullifier) ||
account.children.some(child => BigInt(child.nullifier) === BigInt(withdrawal.spentNullifier));
if (isParentCommitment) {
const parentCommitment = account.children.length > 0
? account.children[account.children.length - 1]
: account.deposit;
if (!parentCommitment) {
this.logger.warn(`No parent commitment found for withdrawal ${withdrawal.spentNullifier.toString()}`);
continue;
}
this.addWithdrawalCommitment(
parentCommitment,
withdrawal.withdrawn,
withdrawal.spentNullifier as unknown as Secret,
parentCommitment.secret,
withdrawal.blockNumber,
withdrawal.timestamp,
withdrawal.transactionHash,
);
break;
}
}
}
}
/**
* Retrieves the history of deposits and withdrawals for the given pools.
*
@@ -469,4 +422,44 @@ export class AccountService {
}),
);
}
private async _processWithdrawals(
chainId: number,
fromBlock: bigint,
foundAccounts: Map<Hash, PoolAccount>,
): Promise<void> {
const withdrawals = await this.dataService.getWithdrawals(chainId, {
fromBlock,
});
for (const withdrawal of withdrawals) {
for (const account of foundAccounts.values()) {
const isParentCommitment =
BigInt(account.deposit.nullifier) === BigInt(withdrawal.spentNullifier) ||
account.children.some(child => BigInt(child.nullifier) === BigInt(withdrawal.spentNullifier));
if (isParentCommitment) {
const parentCommitment = account.children.length > 0
? account.children[account.children.length - 1]
: account.deposit;
if (!parentCommitment) {
this.logger.warn(`No parent commitment found for withdrawal ${withdrawal.spentNullifier.toString()}`);
continue;
}
this.addWithdrawalCommitment(
parentCommitment,
withdrawal.withdrawn,
withdrawal.spentNullifier as unknown as Secret,
parentCommitment.secret,
withdrawal.blockNumber,
withdrawal.timestamp,
withdrawal.transactionHash,
);
break;
}
}
}
}
}

View File

@@ -1,10 +1,10 @@
import {
HypersyncClient,
presetQueryLogsOfEvent,
Query,
QueryResponse,
Log
} from "@envio-dev/hypersync-client";
type PublicClient,
createPublicClient,
http,
parseAbiItem,
type Address,
} from "viem";
import {
ChainConfig,
DepositEvent,
@@ -12,28 +12,33 @@ import {
WithdrawalEvent,
RagequitEvent,
} from "../types/events.js";
import { bigintToHash } from "../crypto.js";
import { Hash } from "../types/commitment.js";
import { Logger } from "../utils/logger.js";
import { DataError } from "../errors/data.error.js";
import { ErrorCode } from "../errors/base.error.js";
// Event signatures from the contract
const DEPOSIT_EVENT = parseAbiItem('event Deposited(address indexed _depositor, uint256 _commitment, uint256 _label, uint256 _value, uint256 _merkleRoot)');
const WITHDRAWAL_EVENT = parseAbiItem('event Withdrawn(address indexed _processooor, uint256 _value, uint256 _spentNullifier, uint256 _newCommitment)');
const RAGEQUIT_EVENT = parseAbiItem('event Ragequit(address indexed _ragequitter, uint256 _commitment, uint256 _label, uint256 _value)');
/**
* Service responsible for fetching and managing privacy pool events across multiple chains.
* Handles event retrieval, parsing, and validation for deposits, withdrawals, and ragequits.
*
* @remarks
* This service uses HypersyncClient to efficiently fetch and process blockchain events.
* This service uses viem's PublicClient to efficiently fetch and process blockchain events.
* It supports multiple chains and provides robust error handling and validation.
* All uint256 values from events are handled as bigints, with Hash type assertions for commitment-related fields.
*/
export class DataService {
private readonly clients: Map<number, HypersyncClient> = new Map();
private readonly clients: Map<number, PublicClient> = new Map();
private readonly logger: Logger;
/**
* Initialize the data service with chain configurations
*
* @param chainConfigs - Array of chain configurations
* @param chainConfigs - Array of chain configurations containing chainId, RPC URL, and API key
* @throws {DataError} If client initialization fails for any chain
*/
constructor(private readonly chainConfigs: ChainConfig[]) {
@@ -41,14 +46,19 @@ export class DataService {
try {
for (const config of chainConfigs) {
const client = HypersyncClient.new({
url: this.getHypersyncUrlForChain(config.chainId),
if (!config.rpcUrl || !config.apiKey) {
throw new Error(`Missing RPC URL or API key for chain ${config.chainId}`);
}
const client = createPublicClient({
transport: http(config.rpcUrl),
key: config.apiKey,
});
this.clients.set(config.chainId, client);
}
} catch (error) {
throw new DataError(
"Failed to initialize HypersyncClient",
"Failed to initialize PublicClient",
ErrorCode.NETWORK_ERROR,
{ error: error instanceof Error ? error.message : "Unknown error" },
);
@@ -56,12 +66,12 @@ export class DataService {
}
/**
* Get deposits for a specific chain
* Get deposit events for a specific chain
*
* @param chainId - Chain ID to fetch deposits from
* @param options - Event filter options
* @returns Array of deposit events
* @throws {DataError} If client is not configured or network error occurs
* @param chainId - Chain ID to fetch events from
* @param options - Event filter options including fromBlock, toBlock, and other filters
* @returns Array of deposit events with properly typed fields (bigint for numbers, Hash for commitments)
* @throws {DataError} If client is not configured, network error occurs, or event data is invalid
*/
async getDeposits(
chainId: number,
@@ -71,99 +81,54 @@ export class DataService {
const client = this.getClientForChain(chainId);
const config = this.getConfigForChain(chainId);
const fromBlock = options.fromBlock ?? config.startBlock;
const toBlock = options.toBlock ?? undefined;
this.logger.debug(
`Fetching deposits for chain ${chainId} from block ${fromBlock}${toBlock ? ` to ${toBlock}` : ""
}`,
);
const query = presetQueryLogsOfEvent(
config.privacyPoolAddress,
// topic0 is keccak256("Deposited(address,uint256,uint256,uint256,uint256)")
"0xe3b53cd1a44fbf11535e145d80b8ef1ed6d57a73bf5daa7e939b6b01657d6549",
Number(fromBlock),
toBlock ? Number(toBlock) : undefined,
);
if (options.depositor) {
const queryWithTopics = query as Query & { topics: (string | null)[] };
const topic0 = queryWithTopics.topics[0];
if (!topic0) {
throw DataError.invalidLog("deposit", "missing topic0");
}
queryWithTopics.topics = [
topic0,
`0x000000000000000000000000${options.depositor.slice(2)}`,
];
}
const res = await client.get(query);
// Create a map of block numbers to timestamps
const blockTimestamps = new Map(
res.data.blocks.map(block => [
block.number,
block.timestamp ? BigInt(block.timestamp) : 0n
])
);
return res.data.logs.map((log) => {
let a: Log = log;
if (!log.topics || log.topics.length < 2) {
throw DataError.invalidLog("deposit", "missing topics");
}
const depositorTopic = log.topics[1];
if (!depositorTopic) {
throw DataError.invalidLog("deposit", "missing depositor topic");
}
const depositor = BigInt(depositorTopic);
if (!log.data) {
throw DataError.invalidLog("deposit", "missing data");
}
const data = log.data.slice(2).match(/.{64}/g);
if (!data || data.length < 4) {
throw DataError.invalidLog("deposit", "insufficient data");
}
const commitment = BigInt("0x" + data[0]);
const label = BigInt("0x" + data[1]);
const value = BigInt("0x" + data[2]);
const precommitment = BigInt("0x" + data[3]);
if (
!depositor ||
!commitment ||
!label ||
!value ||
!log.blockNumber ||
!log.transactionHash
) {
throw DataError.invalidLog("deposit", "missing required fields");
}
const blockNumber = BigInt(log.blockNumber);
const timestamp = blockTimestamps.get(Number(blockNumber));
if (!timestamp) {
throw DataError.invalidLog("deposit", "missing block timestamp");
}
return {
depositor: `0x${depositor.toString(16).padStart(40, "0")}`,
commitment: bigintToHash(commitment),
label: bigintToHash(label),
value,
precommitment: bigintToHash(precommitment),
blockNumber,
timestamp,
transactionHash: log.transactionHash as unknown as Hash,
};
const logs = await client.getLogs({
address: config.privacyPoolAddress,
event: DEPOSIT_EVENT,
fromBlock: options.fromBlock ?? config.startBlock,
toBlock: options.toBlock,
}).catch(error => {
throw new DataError(
"Failed to fetch deposit logs",
ErrorCode.NETWORK_ERROR,
{ error: error instanceof Error ? error.message : "Unknown error" },
);
});
return await Promise.all(logs.map(async (log) => {
try {
const block = await client.getBlock({ blockNumber: log.blockNumber });
if (!log.args) {
throw DataError.invalidLog("deposit", "missing args");
}
const {
_depositor: depositor,
_commitment: commitment,
_label: label,
_value: value,
_merkleRoot: precommitment,
} = log.args;
if (!depositor || !commitment || !label || !value || !precommitment || !log.blockNumber || !log.transactionHash) {
throw DataError.invalidLog("deposit", "missing required fields");
}
return {
depositor: depositor.toLowerCase(),
commitment: commitment as Hash,
label: label as Hash,
value,
precommitment: precommitment as Hash,
blockNumber: BigInt(log.blockNumber),
timestamp: BigInt(block.timestamp),
transactionHash: log.transactionHash,
};
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.invalidLog("deposit", error instanceof Error ? error.message : "Unknown error");
}
}));
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.networkError(chainId, error instanceof Error ? error : new Error(String(error)));
@@ -171,12 +136,12 @@ export class DataService {
}
/**
* Get withdrawals for a specific chain
* Get withdrawal events for a specific chain
*
* @param chainId - Chain ID to fetch withdrawals from
* @param options - Event filter options
* @returns Array of withdrawal events
* @throws {DataError} If client is not configured or network error occurs
* @param chainId - Chain ID to fetch events from
* @param options - Event filter options including fromBlock, toBlock, and other filters
* @returns Array of withdrawal events with properly typed fields (bigint for numbers, Hash for commitments)
* @throws {DataError} If client is not configured, network error occurs, or event data is invalid
*/
async getWithdrawals(
chainId: number,
@@ -186,81 +151,50 @@ export class DataService {
const client = this.getClientForChain(chainId);
const config = this.getConfigForChain(chainId);
const fromBlock = options.fromBlock ?? config.startBlock;
const toBlock = options.toBlock ?? undefined;
this.logger.debug(
`Fetching withdrawals for chain ${chainId} from block ${fromBlock}${toBlock ? ` to ${toBlock}` : ""
}`,
);
const query = presetQueryLogsOfEvent(
config.privacyPoolAddress,
// topic0 is keccak256("Withdrawn(address,uint256,uint256,uint256)")
"0x75e161b3e824b114fc1a33274bd7091918dd4e639cede50b78b15a4eea956a21",
Number(fromBlock),
toBlock ? Number(toBlock) : undefined,
);
const res = await client.get(query);
// Create a map of block numbers to timestamps
const blockTimestamps = new Map(
res.data.blocks.map(block => [
block.number,
block.timestamp ? BigInt(block.timestamp) : 0n
])
);
return res.data.logs.map((log) => {
if (!log.topics || log.topics.length < 2) {
throw DataError.invalidLog("withdrawal", "missing topics");
}
const processorTopic = log.topics[1];
if (!processorTopic) {
throw DataError.invalidLog("withdrawal", "missing processor topic");
}
const processor = BigInt(processorTopic);
if (!log.data) {
throw DataError.invalidLog("withdrawal", "missing data");
}
const data = log.data.slice(2).match(/.{64}/g);
if (!data || data.length < 3) {
throw DataError.invalidLog("withdrawal", "insufficient data");
}
const value = BigInt("0x" + data[0]);
const spentNullifier = BigInt("0x" + data[1]);
const newCommitment = BigInt("0x" + data[2]);
if (
!value ||
!spentNullifier ||
!newCommitment ||
!log.blockNumber ||
!log.transactionHash
) {
throw DataError.invalidLog("withdrawal", "missing required fields");
}
const blockNumber = BigInt(log.blockNumber);
const timestamp = blockTimestamps.get(Number(blockNumber));
if (!timestamp) {
throw DataError.invalidLog("withdrawal", "missing block timestamp");
}
return {
withdrawn: value,
spentNullifier: bigintToHash(spentNullifier),
newCommitment: bigintToHash(newCommitment),
blockNumber,
timestamp,
transactionHash: log.transactionHash as unknown as Hash,
};
const logs = await client.getLogs({
address: config.privacyPoolAddress,
event: WITHDRAWAL_EVENT,
fromBlock: options.fromBlock ?? config.startBlock,
toBlock: options.toBlock,
}).catch(error => {
throw new DataError(
"Failed to fetch withdrawal logs",
ErrorCode.NETWORK_ERROR,
{ error: error instanceof Error ? error.message : "Unknown error" },
);
});
return await Promise.all(logs.map(async (log) => {
try {
const block = await client.getBlock({ blockNumber: log.blockNumber });
if (!log.args) {
throw DataError.invalidLog("withdrawal", "missing args");
}
const {
_value: value,
_spentNullifier: spentNullifier,
_newCommitment: newCommitment,
} = log.args;
if (!value || !spentNullifier || !newCommitment || !log.blockNumber || !log.transactionHash) {
throw DataError.invalidLog("withdrawal", "missing required fields");
}
return {
withdrawn: value,
spentNullifier: spentNullifier as Hash,
newCommitment: newCommitment as Hash,
blockNumber: BigInt(log.blockNumber),
timestamp: BigInt(block.timestamp),
transactionHash: log.transactionHash,
};
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.invalidLog("withdrawal", error instanceof Error ? error.message : "Unknown error");
}
}));
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.networkError(chainId, error instanceof Error ? error : new Error(String(error)));
@@ -270,10 +204,10 @@ export class DataService {
/**
* Get ragequit events for a specific chain
*
* @param chainId - Chain ID to fetch ragequits from
* @param options - Event filter options
* @returns Array of ragequit events
* @throws {DataError} If client is not configured or network error occurs
* @param chainId - Chain ID to fetch events from
* @param options - Event filter options including fromBlock, toBlock, and other filters
* @returns Array of ragequit events with properly typed fields (bigint for numbers, Hash for commitments)
* @throws {DataError} If client is not configured, network error occurs, or event data is invalid
*/
async getRagequits(
chainId: number,
@@ -283,121 +217,59 @@ export class DataService {
const client = this.getClientForChain(chainId);
const config = this.getConfigForChain(chainId);
const fromBlock = options.fromBlock ?? config.startBlock;
const toBlock = options.toBlock ?? undefined;
this.logger.debug(
`Fetching ragequits for chain ${chainId} from block ${fromBlock}${toBlock ? ` to ${toBlock}` : ""
}`,
);
const query = presetQueryLogsOfEvent(
config.privacyPoolAddress,
// topic0 is keccak256("Ragequit(address,uint256,uint256,uint256)")
"0xd2b3e868ae101106371f2bd93abc8d5a4de488b9fe47ed122c23625aa7172f13",
Number(fromBlock),
toBlock ? Number(toBlock) : undefined,
);
const res = await client.get(query);
// Create a map of block numbers to timestamps
const blockTimestamps = new Map(
res.data.blocks.map(block => [
block.number,
block.timestamp ? BigInt(block.timestamp) : 0n
])
);
return res.data.logs.map((log) => {
if (!log.topics || log.topics.length < 2) {
throw DataError.invalidLog("ragequit", "missing topics");
}
const ragequitterTopic = log.topics[1];
if (!ragequitterTopic) {
throw DataError.invalidLog("ragequit", "missing ragequitter topic");
}
const ragequitter = BigInt(ragequitterTopic);
if (!log.data) {
throw DataError.invalidLog("ragequit", "missing data");
}
const data = log.data.slice(2).match(/.{64}/g);
if (!data || data.length < 3) {
throw DataError.invalidLog("ragequit", "insufficient data");
}
const commitment = BigInt("0x" + data[0]);
const label = BigInt("0x" + data[1]);
const value = BigInt("0x" + data[2]);
if (
!ragequitter ||
!commitment ||
!label ||
!value ||
!log.blockNumber ||
!log.transactionHash
) {
throw DataError.invalidLog("ragequit", "missing required fields");
}
const blockNumber = BigInt(log.blockNumber);
const timestamp = blockTimestamps.get(Number(blockNumber));
if (!timestamp) {
throw DataError.invalidLog("ragequit", "missing block timestamp");
}
return {
ragequitter: `0x${ragequitter.toString(16).padStart(40, "0")}`,
commitment: bigintToHash(commitment),
label: bigintToHash(label),
value,
blockNumber,
timestamp,
transactionHash: log.transactionHash as unknown as Hash,
};
const logs = await client.getLogs({
address: config.privacyPoolAddress,
event: RAGEQUIT_EVENT,
fromBlock: options.fromBlock ?? config.startBlock,
toBlock: options.toBlock,
}).catch(error => {
throw new DataError(
"Failed to fetch ragequit logs",
ErrorCode.NETWORK_ERROR,
{ error: error instanceof Error ? error.message : "Unknown error" },
);
});
return await Promise.all(logs.map(async (log) => {
try {
const block = await client.getBlock({ blockNumber: log.blockNumber });
if (!log.args) {
throw DataError.invalidLog("ragequit", "missing args");
}
const {
_ragequitter: ragequitter,
_commitment: commitment,
_label: label,
_value: value,
} = log.args;
if (!ragequitter || !commitment || !label || !value || !log.blockNumber || !log.transactionHash) {
throw DataError.invalidLog("ragequit", "missing required fields");
}
return {
ragequitter: ragequitter.toLowerCase(),
commitment: commitment as Hash,
label: label as Hash,
value,
blockNumber: BigInt(log.blockNumber),
timestamp: BigInt(block.timestamp),
transactionHash: log.transactionHash,
};
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.invalidLog("ragequit", error instanceof Error ? error.message : "Unknown error");
}
}));
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.networkError(chainId, error instanceof Error ? error : new Error(String(error)));
}
}
/**
* Get all events (deposits and withdrawals) for a specific chain in chronological order
*
* @param chainId - Chain ID to fetch events from
* @param options - Event filter options
* @returns Array of events sorted by block number
* @throws {DataError} If client is not configured or network error occurs
*/
async getAllEvents(chainId: number, options: EventFilterOptions = {}) {
try {
const [deposits, withdrawals] = await Promise.all([
this.getDeposits(chainId, options),
this.getWithdrawals(chainId, options),
]);
return [
...deposits.map((d) => ({ type: "deposit" as const, ...d })),
...withdrawals.map((w) => ({ type: "withdrawal" as const, ...w })),
].sort((a, b) => {
const blockDiff = a.blockNumber - b.blockNumber;
if (blockDiff === 0n) {
return a.type === "deposit" ? -1 : 1;
}
return Number(blockDiff);
});
} catch (error) {
if (error instanceof DataError) throw error;
throw DataError.networkError(chainId, error instanceof Error ? error : new Error(String(error)));
}
}
private getClientForChain(chainId: number): HypersyncClient {
private getClientForChain(chainId: number): PublicClient {
const client = this.clients.get(chainId);
if (!client) {
throw DataError.chainNotConfigured(chainId);
@@ -406,27 +278,10 @@ export class DataService {
}
private getConfigForChain(chainId: number): ChainConfig {
const config = this.chainConfigs.find((c) => c.chainId === chainId);
const config = this.chainConfigs.find(c => c.chainId === chainId);
if (!config) {
throw DataError.chainNotConfigured(chainId);
}
return config;
}
private getHypersyncUrlForChain(chainId: number): string {
switch (chainId) {
case 1: // Ethereum Mainnet
return "https://eth.hypersync.xyz";
case 137: // Polygon
return "https://polygon.hypersync.xyz";
case 42161: // Arbitrum
return "https://arbitrum.hypersync.xyz";
case 10: // Optimism
return "https://optimism.hypersync.xyz";
case 11155111: // Sepolia
return "https://sepolia.hypersync.xyz";
default:
throw DataError.chainNotConfigured(chainId);
}
}
}

View File

@@ -1,110 +0,0 @@
import { DataService } from "../core/data.service.js";
import { ChainConfig, DepositEvent, WithdrawalEvent, RagequitEvent } from "../types/events.js";
async function main() {
// These values will be provided:
// - chainId of the testnet
// - contract address of the deployed privacy pool
// - deployment block
// - hypersync endpoint
const ENVIO_TOKEN = process.env.ENVIO_TOKEN;
if (!ENVIO_TOKEN) {
throw new Error("ENVIO_TOKEN environment variable is required");
}
const testnetConfig: ChainConfig = {
chainId: 11155111,
rpcUrl: "https://1rpc.io/sepolia",
privacyPoolAddress: "0x2A4Ba229D7EA7eeBF847df17778a5C5C78b3efF6",
startBlock: 7705830n,
envioToken: ENVIO_TOKEN,
};
console.log("Starting Envio event fetching test...");
console.log("Configuration:", {
chainId: testnetConfig.chainId,
privacyPoolAddress: testnetConfig.privacyPoolAddress,
startBlock: testnetConfig.startBlock,
});
// Initialize service
const dataService = new DataService([testnetConfig]);
try {
// Test deposit event fetching
console.log("\nFetching deposit events...");
const deposits = await dataService.getDeposits(testnetConfig.chainId, {
fromBlock: testnetConfig.startBlock,
});
console.log(`Found ${deposits.length} deposits`);
if (deposits.length > 0) {
console.log("\nDeposit events:");
deposits.forEach((deposit, i) => {
console.log(`\nDeposit ${i + 1}:`, {
blockNumber: deposit.blockNumber.toString(),
depositor: deposit.depositor,
value: deposit.value.toString(),
commitment: deposit.commitment,
label: deposit.label,
transactionHash: deposit.transactionHash,
});
});
}
// Test withdrawal event fetching
console.log("\nFetching withdrawal events...");
const withdrawals = await dataService.getWithdrawals(testnetConfig.chainId, {
fromBlock: testnetConfig.startBlock,
});
console.log(`Found ${withdrawals.length} withdrawals`);
if (withdrawals.length > 0) {
console.log("\nWithdrawal events:");
withdrawals.forEach((withdrawal, i) => {
console.log(`\nWithdrawal ${i + 1}:`, {
blockNumber: withdrawal.blockNumber.toString(),
withdrawn: withdrawal.withdrawn.toString(),
spentNullifier: withdrawal.spentNullifier,
newCommitment: withdrawal.newCommitment,
transactionHash: withdrawal.transactionHash,
});
});
}
// Test ragequit event fetching
console.log("\nFetching ragequit events...");
const ragequits = await dataService.getRagequits(testnetConfig.chainId, {
fromBlock: testnetConfig.startBlock,
});
console.log(`Found ${ragequits.length} ragequits`);
if (ragequits.length > 0) {
console.log("\nRagequit events:");
ragequits.forEach((ragequit, i) => {
console.log(`\nRagequit ${i + 1}:`, {
blockNumber: ragequit.blockNumber.toString(),
ragequitter: ragequit.ragequitter,
value: ragequit.value.toString(),
commitment: ragequit.commitment,
label: ragequit.label,
transactionHash: ragequit.transactionHash,
});
});
}
} catch (error) {
console.error("\nError during event fetching test:", error);
throw error;
}
}
// Only run if called directly
if (process.argv[1] === new URL(import.meta.url).pathname) {
main()
.then(() => process.exit(0))
.catch((error) => {
console.error("Integration test failed:", error);
process.exit(1);
});
}

View File

@@ -1,5 +1,5 @@
import { Hash, Secret } from "./commitment.js";
import { Address } from "viem";
import { Address, Hex } from "viem";
export interface PoolAccount {
label: Hash;
@@ -15,19 +15,19 @@ export interface AccountCommitment {
secret: Secret;
blockNumber: bigint;
timestamp: bigint;
txHash: Hash;
txHash: Hex;
}
export interface PrivacyPoolAccount {
masterKeys: [Secret, Secret];
poolAccounts: Map<bigint, PoolAccount[]>;
poolAccounts: Map<Hash, PoolAccount[]>;
creationTimestamp: bigint;
lastUpdateTimestamp: bigint;
}
export interface PoolInfo {
chainId: number;
address: Address;
address: string;
scope: Hash;
deploymentBlock: bigint;
}

View File

@@ -1,4 +1,4 @@
import { Address } from "viem";
import { Address, Hex } from "viem";
import { Hash } from "./commitment.js";
/**
@@ -12,7 +12,7 @@ export interface DepositEvent {
precommitment: Hash;
blockNumber: bigint;
timestamp: bigint;
transactionHash: Hash;
transactionHash: Hex;
}
/**
@@ -24,7 +24,7 @@ export interface WithdrawalEvent {
newCommitment: Hash;
blockNumber: bigint;
timestamp: bigint;
transactionHash: Hash;
transactionHash: Hex;
}
/**
@@ -37,7 +37,7 @@ export interface RagequitEvent {
value: bigint;
blockNumber: bigint;
timestamp: bigint;
transactionHash: Hash;
transactionHash: Hex;
}
/**
@@ -47,8 +47,8 @@ export interface ChainConfig {
chainId: number;
privacyPoolAddress: Address;
startBlock: bigint;
rpcUrl?: string;
envioToken?: string;
rpcUrl: string;
apiKey: string;
}
/**

View File

@@ -12,6 +12,13 @@ function randomBigInt(): bigint {
return BigInt(Math.floor(Math.random() * Number.MAX_SAFE_INTEGER));
}
// Helper function to create mock transaction hashes
function mockTxHash(index: bigint): `0x${string}` {
// Pad the index to create a valid 32-byte hash
const paddedIndex = index.toString(16).padStart(64, '0');
return `0x${paddedIndex}`;
}
describe("AccountService", () => {
// Configuration for test data size
const NUM_DEPOSITS = 1; // Number of random deposits
@@ -79,7 +86,7 @@ describe("AccountService", () => {
precommitment,
blockNumber: POOL.deploymentBlock + BigInt(i * 100),
timestamp,
transactionHash: BigInt(i + 1) as Hash,
transactionHash: mockTxHash(BigInt(i + 1)),
};
depositEvents.push(deposit);
@@ -131,7 +138,7 @@ describe("AccountService", () => {
newCommitment,
blockNumber: currentCommitment.blockNumber + BigInt((j + 1) * 100),
timestamp: currentCommitment.timestamp + BigInt((j + 1) * 60), // Add 1 minute per withdrawal
transactionHash: BigInt(i * 100 + j + 2) as Hash,
transactionHash: mockTxHash(BigInt(i * 100 + j + 2)),
};
withdrawalEvents.push(withdrawal);

View File

@@ -0,0 +1,186 @@
import { describe, it, expect, beforeAll } from 'vitest';
import { DataService } from '../../src/core/data.service.js';
import { ChainConfig, DepositEvent, WithdrawalEvent, RagequitEvent } from '../../src/types/events.js';
import { Hash } from '../../src/types/commitment.js';
import { DataError } from '../../src/errors/data.error.js';
describe('DataService with Sepolia', () => {
let dataService: DataService;
const SEPOLIA_CHAIN_ID = 11155111;
const POOL_ADDRESS = '0xbbe3b00d54f0ee032eff07a47139da8d44095c96';
const START_BLOCK = 7781496n;
beforeAll(() => {
const apiKey = process.env.HYPERSYNC_API_KEY;
if (!apiKey) {
throw new Error('HYPERSYNC_API_KEY environment variable is required');
}
const config: ChainConfig = {
chainId: SEPOLIA_CHAIN_ID,
privacyPoolAddress: POOL_ADDRESS,
startBlock: START_BLOCK,
rpcUrl: 'https://sepolia.rpc.hypersync.xyz',
apiKey,
};
dataService = new DataService([config]);
});
it('should throw error when chain is not configured', async () => {
const invalidChainId = 1234;
await expect(dataService.getDeposits(invalidChainId)).rejects.toThrow(DataError);
await expect(dataService.getWithdrawals(invalidChainId)).rejects.toThrow(DataError);
await expect(dataService.getRagequits(invalidChainId)).rejects.toThrow(DataError);
});
it('should fetch deposit events', async () => {
const deposits = await dataService.getDeposits(SEPOLIA_CHAIN_ID);
expect(deposits.length).toBeGreaterThan(0);
expect(deposits[0]).toBeDefined();
// Verify the structure of a deposit event
const deposit = deposits[0] as DepositEvent;
expect(deposit).toEqual(
expect.objectContaining({
depositor: expect.stringMatching(/^0x[a-fA-F0-9]{40}$/),
commitment: expect.any(BigInt),
label: expect.any(BigInt),
value: expect.any(BigInt),
precommitment: expect.any(BigInt),
blockNumber: expect.any(BigInt),
timestamp: expect.any(BigInt),
transactionHash: expect.stringMatching(/^0x[a-fA-F0-9]{64}$/),
})
);
// Verify Hash type assertions and value ranges
expect(typeof deposit.commitment).toBe('bigint');
expect(deposit.commitment).toBeGreaterThan(0n);
expect(typeof deposit.label).toBe('bigint');
expect(deposit.label).toBeGreaterThan(0n);
expect(typeof deposit.precommitment).toBe('bigint');
expect(deposit.precommitment).toBeGreaterThan(0n);
expect(deposit.value).toBeGreaterThan(0n);
expect(deposit.blockNumber).toBeGreaterThanOrEqual(START_BLOCK);
expect(deposit.timestamp).toBeGreaterThan(0n);
expect(deposit.transactionHash).toMatch(/^0x[a-fA-F0-9]{64}$/);
// Log some useful information
console.log(`Found ${deposits.length} deposits`);
console.log('Sample deposit:', {
blockNumber: deposit.blockNumber.toString(),
depositor: deposit.depositor,
commitment: deposit.commitment.toString(),
label: deposit.label.toString(),
value: deposit.value.toString(),
precommitment: deposit.precommitment.toString(),
timestamp: new Date(Number(deposit.timestamp) * 1000).toISOString(),
transactionHash: deposit.transactionHash,
});
});
it('should fetch withdrawal events', async () => {
const withdrawals = await dataService.getWithdrawals(SEPOLIA_CHAIN_ID);
expect(withdrawals.length).toBeGreaterThan(0);
expect(withdrawals[0]).toBeDefined();
// Verify the structure of a withdrawal event
const withdrawal = withdrawals[0] as WithdrawalEvent;
expect(withdrawal).toEqual(
expect.objectContaining({
withdrawn: expect.any(BigInt),
spentNullifier: expect.any(BigInt),
newCommitment: expect.any(BigInt),
blockNumber: expect.any(BigInt),
timestamp: expect.any(BigInt),
transactionHash: expect.stringMatching(/^0x[a-fA-F0-9]{64}$/),
})
);
// Verify Hash type assertions and value ranges
expect(typeof withdrawal.spentNullifier).toBe('bigint');
expect(withdrawal.spentNullifier).toBeGreaterThan(0n);
expect(typeof withdrawal.newCommitment).toBe('bigint');
expect(withdrawal.newCommitment).toBeGreaterThan(0n);
expect(withdrawal.withdrawn).toBeGreaterThan(0n);
expect(withdrawal.blockNumber).toBeGreaterThanOrEqual(START_BLOCK);
expect(withdrawal.timestamp).toBeGreaterThan(0n);
expect(withdrawal.transactionHash).toMatch(/^0x[a-fA-F0-9]{64}$/);
// Log some useful information
console.log(`Found ${withdrawals.length} withdrawals`);
console.log('Sample withdrawal:', {
blockNumber: withdrawal.blockNumber.toString(),
withdrawn: withdrawal.withdrawn.toString(),
spentNullifier: withdrawal.spentNullifier.toString(),
newCommitment: withdrawal.newCommitment.toString(),
timestamp: new Date(Number(withdrawal.timestamp) * 1000).toISOString(),
transactionHash: withdrawal.transactionHash,
});
});
it('should fetch ragequit events', async () => {
const ragequits = await dataService.getRagequits(SEPOLIA_CHAIN_ID);
// Ragequits might not exist, so we don't assert on length
if (ragequits.length > 0) {
expect(ragequits[0]).toBeDefined();
// Verify the structure of a ragequit event
const ragequit = ragequits[0] as RagequitEvent;
expect(ragequit).toEqual(
expect.objectContaining({
ragequitter: expect.stringMatching(/^0x[a-fA-F0-9]{40}$/),
commitment: expect.any(BigInt),
label: expect.any(BigInt),
value: expect.any(BigInt),
blockNumber: expect.any(BigInt),
timestamp: expect.any(BigInt),
transactionHash: expect.stringMatching(/^0x[a-fA-F0-9]{64}$/),
})
);
// Verify Hash type assertions and value ranges
expect(typeof ragequit.commitment).toBe('bigint');
expect(ragequit.commitment).toBeGreaterThan(0n);
expect(typeof ragequit.label).toBe('bigint');
expect(ragequit.label).toBeGreaterThan(0n);
expect(ragequit.value).toBeGreaterThan(0n);
expect(ragequit.blockNumber).toBeGreaterThanOrEqual(START_BLOCK);
expect(ragequit.timestamp).toBeGreaterThan(0n);
expect(ragequit.transactionHash).toMatch(/^0x[a-fA-F0-9]{64}$/);
// Log some useful information
console.log(`Found ${ragequits.length} ragequits`);
console.log('Sample ragequit:', {
blockNumber: ragequit.blockNumber.toString(),
ragequitter: ragequit.ragequitter,
commitment: ragequit.commitment.toString(),
label: ragequit.label.toString(),
value: ragequit.value.toString(),
timestamp: new Date(Number(ragequit.timestamp) * 1000).toISOString(),
transactionHash: ragequit.transactionHash,
});
} else {
console.log('No ragequit events found');
}
});
it('should handle event filter options', async () => {
const fromBlock = START_BLOCK;
const toBlock = START_BLOCK + 1000n;
const deposits = await dataService.getDeposits(SEPOLIA_CHAIN_ID, { fromBlock, toBlock });
const withdrawals = await dataService.getWithdrawals(SEPOLIA_CHAIN_ID, { fromBlock, toBlock });
const ragequits = await dataService.getRagequits(SEPOLIA_CHAIN_ID, { fromBlock, toBlock });
// Verify that all events are within the block range
for (const event of [...deposits, ...withdrawals, ...ragequits]) {
expect(event.blockNumber).toBeGreaterThanOrEqual(fromBlock);
expect(event.blockNumber).toBeLessThanOrEqual(toBlock);
}
});
});